mirror of
https://github.com/mikefarah/yq.git
synced 2024-12-19 20:19:04 +00:00
wip
This commit is contained in:
parent
44552c151b
commit
4138c3b005
@ -3,8 +3,6 @@ package yqlib
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
yaml "gopkg.in/yaml.v3"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type InputFormat uint
|
type InputFormat uint
|
||||||
@ -20,8 +18,8 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Decoder interface {
|
type Decoder interface {
|
||||||
Init(reader io.Reader)
|
Init(reader io.Reader) error
|
||||||
Decode(node *yaml.Node) error
|
Decode() (*CandidateNode, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
func InputFormatFromString(format string) (InputFormat, error) {
|
func InputFormatFromString(format string) (InputFormat, error) {
|
||||||
|
@ -1,23 +1,92 @@
|
|||||||
package yqlib
|
package yqlib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bufio"
|
||||||
|
"errors"
|
||||||
"io"
|
"io"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
yaml "gopkg.in/yaml.v3"
|
yaml "gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
type yamlDecoder struct {
|
type yamlDecoder struct {
|
||||||
decoder yaml.Decoder
|
decoder yaml.Decoder
|
||||||
|
// work around of various parsing issues by yaml.v3 with document headers
|
||||||
|
prefs yamlPreferences
|
||||||
|
leadingContent string
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewYamlDecoder() Decoder {
|
func NewYamlDecoder(prefs yamlPreferences) Decoder {
|
||||||
return &yamlDecoder{}
|
return &yamlDecoder{prefs: prefs}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (dec *yamlDecoder) Init(reader io.Reader) {
|
func (dec *yamlDecoder) processReadStream(reader *bufio.Reader) (io.Reader, string, error) {
|
||||||
dec.decoder = *yaml.NewDecoder(reader)
|
var commentLineRegEx = regexp.MustCompile(`^\s*#`)
|
||||||
|
var sb strings.Builder
|
||||||
|
for {
|
||||||
|
peekBytes, err := reader.Peek(3)
|
||||||
|
if errors.Is(err, io.EOF) {
|
||||||
|
// EOF are handled else where..
|
||||||
|
return reader, sb.String(), nil
|
||||||
|
} else if err != nil {
|
||||||
|
return reader, sb.String(), err
|
||||||
|
} else if string(peekBytes) == "---" {
|
||||||
|
_, err := reader.ReadString('\n')
|
||||||
|
sb.WriteString("$yqDocSeperator$\n")
|
||||||
|
if errors.Is(err, io.EOF) {
|
||||||
|
return reader, sb.String(), nil
|
||||||
|
} else if err != nil {
|
||||||
|
return reader, sb.String(), err
|
||||||
|
}
|
||||||
|
} else if commentLineRegEx.MatchString(string(peekBytes)) {
|
||||||
|
line, err := reader.ReadString('\n')
|
||||||
|
sb.WriteString(line)
|
||||||
|
if errors.Is(err, io.EOF) {
|
||||||
|
return reader, sb.String(), nil
|
||||||
|
} else if err != nil {
|
||||||
|
return reader, sb.String(), err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return reader, sb.String(), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (dec *yamlDecoder) Decode(rootYamlNode *yaml.Node) error {
|
func (dec *yamlDecoder) Init(reader io.Reader) error {
|
||||||
return dec.decoder.Decode(rootYamlNode)
|
readerToUse := reader
|
||||||
|
leadingContent := ""
|
||||||
|
var err error
|
||||||
|
if dec.leadingContentPreProcessing {
|
||||||
|
readerToUse, leadingContent, err = dec.processReadStream(bufio.NewReader(reader))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dec.leadingContent = leadingContent
|
||||||
|
dec.decoder = *yaml.NewDecoder(readerToUse)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dec *yamlDecoder) Decode() (*CandidateNode, error) {
|
||||||
|
var dataBucket yaml.Node
|
||||||
|
|
||||||
|
err := dec.decoder.Decode(&dataBucket)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
candidateNode := &CandidateNode{
|
||||||
|
Node: &dataBucket,
|
||||||
|
}
|
||||||
|
|
||||||
|
if dec.leadingContent != "" {
|
||||||
|
candidateNode.LeadingContent = dec.leadingContent
|
||||||
|
dec.leadingContent = ""
|
||||||
|
}
|
||||||
|
// move document comments into candidate node
|
||||||
|
// otherwise unwrap drops them.
|
||||||
|
candidateNode.TrailingContent = dataBucket.FootComment
|
||||||
|
dataBucket.FootComment = ""
|
||||||
|
return candidateNode, nil
|
||||||
}
|
}
|
||||||
|
@ -11,10 +11,9 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type yamlEncoder struct {
|
type yamlEncoder struct {
|
||||||
indent int
|
indent int
|
||||||
colorise bool
|
colorise bool
|
||||||
printDocSeparators bool
|
prefs yamlPreferences
|
||||||
unwrapScalar bool
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewYamlEncoder(indent int, colorise bool, printDocSeparators bool, unwrapScalar bool) Encoder {
|
func NewYamlEncoder(indent int, colorise bool, printDocSeparators bool, unwrapScalar bool) Encoder {
|
||||||
|
@ -21,6 +21,14 @@ func InitExpressionParser() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type yamlPreferences struct {
|
||||||
|
LeadingContentPreProcessing bool
|
||||||
|
printDocSeparators bool
|
||||||
|
unwrapScalar bool
|
||||||
|
}
|
||||||
|
|
||||||
|
var YamlPreferences = NewDefaultYamlPreferences()
|
||||||
|
|
||||||
var log = logging.MustGetLogger("yq-lib")
|
var log = logging.MustGetLogger("yq-lib")
|
||||||
|
|
||||||
var PrettyPrintExp = `(... | (select(tag != "!!str"), select(tag == "!!str") | select(test("(?i)^(y|yes|n|no|on|off)$") | not)) ) style=""`
|
var PrettyPrintExp = `(... | (select(tag != "!!str"), select(tag == "!!str") | select(test("(?i)^(y|yes|n|no|on|off)$") | not)) ) style=""`
|
||||||
|
@ -59,18 +59,13 @@ func (s *streamEvaluator) EvaluateFiles(expression string, filenames []string, p
|
|||||||
|
|
||||||
var firstFileLeadingContent string
|
var firstFileLeadingContent string
|
||||||
|
|
||||||
for index, filename := range filenames {
|
for _, filename := range filenames {
|
||||||
reader, leadingContent, err := readStream(filename, leadingContentPreProcessing)
|
reader, err := readStream(filename)
|
||||||
log.Debug("leadingContent: %v", leadingContent)
|
|
||||||
|
|
||||||
if index == 0 {
|
|
||||||
firstFileLeadingContent = leadingContent
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
processedDocs, err := s.Evaluate(filename, reader, node, printer, leadingContent, decoder)
|
processedDocs, err := s.Evaluate(filename, reader, node, printer, decoder)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -89,13 +84,12 @@ func (s *streamEvaluator) EvaluateFiles(expression string, filenames []string, p
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *streamEvaluator) Evaluate(filename string, reader io.Reader, node *ExpressionNode, printer Printer, leadingContent string, decoder Decoder) (uint, error) {
|
func (s *streamEvaluator) Evaluate(filename string, reader io.Reader, node *ExpressionNode, printer Printer, decoder Decoder) (uint, error) {
|
||||||
|
|
||||||
var currentIndex uint
|
var currentIndex uint
|
||||||
decoder.Init(reader)
|
decoder.Init(reader)
|
||||||
for {
|
for {
|
||||||
var dataBucket yaml.Node
|
candidateNode, errorReading := decoder.Decode()
|
||||||
errorReading := decoder.Decode(&dataBucket)
|
|
||||||
|
|
||||||
if errors.Is(errorReading, io.EOF) {
|
if errors.Is(errorReading, io.EOF) {
|
||||||
s.fileIndex = s.fileIndex + 1
|
s.fileIndex = s.fileIndex + 1
|
||||||
@ -103,21 +97,10 @@ func (s *streamEvaluator) Evaluate(filename string, reader io.Reader, node *Expr
|
|||||||
} else if errorReading != nil {
|
} else if errorReading != nil {
|
||||||
return currentIndex, fmt.Errorf("bad file '%v': %w", filename, errorReading)
|
return currentIndex, fmt.Errorf("bad file '%v': %w", filename, errorReading)
|
||||||
}
|
}
|
||||||
|
candidateNode.Document = currentIndex
|
||||||
|
candidateNode.Filename = filename
|
||||||
|
candidateNode.FileIndex = s.fileIndex
|
||||||
|
|
||||||
candidateNode := &CandidateNode{
|
|
||||||
Document: currentIndex,
|
|
||||||
Filename: filename,
|
|
||||||
Node: &dataBucket,
|
|
||||||
FileIndex: s.fileIndex,
|
|
||||||
}
|
|
||||||
// move document comments into candidate node
|
|
||||||
// otherwise unwrap drops them.
|
|
||||||
candidateNode.TrailingContent = dataBucket.FootComment
|
|
||||||
dataBucket.FootComment = ""
|
|
||||||
|
|
||||||
if currentIndex == 0 {
|
|
||||||
candidateNode.LeadingContent = leadingContent
|
|
||||||
}
|
|
||||||
inputList := list.New()
|
inputList := list.New()
|
||||||
inputList.PushBack(candidateNode)
|
inputList.PushBack(candidateNode)
|
||||||
|
|
||||||
|
@ -7,13 +7,10 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"regexp"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
yaml "gopkg.in/yaml.v3"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func readStream(filename string, leadingContentPreProcessing bool) (io.Reader, string, error) {
|
func readStream(filename string) (io.Reader, error) {
|
||||||
var reader *bufio.Reader
|
var reader *bufio.Reader
|
||||||
if filename == "-" {
|
if filename == "-" {
|
||||||
reader = bufio.NewReader(os.Stdin)
|
reader = bufio.NewReader(os.Stdin)
|
||||||
@ -22,23 +19,16 @@ func readStream(filename string, leadingContentPreProcessing bool) (io.Reader, s
|
|||||||
// and ensuring that it's not possible to give a path to a file outside thar directory.
|
// and ensuring that it's not possible to give a path to a file outside thar directory.
|
||||||
file, err := os.Open(filename) // #nosec
|
file, err := os.Open(filename) // #nosec
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, "", err
|
return nil, err
|
||||||
}
|
}
|
||||||
reader = bufio.NewReader(file)
|
reader = bufio.NewReader(file)
|
||||||
}
|
}
|
||||||
|
return reader, nil
|
||||||
|
|
||||||
if !leadingContentPreProcessing {
|
|
||||||
return reader, "", nil
|
|
||||||
}
|
|
||||||
return processReadStream(reader)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func readString(input string, leadingContentPreProcessing bool) (io.Reader, string, error) {
|
func readString(input string) (io.Reader, error) {
|
||||||
reader := bufio.NewReader(strings.NewReader(input))
|
return bufio.NewReader(strings.NewReader(input)), nil
|
||||||
if !leadingContentPreProcessing {
|
|
||||||
return reader, "", nil
|
|
||||||
}
|
|
||||||
return processReadStream(reader)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func writeString(writer io.Writer, txt string) error {
|
func writeString(writer io.Writer, txt string) error {
|
||||||
@ -46,46 +36,13 @@ func writeString(writer io.Writer, txt string) error {
|
|||||||
return errorWriting
|
return errorWriting
|
||||||
}
|
}
|
||||||
|
|
||||||
func processReadStream(reader *bufio.Reader) (io.Reader, string, error) {
|
|
||||||
var commentLineRegEx = regexp.MustCompile(`^\s*#`)
|
|
||||||
var sb strings.Builder
|
|
||||||
for {
|
|
||||||
peekBytes, err := reader.Peek(3)
|
|
||||||
if errors.Is(err, io.EOF) {
|
|
||||||
// EOF are handled else where..
|
|
||||||
return reader, sb.String(), nil
|
|
||||||
} else if err != nil {
|
|
||||||
return reader, sb.String(), err
|
|
||||||
} else if string(peekBytes) == "---" {
|
|
||||||
_, err := reader.ReadString('\n')
|
|
||||||
sb.WriteString("$yqDocSeperator$\n")
|
|
||||||
if errors.Is(err, io.EOF) {
|
|
||||||
return reader, sb.String(), nil
|
|
||||||
} else if err != nil {
|
|
||||||
return reader, sb.String(), err
|
|
||||||
}
|
|
||||||
} else if commentLineRegEx.MatchString(string(peekBytes)) {
|
|
||||||
line, err := reader.ReadString('\n')
|
|
||||||
sb.WriteString(line)
|
|
||||||
if errors.Is(err, io.EOF) {
|
|
||||||
return reader, sb.String(), nil
|
|
||||||
} else if err != nil {
|
|
||||||
return reader, sb.String(), err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return reader, sb.String(), nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func readDocuments(reader io.Reader, filename string, fileIndex int, decoder Decoder) (*list.List, error) {
|
func readDocuments(reader io.Reader, filename string, fileIndex int, decoder Decoder) (*list.List, error) {
|
||||||
decoder.Init(reader)
|
decoder.Init(reader)
|
||||||
inputList := list.New()
|
inputList := list.New()
|
||||||
var currentIndex uint
|
var currentIndex uint
|
||||||
|
|
||||||
for {
|
for {
|
||||||
var dataBucket yaml.Node
|
candidateNode, errorReading := decoder.Decode()
|
||||||
errorReading := decoder.Decode(&dataBucket)
|
|
||||||
|
|
||||||
if errors.Is(errorReading, io.EOF) {
|
if errors.Is(errorReading, io.EOF) {
|
||||||
switch reader := reader.(type) {
|
switch reader := reader.(type) {
|
||||||
@ -96,18 +53,10 @@ func readDocuments(reader io.Reader, filename string, fileIndex int, decoder Dec
|
|||||||
} else if errorReading != nil {
|
} else if errorReading != nil {
|
||||||
return nil, fmt.Errorf("bad file '%v': %w", filename, errorReading)
|
return nil, fmt.Errorf("bad file '%v': %w", filename, errorReading)
|
||||||
}
|
}
|
||||||
candidateNode := &CandidateNode{
|
candidateNode.Document = currentIndex
|
||||||
Document: currentIndex,
|
candidateNode.Filename = filename
|
||||||
Filename: filename,
|
candidateNode.FileIndex = fileIndex
|
||||||
Node: &dataBucket,
|
candidateNode.EvaluateTogether = true
|
||||||
FileIndex: fileIndex,
|
|
||||||
EvaluateTogether: true,
|
|
||||||
}
|
|
||||||
|
|
||||||
//move document comments into candidate node
|
|
||||||
// otherwise unwrap drops them.
|
|
||||||
candidateNode.TrailingContent = dataBucket.FootComment
|
|
||||||
dataBucket.FootComment = ""
|
|
||||||
|
|
||||||
inputList.PushBack(candidateNode)
|
inputList.PushBack(candidateNode)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user