mirror of
https://github.com/mikefarah/yq.git
synced 2025-01-12 11:05:37 +00:00
wip
This commit is contained in:
parent
dbe3921f5d
commit
d2fd086289
@ -8,6 +8,9 @@ var outputToJSON = false
|
||||
var outputFormat = "yaml"
|
||||
var inputFormat = "yaml"
|
||||
|
||||
var xmlAttributePrefix = "+"
|
||||
var xmlContentName = "+content"
|
||||
|
||||
var exitStatus = false
|
||||
var forceColor = false
|
||||
var forceNoColor = false
|
||||
|
@ -75,6 +75,11 @@ func evaluateAll(cmd *cobra.Command, args []string) error {
|
||||
return err
|
||||
}
|
||||
|
||||
decoder, err := configureDecoder()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
printerWriter := configurePrinterWriter(format, out)
|
||||
|
||||
printer := yqlib.NewPrinter(printerWriter, format, unwrapScalar, colorsEnabled, indent, !noDocSeparators)
|
||||
@ -99,7 +104,7 @@ func evaluateAll(cmd *cobra.Command, args []string) error {
|
||||
switch len(args) {
|
||||
case 0:
|
||||
if pipingStdIn {
|
||||
err = allAtOnceEvaluator.EvaluateFiles(processExpression(""), []string{"-"}, printer, leadingContentPreProcessing)
|
||||
err = allAtOnceEvaluator.EvaluateFiles(processExpression(""), []string{"-"}, printer, leadingContentPreProcessing, decoder)
|
||||
} else {
|
||||
cmd.Println(cmd.UsageString())
|
||||
return nil
|
||||
@ -108,10 +113,10 @@ func evaluateAll(cmd *cobra.Command, args []string) error {
|
||||
if nullInput {
|
||||
err = yqlib.NewStreamEvaluator().EvaluateNew(processExpression(args[0]), printer, "")
|
||||
} else {
|
||||
err = allAtOnceEvaluator.EvaluateFiles(processExpression(""), []string{args[0]}, printer, leadingContentPreProcessing)
|
||||
err = allAtOnceEvaluator.EvaluateFiles(processExpression(""), []string{args[0]}, printer, leadingContentPreProcessing, decoder)
|
||||
}
|
||||
default:
|
||||
err = allAtOnceEvaluator.EvaluateFiles(processExpression(args[0]), args[1:], printer, leadingContentPreProcessing)
|
||||
err = allAtOnceEvaluator.EvaluateFiles(processExpression(args[0]), args[1:], printer, leadingContentPreProcessing, decoder)
|
||||
}
|
||||
|
||||
completedSuccessfully = err == nil
|
||||
|
@ -92,6 +92,11 @@ func evaluateSequence(cmd *cobra.Command, args []string) error {
|
||||
|
||||
printer := yqlib.NewPrinter(printerWriter, format, unwrapScalar, colorsEnabled, indent, !noDocSeparators)
|
||||
|
||||
decoder, err := configureDecoder()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
streamEvaluator := yqlib.NewStreamEvaluator()
|
||||
|
||||
if frontMatter != "" {
|
||||
@ -113,7 +118,7 @@ func evaluateSequence(cmd *cobra.Command, args []string) error {
|
||||
switch len(args) {
|
||||
case 0:
|
||||
if pipingStdIn {
|
||||
err = streamEvaluator.EvaluateFiles(processExpression(""), []string{"-"}, printer, leadingContentPreProcessing)
|
||||
err = streamEvaluator.EvaluateFiles(processExpression(""), []string{"-"}, printer, leadingContentPreProcessing, decoder)
|
||||
} else {
|
||||
cmd.Println(cmd.UsageString())
|
||||
return nil
|
||||
@ -122,10 +127,10 @@ func evaluateSequence(cmd *cobra.Command, args []string) error {
|
||||
if nullInput {
|
||||
err = streamEvaluator.EvaluateNew(processExpression(args[0]), printer, "")
|
||||
} else {
|
||||
err = streamEvaluator.EvaluateFiles(processExpression(""), []string{args[0]}, printer, leadingContentPreProcessing)
|
||||
err = streamEvaluator.EvaluateFiles(processExpression(""), []string{args[0]}, printer, leadingContentPreProcessing, decoder)
|
||||
}
|
||||
default:
|
||||
err = streamEvaluator.EvaluateFiles(processExpression(args[0]), args[1:], printer, leadingContentPreProcessing)
|
||||
err = streamEvaluator.EvaluateFiles(processExpression(args[0]), args[1:], printer, leadingContentPreProcessing, decoder)
|
||||
}
|
||||
completedSuccessfully = err == nil
|
||||
|
||||
|
@ -49,6 +49,11 @@ See https://mikefarah.gitbook.io/yq/ for detailed documentation and examples.`,
|
||||
}
|
||||
|
||||
rootCmd.PersistentFlags().StringVarP(&outputFormat, "output-format", "o", "yaml", "[yaml|y|json|j|props|p] output format type.")
|
||||
rootCmd.PersistentFlags().StringVarP(&inputFormat, "input-format", "p", "yaml", "[yaml|y|xml|x] input format type.")
|
||||
|
||||
rootCmd.PersistentFlags().StringVar(&xmlAttributePrefix, "xml-attribute-prefix", "+", "prefix for xml attributes")
|
||||
rootCmd.PersistentFlags().StringVar(&xmlContentName, "xml-content-name", "+content", "name for xml content (if no attribute name is present).")
|
||||
|
||||
rootCmd.PersistentFlags().BoolVarP(&nullInput, "null-input", "n", false, "Don't read input, simply evaluate the expression given. Useful for creating yaml docs from scratch.")
|
||||
rootCmd.PersistentFlags().BoolVarP(&noDocSeparators, "no-doc", "N", false, "Don't print document separators (---)")
|
||||
|
||||
|
12
cmd/utils.go
12
cmd/utils.go
@ -45,6 +45,18 @@ func initCommand(cmd *cobra.Command, args []string) (firstFileIndex int, err err
|
||||
return firstFileIndex, nil
|
||||
}
|
||||
|
||||
func configureDecoder() (yqlib.Decoder, error) {
|
||||
yqlibInputFormat, err := yqlib.InputFormatFromString(inputFormat)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
switch yqlibInputFormat {
|
||||
case yqlib.XmlInputFormat:
|
||||
return yqlib.NewXmlDecoder(xmlAttributePrefix, xmlContentName), nil
|
||||
}
|
||||
return yqlib.NewYamlDecoder(), nil
|
||||
}
|
||||
|
||||
func configurePrinterWriter(format yqlib.PrinterOutputFormat, out io.Writer) yqlib.PrinterWriter {
|
||||
|
||||
var printerWriter yqlib.PrinterWriter
|
||||
|
1
go.mod
1
go.mod
@ -14,7 +14,6 @@ require (
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/basgys/goxml2json v1.1.0 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.0.0 // indirect
|
||||
github.com/mattn/go-colorable v0.1.12 // indirect
|
||||
github.com/mattn/go-isatty v0.0.14 // indirect
|
||||
|
2
go.sum
2
go.sum
@ -60,8 +60,6 @@ github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmV
|
||||
github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc=
|
||||
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
||||
github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
||||
github.com/basgys/goxml2json v1.1.0 h1:4ln5i4rseYfXNd86lGEB+Vi652IsIXIvggKM/BhUKVw=
|
||||
github.com/basgys/goxml2json v1.1.0/go.mod h1:wH7a5Np/Q4QoECFIU8zTQlZwZkrilY0itPfecMw41Dw=
|
||||
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
|
||||
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
|
||||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||
|
@ -8,7 +8,7 @@ import (
|
||||
|
||||
// A yaml expression evaluator that runs the expression once against all files/nodes in memory.
|
||||
type Evaluator interface {
|
||||
EvaluateFiles(expression string, filenames []string, printer Printer, leadingContentPreProcessing bool) error
|
||||
EvaluateFiles(expression string, filenames []string, printer Printer, leadingContentPreProcessing bool, decoder Decoder) error
|
||||
|
||||
// EvaluateNodes takes an expression and one or more yaml nodes, returning a list of matching candidate nodes
|
||||
EvaluateNodes(expression string, nodes ...*yaml.Node) (*list.List, error)
|
||||
@ -46,7 +46,7 @@ func (e *allAtOnceEvaluator) EvaluateCandidateNodes(expression string, inputCand
|
||||
return context.MatchingNodes, nil
|
||||
}
|
||||
|
||||
func (e *allAtOnceEvaluator) EvaluateFiles(expression string, filenames []string, printer Printer, leadingContentPreProcessing bool) error {
|
||||
func (e *allAtOnceEvaluator) EvaluateFiles(expression string, filenames []string, printer Printer, leadingContentPreProcessing bool, decoder Decoder) error {
|
||||
fileIndex := 0
|
||||
firstFileLeadingContent := ""
|
||||
|
||||
@ -61,7 +61,7 @@ func (e *allAtOnceEvaluator) EvaluateFiles(expression string, filenames []string
|
||||
firstFileLeadingContent = leadingContent
|
||||
}
|
||||
|
||||
fileDocuments, err := readDocuments(reader, filename, fileIndex)
|
||||
fileDocuments, err := readDocuments(reader, filename, fileIndex, decoder)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ package yqlib
|
||||
|
||||
import (
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
"io"
|
||||
"unicode"
|
||||
|
||||
@ -9,6 +10,24 @@ import (
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type InputFormat uint
|
||||
|
||||
const (
|
||||
YamlInputFormat = 1 << iota
|
||||
XmlInputFormat
|
||||
)
|
||||
|
||||
func InputFormatFromString(format string) (InputFormat, error) {
|
||||
switch format {
|
||||
case "yaml", "y":
|
||||
return YamlInputFormat, nil
|
||||
case "xml", "x":
|
||||
return XmlInputFormat, nil
|
||||
default:
|
||||
return 0, fmt.Errorf("unknown format '%v' please use [yaml|xml]", format)
|
||||
}
|
||||
}
|
||||
|
||||
type xmlDecoder struct {
|
||||
reader io.Reader
|
||||
attributePrefix string
|
||||
@ -16,11 +35,16 @@ type xmlDecoder struct {
|
||||
finished bool
|
||||
}
|
||||
|
||||
func NewXmlDecoder(reader io.Reader, attributePrefix string, contentPrefix string) Decoder {
|
||||
func NewXmlDecoder(attributePrefix string, contentPrefix string) Decoder {
|
||||
if contentPrefix == "" {
|
||||
contentPrefix = "content"
|
||||
}
|
||||
return &xmlDecoder{reader: reader, attributePrefix: attributePrefix, contentPrefix: contentPrefix, finished: false}
|
||||
return &xmlDecoder{attributePrefix: attributePrefix, contentPrefix: contentPrefix, finished: false}
|
||||
}
|
||||
|
||||
func (dec *xmlDecoder) Init(reader io.Reader) {
|
||||
dec.reader = reader
|
||||
dec.finished = false
|
||||
}
|
||||
|
||||
func (dec *xmlDecoder) createSequence(nodes []*xmlNode) (*yaml.Node, error) {
|
||||
@ -172,7 +196,7 @@ func (dec *xmlDecoder) decodeXml(root *xmlNode) error {
|
||||
}
|
||||
case xml.CharData:
|
||||
// Extract XML data (if any)
|
||||
elem.n.Data = trimNonGraphic(string(xml.CharData(se)))
|
||||
elem.n.Data = trimNonGraphic(string(se))
|
||||
case xml.EndElement:
|
||||
// And add it to its parent list
|
||||
if elem.parent != nil {
|
||||
|
@ -7,15 +7,20 @@ import (
|
||||
)
|
||||
|
||||
type Decoder interface {
|
||||
Init(reader io.Reader)
|
||||
Decode(node *yaml.Node) error
|
||||
}
|
||||
|
||||
type yamlDecoder struct {
|
||||
decoder *yaml.Decoder
|
||||
decoder yaml.Decoder
|
||||
}
|
||||
|
||||
func NewYamlDecoder(reader io.Reader) Decoder {
|
||||
return &yamlDecoder{decoder: yaml.NewDecoder(reader)}
|
||||
func NewYamlDecoder() Decoder {
|
||||
return &yamlDecoder{}
|
||||
}
|
||||
|
||||
func (dec *yamlDecoder) Init(reader io.Reader) {
|
||||
dec.decoder = *yaml.NewDecoder(reader)
|
||||
}
|
||||
|
||||
func (dec *yamlDecoder) Decode(rootYamlNode *yaml.Node) error {
|
||||
|
@ -14,7 +14,7 @@ func yamlToCsv(sampleYaml string, separator rune) string {
|
||||
writer := bufio.NewWriter(&output)
|
||||
|
||||
var jsonEncoder = NewCsvEncoder(writer, separator)
|
||||
inputs, err := readDocuments(strings.NewReader(sampleYaml), "sample.yml", 0)
|
||||
inputs, err := readDocuments(strings.NewReader(sampleYaml), "sample.yml", 0, NewYamlDecoder())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ func yamlToProps(sampleYaml string) string {
|
||||
writer := bufio.NewWriter(&output)
|
||||
|
||||
var propsEncoder = NewPropertiesEncoder(writer)
|
||||
inputs, err := readDocuments(strings.NewReader(sampleYaml), "sample.yml", 0)
|
||||
inputs, err := readDocuments(strings.NewReader(sampleYaml), "sample.yml", 0, NewYamlDecoder())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ func yamlToJson(sampleYaml string, indent int) string {
|
||||
writer := bufio.NewWriter(&output)
|
||||
|
||||
var jsonEncoder = NewJsonEncoder(writer, indent)
|
||||
inputs, err := readDocuments(strings.NewReader(sampleYaml), "sample.yml", 0)
|
||||
inputs, err := readDocuments(strings.NewReader(sampleYaml), "sample.yml", 0, NewYamlDecoder())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
@ -326,25 +326,26 @@ func initLexer() (*lex.Lexer, error) {
|
||||
lexer.Add([]byte(`to_json\([0-9]+\)`), encodeWithIndent(JsonOutputFormat))
|
||||
|
||||
lexer.Add([]byte(`toyaml`), opTokenWithPrefs(encodeOpType, nil, encoderPreferences{format: YamlOutputFormat, indent: 2}))
|
||||
lexer.Add([]byte(`to_yaml`), opTokenWithPrefs(encodeOpType, nil, encoderPreferences{format: YamlOutputFormat, indent: 2}))
|
||||
// 0 indent doesn't work with yaml.
|
||||
lexer.Add([]byte(`@yaml`), opTokenWithPrefs(encodeOpType, nil, encoderPreferences{format: YamlOutputFormat, indent: 2}))
|
||||
|
||||
lexer.Add([]byte(`tojson`), opTokenWithPrefs(encodeOpType, nil, encoderPreferences{format: JsonOutputFormat, indent: 2}))
|
||||
lexer.Add([]byte(`toprops`), opTokenWithPrefs(encodeOpType, nil, encoderPreferences{format: PropsOutputFormat, indent: 2}))
|
||||
lexer.Add([]byte(`@props`), opTokenWithPrefs(encodeOpType, nil, encoderPreferences{format: PropsOutputFormat, indent: 2}))
|
||||
|
||||
lexer.Add([]byte(`to_yaml`), opTokenWithPrefs(encodeOpType, nil, encoderPreferences{format: YamlOutputFormat, indent: 2}))
|
||||
lexer.Add([]byte(`to_json`), opTokenWithPrefs(encodeOpType, nil, encoderPreferences{format: JsonOutputFormat, indent: 2}))
|
||||
lexer.Add([]byte(`@json`), opTokenWithPrefs(encodeOpType, nil, encoderPreferences{format: JsonOutputFormat, indent: 0}))
|
||||
|
||||
lexer.Add([]byte(`toprops`), opTokenWithPrefs(encodeOpType, nil, encoderPreferences{format: PropsOutputFormat, indent: 2}))
|
||||
lexer.Add([]byte(`to_props`), opTokenWithPrefs(encodeOpType, nil, encoderPreferences{format: PropsOutputFormat, indent: 2}))
|
||||
lexer.Add([]byte(`@props`), opTokenWithPrefs(encodeOpType, nil, encoderPreferences{format: PropsOutputFormat, indent: 2}))
|
||||
|
||||
lexer.Add([]byte(`tocsv`), opTokenWithPrefs(encodeOpType, nil, encoderPreferences{format: CsvOutputFormat}))
|
||||
lexer.Add([]byte(`to_csv`), opTokenWithPrefs(encodeOpType, nil, encoderPreferences{format: CsvOutputFormat}))
|
||||
lexer.Add([]byte(`@csv`), opTokenWithPrefs(encodeOpType, nil, encoderPreferences{format: CsvOutputFormat}))
|
||||
|
||||
lexer.Add([]byte(`totsv`), opTokenWithPrefs(encodeOpType, nil, encoderPreferences{format: TsvOutputFormat}))
|
||||
lexer.Add([]byte(`to_tsv`), opTokenWithPrefs(encodeOpType, nil, encoderPreferences{format: TsvOutputFormat}))
|
||||
lexer.Add([]byte(`@tsv`), opTokenWithPrefs(encodeOpType, nil, encoderPreferences{format: TsvOutputFormat}))
|
||||
|
||||
lexer.Add([]byte(`to_props`), opTokenWithPrefs(encodeOpType, nil, encoderPreferences{format: PropsOutputFormat, indent: 2}))
|
||||
|
||||
lexer.Add([]byte(`fromyaml`), opToken(decodeOpType))
|
||||
lexer.Add([]byte(`fromjson`), opToken(decodeOpType))
|
||||
|
||||
@ -354,8 +355,16 @@ func initLexer() (*lex.Lexer, error) {
|
||||
lexer.Add([]byte(`sortKeys`), opToken(sortKeysOpType))
|
||||
lexer.Add([]byte(`sort_keys`), opToken(sortKeysOpType))
|
||||
|
||||
lexer.Add([]byte(`load`), opTokenWithPrefs(loadOpType, nil, loadPrefs{loadAsString: false}))
|
||||
lexer.Add([]byte(`load`), opTokenWithPrefs(loadOpType, nil, loadPrefs{loadAsString: false, decoder: NewYamlDecoder()}))
|
||||
|
||||
lexer.Add([]byte(`xmlload`), opTokenWithPrefs(loadOpType, nil, loadPrefs{loadAsString: false, decoder: NewXmlDecoder("+", "+content")}))
|
||||
lexer.Add([]byte(`load_xml`), opTokenWithPrefs(loadOpType, nil, loadPrefs{loadAsString: false, decoder: NewXmlDecoder("+", "+content")}))
|
||||
lexer.Add([]byte(`loadxml`), opTokenWithPrefs(loadOpType, nil, loadPrefs{loadAsString: false, decoder: NewXmlDecoder("+", "+content")}))
|
||||
|
||||
lexer.Add([]byte(`strload`), opTokenWithPrefs(loadOpType, nil, loadPrefs{loadAsString: true}))
|
||||
lexer.Add([]byte(`load_str`), opTokenWithPrefs(loadOpType, nil, loadPrefs{loadAsString: true}))
|
||||
lexer.Add([]byte(`loadstr`), opTokenWithPrefs(loadOpType, nil, loadPrefs{loadAsString: true}))
|
||||
|
||||
lexer.Add([]byte(`select`), opToken(selectOpType))
|
||||
lexer.Add([]byte(`has`), opToken(hasOpType))
|
||||
lexer.Add([]byte(`unique`), opToken(uniqueOpType))
|
||||
|
@ -12,6 +12,7 @@ import (
|
||||
|
||||
type loadPrefs struct {
|
||||
loadAsString bool
|
||||
decoder Decoder
|
||||
}
|
||||
|
||||
func loadString(filename string) (*CandidateNode, error) {
|
||||
@ -26,7 +27,7 @@ func loadString(filename string) (*CandidateNode, error) {
|
||||
return &CandidateNode{Node: &yaml.Node{Kind: yaml.ScalarNode, Tag: "!!str", Value: string(filebytes)}}, nil
|
||||
}
|
||||
|
||||
func loadYaml(filename string) (*CandidateNode, error) {
|
||||
func loadYaml(filename string, decoder Decoder) (*CandidateNode, error) {
|
||||
|
||||
file, err := os.Open(filename) // #nosec
|
||||
if err != nil {
|
||||
@ -34,7 +35,7 @@ func loadYaml(filename string) (*CandidateNode, error) {
|
||||
}
|
||||
reader := bufio.NewReader(file)
|
||||
|
||||
documents, err := readDocuments(reader, filename, 0)
|
||||
documents, err := readDocuments(reader, filename, 0, decoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -83,7 +84,7 @@ func loadYamlOperator(d *dataTreeNavigator, context Context, expressionNode *Exp
|
||||
if loadPrefs.loadAsString {
|
||||
contentsCandidate, err = loadString(filename)
|
||||
} else {
|
||||
contentsCandidate, err = loadYaml(filename)
|
||||
contentsCandidate, err = loadYaml(filename, loadPrefs.decoder)
|
||||
}
|
||||
if err != nil {
|
||||
return Context{}, fmt.Errorf("Failed to load %v: %w", filename, err)
|
||||
|
@ -32,7 +32,7 @@ func readDocumentWithLeadingContent(content string, fakefilename string, fakeFil
|
||||
return nil, err
|
||||
}
|
||||
|
||||
inputs, err := readDocuments(reader, fakefilename, fakeFileIndex)
|
||||
inputs, err := readDocuments(reader, fakefilename, fakeFileIndex, NewYamlDecoder())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -147,7 +147,7 @@ func formatYaml(yaml string, filename string) string {
|
||||
panic(err)
|
||||
}
|
||||
streamEvaluator := NewStreamEvaluator()
|
||||
_, err = streamEvaluator.Evaluate(filename, strings.NewReader(yaml), node, printer, "")
|
||||
_, err = streamEvaluator.Evaluate(filename, strings.NewReader(yaml), node, printer, "", NewYamlDecoder())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
@ -38,7 +38,7 @@ func TestPrinterMultipleDocsInSequence(t *testing.T) {
|
||||
var writer = bufio.NewWriter(&output)
|
||||
printer := NewPrinterWithSingleWriter(writer, YamlOutputFormat, true, false, 2, true)
|
||||
|
||||
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0)
|
||||
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
@ -76,7 +76,7 @@ func TestPrinterMultipleDocsInSequenceWithLeadingContent(t *testing.T) {
|
||||
var writer = bufio.NewWriter(&output)
|
||||
printer := NewPrinterWithSingleWriter(writer, YamlOutputFormat, true, false, 2, true)
|
||||
|
||||
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0)
|
||||
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
@ -118,7 +118,7 @@ func TestPrinterMultipleFilesInSequence(t *testing.T) {
|
||||
var writer = bufio.NewWriter(&output)
|
||||
printer := NewPrinterWithSingleWriter(writer, YamlOutputFormat, true, false, 2, true)
|
||||
|
||||
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0)
|
||||
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
@ -165,7 +165,7 @@ func TestPrinterMultipleFilesInSequenceWithLeadingContent(t *testing.T) {
|
||||
var writer = bufio.NewWriter(&output)
|
||||
printer := NewPrinterWithSingleWriter(writer, YamlOutputFormat, true, false, 2, true)
|
||||
|
||||
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0)
|
||||
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
@ -215,7 +215,7 @@ func TestPrinterMultipleDocsInSinglePrint(t *testing.T) {
|
||||
var writer = bufio.NewWriter(&output)
|
||||
printer := NewPrinterWithSingleWriter(writer, YamlOutputFormat, true, false, 2, true)
|
||||
|
||||
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0)
|
||||
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
@ -234,7 +234,7 @@ func TestPrinterMultipleDocsInSinglePrintWithLeadingDoc(t *testing.T) {
|
||||
var writer = bufio.NewWriter(&output)
|
||||
printer := NewPrinterWithSingleWriter(writer, YamlOutputFormat, true, false, 2, true)
|
||||
|
||||
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0)
|
||||
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
@ -263,7 +263,7 @@ func TestPrinterMultipleDocsInSinglePrintWithLeadingDocTrailing(t *testing.T) {
|
||||
var writer = bufio.NewWriter(&output)
|
||||
printer := NewPrinterWithSingleWriter(writer, YamlOutputFormat, true, false, 2, true)
|
||||
|
||||
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0)
|
||||
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
@ -294,7 +294,7 @@ func TestPrinterScalarWithLeadingCont(t *testing.T) {
|
||||
panic(err)
|
||||
}
|
||||
streamEvaluator := NewStreamEvaluator()
|
||||
_, err = streamEvaluator.Evaluate("sample", strings.NewReader(multiDocSample), node, printer, "# blah\n")
|
||||
_, err = streamEvaluator.Evaluate("sample", strings.NewReader(multiDocSample), node, printer, "# blah\n", NewYamlDecoder())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
@ -316,7 +316,7 @@ func TestPrinterMultipleDocsJson(t *testing.T) {
|
||||
// when outputing JSON.
|
||||
printer := NewPrinterWithSingleWriter(writer, JsonOutputFormat, true, false, 0, true)
|
||||
|
||||
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0)
|
||||
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
@ -14,8 +14,8 @@ import (
|
||||
// Uses less memory than loading all documents and running the expression once, but this cannot process
|
||||
// cross document expressions.
|
||||
type StreamEvaluator interface {
|
||||
Evaluate(filename string, reader io.Reader, node *ExpressionNode, printer Printer, leadingContent string) (uint, error)
|
||||
EvaluateFiles(expression string, filenames []string, printer Printer, leadingContentPreProcessing bool) error
|
||||
Evaluate(filename string, reader io.Reader, node *ExpressionNode, printer Printer, leadingContent string, decoder Decoder) (uint, error)
|
||||
EvaluateFiles(expression string, filenames []string, printer Printer, leadingContentPreProcessing bool, decoder Decoder) error
|
||||
EvaluateNew(expression string, printer Printer, leadingContent string) error
|
||||
}
|
||||
|
||||
@ -23,7 +23,6 @@ type streamEvaluator struct {
|
||||
treeNavigator DataTreeNavigator
|
||||
treeCreator ExpressionParser
|
||||
fileIndex int
|
||||
decoder Decoder
|
||||
}
|
||||
|
||||
func NewStreamEvaluator() StreamEvaluator {
|
||||
@ -52,7 +51,7 @@ func (s *streamEvaluator) EvaluateNew(expression string, printer Printer, leadin
|
||||
return printer.PrintResults(result.MatchingNodes)
|
||||
}
|
||||
|
||||
func (s *streamEvaluator) EvaluateFiles(expression string, filenames []string, printer Printer, leadingContentPreProcessing bool) error {
|
||||
func (s *streamEvaluator) EvaluateFiles(expression string, filenames []string, printer Printer, leadingContentPreProcessing bool, decoder Decoder) error {
|
||||
var totalProcessDocs uint
|
||||
node, err := s.treeCreator.ParseExpression(expression)
|
||||
if err != nil {
|
||||
@ -71,7 +70,7 @@ func (s *streamEvaluator) EvaluateFiles(expression string, filenames []string, p
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
processedDocs, err := s.Evaluate(filename, reader, node, printer, leadingContent)
|
||||
processedDocs, err := s.Evaluate(filename, reader, node, printer, leadingContent, decoder)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -90,10 +89,10 @@ func (s *streamEvaluator) EvaluateFiles(expression string, filenames []string, p
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *streamEvaluator) Evaluate(filename string, reader io.Reader, node *ExpressionNode, printer Printer, leadingContent string) (uint, error) {
|
||||
func (s *streamEvaluator) Evaluate(filename string, reader io.Reader, node *ExpressionNode, printer Printer, leadingContent string, decoder Decoder) (uint, error) {
|
||||
|
||||
var currentIndex uint
|
||||
decoder := NewXmlDecoder(reader, "+", "")
|
||||
decoder.Init(reader)
|
||||
for {
|
||||
var dataBucket yaml.Node
|
||||
errorReading := decoder.Decode(&dataBucket)
|
||||
|
@ -107,8 +107,8 @@ func processReadStream(reader *bufio.Reader) (io.Reader, string, error) {
|
||||
}
|
||||
}
|
||||
|
||||
func readDocuments(reader io.Reader, filename string, fileIndex int) (*list.List, error) {
|
||||
decoder := yaml.NewDecoder(reader)
|
||||
func readDocuments(reader io.Reader, filename string, fileIndex int, decoder Decoder) (*list.List, error) {
|
||||
decoder.Init(reader)
|
||||
inputList := list.New()
|
||||
var currentIndex uint
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user