Fixed collect object for multi doc

This commit is contained in:
Mike Farah 2020-11-13 13:19:54 +11:00
parent 2edf64182b
commit 708ff02e8d
17 changed files with 406 additions and 118 deletions

View File

@ -0,0 +1,64 @@
package cmd
import (
"os"
"github.com/mikefarah/yq/v4/pkg/yqlib"
"github.com/spf13/cobra"
)
func createEvaluateAllCommand() *cobra.Command {
var cmdEvalAll = &cobra.Command{
Use: "eval-all [expression] [yaml_file1]...",
Aliases: []string{"ea"},
Short: "Loads all yaml documents of all yaml files and runs expression once",
Example: `
yq es '.a.b | length' file1.yml file2.yml
yq es < sample.yaml
yq es -n '{"a": "b"}'
`,
Long: "Evaluate All:\nUseful when you need to run an expression across several yaml documents or files. Consumes more memory than eval-seq",
RunE: evaluateAll,
}
return cmdEvalAll
}
func evaluateAll(cmd *cobra.Command, args []string) error {
// 0 args, read std in
// 1 arg, null input, process expression
// 1 arg, read file in sequence
// 2+ args, [0] = expression, file the rest
var err error
stat, _ := os.Stdin.Stat()
pipingStdIn := (stat.Mode() & os.ModeCharDevice) == 0
out := cmd.OutOrStdout()
fileInfo, _ := os.Stdout.Stat()
if forceColor || (!forceNoColor && (fileInfo.Mode()&os.ModeCharDevice) != 0) {
colorsEnabled = true
}
printer := yqlib.NewPrinter(out, outputToJSON, unwrapScalar, colorsEnabled, indent, printDocSeparators)
switch len(args) {
case 0:
if pipingStdIn {
err = yqlib.EvaluateAllFileStreams("", []string{"-"}, printer)
} else {
cmd.Println(cmd.UsageString())
return nil
}
case 1:
if nullInput {
err = yqlib.EvaluateAllFileStreams(args[0], []string{}, printer)
} else {
err = yqlib.EvaluateAllFileStreams("", []string{args[0]}, printer)
}
default:
err = yqlib.EvaluateAllFileStreams(args[0], args[1:len(args)], printer)
}
cmd.SilenceUsage = true
return err
}

View File

@ -1,7 +1,6 @@
package cmd
import (
"container/list"
"os"
"github.com/mikefarah/yq/v4/pkg/yqlib"
@ -29,30 +28,10 @@ func evaluateSequence(cmd *cobra.Command, args []string) error {
// 1 arg, read file in sequence
// 2+ args, [0] = expression, file the rest
var matchingNodes *list.List
var err error
stat, _ := os.Stdin.Stat()
pipingStdIn := (stat.Mode() & os.ModeCharDevice) == 0
switch len(args) {
case 0:
if pipingStdIn {
matchingNodes, err = yqlib.Evaluate("-", "")
} else {
cmd.Println(cmd.UsageString())
return nil
}
case 1:
if nullInput {
matchingNodes, err = yqlib.EvaluateExpression(args[0])
} else {
matchingNodes, err = yqlib.Evaluate(args[0], "")
}
}
cmd.SilenceUsage = true
if err != nil {
return err
}
out := cmd.OutOrStdout()
fileInfo, _ := os.Stdout.Stat()
@ -60,7 +39,26 @@ func evaluateSequence(cmd *cobra.Command, args []string) error {
if forceColor || (!forceNoColor && (fileInfo.Mode()&os.ModeCharDevice) != 0) {
colorsEnabled = true
}
printer := yqlib.NewPrinter(outputToJSON, unwrapScalar, colorsEnabled, indent, printDocSeparators)
printer := yqlib.NewPrinter(out, outputToJSON, unwrapScalar, colorsEnabled, indent, printDocSeparators)
return printer.PrintResults(matchingNodes, out)
switch len(args) {
case 0:
if pipingStdIn {
err = yqlib.EvaluateFileStreamsSequence("", []string{"-"}, printer)
} else {
cmd.Println(cmd.UsageString())
return nil
}
case 1:
if nullInput {
err = yqlib.EvaluateAllFileStreams(args[0], []string{}, printer)
} else {
err = yqlib.EvaluateFileStreamsSequence("", []string{args[0]}, printer)
}
default:
err = yqlib.EvaluateFileStreamsSequence(args[0], args[1:len(args)], printer)
}
cmd.SilenceUsage = true
return err
}

View File

@ -65,6 +65,6 @@ func New() *cobra.Command {
rootCmd.PersistentFlags().BoolVarP(&forceColor, "colors", "C", false, "force print with colors")
rootCmd.PersistentFlags().BoolVarP(&forceNoColor, "no-colors", "M", false, "force print with no colors")
rootCmd.AddCommand(createEvaluateSequenceCommand())
rootCmd.AddCommand(createEvaluateSequenceCommand(), createEvaluateAllCommand())
return rootCmd
}

View File

@ -25,10 +25,10 @@ var documentIndexScenarios = []expressionScenario{
{
description: "Print Document Index with matches",
document: "a: cat\n---\na: frog\n",
expression: `.a | {"match": ., "doc": (. | documentIndex)}`,
expression: `.a | ({"match": ., "doc": (. | documentIndex)})`,
expected: []string{
"D0, P[], (!!map)::match: cat\ndoc: 0\n",
"D1, P[], (!!map)::match: frog\ndoc: 1\n",
"D0, P[], (!!map)::match: frog\ndoc: 1\n",
},
},
}

View File

@ -62,6 +62,7 @@ var DocumentFilter = &OperationType{Type: "DOCUMENT_FILTER", NumArgs: 0, Precede
var SelfReference = &OperationType{Type: "SELF", NumArgs: 0, Precedence: 50, Handler: SelfOperator}
var ValueOp = &OperationType{Type: "VALUE", NumArgs: 0, Precedence: 50, Handler: ValueOperator}
var Not = &OperationType{Type: "NOT", NumArgs: 0, Precedence: 50, Handler: NotOperator}
var Empty = &OperationType{Type: "EMPTY", NumArgs: 50, Handler: EmptyOperator}
var RecursiveDescent = &OperationType{Type: "RECURSIVE_DESCENT", NumArgs: 0, Precedence: 50, Handler: RecursiveDescentOperator}

View File

@ -17,8 +17,44 @@ import (
func CollectObjectOperator(d *dataTreeNavigator, matchMap *list.List, pathNode *PathTreeNode) (*list.List, error) {
log.Debugf("-- collectObjectOperation")
return collect(d, list.New(), matchMap)
if matchMap.Len() == 0 {
return list.New(), nil
}
first := matchMap.Front().Value.(*CandidateNode)
var rotated []*list.List = make([]*list.List, len(first.Node.Content))
for i := 0; i < len(first.Node.Content); i++ {
rotated[i] = list.New()
}
for el := matchMap.Front(); el != nil; el = el.Next() {
candidateNode := el.Value.(*CandidateNode)
for i := 0; i < len(first.Node.Content); i++ {
rotated[i].PushBack(createChildCandidate(candidateNode, i))
}
}
newObject := list.New()
for i := 0; i < len(first.Node.Content); i++ {
additions, err := collect(d, list.New(), rotated[i])
if err != nil {
return nil, err
}
newObject.PushBackList(additions)
}
return newObject, nil
}
func createChildCandidate(candidate *CandidateNode, index int) *CandidateNode {
return &CandidateNode{
Document: candidate.Document,
Path: append(candidate.Path, index),
Filename: candidate.Filename,
Node: candidate.Node.Content[index],
}
}
func collect(d *dataTreeNavigator, aggregate *list.List, remainingMatches *list.List) (*list.List, error) {

View File

@ -5,6 +5,26 @@ import (
)
var collectObjectOperatorScenarios = []expressionScenario{
{
document: ``,
expression: `{}`,
expected: []string{},
},
{
document: "{name: Mike}\n",
expression: `{"wrap": .}`,
expected: []string{
"D0, P[], (!!map)::wrap: {name: Mike}\n",
},
},
{
document: "{name: Mike}\n---\n{name: Bob}",
expression: `{"wrap": .}`,
expected: []string{
"D0, P[], (!!map)::wrap: {name: Mike}\n",
"D0, P[], (!!map)::wrap: {name: Bob}\n",
},
},
{
document: `{name: Mike, age: 32}`,
expression: `{.name: .age}`,
@ -20,6 +40,16 @@ var collectObjectOperatorScenarios = []expressionScenario{
"D0, P[], (!!map)::Mike: dog\n",
},
},
{
document: "{name: Mike, pets: [cat, dog]}\n---\n{name: Rosey, pets: [monkey, sheep]}",
expression: `{.name: .pets[]}`,
expected: []string{
"D0, P[], (!!map)::Mike: cat\n",
"D0, P[], (!!map)::Mike: dog\n",
"D0, P[], (!!map)::Rosey: monkey\n",
"D0, P[], (!!map)::Rosey: sheep\n",
},
},
{
document: `{name: Mike, pets: [cat, dog], food: [hotdog, burger]}`,
expression: `{.name: .pets[], "f":.food[]}`,
@ -55,11 +85,9 @@ b: {cows: [apl, bba]}
},
{
document: `{name: Mike}`,
expression: `{"wrap": {"further": .}}`,
expression: `{"wrap": {"further": .}} | (.. style= "flow")`,
expected: []string{
`D0, P[], (!!map)::wrap:
further: {name: Mike}
`,
"D0, P[], (!!map)::{wrap: {further: {name: Mike}}}\n",
},
},
}
@ -68,4 +96,5 @@ func TestCollectObjectOperatorScenarios(t *testing.T) {
for _, tt := range collectObjectOperatorScenarios {
testScenario(t, &tt)
}
documentScenarios(t, "Collect into Object", collectObjectOperatorScenarios)
}

View File

@ -6,25 +6,30 @@ import (
var collectOperatorScenarios = []expressionScenario{
{
document: `{}`,
document: ``,
expression: `[]`,
expected: []string{},
},
{
document: ``,
expression: `["cat"]`,
expected: []string{
"D0, P[], (!!seq)::- cat\n",
},
}, {
document: `{}`,
document: ``,
expression: `[true]`,
expected: []string{
"D0, P[], (!!seq)::- true\n",
},
}, {
document: `{}`,
document: ``,
expression: `["cat", "dog"]`,
expected: []string{
"D0, P[], (!!seq)::- cat\n- dog\n",
},
}, {
document: `{}`,
document: ``,
expression: `1 | collect`,
expected: []string{
"D0, P[], (!!seq)::- 1\n",
@ -48,4 +53,5 @@ func TestCollectOperatorScenarios(t *testing.T) {
for _, tt := range collectOperatorScenarios {
testScenario(t, &tt)
}
documentScenarios(t, "Collect into Array", collectOperatorScenarios)
}

View File

@ -8,15 +8,50 @@ import (
func CreateMapOperator(d *dataTreeNavigator, matchingNodes *list.List, pathNode *PathTreeNode) (*list.List, error) {
log.Debugf("-- createMapOperation")
var path []interface{} = nil
//each matchingNodes entry should turn into a sequence of keys to create.
//then collect object should do a cross function of the same index sequence for all matches.
var path []interface{}
var document uint = 0
if matchingNodes.Front() != nil {
sample := matchingNodes.Front().Value.(*CandidateNode)
path = sample.Path
document = sample.Document
sequences := list.New()
if matchingNodes.Len() > 0 {
for matchingNodeEl := matchingNodes.Front(); matchingNodeEl != nil; matchingNodeEl = matchingNodeEl.Next() {
matchingNode := matchingNodeEl.Value.(*CandidateNode)
sequenceNode, err := sequenceFor(d, matchingNode, pathNode)
if err != nil {
return nil, err
}
sequences.PushBack(sequenceNode)
}
} else {
sequenceNode, err := sequenceFor(d, nil, pathNode)
if err != nil {
return nil, err
}
sequences.PushBack(sequenceNode)
}
mapPairs, err := crossFunction(d, matchingNodes, pathNode,
return nodeToMap(&CandidateNode{Node: listToNodeSeq(sequences), Document: document, Path: path}), nil
}
func sequenceFor(d *dataTreeNavigator, matchingNode *CandidateNode, pathNode *PathTreeNode) (*CandidateNode, error) {
var path []interface{}
var document uint = 0
var matches = list.New()
if matchingNode != nil {
path = matchingNode.Path
document = matchingNode.Document
matches = nodeToMap(matchingNode)
}
mapPairs, err := crossFunction(d, matches, pathNode,
func(d *dataTreeNavigator, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
node := yaml.Node{Kind: yaml.MappingNode, Tag: "!!map"}
log.Debugf("LHS:", NodeToString(lhs))
@ -32,12 +67,19 @@ func CreateMapOperator(d *dataTreeNavigator, matchingNodes *list.List, pathNode
if err != nil {
return nil, err
}
//wrap up all the pairs into an array
node := yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq"}
for mapPair := mapPairs.Front(); mapPair != nil; mapPair = mapPair.Next() {
mapPairCandidate := mapPair.Value.(*CandidateNode)
log.Debugf("Collecting %v into sequence", NodeToString(mapPairCandidate))
node.Content = append(node.Content, mapPairCandidate.Node)
}
return nodeToMap(&CandidateNode{Node: &node, Document: document, Path: path}), nil
innerList := listToNodeSeq(mapPairs)
innerList.Style = yaml.FlowStyle
return &CandidateNode{Node: innerList, Document: document, Path: path}, nil
}
//NOTE: here the document index gets dropped so we
// no longer know where the node originates from.
func listToNodeSeq(list *list.List) *yaml.Node {
node := yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq"}
for entry := list.Front(); entry != nil; entry = entry.Next() {
entryCandidate := entry.Value.(*CandidateNode)
log.Debugf("Collecting %v into sequence", NodeToString(entryCandidate))
node.Content = append(node.Content, entryCandidate.Node)
}
return &node
}

View File

@ -5,41 +5,71 @@ import (
)
var createMapOperatorScenarios = []expressionScenario{
{
document: ``,
expression: `"frog": "jumps"`,
expected: []string{
"D0, P[], (!!seq)::- [{frog: jumps}]\n",
},
},
{
document: `{name: Mike, age: 32}`,
expression: `.name: .age`,
expected: []string{
"D0, P[], (!!seq)::- Mike: 32\n",
"D0, P[], (!!seq)::- [{Mike: 32}]\n",
},
},
{
document: `{name: Mike, pets: [cat, dog]}`,
expression: `.name: .pets[]`,
expected: []string{
"D0, P[], (!!seq)::- Mike: cat\n- Mike: dog\n",
"D0, P[], (!!seq)::- [{Mike: cat}, {Mike: dog}]\n",
},
},
{
document: `{name: Mike, pets: [cat, dog], food: [hotdog, burger]}`,
expression: `.name: .pets[], "f":.food[]`,
expected: []string{
"D0, P[], (!!seq)::- Mike: cat\n- Mike: dog\n",
"D0, P[], (!!seq)::- f: hotdog\n- f: burger\n",
"D0, P[], (!!seq)::- [{Mike: cat}, {Mike: dog}]\n",
"D0, P[], (!!seq)::- [{f: hotdog}, {f: burger}]\n",
},
},
{
document: "{name: Mike, pets: [cat, dog], food: [hotdog, burger]}\n---\n{name: Fred, pets: [mouse], food: [pizza, onion, apple]}",
expression: `.name: .pets[], "f":.food[]`,
expected: []string{
"D0, P[], (!!seq)::- [{Mike: cat}, {Mike: dog}]\n- [{Fred: mouse}]\n",
"D0, P[], (!!seq)::- [{f: hotdog}, {f: burger}]\n- [{f: pizza}, {f: onion}, {f: apple}]\n",
},
},
{
document: `{name: Mike, pets: {cows: [apl, bba]}}`,
expression: `"a":.name, "b":.pets`,
expected: []string{
"D0, P[], (!!seq)::- a: Mike\n",
"D0, P[], (!!seq)::- b: {cows: [apl, bba]}\n",
"D0, P[], (!!seq)::- [{a: Mike}]\n",
"D0, P[], (!!seq)::- [{b: {cows: [apl, bba]}}]\n",
},
},
{
document: `{name: Mike}`,
expression: `"wrap": .`,
expected: []string{
"D0, P[], (!!seq)::- wrap: {name: Mike}\n",
"D0, P[], (!!seq)::- [{wrap: {name: Mike}}]\n",
},
},
{
document: "{name: Mike}\n---\n{name: Bob}",
expression: `"wrap": .`,
expected: []string{
"D0, P[], (!!seq)::- [{wrap: {name: Mike}}]\n- [{wrap: {name: Bob}}]\n",
},
},
{
document: "{name: Mike}\n---\n{name: Bob}",
expression: `"wrap": ., .name: "great"`,
expected: []string{
"D0, P[], (!!seq)::- [{wrap: {name: Mike}}]\n- [{wrap: {name: Bob}}]\n",
"D0, P[], (!!seq)::- [{Mike: great}]\n- [{Bob: great}]\n",
},
},
}

View File

@ -16,6 +16,10 @@ func UnwrapDoc(node *yaml.Node) *yaml.Node {
return node
}
func EmptyOperator(d *dataTreeNavigator, matchingNodes *list.List, pathNode *PathTreeNode) (*list.List, error) {
return list.New(), nil
}
func PipeOperator(d *dataTreeNavigator, matchingNodes *list.List, pathNode *PathTreeNode) (*list.List, error) {
lhs, err := d.GetMatchingNodes(matchingNodes, pathNode.Lhs)
if err != nil {

View File

@ -23,11 +23,23 @@ type expressionScenario struct {
func testScenario(t *testing.T, s *expressionScenario) {
var results *list.List
var err error
if s.document != "" {
results, err = EvaluateStream("sample.yaml", strings.NewReader(s.document), s.expression)
} else {
results, err = EvaluateExpression(s.expression)
node, err := treeCreator.ParsePath(s.expression)
if err != nil {
t.Error(err)
return
}
inputs := list.New()
if s.document != "" {
inputs, err = readDocuments(strings.NewReader(s.document), "sample.yml")
if err != nil {
t.Error(err)
return
}
}
results, err = treeNavigator.GetMatchingNodes(inputs, node)
if err != nil {
t.Error(err)
@ -40,15 +52,13 @@ func documentScenarios(t *testing.T, title string, scenarios []expressionScenari
f, err := os.Create(fmt.Sprintf("doc/%v.md", title))
if err != nil {
panic(err)
t.Error(err)
}
defer f.Close()
w := bufio.NewWriter(f)
w.WriteString(fmt.Sprintf("# %v\n", title))
w.WriteString(fmt.Sprintf("## Examples\n"))
printer := NewPrinter(false, true, false, 2, true)
for index, s := range scenarios {
if !s.skipDoc {
@ -69,20 +79,23 @@ func documentScenarios(t *testing.T, title string, scenarios []expressionScenari
w.WriteString(fmt.Sprintf("Result\n"))
var output bytes.Buffer
var results *list.List
var err error
if s.document != "" {
results, err = EvaluateStream("sample.yaml", strings.NewReader(s.document), s.expression)
} else {
results, err = EvaluateExpression(s.expression)
}
printer := NewPrinter(bufio.NewWriter(&output), false, true, false, 2, true)
printer.PrintResults(results, bufio.NewWriter(&output))
if s.document != "" {
node, err := treeCreator.ParsePath(s.expression)
if err != nil {
t.Error(err)
}
err = EvaluateStream("sample.yaml", strings.NewReader(s.document), node, printer)
} else {
err = EvaluateAllFileStreams(s.expression, []string{}, printer)
}
w.WriteString(fmt.Sprintf("```yaml\n%v```\n", output.String()))
if err != nil {
panic(err)
t.Error(err)
}
}

View File

@ -114,6 +114,11 @@ var pathTests = []struct {
append(make([]interface{}, 0), "foo*", "PIPE", "(", "SELF", "ASSIGN_STYLE", "flow (string)", ")"),
append(make([]interface{}, 0), "foo*", "SELF", "flow (string)", "ASSIGN_STYLE", "PIPE"),
},
{
`{}`,
append(make([]interface{}, 0), "{", "}"),
append(make([]interface{}, 0), "EMPTY", "COLLECT_OBJECT", "PIPE"),
},
// {".animals | .==cat", append(make([]interface{}, 0), "animals", "TRAVERSE", "SELF", "EQUALS", "cat")},
// {".animals | (. == cat)", append(make([]interface{}, 0), "animals", "TRAVERSE", "(", "SELF", "EQUALS", "cat", ")")},

View File

@ -41,8 +41,14 @@ func (p *pathPostFixer) ConvertToPostfix(infixTokens []*Token) ([]*Operation, er
opener = OpenCollectObject
collectOperator = CollectObject
}
itemsInMiddle := false
for len(opStack) > 0 && opStack[len(opStack)-1].TokenType != opener {
opStack, result = popOpToResult(opStack, result)
itemsInMiddle = true
}
if !itemsInMiddle {
// must be an empty collection, add the empty object as a LHS parameter
result = append(result, &Operation{OperationType: Empty})
}
if len(opStack) == 0 {
return nil, errors.New("Bad path expression, got close collect brackets without matching opening bracket")

View File

@ -174,7 +174,6 @@ func selfToken() lex.Action {
}
}
// Creates the lexer object and compiles the NFA.
func initLexer() (*lex.Lexer, error) {
lexer := lex.NewLexer()
lexer.Add([]byte(`\(`), literalToken(OpenBracket, false))
@ -206,9 +205,7 @@ func initLexer() (*lex.Lexer, error) {
lexer.Add([]byte(`footComment`), opTokenWithPrefs(GetComment, &CommentOpPreferences{FootComment: true}))
lexer.Add([]byte(`comments\s*=`), opTokenWithPrefs(AssignComment, &CommentOpPreferences{LineComment: true, HeadComment: true, FootComment: true}))
// lexer.Add([]byte(`style`), opToken(GetStyle))
// lexer.Add([]byte(`and`), opToken())
lexer.Add([]byte(`collect`), opToken(Collect))
lexer.Add([]byte(`\s*==\s*`), opToken(Equals))
@ -222,8 +219,7 @@ func initLexer() (*lex.Lexer, error) {
lexer.Add([]byte("( |\t|\n|\r)+"), skip)
lexer.Add([]byte(`d[0-9]+`), documentToken()) // $0
lexer.Add([]byte(`d[0-9]+`), documentToken())
lexer.Add([]byte(`\."[^ "]+"`), pathToken(true))
lexer.Add([]byte(`\.[^ \}\{\:\[\],\|\.\[\(\)=]+`), pathToken(false))
lexer.Add([]byte(`\.`), selfToken())
@ -248,7 +244,6 @@ func initLexer() (*lex.Lexer, error) {
lexer.Add([]byte(`\}`), literalToken(CloseCollectObject, true))
lexer.Add([]byte(`\*`), opToken(Multiply))
// lexer.Add([]byte(`[^ \,\|\.\[\(\)=]+`), stringValue(false))
err := lexer.Compile()
if err != nil {
return nil, err

View File

@ -9,7 +9,7 @@ import (
)
type Printer interface {
PrintResults(matchingNodes *list.List, writer io.Writer) error
PrintResults(matchingNodes *list.List) error
}
type resultsPrinter struct {
@ -18,10 +18,20 @@ type resultsPrinter struct {
colorsEnabled bool
indent int
printDocSeparators bool
writer io.Writer
firstTimePrinting bool
}
func NewPrinter(outputToJSON bool, unwrapScalar bool, colorsEnabled bool, indent int, printDocSeparators bool) Printer {
return &resultsPrinter{outputToJSON, unwrapScalar, colorsEnabled, indent, printDocSeparators}
func NewPrinter(writer io.Writer, outputToJSON bool, unwrapScalar bool, colorsEnabled bool, indent int, printDocSeparators bool) Printer {
return &resultsPrinter{
writer: writer,
outputToJSON: outputToJSON,
unwrapScalar: unwrapScalar,
colorsEnabled: colorsEnabled,
indent: indent,
printDocSeparators: printDocSeparators,
firstTimePrinting: true,
}
}
func (p *resultsPrinter) printNode(node *yaml.Node, writer io.Writer) error {
@ -42,7 +52,7 @@ func (p *resultsPrinter) writeString(writer io.Writer, txt string) error {
return errorWriting
}
func (p *resultsPrinter) PrintResults(matchingNodes *list.List, writer io.Writer) error {
func (p *resultsPrinter) PrintResults(matchingNodes *list.List) error {
var err error
if p.outputToJSON {
explodeOp := Operation{OperationType: Explode}
@ -53,7 +63,7 @@ func (p *resultsPrinter) PrintResults(matchingNodes *list.List, writer io.Writer
}
}
bufferedWriter := bufio.NewWriter(writer)
bufferedWriter := bufio.NewWriter(p.writer)
defer safelyFlush(bufferedWriter)
if matchingNodes.Len() == 0 {
@ -61,12 +71,12 @@ func (p *resultsPrinter) PrintResults(matchingNodes *list.List, writer io.Writer
return nil
}
var previousDocIndex uint = matchingNodes.Front().Value.(*CandidateNode).Document
previousDocIndex := matchingNodes.Front().Value.(*CandidateNode).Document
for el := matchingNodes.Front(); el != nil; el = el.Next() {
mappedDoc := el.Value.(*CandidateNode)
if previousDocIndex != mappedDoc.Document && p.printDocSeparators {
if (!p.firstTimePrinting || (previousDocIndex != mappedDoc.Document)) && p.printDocSeparators {
p.writeString(bufferedWriter, "---\n")
}
@ -76,6 +86,7 @@ func (p *resultsPrinter) PrintResults(matchingNodes *list.List, writer io.Writer
previousDocIndex = mappedDoc.Document
}
p.firstTimePrinting = false
return nil
}

View File

@ -13,35 +13,14 @@ var treeNavigator = NewDataTreeNavigator(NavigationPrefs{})
var treeCreator = NewPathTreeCreator()
func readStream(filename string) (io.Reader, error) {
var stream io.Reader
if filename == "-" {
stream = bufio.NewReader(os.Stdin)
return bufio.NewReader(os.Stdin), nil
} else {
file, err := os.Open(filename) // nolint gosec
if err != nil {
return nil, err
}
defer safelyCloseFile(file)
stream = file
return os.Open(filename) // nolint gosec
}
return stream, nil
}
func EvaluateExpression(expression string) (*list.List, error) {
node, err := treeCreator.ParsePath(expression)
if err != nil {
return nil, err
}
return treeNavigator.GetMatchingNodes(list.New(), node)
}
func EvaluateStream(filename string, reader io.Reader, expression string) (*list.List, error) {
node, err := treeCreator.ParsePath(expression)
if err != nil {
return nil, err
}
var matchingNodes = list.New()
func EvaluateStream(filename string, reader io.Reader, node *PathTreeNode, printer Printer) error {
var currentIndex uint = 0
@ -51,9 +30,9 @@ func EvaluateStream(filename string, reader io.Reader, expression string) (*list
errorReading := decoder.Decode(&dataBucket)
if errorReading == io.EOF {
return matchingNodes, nil
return nil
} else if errorReading != nil {
return nil, errorReading
return errorReading
}
candidateNode := &CandidateNode{
Document: currentIndex,
@ -63,23 +42,92 @@ func EvaluateStream(filename string, reader io.Reader, expression string) (*list
inputList := list.New()
inputList.PushBack(candidateNode)
newMatches, errorParsing := treeNavigator.GetMatchingNodes(inputList, node)
matches, errorParsing := treeNavigator.GetMatchingNodes(inputList, node)
if errorParsing != nil {
return nil, errorParsing
return errorParsing
}
matchingNodes.PushBackList(newMatches)
printer.PrintResults(matches)
currentIndex = currentIndex + 1
}
}
func Evaluate(filename string, expression string) (*list.List, error) {
func readDocuments(reader io.Reader, filename string) (*list.List, error) {
decoder := yaml.NewDecoder(reader)
inputList := list.New()
var currentIndex uint = 0
var reader, err = readStream(filename)
if err != nil {
return nil, err
for {
var dataBucket yaml.Node
errorReading := decoder.Decode(&dataBucket)
if errorReading == io.EOF {
switch reader.(type) {
case *os.File:
safelyCloseFile(reader.(*os.File))
}
return inputList, nil
} else if errorReading != nil {
return nil, errorReading
}
candidateNode := &CandidateNode{
Document: currentIndex,
Filename: filename,
Node: &dataBucket,
}
inputList.PushBack(candidateNode)
currentIndex = currentIndex + 1
}
return EvaluateStream(filename, reader, expression)
}
func EvaluateAllFileStreams(expression string, filenames []string, printer Printer) error {
node, err := treeCreator.ParsePath(expression)
if err != nil {
return err
}
var allDocuments *list.List = list.New()
for _, filename := range filenames {
reader, err := readStream(filename)
if err != nil {
return err
}
fileDocuments, err := readDocuments(reader, filename)
if err != nil {
return err
}
allDocuments.PushBackList(fileDocuments)
}
matches, err := treeNavigator.GetMatchingNodes(allDocuments, node)
if err != nil {
return err
}
return printer.PrintResults(matches)
}
func EvaluateFileStreamsSequence(expression string, filenames []string, printer Printer) error {
node, err := treeCreator.ParsePath(expression)
if err != nil {
return err
}
for _, filename := range filenames {
reader, err := readStream(filename)
if err != nil {
return err
}
err = EvaluateStream(filename, reader, node, printer)
if err != nil {
return err
}
switch reader.(type) {
case *os.File:
safelyCloseFile(reader.(*os.File))
}
}
return nil
}
func safelyRenameFile(from string, to string) {