mirror of
https://github.com/mikefarah/yq.git
synced 2025-01-22 21:55:42 +00:00
Traverse Array Operator
This commit is contained in:
parent
ea231006ed
commit
a88c2dc5d3
@ -112,7 +112,7 @@ a:
|
|||||||
```
|
```
|
||||||
then
|
then
|
||||||
```bash
|
```bash
|
||||||
yq eval '(.a.[] | select(. == "apple")) = "frog"' sample.yml
|
yq eval '(.a[] | select(. == "apple")) = "frog"' sample.yml
|
||||||
```
|
```
|
||||||
will output
|
will output
|
||||||
```yaml
|
```yaml
|
||||||
|
@ -366,3 +366,21 @@ bar_thing
|
|||||||
foobarList_c
|
foobarList_c
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Select multiple indices
|
||||||
|
Given a sample.yml file of:
|
||||||
|
```yaml
|
||||||
|
a:
|
||||||
|
- a
|
||||||
|
- b
|
||||||
|
- c
|
||||||
|
```
|
||||||
|
then
|
||||||
|
```bash
|
||||||
|
yq eval '.a[0, 2]' sample.yml
|
||||||
|
```
|
||||||
|
will output
|
||||||
|
```yaml
|
||||||
|
a
|
||||||
|
c
|
||||||
|
```
|
||||||
|
|
||||||
|
@ -65,6 +65,7 @@ var SortKeys = &OperationType{Type: "SORT_KEYS", NumArgs: 1, Precedence: 50, Han
|
|||||||
|
|
||||||
var CollectObject = &OperationType{Type: "COLLECT_OBJECT", NumArgs: 0, Precedence: 50, Handler: CollectObjectOperator}
|
var CollectObject = &OperationType{Type: "COLLECT_OBJECT", NumArgs: 0, Precedence: 50, Handler: CollectObjectOperator}
|
||||||
var TraversePath = &OperationType{Type: "TRAVERSE_PATH", NumArgs: 0, Precedence: 50, Handler: TraversePathOperator}
|
var TraversePath = &OperationType{Type: "TRAVERSE_PATH", NumArgs: 0, Precedence: 50, Handler: TraversePathOperator}
|
||||||
|
var TraverseArray = &OperationType{Type: "TRAVERSE_ARRAY", NumArgs: 1, Precedence: 50, Handler: TraverseArrayOperator}
|
||||||
|
|
||||||
var DocumentFilter = &OperationType{Type: "DOCUMENT_FILTER", NumArgs: 0, Precedence: 50, Handler: TraversePathOperator}
|
var DocumentFilter = &OperationType{Type: "DOCUMENT_FILTER", NumArgs: 0, Precedence: 50, Handler: TraversePathOperator}
|
||||||
var SelfReference = &OperationType{Type: "SELF", NumArgs: 0, Precedence: 50, Handler: SelfOperator}
|
var SelfReference = &OperationType{Type: "SELF", NumArgs: 0, Precedence: 50, Handler: SelfOperator}
|
||||||
|
@ -80,7 +80,15 @@ var assignOperatorScenarios = []expressionScenario{
|
|||||||
{
|
{
|
||||||
description: "Update selected results",
|
description: "Update selected results",
|
||||||
document: `{a: {b: apple, c: cactus}}`,
|
document: `{a: {b: apple, c: cactus}}`,
|
||||||
expression: `(.a.[] | select(. == "apple")) = "frog"`,
|
expression: `(.a[] | select(. == "apple")) = "frog"`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (doc)::{a: {b: frog, c: cactus}}\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: {b: apple, c: cactus}}`,
|
||||||
|
expression: `(.a.[] | select(. == "apple")) = "frog"`,
|
||||||
expected: []string{
|
expected: []string{
|
||||||
"D0, P[], (doc)::{a: {b: frog, c: cactus}}\n",
|
"D0, P[], (doc)::{a: {b: frog, c: cactus}}\n",
|
||||||
},
|
},
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
package yqlib
|
package yqlib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"container/list"
|
"container/list"
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
"github.com/elliotchance/orderedmap"
|
"github.com/elliotchance/orderedmap"
|
||||||
yaml "gopkg.in/yaml.v3"
|
yaml "gopkg.in/yaml.v3"
|
||||||
@ -14,10 +14,7 @@ type TraversePreferences struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func Splat(d *dataTreeNavigator, matches *list.List) (*list.List, error) {
|
func Splat(d *dataTreeNavigator, matches *list.List) (*list.List, error) {
|
||||||
preferences := &TraversePreferences{DontFollowAlias: true}
|
return traverseNodesWithArrayIndices(matches, make([]*yaml.Node, 0), false)
|
||||||
splatOperation := &Operation{OperationType: TraversePath, Value: "[]", Preferences: preferences}
|
|
||||||
splatTreeNode := &PathTreeNode{Operation: splatOperation}
|
|
||||||
return TraversePathOperator(d, matches, splatTreeNode)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TraversePathOperator(d *dataTreeNavigator, matchMap *list.List, pathNode *PathTreeNode) (*list.List, error) {
|
func TraversePathOperator(d *dataTreeNavigator, matchMap *list.List, pathNode *PathTreeNode) (*list.List, error) {
|
||||||
@ -56,7 +53,12 @@ func traverse(d *dataTreeNavigator, matchingNode *CandidateNode, operation *Oper
|
|||||||
switch value.Kind {
|
switch value.Kind {
|
||||||
case yaml.MappingNode:
|
case yaml.MappingNode:
|
||||||
log.Debug("its a map with %v entries", len(value.Content)/2)
|
log.Debug("its a map with %v entries", len(value.Content)/2)
|
||||||
return traverseMap(matchingNode, operation)
|
followAlias := true
|
||||||
|
|
||||||
|
if operation.Preferences != nil {
|
||||||
|
followAlias = !operation.Preferences.(*TraversePreferences).DontFollowAlias
|
||||||
|
}
|
||||||
|
return traverseMap(matchingNode, operation.StringValue, followAlias, false)
|
||||||
|
|
||||||
case yaml.SequenceNode:
|
case yaml.SequenceNode:
|
||||||
log.Debug("its a sequence of %v things!", len(value.Content))
|
log.Debug("its a sequence of %v things!", len(value.Content))
|
||||||
@ -69,20 +71,144 @@ func traverse(d *dataTreeNavigator, matchingNode *CandidateNode, operation *Oper
|
|||||||
case yaml.DocumentNode:
|
case yaml.DocumentNode:
|
||||||
log.Debug("digging into doc node")
|
log.Debug("digging into doc node")
|
||||||
return traverse(d, &CandidateNode{
|
return traverse(d, &CandidateNode{
|
||||||
Node: matchingNode.Node.Content[0],
|
Node: matchingNode.Node.Content[0],
|
||||||
Document: matchingNode.Document}, operation)
|
Filename: matchingNode.Filename,
|
||||||
|
FileIndex: matchingNode.FileIndex,
|
||||||
|
Document: matchingNode.Document}, operation)
|
||||||
default:
|
default:
|
||||||
return list.New(), nil
|
return list.New(), nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func keyMatches(key *yaml.Node, pathNode *Operation) bool {
|
func TraverseArrayOperator(d *dataTreeNavigator, matchingNodes *list.List, pathNode *PathTreeNode) (*list.List, error) {
|
||||||
return pathNode.Value == "[]" || Match(key.Value, pathNode.StringValue)
|
// rhs is a collect expression that will yield indexes to retreive of the arrays
|
||||||
|
|
||||||
|
rhs, err := d.GetMatchingNodes(matchingNodes, pathNode.Rhs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var indicesToTraverse = rhs.Front().Value.(*CandidateNode).Node.Content
|
||||||
|
|
||||||
|
return traverseNodesWithArrayIndices(matchingNodes, indicesToTraverse, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
func traverseMap(matchingNode *CandidateNode, operation *Operation) (*list.List, error) {
|
func traverseNodesWithArrayIndices(matchingNodes *list.List, indicesToTraverse []*yaml.Node, followAlias bool) (*list.List, error) {
|
||||||
|
var matchingNodeMap = list.New()
|
||||||
|
for el := matchingNodes.Front(); el != nil; el = el.Next() {
|
||||||
|
candidate := el.Value.(*CandidateNode)
|
||||||
|
newNodes, err := traverseArrayIndices(candidate, indicesToTraverse, followAlias)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
matchingNodeMap.PushBackList(newNodes)
|
||||||
|
}
|
||||||
|
|
||||||
|
return matchingNodeMap, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func traverseArrayIndices(matchingNode *CandidateNode, indicesToTraverse []*yaml.Node, followAlias bool) (*list.List, error) { // call this if doc / alias like the other traverse
|
||||||
|
node := matchingNode.Node
|
||||||
|
if node.Tag == "!!null" {
|
||||||
|
log.Debugf("OperatorArrayTraverse got a null - turning it into an empty array")
|
||||||
|
// auto vivification, make it into an empty array
|
||||||
|
node.Tag = ""
|
||||||
|
node.Kind = yaml.SequenceNode
|
||||||
|
}
|
||||||
|
|
||||||
|
if node.Kind == yaml.AliasNode {
|
||||||
|
matchingNode.Node = node.Alias
|
||||||
|
return traverseArrayIndices(matchingNode, indicesToTraverse, followAlias)
|
||||||
|
} else if node.Kind == yaml.SequenceNode {
|
||||||
|
return traverseArrayWithIndices(matchingNode, indicesToTraverse)
|
||||||
|
} else if node.Kind == yaml.MappingNode {
|
||||||
|
return traverseMapWithIndices(matchingNode, indicesToTraverse, followAlias)
|
||||||
|
} else if node.Kind == yaml.DocumentNode {
|
||||||
|
return traverseArrayIndices(&CandidateNode{
|
||||||
|
Node: matchingNode.Node.Content[0],
|
||||||
|
Filename: matchingNode.Filename,
|
||||||
|
FileIndex: matchingNode.FileIndex,
|
||||||
|
Document: matchingNode.Document}, indicesToTraverse, followAlias)
|
||||||
|
}
|
||||||
|
log.Debugf("OperatorArrayTraverse skipping %v as its a %v", matchingNode, node.Tag)
|
||||||
|
return list.New(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func traverseMapWithIndices(candidate *CandidateNode, indices []*yaml.Node, followAlias bool) (*list.List, error) {
|
||||||
|
if len(indices) == 0 {
|
||||||
|
return traverseMap(candidate, "", followAlias, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
var matchingNodeMap = list.New()
|
||||||
|
|
||||||
|
for _, indexNode := range indices {
|
||||||
|
log.Debug("traverseMapWithIndices: %v", indexNode.Value)
|
||||||
|
newNodes, err := traverseMap(candidate, indexNode.Value, followAlias, false)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
matchingNodeMap.PushBackList(newNodes)
|
||||||
|
}
|
||||||
|
|
||||||
|
return matchingNodeMap, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func traverseArrayWithIndices(candidate *CandidateNode, indices []*yaml.Node) (*list.List, error) {
|
||||||
|
log.Debug("traverseArrayWithIndices")
|
||||||
|
var newMatches = list.New()
|
||||||
|
node := UnwrapDoc(candidate.Node)
|
||||||
|
if len(indices) == 0 {
|
||||||
|
log.Debug("splatting")
|
||||||
|
var index int64
|
||||||
|
for index = 0; index < int64(len(node.Content)); index = index + 1 {
|
||||||
|
|
||||||
|
newMatches.PushBack(&CandidateNode{
|
||||||
|
Document: candidate.Document,
|
||||||
|
Path: candidate.CreateChildPath(index),
|
||||||
|
Node: node.Content[index],
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return newMatches, nil
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, indexNode := range indices {
|
||||||
|
log.Debug("traverseArrayWithIndices: '%v'", indexNode.Value)
|
||||||
|
index, err := strconv.ParseInt(indexNode.Value, 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("Cannot index array with '%v' (%v)", indexNode.Value, err)
|
||||||
|
}
|
||||||
|
indexToUse := index
|
||||||
|
contentLength := int64(len(node.Content))
|
||||||
|
for contentLength <= index {
|
||||||
|
node.Content = append(node.Content, &yaml.Node{Tag: "!!null", Kind: yaml.ScalarNode, Value: "null"})
|
||||||
|
contentLength = int64(len(node.Content))
|
||||||
|
}
|
||||||
|
|
||||||
|
if indexToUse < 0 {
|
||||||
|
indexToUse = contentLength + indexToUse
|
||||||
|
}
|
||||||
|
|
||||||
|
if indexToUse < 0 {
|
||||||
|
return nil, fmt.Errorf("Index [%v] out of range, array size is %v", index, contentLength)
|
||||||
|
}
|
||||||
|
|
||||||
|
newMatches.PushBack(&CandidateNode{
|
||||||
|
Node: node.Content[indexToUse],
|
||||||
|
Document: candidate.Document,
|
||||||
|
Path: candidate.CreateChildPath(index),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return newMatches, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func keyMatches(key *yaml.Node, wantedKey string) bool {
|
||||||
|
return Match(key.Value, wantedKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
func traverseMap(matchingNode *CandidateNode, key string, followAlias bool, splat bool) (*list.List, error) {
|
||||||
var newMatches = orderedmap.NewOrderedMap()
|
var newMatches = orderedmap.NewOrderedMap()
|
||||||
err := doTraverseMap(newMatches, matchingNode, operation)
|
err := doTraverseMap(newMatches, matchingNode, key, followAlias, splat)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -92,10 +218,10 @@ func traverseMap(matchingNode *CandidateNode, operation *Operation) (*list.List,
|
|||||||
//no matches, create one automagically
|
//no matches, create one automagically
|
||||||
valueNode := &yaml.Node{Tag: "!!null", Kind: yaml.ScalarNode, Value: "null"}
|
valueNode := &yaml.Node{Tag: "!!null", Kind: yaml.ScalarNode, Value: "null"}
|
||||||
node := matchingNode.Node
|
node := matchingNode.Node
|
||||||
node.Content = append(node.Content, &yaml.Node{Kind: yaml.ScalarNode, Value: operation.StringValue}, valueNode)
|
node.Content = append(node.Content, &yaml.Node{Kind: yaml.ScalarNode, Value: key}, valueNode)
|
||||||
candidateNode := &CandidateNode{
|
candidateNode := &CandidateNode{
|
||||||
Node: valueNode,
|
Node: valueNode,
|
||||||
Path: append(matchingNode.Path, operation.StringValue),
|
Path: append(matchingNode.Path, key),
|
||||||
Document: matchingNode.Document,
|
Document: matchingNode.Document,
|
||||||
}
|
}
|
||||||
newMatches.Set(candidateNode.GetKey(), candidateNode)
|
newMatches.Set(candidateNode.GetKey(), candidateNode)
|
||||||
@ -111,21 +237,14 @@ func traverseMap(matchingNode *CandidateNode, operation *Operation) (*list.List,
|
|||||||
return results, nil
|
return results, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func doTraverseMap(newMatches *orderedmap.OrderedMap, candidate *CandidateNode, operation *Operation) error {
|
func doTraverseMap(newMatches *orderedmap.OrderedMap, candidate *CandidateNode, wantedKey string, followAlias bool, splat bool) error {
|
||||||
// value.Content is a concatenated array of key, value,
|
// value.Content is a concatenated array of key, value,
|
||||||
// so keys are in the even indexes, values in odd.
|
// so keys are in the even indexes, values in odd.
|
||||||
// merge aliases are defined first, but we only want to traverse them
|
// merge aliases are defined first, but we only want to traverse them
|
||||||
// if we don't find a match directly on this node first.
|
// if we don't find a match directly on this node first.
|
||||||
//TODO ALIASES, auto creation?
|
|
||||||
|
|
||||||
node := candidate.Node
|
node := candidate.Node
|
||||||
|
|
||||||
followAlias := true
|
|
||||||
|
|
||||||
if operation.Preferences != nil {
|
|
||||||
followAlias = !operation.Preferences.(*TraversePreferences).DontFollowAlias
|
|
||||||
}
|
|
||||||
|
|
||||||
var contents = node.Content
|
var contents = node.Content
|
||||||
for index := 0; index < len(contents); index = index + 2 {
|
for index := 0; index < len(contents); index = index + 2 {
|
||||||
key := contents[index]
|
key := contents[index]
|
||||||
@ -135,11 +254,11 @@ func doTraverseMap(newMatches *orderedmap.OrderedMap, candidate *CandidateNode,
|
|||||||
//skip the 'merge' tag, find a direct match first
|
//skip the 'merge' tag, find a direct match first
|
||||||
if key.Tag == "!!merge" && followAlias {
|
if key.Tag == "!!merge" && followAlias {
|
||||||
log.Debug("Merge anchor")
|
log.Debug("Merge anchor")
|
||||||
err := traverseMergeAnchor(newMatches, candidate, value, operation)
|
err := traverseMergeAnchor(newMatches, candidate, value, wantedKey, splat)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
} else if keyMatches(key, operation) {
|
} else if splat || keyMatches(key, wantedKey) {
|
||||||
log.Debug("MATCHED")
|
log.Debug("MATCHED")
|
||||||
candidateNode := &CandidateNode{
|
candidateNode := &CandidateNode{
|
||||||
Node: value,
|
Node: value,
|
||||||
@ -153,7 +272,7 @@ func doTraverseMap(newMatches *orderedmap.OrderedMap, candidate *CandidateNode,
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func traverseMergeAnchor(newMatches *orderedmap.OrderedMap, originalCandidate *CandidateNode, value *yaml.Node, operation *Operation) error {
|
func traverseMergeAnchor(newMatches *orderedmap.OrderedMap, originalCandidate *CandidateNode, value *yaml.Node, wantedKey string, splat bool) error {
|
||||||
switch value.Kind {
|
switch value.Kind {
|
||||||
case yaml.AliasNode:
|
case yaml.AliasNode:
|
||||||
candidateNode := &CandidateNode{
|
candidateNode := &CandidateNode{
|
||||||
@ -161,10 +280,10 @@ func traverseMergeAnchor(newMatches *orderedmap.OrderedMap, originalCandidate *C
|
|||||||
Path: originalCandidate.Path,
|
Path: originalCandidate.Path,
|
||||||
Document: originalCandidate.Document,
|
Document: originalCandidate.Document,
|
||||||
}
|
}
|
||||||
return doTraverseMap(newMatches, candidateNode, operation)
|
return doTraverseMap(newMatches, candidateNode, wantedKey, true, splat)
|
||||||
case yaml.SequenceNode:
|
case yaml.SequenceNode:
|
||||||
for _, childValue := range value.Content {
|
for _, childValue := range value.Content {
|
||||||
err := traverseMergeAnchor(newMatches, originalCandidate, childValue, operation)
|
err := traverseMergeAnchor(newMatches, originalCandidate, childValue, wantedKey, splat)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -175,49 +294,6 @@ func traverseMergeAnchor(newMatches *orderedmap.OrderedMap, originalCandidate *C
|
|||||||
|
|
||||||
func traverseArray(candidate *CandidateNode, operation *Operation) (*list.List, error) {
|
func traverseArray(candidate *CandidateNode, operation *Operation) (*list.List, error) {
|
||||||
log.Debug("operation Value %v", operation.Value)
|
log.Debug("operation Value %v", operation.Value)
|
||||||
if operation.Value == "[]" {
|
indices := []*yaml.Node{&yaml.Node{Value: operation.StringValue}}
|
||||||
|
return traverseArrayWithIndices(candidate, indices)
|
||||||
var contents = candidate.Node.Content
|
|
||||||
var newMatches = list.New()
|
|
||||||
var index int64
|
|
||||||
for index = 0; index < int64(len(contents)); index = index + 1 {
|
|
||||||
|
|
||||||
newMatches.PushBack(&CandidateNode{
|
|
||||||
Document: candidate.Document,
|
|
||||||
Path: candidate.CreateChildPath(index),
|
|
||||||
Node: contents[index],
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return newMatches, nil
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
switch operation.Value.(type) {
|
|
||||||
case int64:
|
|
||||||
index := operation.Value.(int64)
|
|
||||||
indexToUse := index
|
|
||||||
contentLength := int64(len(candidate.Node.Content))
|
|
||||||
for contentLength <= index {
|
|
||||||
candidate.Node.Content = append(candidate.Node.Content, &yaml.Node{Tag: "!!null", Kind: yaml.ScalarNode, Value: "null"})
|
|
||||||
contentLength = int64(len(candidate.Node.Content))
|
|
||||||
}
|
|
||||||
|
|
||||||
if indexToUse < 0 {
|
|
||||||
indexToUse = contentLength + indexToUse
|
|
||||||
}
|
|
||||||
|
|
||||||
if indexToUse < 0 {
|
|
||||||
return nil, fmt.Errorf("Index [%v] out of range, array size is %v", index, contentLength)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nodeToMap(&CandidateNode{
|
|
||||||
Node: candidate.Node.Content[indexToUse],
|
|
||||||
Document: candidate.Document,
|
|
||||||
Path: candidate.CreateChildPath(index),
|
|
||||||
}), nil
|
|
||||||
default:
|
|
||||||
log.Debug("argument not an int (%v), no array matches", operation.Value)
|
|
||||||
return list.New(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -65,7 +65,7 @@ var traversePathOperatorScenarios = []expressionScenario{
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
skipDoc: true,
|
skipDoc: true,
|
||||||
document: `{}`,
|
document: ``,
|
||||||
expression: `.[1].a`,
|
expression: `.[1].a`,
|
||||||
expected: []string{
|
expected: []string{
|
||||||
"D0, P[1 a], (!!null)::null\n",
|
"D0, P[1 a], (!!null)::null\n",
|
||||||
@ -150,12 +150,6 @@ var traversePathOperatorScenarios = []expressionScenario{
|
|||||||
"D0, P[b c], (!!str)::frog\n",
|
"D0, P[b c], (!!str)::frog\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
|
||||||
skipDoc: true,
|
|
||||||
document: `[1,2,3]`,
|
|
||||||
expression: `.b`,
|
|
||||||
expected: []string{},
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
description: "Traversing arrays by index",
|
description: "Traversing arrays by index",
|
||||||
document: `[1,2,3]`,
|
document: `[1,2,3]`,
|
||||||
@ -274,6 +268,120 @@ var traversePathOperatorScenarios = []expressionScenario{
|
|||||||
"D0, P[foobarList c], (!!str)::foobarList_c\n",
|
"D0, P[foobarList c], (!!str)::foobarList_c\n",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `[a,b,c]`,
|
||||||
|
expression: `.[]`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[0], (!!str)::a\n",
|
||||||
|
"D0, P[1], (!!str)::b\n",
|
||||||
|
"D0, P[2], (!!str)::c\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `[a,b,c]`,
|
||||||
|
expression: `[]`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[], (!!seq)::[]\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: [a,b,c]}`,
|
||||||
|
expression: `.a[0]`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a 0], (!!str)::a\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "Select multiple indices",
|
||||||
|
document: `{a: [a,b,c]}`,
|
||||||
|
expression: `.a[0, 2]`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a 0], (!!str)::a\n",
|
||||||
|
"D0, P[a 2], (!!str)::c\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: [a,b,c]}`,
|
||||||
|
expression: `.a.[0, 2]`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a 0], (!!str)::a\n",
|
||||||
|
"D0, P[a 2], (!!str)::c\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: [a,b,c]}`,
|
||||||
|
expression: `.a.[0]`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a 0], (!!str)::a\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: [a,b,c]}`,
|
||||||
|
expression: `.a[-1]`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a -1], (!!str)::c\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: [a,b,c]}`,
|
||||||
|
expression: `.a.[-1]`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a -1], (!!str)::c\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: [a,b,c]}`,
|
||||||
|
expression: `.a[-2]`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a -2], (!!str)::b\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: [a,b,c]}`,
|
||||||
|
expression: `.a.[-2]`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a -2], (!!str)::b\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: [a,b,c]}`,
|
||||||
|
expression: `.a[]`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a 0], (!!str)::a\n",
|
||||||
|
"D0, P[a 1], (!!str)::b\n",
|
||||||
|
"D0, P[a 2], (!!str)::c\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: [a,b,c]}`,
|
||||||
|
expression: `.a.[]`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a 0], (!!str)::a\n",
|
||||||
|
"D0, P[a 1], (!!str)::b\n",
|
||||||
|
"D0, P[a 2], (!!str)::c\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
skipDoc: true,
|
||||||
|
document: `{a: [a,b,c]}`,
|
||||||
|
expression: `.a | .[]`,
|
||||||
|
expected: []string{
|
||||||
|
"D0, P[a 0], (!!str)::a\n",
|
||||||
|
"D0, P[a 1], (!!str)::b\n",
|
||||||
|
"D0, P[a 2], (!!str)::c\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTraversePathOperatorScenarios(t *testing.T) {
|
func TestTraversePathOperatorScenarios(t *testing.T) {
|
||||||
|
@ -38,7 +38,7 @@ func testScenario(t *testing.T, s *expressionScenario) {
|
|||||||
if s.document != "" {
|
if s.document != "" {
|
||||||
inputs, err = readDocuments(strings.NewReader(s.document), "sample.yml", 0)
|
inputs, err = readDocuments(strings.NewReader(s.document), "sample.yml", 0)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err, s.document)
|
t.Error(err, s.document, s.expression)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -55,7 +55,7 @@ func testScenario(t *testing.T, s *expressionScenario) {
|
|||||||
results, err = treeNavigator.GetMatchingNodes(inputs, node)
|
results, err = treeNavigator.GetMatchingNodes(inputs, node)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(fmt.Errorf("%v: %v", err, s.expression))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
test.AssertResultComplexWithContext(t, s.expected, resultsToString(results), fmt.Sprintf("exp: %v\ndoc: %v", s.expression, s.document))
|
test.AssertResultComplexWithContext(t, s.expected, resultsToString(results), fmt.Sprintf("exp: %v\ndoc: %v", s.expression, s.document))
|
||||||
@ -167,17 +167,17 @@ func documentScenarios(t *testing.T, title string, scenarios []expressionScenari
|
|||||||
if s.document != "" {
|
if s.document != "" {
|
||||||
node, err := treeCreator.ParsePath(s.expression)
|
node, err := treeCreator.ParsePath(s.expression)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err, s.expression)
|
||||||
}
|
}
|
||||||
err = streamEvaluator.Evaluate("sample.yaml", strings.NewReader(formattedDoc), node, printer)
|
err = streamEvaluator.Evaluate("sample.yaml", strings.NewReader(formattedDoc), node, printer)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err, s.expression)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
err = streamEvaluator.EvaluateNew(s.expression, printer)
|
err = streamEvaluator.EvaluateNew(s.expression, printer)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Error(err)
|
t.Error(err, s.expression)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -17,20 +17,35 @@ var pathTests = []struct {
|
|||||||
append(make([]interface{}, 0), "[", "]"),
|
append(make([]interface{}, 0), "[", "]"),
|
||||||
append(make([]interface{}, 0), "EMPTY", "COLLECT", "SHORT_PIPE"),
|
append(make([]interface{}, 0), "EMPTY", "COLLECT", "SHORT_PIPE"),
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
`.[]`,
|
||||||
|
append(make([]interface{}, 0), "TRAVERSE_ARRAY", "[", "]"),
|
||||||
|
append(make([]interface{}, 0), "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY"),
|
||||||
|
},
|
||||||
{
|
{
|
||||||
`.a[]`,
|
`.a[]`,
|
||||||
append(make([]interface{}, 0), "a", "SHORT_PIPE", "[]"),
|
append(make([]interface{}, 0), "a", "SHORT_PIPE", "TRAVERSE_ARRAY", "[", "]"),
|
||||||
append(make([]interface{}, 0), "a", "[]", "SHORT_PIPE"),
|
append(make([]interface{}, 0), "a", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "SHORT_PIPE"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`.a.[]`,
|
`.a.[]`,
|
||||||
append(make([]interface{}, 0), "a", "SHORT_PIPE", "[]"),
|
append(make([]interface{}, 0), "a", "SHORT_PIPE", "TRAVERSE_ARRAY", "[", "]"),
|
||||||
append(make([]interface{}, 0), "a", "[]", "SHORT_PIPE"),
|
append(make([]interface{}, 0), "a", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "SHORT_PIPE"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`.a[0]`,
|
||||||
|
append(make([]interface{}, 0), "a", "SHORT_PIPE", "TRAVERSE_ARRAY", "[", "0 (int64)", "]"),
|
||||||
|
append(make([]interface{}, 0), "a", "0 (int64)", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "SHORT_PIPE"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`.a.[0]`,
|
||||||
|
append(make([]interface{}, 0), "a", "SHORT_PIPE", "TRAVERSE_ARRAY", "[", "0 (int64)", "]"),
|
||||||
|
append(make([]interface{}, 0), "a", "0 (int64)", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "SHORT_PIPE"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`.a[].c`,
|
`.a[].c`,
|
||||||
append(make([]interface{}, 0), "a", "SHORT_PIPE", "[]", "SHORT_PIPE", "c"),
|
append(make([]interface{}, 0), "a", "SHORT_PIPE", "TRAVERSE_ARRAY", "[", "]", "SHORT_PIPE", "c"),
|
||||||
append(make([]interface{}, 0), "a", "[]", "SHORT_PIPE", "c", "SHORT_PIPE"),
|
append(make([]interface{}, 0), "a", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "SHORT_PIPE", "c", "SHORT_PIPE"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`[3]`,
|
`[3]`,
|
||||||
@ -44,18 +59,18 @@ var pathTests = []struct {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
`.a | .[].b == "apple"`,
|
`.a | .[].b == "apple"`,
|
||||||
append(make([]interface{}, 0), "a", "PIPE", "[]", "SHORT_PIPE", "b", "EQUALS", "apple (string)"),
|
append(make([]interface{}, 0), "a", "PIPE", "TRAVERSE_ARRAY", "[", "]", "SHORT_PIPE", "b", "EQUALS", "apple (string)"),
|
||||||
append(make([]interface{}, 0), "a", "[]", "b", "SHORT_PIPE", "apple (string)", "EQUALS", "PIPE"),
|
append(make([]interface{}, 0), "a", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "b", "SHORT_PIPE", "apple (string)", "EQUALS", "PIPE"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`(.a | .[].b) == "apple"`,
|
`(.a | .[].b) == "apple"`,
|
||||||
append(make([]interface{}, 0), "(", "a", "PIPE", "[]", "SHORT_PIPE", "b", ")", "EQUALS", "apple (string)"),
|
append(make([]interface{}, 0), "(", "a", "PIPE", "TRAVERSE_ARRAY", "[", "]", "SHORT_PIPE", "b", ")", "EQUALS", "apple (string)"),
|
||||||
append(make([]interface{}, 0), "a", "[]", "b", "SHORT_PIPE", "PIPE", "apple (string)", "EQUALS"),
|
append(make([]interface{}, 0), "a", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "b", "SHORT_PIPE", "PIPE", "apple (string)", "EQUALS"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`.[] | select(. == "*at")`,
|
`.[] | select(. == "*at")`,
|
||||||
append(make([]interface{}, 0), "[]", "PIPE", "SELECT", "(", "SELF", "EQUALS", "*at (string)", ")"),
|
append(make([]interface{}, 0), "TRAVERSE_ARRAY", "[", "]", "PIPE", "SELECT", "(", "SELF", "EQUALS", "*at (string)", ")"),
|
||||||
append(make([]interface{}, 0), "[]", "SELF", "*at (string)", "EQUALS", "SELECT", "PIPE"),
|
append(make([]interface{}, 0), "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "SELF", "*at (string)", "EQUALS", "SELECT", "PIPE"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`[true]`,
|
`[true]`,
|
||||||
@ -89,8 +104,8 @@ var pathTests = []struct {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
`{.a: .c, .b.[]: .f.g.[]}`,
|
`{.a: .c, .b.[]: .f.g.[]}`,
|
||||||
append(make([]interface{}, 0), "{", "a", "CREATE_MAP", "c", "UNION", "b", "SHORT_PIPE", "[]", "CREATE_MAP", "f", "SHORT_PIPE", "g", "SHORT_PIPE", "[]", "}"),
|
append(make([]interface{}, 0), "{", "a", "CREATE_MAP", "c", "UNION", "b", "SHORT_PIPE", "TRAVERSE_ARRAY", "[", "]", "CREATE_MAP", "f", "SHORT_PIPE", "g", "SHORT_PIPE", "TRAVERSE_ARRAY", "[", "]", "}"),
|
||||||
append(make([]interface{}, 0), "a", "c", "CREATE_MAP", "b", "[]", "SHORT_PIPE", "f", "g", "SHORT_PIPE", "[]", "SHORT_PIPE", "CREATE_MAP", "UNION", "COLLECT_OBJECT", "SHORT_PIPE"),
|
append(make([]interface{}, 0), "a", "c", "CREATE_MAP", "b", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "SHORT_PIPE", "f", "g", "SHORT_PIPE", "EMPTY", "COLLECT", "SHORT_PIPE", "TRAVERSE_ARRAY", "SHORT_PIPE", "CREATE_MAP", "UNION", "COLLECT_OBJECT", "SHORT_PIPE"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`explode(.a.b)`,
|
`explode(.a.b)`,
|
||||||
@ -122,7 +137,6 @@ var pathTests = []struct {
|
|||||||
append(make([]interface{}, 0), "SELF", "ASSIGN_COMMENT", "str (string)"),
|
append(make([]interface{}, 0), "SELF", "ASSIGN_COMMENT", "str (string)"),
|
||||||
append(make([]interface{}, 0), "SELF", "str (string)", "ASSIGN_COMMENT"),
|
append(make([]interface{}, 0), "SELF", "str (string)", "ASSIGN_COMMENT"),
|
||||||
},
|
},
|
||||||
|
|
||||||
{
|
{
|
||||||
`.a.b tag="!!str"`,
|
`.a.b tag="!!str"`,
|
||||||
append(make([]interface{}, 0), "a", "SHORT_PIPE", "b", "ASSIGN_TAG", "!!str (string)"),
|
append(make([]interface{}, 0), "a", "SHORT_PIPE", "b", "ASSIGN_TAG", "!!str (string)"),
|
||||||
|
@ -22,7 +22,7 @@ const (
|
|||||||
CloseCollect
|
CloseCollect
|
||||||
OpenCollectObject
|
OpenCollectObject
|
||||||
CloseCollectObject
|
CloseCollectObject
|
||||||
SplatOrEmptyCollect
|
TraverseArrayCollect
|
||||||
)
|
)
|
||||||
|
|
||||||
type Token struct {
|
type Token struct {
|
||||||
@ -49,8 +49,9 @@ func (t *Token) toString() string {
|
|||||||
return "{"
|
return "{"
|
||||||
} else if t.TokenType == CloseCollectObject {
|
} else if t.TokenType == CloseCollectObject {
|
||||||
return "}"
|
return "}"
|
||||||
} else if t.TokenType == SplatOrEmptyCollect {
|
} else if t.TokenType == TraverseArrayCollect {
|
||||||
return "[]?"
|
return ".["
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
return "NFI"
|
return "NFI"
|
||||||
}
|
}
|
||||||
@ -114,23 +115,6 @@ func unwrap(value string) string {
|
|||||||
return value[1 : len(value)-1]
|
return value[1 : len(value)-1]
|
||||||
}
|
}
|
||||||
|
|
||||||
func arrayIndextoken(precedingDot bool) lex.Action {
|
|
||||||
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
|
||||||
var numberString = string(m.Bytes)
|
|
||||||
startIndex := 1
|
|
||||||
if precedingDot {
|
|
||||||
startIndex = 2
|
|
||||||
}
|
|
||||||
numberString = numberString[startIndex : len(numberString)-1]
|
|
||||||
var number, errParsingInt = strconv.ParseInt(numberString, 10, 64) // nolint
|
|
||||||
if errParsingInt != nil {
|
|
||||||
return nil, errParsingInt
|
|
||||||
}
|
|
||||||
op := &Operation{OperationType: TraversePath, Value: number, StringValue: numberString}
|
|
||||||
return &Token{TokenType: OperationToken, Operation: op, CheckForPostTraverse: true}, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func numberValue() lex.Action {
|
func numberValue() lex.Action {
|
||||||
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
||||||
var numberString = string(m.Bytes)
|
var numberString = string(m.Bytes)
|
||||||
@ -188,7 +172,8 @@ func initLexer() (*lex.Lexer, error) {
|
|||||||
lexer.Add([]byte(`\(`), literalToken(OpenBracket, false))
|
lexer.Add([]byte(`\(`), literalToken(OpenBracket, false))
|
||||||
lexer.Add([]byte(`\)`), literalToken(CloseBracket, true))
|
lexer.Add([]byte(`\)`), literalToken(CloseBracket, true))
|
||||||
|
|
||||||
lexer.Add([]byte(`\.\[\]`), pathToken(false))
|
// lexer.Add([]byte(`\.\[\]`), pathToken(false)) // traverseCollect(false)
|
||||||
|
lexer.Add([]byte(`\.\[`), literalToken(TraverseArrayCollect, false))
|
||||||
lexer.Add([]byte(`\.\.`), opToken(RecursiveDescent))
|
lexer.Add([]byte(`\.\.`), opToken(RecursiveDescent))
|
||||||
|
|
||||||
lexer.Add([]byte(`,`), opToken(Union))
|
lexer.Add([]byte(`,`), opToken(Union))
|
||||||
@ -231,7 +216,7 @@ func initLexer() (*lex.Lexer, error) {
|
|||||||
|
|
||||||
lexer.Add([]byte(`\s*\|=\s*`), opTokenWithPrefs(Assign, nil, &AssignOpPreferences{true}))
|
lexer.Add([]byte(`\s*\|=\s*`), opTokenWithPrefs(Assign, nil, &AssignOpPreferences{true}))
|
||||||
|
|
||||||
lexer.Add([]byte(`\.\[-?[0-9]+\]`), arrayIndextoken(true))
|
// lexer.Add([]byte(`\.\[-?[0-9]+\]`), arrayIndextoken(true)) // traverseCollect(true)
|
||||||
|
|
||||||
lexer.Add([]byte("( |\t|\n|\r)+"), skip)
|
lexer.Add([]byte("( |\t|\n|\r)+"), skip)
|
||||||
|
|
||||||
@ -254,8 +239,6 @@ func initLexer() (*lex.Lexer, error) {
|
|||||||
|
|
||||||
lexer.Add([]byte(`"[^"]*"`), stringValue(true))
|
lexer.Add([]byte(`"[^"]*"`), stringValue(true))
|
||||||
|
|
||||||
lexer.Add([]byte(`\[\]`), literalToken(SplatOrEmptyCollect, true))
|
|
||||||
|
|
||||||
lexer.Add([]byte(`\[`), literalToken(OpenCollect, false))
|
lexer.Add([]byte(`\[`), literalToken(OpenCollect, false))
|
||||||
lexer.Add([]byte(`\]`), literalToken(CloseCollect, true))
|
lexer.Add([]byte(`\]`), literalToken(CloseCollect, true))
|
||||||
lexer.Add([]byte(`\{`), literalToken(OpenCollectObject, false))
|
lexer.Add([]byte(`\{`), literalToken(OpenCollectObject, false))
|
||||||
@ -324,24 +307,16 @@ func (p *pathTokeniser) Tokenise(path string) ([]*Token, error) {
|
|||||||
func (p *pathTokeniser) handleToken(tokens []*Token, index int, postProcessedTokens []*Token) (tokensAccum []*Token, skipNextToken bool) {
|
func (p *pathTokeniser) handleToken(tokens []*Token, index int, postProcessedTokens []*Token) (tokensAccum []*Token, skipNextToken bool) {
|
||||||
skipNextToken = false
|
skipNextToken = false
|
||||||
token := tokens[index]
|
token := tokens[index]
|
||||||
if token.TokenType == SplatOrEmptyCollect {
|
|
||||||
if index > 0 && tokens[index-1].TokenType == OperationToken &&
|
|
||||||
tokens[index-1].Operation.OperationType == TraversePath {
|
|
||||||
// must be a splat without a preceding dot , e.g. .a[]
|
|
||||||
// lets put a pipe in front of it, and convert it to a traverse "[]" token
|
|
||||||
pipeOp := &Operation{OperationType: ShortPipe, Value: "PIPE"}
|
|
||||||
|
|
||||||
postProcessedTokens = append(postProcessedTokens, &Token{TokenType: OperationToken, Operation: pipeOp})
|
if token.TokenType == TraverseArrayCollect {
|
||||||
|
//need to put a traverse array then a collect token
|
||||||
|
// do this by adding traverse then converting token to collect
|
||||||
|
|
||||||
traverseOp := &Operation{OperationType: TraversePath, Value: "[]", StringValue: "[]"}
|
op := &Operation{OperationType: TraverseArray, StringValue: "TRAVERSE_ARRAY"}
|
||||||
token = &Token{TokenType: OperationToken, Operation: traverseOp, CheckForPostTraverse: true}
|
postProcessedTokens = append(postProcessedTokens, &Token{TokenType: OperationToken, Operation: op})
|
||||||
|
|
||||||
|
token = &Token{TokenType: OpenCollect}
|
||||||
|
|
||||||
} else {
|
|
||||||
// gotta be a collect empty array, we need to split this into two tokens
|
|
||||||
// one OpenCollect, the other CloseCollect
|
|
||||||
postProcessedTokens = append(postProcessedTokens, &Token{TokenType: OpenCollect})
|
|
||||||
token = &Token{TokenType: CloseCollect, CheckForPostTraverse: true}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if index != len(tokens)-1 && token.AssignOperation != nil &&
|
if index != len(tokens)-1 && token.AssignOperation != nil &&
|
||||||
@ -359,5 +334,21 @@ func (p *pathTokeniser) handleToken(tokens []*Token, index int, postProcessedTok
|
|||||||
op := &Operation{OperationType: ShortPipe, Value: "PIPE"}
|
op := &Operation{OperationType: ShortPipe, Value: "PIPE"}
|
||||||
postProcessedTokens = append(postProcessedTokens, &Token{TokenType: OperationToken, Operation: op})
|
postProcessedTokens = append(postProcessedTokens, &Token{TokenType: OperationToken, Operation: op})
|
||||||
}
|
}
|
||||||
|
if index != len(tokens)-1 && token.CheckForPostTraverse &&
|
||||||
|
tokens[index+1].TokenType == OpenCollect {
|
||||||
|
|
||||||
|
op := &Operation{OperationType: ShortPipe, Value: "PIPE"}
|
||||||
|
postProcessedTokens = append(postProcessedTokens, &Token{TokenType: OperationToken, Operation: op})
|
||||||
|
|
||||||
|
op = &Operation{OperationType: TraverseArray}
|
||||||
|
postProcessedTokens = append(postProcessedTokens, &Token{TokenType: OperationToken, Operation: op})
|
||||||
|
}
|
||||||
|
if index != len(tokens)-1 && token.CheckForPostTraverse &&
|
||||||
|
tokens[index+1].TokenType == TraverseArrayCollect {
|
||||||
|
|
||||||
|
op := &Operation{OperationType: ShortPipe, Value: "PIPE"}
|
||||||
|
postProcessedTokens = append(postProcessedTokens, &Token{TokenType: OperationToken, Operation: op})
|
||||||
|
|
||||||
|
}
|
||||||
return postProcessedTokens, skipNextToken
|
return postProcessedTokens, skipNextToken
|
||||||
}
|
}
|
||||||
|
60
yq_test.go
60
yq_test.go
@ -1,60 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
// import (
|
|
||||||
// "fmt"
|
|
||||||
// "runtime"
|
|
||||||
// "testing"
|
|
||||||
|
|
||||||
// "github.com/mikefarah/yq/v2/pkg/marshal"
|
|
||||||
// "github.com/mikefarah/yq/v2/test"
|
|
||||||
// )
|
|
||||||
|
|
||||||
// func TestMultilineString(t *testing.T) {
|
|
||||||
// testString := `
|
|
||||||
// abcd
|
|
||||||
// efg`
|
|
||||||
// formattedResult, _ := marshal.NewYamlConverter().YamlToString(testString, false)
|
|
||||||
// test.AssertResult(t, testString, formattedResult)
|
|
||||||
// }
|
|
||||||
|
|
||||||
// func TestNewYaml(t *testing.T) {
|
|
||||||
// result, _ := newYaml([]string{"b.c", "3"})
|
|
||||||
// formattedResult := fmt.Sprintf("%v", result)
|
|
||||||
// test.AssertResult(t,
|
|
||||||
// "[{b [{c 3}]}]",
|
|
||||||
// formattedResult)
|
|
||||||
// }
|
|
||||||
|
|
||||||
// func TestNewYamlArray(t *testing.T) {
|
|
||||||
// result, _ := newYaml([]string{"[0].cat", "meow"})
|
|
||||||
// formattedResult := fmt.Sprintf("%v", result)
|
|
||||||
// test.AssertResult(t,
|
|
||||||
// "[[{cat meow}]]",
|
|
||||||
// formattedResult)
|
|
||||||
// }
|
|
||||||
|
|
||||||
// func TestNewYaml_WithScript(t *testing.T) {
|
|
||||||
// writeScript = "examples/instruction_sample.yaml"
|
|
||||||
// expectedResult := `b:
|
|
||||||
// c: cat
|
|
||||||
// e:
|
|
||||||
// - name: Mike Farah`
|
|
||||||
// result, _ := newYaml([]string{""})
|
|
||||||
// actualResult, _ := marshal.NewYamlConverter().YamlToString(result, true)
|
|
||||||
// test.AssertResult(t, expectedResult, actualResult)
|
|
||||||
// }
|
|
||||||
|
|
||||||
// func TestNewYaml_WithUnknownScript(t *testing.T) {
|
|
||||||
// writeScript = "fake-unknown"
|
|
||||||
// _, err := newYaml([]string{""})
|
|
||||||
// if err == nil {
|
|
||||||
// t.Error("Expected error due to unknown file")
|
|
||||||
// }
|
|
||||||
// var expectedOutput string
|
|
||||||
// if runtime.GOOS == "windows" {
|
|
||||||
// expectedOutput = `open fake-unknown: The system cannot find the file specified.`
|
|
||||||
// } else {
|
|
||||||
// expectedOutput = `open fake-unknown: no such file or directory`
|
|
||||||
// }
|
|
||||||
// test.AssertResult(t, expectedOutput, err.Error())
|
|
||||||
// }
|
|
Loading…
Reference in New Issue
Block a user