mirror of
https://github.com/mikefarah/yq.git
synced 2025-01-27 17:05:35 +00:00
fixed equals number issue
This commit is contained in:
parent
93aaa8ccee
commit
23083ed974
@ -390,6 +390,34 @@ func TestDataTreeNavigatorArrayEquals(t *testing.T) {
|
|||||||
test.AssertResult(t, expected, resultsToString(results))
|
test.AssertResult(t, expected, resultsToString(results))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestDataTreeNavigatorArrayEqualsDeep(t *testing.T) {
|
||||||
|
|
||||||
|
nodes := readDoc(t, `apples:
|
||||||
|
- { b: apple, animal: {legs: 2} }
|
||||||
|
- { b: banana, animal: {legs: 4} }
|
||||||
|
- { b: corn, animal: {legs: 6} }
|
||||||
|
`)
|
||||||
|
|
||||||
|
path, errPath := treeCreator.ParsePath("apples(animal.legs == 4)")
|
||||||
|
if errPath != nil {
|
||||||
|
t.Error(errPath)
|
||||||
|
}
|
||||||
|
results, errNav := treeNavigator.GetMatchingNodes(nodes, path)
|
||||||
|
|
||||||
|
if errNav != nil {
|
||||||
|
t.Error(errNav)
|
||||||
|
}
|
||||||
|
|
||||||
|
expected := `
|
||||||
|
-- Node --
|
||||||
|
Document 0, path: [apples 1]
|
||||||
|
Tag: !!map, Kind: MappingNode, Anchor:
|
||||||
|
{b: banana, animal: {legs: 4}}
|
||||||
|
`
|
||||||
|
|
||||||
|
test.AssertResult(t, expected, resultsToString(results))
|
||||||
|
}
|
||||||
|
|
||||||
func TestDataTreeNavigatorEqualsTrickey(t *testing.T) {
|
func TestDataTreeNavigatorEqualsTrickey(t *testing.T) {
|
||||||
|
|
||||||
nodes := readDoc(t, `a:
|
nodes := readDoc(t, `a:
|
||||||
|
@ -25,6 +25,20 @@ func testExpression(expression string) (string, error) {
|
|||||||
return formatted, nil
|
return formatted, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestPostFixArrayEquals(t *testing.T) {
|
||||||
|
var infix = "a"
|
||||||
|
var expectedOutput = `PathKey - 'a'
|
||||||
|
--------
|
||||||
|
`
|
||||||
|
|
||||||
|
actual, err := testExpression(infix)
|
||||||
|
if err != nil {
|
||||||
|
t.Error(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
test.AssertResultComplex(t, expectedOutput, actual)
|
||||||
|
}
|
||||||
|
|
||||||
func TestPostFixSimpleExample(t *testing.T) {
|
func TestPostFixSimpleExample(t *testing.T) {
|
||||||
var infix = "a"
|
var infix = "a"
|
||||||
var expectedOutput = `PathKey - 'a'
|
var expectedOutput = `PathKey - 'a'
|
||||||
@ -167,6 +181,24 @@ Operation - OR
|
|||||||
test.AssertResultComplex(t, expectedOutput, actual)
|
test.AssertResultComplex(t, expectedOutput, actual)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestPostFixEqualsNumberExample(t *testing.T) {
|
||||||
|
var infix = "(animal == 3)"
|
||||||
|
var expectedOutput = `PathKey - 'animal'
|
||||||
|
--------
|
||||||
|
PathKey - '3'
|
||||||
|
--------
|
||||||
|
Operation - EQUALS
|
||||||
|
--------
|
||||||
|
`
|
||||||
|
|
||||||
|
actual, err := testExpression(infix)
|
||||||
|
if err != nil {
|
||||||
|
t.Error(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
test.AssertResultComplex(t, expectedOutput, actual)
|
||||||
|
}
|
||||||
|
|
||||||
func TestPostFixOrWithEqualsExample(t *testing.T) {
|
func TestPostFixOrWithEqualsExample(t *testing.T) {
|
||||||
var infix = "a==thing OR b==thongs"
|
var infix = "a==thing OR b==thongs"
|
||||||
var expectedOutput = `PathKey - 'a'
|
var expectedOutput = `PathKey - 'a'
|
||||||
|
@ -142,14 +142,14 @@ func (p *pathTokeniser) Tokenise(path string) ([]*lex.Token, error) {
|
|||||||
|
|
||||||
for index, token := range tokens {
|
for index, token := range tokens {
|
||||||
for _, literalTokenDef := range append(Literals, "ARRAY_INDEX", "(") {
|
for _, literalTokenDef := range append(Literals, "ARRAY_INDEX", "(") {
|
||||||
if index > 0 && token.Type == TokenIds[literalTokenDef] && tokens[index-1].Type != TokenIds["TRAVERSE_OPERATOR"] {
|
if index > 0 && token.Type == TokenIds[literalTokenDef] && tokens[index-1].Type != TokenIds["TRAVERSE_OPERATOR"] && tokens[index-1].Type != TokenIds["EQUALS_OPERATOR"] && tokens[index-1].Type != TokenIds["EQUALS_SELF_OPERATOR"] {
|
||||||
postProcessedTokens = append(postProcessedTokens, &lex.Token{Type: TokenIds["TRAVERSE_OPERATOR"], Value: "."})
|
postProcessedTokens = append(postProcessedTokens, &lex.Token{Type: TokenIds["TRAVERSE_OPERATOR"], Value: "."})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
postProcessedTokens = append(postProcessedTokens, token)
|
postProcessedTokens = append(postProcessedTokens, token)
|
||||||
for _, literalTokenDef := range append(Literals, "ARRAY_INDEX", ")") {
|
for _, literalTokenDef := range append(Literals, "ARRAY_INDEX", ")") {
|
||||||
if index != len(tokens)-1 && token.Type == TokenIds[literalTokenDef] && tokens[index+1].Type != TokenIds["TRAVERSE_OPERATOR"] {
|
if index != len(tokens)-1 && token.Type == TokenIds[literalTokenDef] && tokens[index+1].Type != TokenIds["TRAVERSE_OPERATOR"] && tokens[index+1].Type != TokenIds[")"] {
|
||||||
postProcessedTokens = append(postProcessedTokens, &lex.Token{Type: TokenIds["TRAVERSE_OPERATOR"], Value: "."})
|
postProcessedTokens = append(postProcessedTokens, &lex.Token{Type: TokenIds["TRAVERSE_OPERATOR"], Value: "."})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -11,6 +11,8 @@ var tokeniserTests = []struct {
|
|||||||
expectedTokens []interface{}
|
expectedTokens []interface{}
|
||||||
}{ // TODO: Ensure ALL documented examples have tests! sheesh
|
}{ // TODO: Ensure ALL documented examples have tests! sheesh
|
||||||
|
|
||||||
|
{"(animal==3)", append(make([]interface{}, 0), "(", "animal", "==", int64(3), ")")},
|
||||||
|
{"(animal==f3)", append(make([]interface{}, 0), "(", "animal", "==", "f3", ")")},
|
||||||
{"apples.BANANAS", append(make([]interface{}, 0), "apples", ".", "BANANAS")},
|
{"apples.BANANAS", append(make([]interface{}, 0), "apples", ".", "BANANAS")},
|
||||||
{"appl*.BANA*", append(make([]interface{}, 0), "appl*", ".", "BANA*")},
|
{"appl*.BANA*", append(make([]interface{}, 0), "appl*", ".", "BANA*")},
|
||||||
{"a.b.**", append(make([]interface{}, 0), "a", ".", "b", ".", "**")},
|
{"a.b.**", append(make([]interface{}, 0), "a", ".", "b", ".", "**")},
|
||||||
|
Loading…
Reference in New Issue
Block a user