mirror of
https://github.com/mikefarah/yq.git
synced 2024-11-12 13:48:06 +00:00
wip
This commit is contained in:
parent
c2159d9861
commit
5ee52f9506
@ -125,7 +125,7 @@ func (p *pathPostFixer) ConvertToPostfix(infixTokens []*lex.Token) ([]*PathEleme
|
|||||||
|
|
||||||
for _, token := range tokens {
|
for _, token := range tokens {
|
||||||
switch token.Type {
|
switch token.Type {
|
||||||
case TokenIds["PATH_KEY"]: // handle splats and array appends here too
|
case TokenIds["PATH_KEY"], TokenIds["ARRAY_INDEX"], TokenIds["[+]"], TokenIds["[*]"], TokenIds["**"]:
|
||||||
var pathElement = PathElement{PathElementType: PathKey, Value: token.Value}
|
var pathElement = PathElement{PathElementType: PathKey, Value: token.Value}
|
||||||
result = append(result, &pathElement)
|
result = append(result, &pathElement)
|
||||||
case TokenIds["("]:
|
case TokenIds["("]:
|
||||||
|
@ -61,6 +61,94 @@ Operation - TRAVERSE
|
|||||||
test.AssertResultComplex(t, expectedOutput, actual)
|
test.AssertResultComplex(t, expectedOutput, actual)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestPostFixSimplePathNumbersExample(t *testing.T) {
|
||||||
|
var infix = "apples[0].cat"
|
||||||
|
var expectedOutput = `PathKey - 'apples'
|
||||||
|
--------
|
||||||
|
PathKey - '0'
|
||||||
|
--------
|
||||||
|
Operation - TRAVERSE
|
||||||
|
--------
|
||||||
|
PathKey - 'cat'
|
||||||
|
--------
|
||||||
|
Operation - TRAVERSE
|
||||||
|
--------
|
||||||
|
`
|
||||||
|
|
||||||
|
actual, err := testExpression(infix)
|
||||||
|
if err != nil {
|
||||||
|
t.Error(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
test.AssertResultComplex(t, expectedOutput, actual)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPostFixSimplePathAppendArrayExample(t *testing.T) {
|
||||||
|
var infix = "apples[+].cat"
|
||||||
|
var expectedOutput = `PathKey - 'apples'
|
||||||
|
--------
|
||||||
|
PathKey - '[+]'
|
||||||
|
--------
|
||||||
|
Operation - TRAVERSE
|
||||||
|
--------
|
||||||
|
PathKey - 'cat'
|
||||||
|
--------
|
||||||
|
Operation - TRAVERSE
|
||||||
|
--------
|
||||||
|
`
|
||||||
|
|
||||||
|
actual, err := testExpression(infix)
|
||||||
|
if err != nil {
|
||||||
|
t.Error(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
test.AssertResultComplex(t, expectedOutput, actual)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPostFixSimplePathSplatArrayExample(t *testing.T) {
|
||||||
|
var infix = "apples.[*]cat"
|
||||||
|
var expectedOutput = `PathKey - 'apples'
|
||||||
|
--------
|
||||||
|
PathKey - '[*]'
|
||||||
|
--------
|
||||||
|
Operation - TRAVERSE
|
||||||
|
--------
|
||||||
|
PathKey - 'cat'
|
||||||
|
--------
|
||||||
|
Operation - TRAVERSE
|
||||||
|
--------
|
||||||
|
`
|
||||||
|
|
||||||
|
actual, err := testExpression(infix)
|
||||||
|
if err != nil {
|
||||||
|
t.Error(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
test.AssertResultComplex(t, expectedOutput, actual)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPostFixDeepMatchExample(t *testing.T) {
|
||||||
|
var infix = "apples.**.cat"
|
||||||
|
var expectedOutput = `PathKey - 'apples'
|
||||||
|
--------
|
||||||
|
PathKey - '**'
|
||||||
|
--------
|
||||||
|
Operation - TRAVERSE
|
||||||
|
--------
|
||||||
|
PathKey - 'cat'
|
||||||
|
--------
|
||||||
|
Operation - TRAVERSE
|
||||||
|
--------
|
||||||
|
`
|
||||||
|
|
||||||
|
actual, err := testExpression(infix)
|
||||||
|
if err != nil {
|
||||||
|
t.Error(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
test.AssertResultComplex(t, expectedOutput, actual)
|
||||||
|
}
|
||||||
|
|
||||||
func TestPostFixOrExample(t *testing.T) {
|
func TestPostFixOrExample(t *testing.T) {
|
||||||
var infix = "a OR b"
|
var infix = "a OR b"
|
||||||
var expectedOutput = `PathKey - 'a'
|
var expectedOutput = `PathKey - 'a'
|
||||||
|
@ -8,22 +8,20 @@ import (
|
|||||||
"github.com/timtadh/lexmachine/machines"
|
"github.com/timtadh/lexmachine/machines"
|
||||||
)
|
)
|
||||||
|
|
||||||
var Literals []string // The tokens representing literal strings
|
var Literals []string // The tokens representing literal strings
|
||||||
var ClosingLiterals []string // The tokens representing literal strings
|
var Keywords []string // The keyword tokens
|
||||||
var Keywords []string // The keyword tokens
|
var Tokens []string // All of the tokens (including literals and keywords)
|
||||||
var Tokens []string // All of the tokens (including literals and keywords)
|
var TokenIds map[string]int // A map from the token names to their int ids
|
||||||
var TokenIds map[string]int // A map from the token names to their int ids
|
|
||||||
|
var bracketLiterals []string
|
||||||
|
|
||||||
func initTokens() {
|
func initTokens() {
|
||||||
|
bracketLiterals = []string{"(", ")"}
|
||||||
Literals = []string{ // these need a traverse operator infront
|
Literals = []string{ // these need a traverse operator infront
|
||||||
"(",
|
|
||||||
"[+]",
|
"[+]",
|
||||||
"[*]",
|
"[*]",
|
||||||
"**",
|
"**",
|
||||||
}
|
}
|
||||||
ClosingLiterals = []string{ // these need a traverse operator after
|
|
||||||
")",
|
|
||||||
}
|
|
||||||
Tokens = []string{
|
Tokens = []string{
|
||||||
"OR_OPERATOR",
|
"OR_OPERATOR",
|
||||||
"AND_OPERATOR",
|
"AND_OPERATOR",
|
||||||
@ -33,8 +31,8 @@ func initTokens() {
|
|||||||
"PATH_KEY", // apples
|
"PATH_KEY", // apples
|
||||||
"ARRAY_INDEX", // 123
|
"ARRAY_INDEX", // 123
|
||||||
}
|
}
|
||||||
|
Tokens = append(Tokens, bracketLiterals...)
|
||||||
Tokens = append(Tokens, Literals...)
|
Tokens = append(Tokens, Literals...)
|
||||||
Tokens = append(Tokens, ClosingLiterals...)
|
|
||||||
TokenIds = make(map[string]int)
|
TokenIds = make(map[string]int)
|
||||||
for i, tok := range Tokens {
|
for i, tok := range Tokens {
|
||||||
TokenIds[tok] = i
|
TokenIds[tok] = i
|
||||||
@ -80,11 +78,11 @@ func numberToken(name string, wrapped bool) lex.Action {
|
|||||||
// Creates the lexer object and compiles the NFA.
|
// Creates the lexer object and compiles the NFA.
|
||||||
func initLexer() (*lex.Lexer, error) {
|
func initLexer() (*lex.Lexer, error) {
|
||||||
lexer := lex.NewLexer()
|
lexer := lex.NewLexer()
|
||||||
for _, lit := range Literals {
|
for _, lit := range bracketLiterals {
|
||||||
r := "\\" + strings.Join(strings.Split(lit, ""), "\\")
|
r := "\\" + strings.Join(strings.Split(lit, ""), "\\")
|
||||||
lexer.Add([]byte(r), token(lit))
|
lexer.Add([]byte(r), token(lit))
|
||||||
}
|
}
|
||||||
for _, lit := range ClosingLiterals {
|
for _, lit := range Literals {
|
||||||
r := "\\" + strings.Join(strings.Split(lit, ""), "\\")
|
r := "\\" + strings.Join(strings.Split(lit, ""), "\\")
|
||||||
lexer.Add([]byte(r), token(lit))
|
lexer.Add([]byte(r), token(lit))
|
||||||
}
|
}
|
||||||
@ -143,14 +141,14 @@ func (p *pathTokeniser) Tokenise(path string) ([]*lex.Token, error) {
|
|||||||
var postProcessedTokens []*lex.Token = make([]*lex.Token, 0)
|
var postProcessedTokens []*lex.Token = make([]*lex.Token, 0)
|
||||||
|
|
||||||
for index, token := range tokens {
|
for index, token := range tokens {
|
||||||
for _, literalTokenDef := range append(Literals, "ARRAY_INDEX") {
|
for _, literalTokenDef := range append(Literals, "ARRAY_INDEX", "(") {
|
||||||
if index > 0 && token.Type == TokenIds[literalTokenDef] && tokens[index-1].Type != TokenIds["TRAVERSE_OPERATOR"] {
|
if index > 0 && token.Type == TokenIds[literalTokenDef] && tokens[index-1].Type != TokenIds["TRAVERSE_OPERATOR"] {
|
||||||
postProcessedTokens = append(postProcessedTokens, &lex.Token{Type: TokenIds["TRAVERSE_OPERATOR"], Value: "."})
|
postProcessedTokens = append(postProcessedTokens, &lex.Token{Type: TokenIds["TRAVERSE_OPERATOR"], Value: "."})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
postProcessedTokens = append(postProcessedTokens, token)
|
postProcessedTokens = append(postProcessedTokens, token)
|
||||||
for _, literalTokenDef := range append(ClosingLiterals, "ARRAY_INDEX") {
|
for _, literalTokenDef := range append(Literals, "ARRAY_INDEX", ")") {
|
||||||
if index != len(tokens)-1 && token.Type == TokenIds[literalTokenDef] && tokens[index+1].Type != TokenIds["TRAVERSE_OPERATOR"] {
|
if index != len(tokens)-1 && token.Type == TokenIds[literalTokenDef] && tokens[index+1].Type != TokenIds["TRAVERSE_OPERATOR"] {
|
||||||
postProcessedTokens = append(postProcessedTokens, &lex.Token{Type: TokenIds["TRAVERSE_OPERATOR"], Value: "."})
|
postProcessedTokens = append(postProcessedTokens, &lex.Token{Type: TokenIds["TRAVERSE_OPERATOR"], Value: "."})
|
||||||
}
|
}
|
||||||
|
@ -43,6 +43,7 @@ var tokeniserTests = []struct {
|
|||||||
{"a.[0].c", append(make([]interface{}, 0), "a", ".", int64(0), ".", "c")},
|
{"a.[0].c", append(make([]interface{}, 0), "a", ".", int64(0), ".", "c")},
|
||||||
{"[0]", append(make([]interface{}, 0), int64(0))},
|
{"[0]", append(make([]interface{}, 0), int64(0))},
|
||||||
{"0", append(make([]interface{}, 0), int64(0))},
|
{"0", append(make([]interface{}, 0), int64(0))},
|
||||||
|
{"a.b[+]c", append(make([]interface{}, 0), "a", ".", "b", ".", "[+]", ".", "c")},
|
||||||
{"a.cool(s.d.f == cool)", append(make([]interface{}, 0), "a", ".", "cool", ".", "(", "s", ".", "d", ".", "f", " == ", "cool", ")")},
|
{"a.cool(s.d.f == cool)", append(make([]interface{}, 0), "a", ".", "cool", ".", "(", "s", ".", "d", ".", "f", " == ", "cool", ")")},
|
||||||
{"a.cool.(s.d.f==cool OR t.b.h==frog).caterpillar", append(make([]interface{}, 0), "a", ".", "cool", ".", "(", "s", ".", "d", ".", "f", "==", "cool", "OR", "t", ".", "b", ".", "h", "==", "frog", ")", ".", "caterpillar")},
|
{"a.cool.(s.d.f==cool OR t.b.h==frog).caterpillar", append(make([]interface{}, 0), "a", ".", "cool", ".", "(", "s", ".", "d", ".", "f", "==", "cool", "OR", "t", ".", "b", ".", "h", "==", "frog", ")", ".", "caterpillar")},
|
||||||
{"a.cool(s.d.f==cool and t.b.h==frog)*", append(make([]interface{}, 0), "a", ".", "cool", ".", "(", "s", ".", "d", ".", "f", "==", "cool", "and", "t", ".", "b", ".", "h", "==", "frog", ")", ".", "*")},
|
{"a.cool(s.d.f==cool and t.b.h==frog)*", append(make([]interface{}, 0), "a", ".", "cool", ".", "(", "s", ".", "d", ".", "f", "==", "cool", "and", "t", ".", "b", ".", "h", "==", "frog", ")", ".", "*")},
|
||||||
|
Loading…
Reference in New Issue
Block a user