From 5ee52f950646f08e4363957d7862b4b390930ec3 Mon Sep 17 00:00:00 2001 From: Mike Farah Date: Thu, 24 Sep 2020 13:28:47 +1000 Subject: [PATCH] wip --- pkg/yqlib/path_postfix.go | 2 +- pkg/yqlib/path_postfix_test.go | 88 ++++++++++++++++++++++++++++++++ pkg/yqlib/path_tokeniser.go | 26 +++++----- pkg/yqlib/path_tokeniser_test.go | 1 + 4 files changed, 102 insertions(+), 15 deletions(-) diff --git a/pkg/yqlib/path_postfix.go b/pkg/yqlib/path_postfix.go index 50eddd03..631076b1 100644 --- a/pkg/yqlib/path_postfix.go +++ b/pkg/yqlib/path_postfix.go @@ -125,7 +125,7 @@ func (p *pathPostFixer) ConvertToPostfix(infixTokens []*lex.Token) ([]*PathEleme for _, token := range tokens { switch token.Type { - case TokenIds["PATH_KEY"]: // handle splats and array appends here too + case TokenIds["PATH_KEY"], TokenIds["ARRAY_INDEX"], TokenIds["[+]"], TokenIds["[*]"], TokenIds["**"]: var pathElement = PathElement{PathElementType: PathKey, Value: token.Value} result = append(result, &pathElement) case TokenIds["("]: diff --git a/pkg/yqlib/path_postfix_test.go b/pkg/yqlib/path_postfix_test.go index 39ab7b7a..60ba9060 100644 --- a/pkg/yqlib/path_postfix_test.go +++ b/pkg/yqlib/path_postfix_test.go @@ -61,6 +61,94 @@ Operation - TRAVERSE test.AssertResultComplex(t, expectedOutput, actual) } +func TestPostFixSimplePathNumbersExample(t *testing.T) { + var infix = "apples[0].cat" + var expectedOutput = `PathKey - 'apples' +-------- +PathKey - '0' +-------- +Operation - TRAVERSE +-------- +PathKey - 'cat' +-------- +Operation - TRAVERSE +-------- +` + + actual, err := testExpression(infix) + if err != nil { + t.Error(err) + } + + test.AssertResultComplex(t, expectedOutput, actual) +} + +func TestPostFixSimplePathAppendArrayExample(t *testing.T) { + var infix = "apples[+].cat" + var expectedOutput = `PathKey - 'apples' +-------- +PathKey - '[+]' +-------- +Operation - TRAVERSE +-------- +PathKey - 'cat' +-------- +Operation - TRAVERSE +-------- +` + + actual, err := testExpression(infix) + if err != nil { + t.Error(err) + } + + test.AssertResultComplex(t, expectedOutput, actual) +} + +func TestPostFixSimplePathSplatArrayExample(t *testing.T) { + var infix = "apples.[*]cat" + var expectedOutput = `PathKey - 'apples' +-------- +PathKey - '[*]' +-------- +Operation - TRAVERSE +-------- +PathKey - 'cat' +-------- +Operation - TRAVERSE +-------- +` + + actual, err := testExpression(infix) + if err != nil { + t.Error(err) + } + + test.AssertResultComplex(t, expectedOutput, actual) +} + +func TestPostFixDeepMatchExample(t *testing.T) { + var infix = "apples.**.cat" + var expectedOutput = `PathKey - 'apples' +-------- +PathKey - '**' +-------- +Operation - TRAVERSE +-------- +PathKey - 'cat' +-------- +Operation - TRAVERSE +-------- +` + + actual, err := testExpression(infix) + if err != nil { + t.Error(err) + } + + test.AssertResultComplex(t, expectedOutput, actual) +} + func TestPostFixOrExample(t *testing.T) { var infix = "a OR b" var expectedOutput = `PathKey - 'a' diff --git a/pkg/yqlib/path_tokeniser.go b/pkg/yqlib/path_tokeniser.go index d7e22932..cbd6e5e2 100644 --- a/pkg/yqlib/path_tokeniser.go +++ b/pkg/yqlib/path_tokeniser.go @@ -8,22 +8,20 @@ import ( "github.com/timtadh/lexmachine/machines" ) -var Literals []string // The tokens representing literal strings -var ClosingLiterals []string // The tokens representing literal strings -var Keywords []string // The keyword tokens -var Tokens []string // All of the tokens (including literals and keywords) -var TokenIds map[string]int // A map from the token names to their int ids +var Literals []string // The tokens representing literal strings +var Keywords []string // The keyword tokens +var Tokens []string // All of the tokens (including literals and keywords) +var TokenIds map[string]int // A map from the token names to their int ids + +var bracketLiterals []string func initTokens() { + bracketLiterals = []string{"(", ")"} Literals = []string{ // these need a traverse operator infront - "(", "[+]", "[*]", "**", } - ClosingLiterals = []string{ // these need a traverse operator after - ")", - } Tokens = []string{ "OR_OPERATOR", "AND_OPERATOR", @@ -33,8 +31,8 @@ func initTokens() { "PATH_KEY", // apples "ARRAY_INDEX", // 123 } + Tokens = append(Tokens, bracketLiterals...) Tokens = append(Tokens, Literals...) - Tokens = append(Tokens, ClosingLiterals...) TokenIds = make(map[string]int) for i, tok := range Tokens { TokenIds[tok] = i @@ -80,11 +78,11 @@ func numberToken(name string, wrapped bool) lex.Action { // Creates the lexer object and compiles the NFA. func initLexer() (*lex.Lexer, error) { lexer := lex.NewLexer() - for _, lit := range Literals { + for _, lit := range bracketLiterals { r := "\\" + strings.Join(strings.Split(lit, ""), "\\") lexer.Add([]byte(r), token(lit)) } - for _, lit := range ClosingLiterals { + for _, lit := range Literals { r := "\\" + strings.Join(strings.Split(lit, ""), "\\") lexer.Add([]byte(r), token(lit)) } @@ -143,14 +141,14 @@ func (p *pathTokeniser) Tokenise(path string) ([]*lex.Token, error) { var postProcessedTokens []*lex.Token = make([]*lex.Token, 0) for index, token := range tokens { - for _, literalTokenDef := range append(Literals, "ARRAY_INDEX") { + for _, literalTokenDef := range append(Literals, "ARRAY_INDEX", "(") { if index > 0 && token.Type == TokenIds[literalTokenDef] && tokens[index-1].Type != TokenIds["TRAVERSE_OPERATOR"] { postProcessedTokens = append(postProcessedTokens, &lex.Token{Type: TokenIds["TRAVERSE_OPERATOR"], Value: "."}) } } postProcessedTokens = append(postProcessedTokens, token) - for _, literalTokenDef := range append(ClosingLiterals, "ARRAY_INDEX") { + for _, literalTokenDef := range append(Literals, "ARRAY_INDEX", ")") { if index != len(tokens)-1 && token.Type == TokenIds[literalTokenDef] && tokens[index+1].Type != TokenIds["TRAVERSE_OPERATOR"] { postProcessedTokens = append(postProcessedTokens, &lex.Token{Type: TokenIds["TRAVERSE_OPERATOR"], Value: "."}) } diff --git a/pkg/yqlib/path_tokeniser_test.go b/pkg/yqlib/path_tokeniser_test.go index d5b24811..31b6379e 100644 --- a/pkg/yqlib/path_tokeniser_test.go +++ b/pkg/yqlib/path_tokeniser_test.go @@ -43,6 +43,7 @@ var tokeniserTests = []struct { {"a.[0].c", append(make([]interface{}, 0), "a", ".", int64(0), ".", "c")}, {"[0]", append(make([]interface{}, 0), int64(0))}, {"0", append(make([]interface{}, 0), int64(0))}, + {"a.b[+]c", append(make([]interface{}, 0), "a", ".", "b", ".", "[+]", ".", "c")}, {"a.cool(s.d.f == cool)", append(make([]interface{}, 0), "a", ".", "cool", ".", "(", "s", ".", "d", ".", "f", " == ", "cool", ")")}, {"a.cool.(s.d.f==cool OR t.b.h==frog).caterpillar", append(make([]interface{}, 0), "a", ".", "cool", ".", "(", "s", ".", "d", ".", "f", "==", "cool", "OR", "t", ".", "b", ".", "h", "==", "frog", ")", ".", "caterpillar")}, {"a.cool(s.d.f==cool and t.b.h==frog)*", append(make([]interface{}, 0), "a", ".", "cool", ".", "(", "s", ".", "d", ".", "f", "==", "cool", "and", "t", ".", "b", ".", "h", "==", "frog", ")", ".", "*")},