This commit is contained in:
Mike Farah 2020-09-24 13:28:47 +10:00
parent c2159d9861
commit 5ee52f9506
4 changed files with 102 additions and 15 deletions

View File

@ -125,7 +125,7 @@ func (p *pathPostFixer) ConvertToPostfix(infixTokens []*lex.Token) ([]*PathEleme
for _, token := range tokens {
switch token.Type {
case TokenIds["PATH_KEY"]: // handle splats and array appends here too
case TokenIds["PATH_KEY"], TokenIds["ARRAY_INDEX"], TokenIds["[+]"], TokenIds["[*]"], TokenIds["**"]:
var pathElement = PathElement{PathElementType: PathKey, Value: token.Value}
result = append(result, &pathElement)
case TokenIds["("]:

View File

@ -61,6 +61,94 @@ Operation - TRAVERSE
test.AssertResultComplex(t, expectedOutput, actual)
}
func TestPostFixSimplePathNumbersExample(t *testing.T) {
var infix = "apples[0].cat"
var expectedOutput = `PathKey - 'apples'
--------
PathKey - '0'
--------
Operation - TRAVERSE
--------
PathKey - 'cat'
--------
Operation - TRAVERSE
--------
`
actual, err := testExpression(infix)
if err != nil {
t.Error(err)
}
test.AssertResultComplex(t, expectedOutput, actual)
}
func TestPostFixSimplePathAppendArrayExample(t *testing.T) {
var infix = "apples[+].cat"
var expectedOutput = `PathKey - 'apples'
--------
PathKey - '[+]'
--------
Operation - TRAVERSE
--------
PathKey - 'cat'
--------
Operation - TRAVERSE
--------
`
actual, err := testExpression(infix)
if err != nil {
t.Error(err)
}
test.AssertResultComplex(t, expectedOutput, actual)
}
func TestPostFixSimplePathSplatArrayExample(t *testing.T) {
var infix = "apples.[*]cat"
var expectedOutput = `PathKey - 'apples'
--------
PathKey - '[*]'
--------
Operation - TRAVERSE
--------
PathKey - 'cat'
--------
Operation - TRAVERSE
--------
`
actual, err := testExpression(infix)
if err != nil {
t.Error(err)
}
test.AssertResultComplex(t, expectedOutput, actual)
}
func TestPostFixDeepMatchExample(t *testing.T) {
var infix = "apples.**.cat"
var expectedOutput = `PathKey - 'apples'
--------
PathKey - '**'
--------
Operation - TRAVERSE
--------
PathKey - 'cat'
--------
Operation - TRAVERSE
--------
`
actual, err := testExpression(infix)
if err != nil {
t.Error(err)
}
test.AssertResultComplex(t, expectedOutput, actual)
}
func TestPostFixOrExample(t *testing.T) {
var infix = "a OR b"
var expectedOutput = `PathKey - 'a'

View File

@ -9,21 +9,19 @@ import (
)
var Literals []string // The tokens representing literal strings
var ClosingLiterals []string // The tokens representing literal strings
var Keywords []string // The keyword tokens
var Tokens []string // All of the tokens (including literals and keywords)
var TokenIds map[string]int // A map from the token names to their int ids
var bracketLiterals []string
func initTokens() {
bracketLiterals = []string{"(", ")"}
Literals = []string{ // these need a traverse operator infront
"(",
"[+]",
"[*]",
"**",
}
ClosingLiterals = []string{ // these need a traverse operator after
")",
}
Tokens = []string{
"OR_OPERATOR",
"AND_OPERATOR",
@ -33,8 +31,8 @@ func initTokens() {
"PATH_KEY", // apples
"ARRAY_INDEX", // 123
}
Tokens = append(Tokens, bracketLiterals...)
Tokens = append(Tokens, Literals...)
Tokens = append(Tokens, ClosingLiterals...)
TokenIds = make(map[string]int)
for i, tok := range Tokens {
TokenIds[tok] = i
@ -80,11 +78,11 @@ func numberToken(name string, wrapped bool) lex.Action {
// Creates the lexer object and compiles the NFA.
func initLexer() (*lex.Lexer, error) {
lexer := lex.NewLexer()
for _, lit := range Literals {
for _, lit := range bracketLiterals {
r := "\\" + strings.Join(strings.Split(lit, ""), "\\")
lexer.Add([]byte(r), token(lit))
}
for _, lit := range ClosingLiterals {
for _, lit := range Literals {
r := "\\" + strings.Join(strings.Split(lit, ""), "\\")
lexer.Add([]byte(r), token(lit))
}
@ -143,14 +141,14 @@ func (p *pathTokeniser) Tokenise(path string) ([]*lex.Token, error) {
var postProcessedTokens []*lex.Token = make([]*lex.Token, 0)
for index, token := range tokens {
for _, literalTokenDef := range append(Literals, "ARRAY_INDEX") {
for _, literalTokenDef := range append(Literals, "ARRAY_INDEX", "(") {
if index > 0 && token.Type == TokenIds[literalTokenDef] && tokens[index-1].Type != TokenIds["TRAVERSE_OPERATOR"] {
postProcessedTokens = append(postProcessedTokens, &lex.Token{Type: TokenIds["TRAVERSE_OPERATOR"], Value: "."})
}
}
postProcessedTokens = append(postProcessedTokens, token)
for _, literalTokenDef := range append(ClosingLiterals, "ARRAY_INDEX") {
for _, literalTokenDef := range append(Literals, "ARRAY_INDEX", ")") {
if index != len(tokens)-1 && token.Type == TokenIds[literalTokenDef] && tokens[index+1].Type != TokenIds["TRAVERSE_OPERATOR"] {
postProcessedTokens = append(postProcessedTokens, &lex.Token{Type: TokenIds["TRAVERSE_OPERATOR"], Value: "."})
}

View File

@ -43,6 +43,7 @@ var tokeniserTests = []struct {
{"a.[0].c", append(make([]interface{}, 0), "a", ".", int64(0), ".", "c")},
{"[0]", append(make([]interface{}, 0), int64(0))},
{"0", append(make([]interface{}, 0), int64(0))},
{"a.b[+]c", append(make([]interface{}, 0), "a", ".", "b", ".", "[+]", ".", "c")},
{"a.cool(s.d.f == cool)", append(make([]interface{}, 0), "a", ".", "cool", ".", "(", "s", ".", "d", ".", "f", " == ", "cool", ")")},
{"a.cool.(s.d.f==cool OR t.b.h==frog).caterpillar", append(make([]interface{}, 0), "a", ".", "cool", ".", "(", "s", ".", "d", ".", "f", "==", "cool", "OR", "t", ".", "b", ".", "h", "==", "frog", ")", ".", "caterpillar")},
{"a.cool(s.d.f==cool and t.b.h==frog)*", append(make([]interface{}, 0), "a", ".", "cool", ".", "(", "s", ".", "d", ".", "f", "==", "cool", "and", "t", ".", "b", ".", "h", "==", "frog", ")", ".", "*")},