mirror of
https://github.com/mikefarah/yq.git
synced 2024-11-12 13:48:06 +00:00
fixed wrapping!
This commit is contained in:
parent
4c95efa469
commit
c321600afa
@ -1,6 +1,7 @@
|
|||||||
package yqlib
|
package yqlib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
lex "github.com/timtadh/lexmachine"
|
lex "github.com/timtadh/lexmachine"
|
||||||
@ -43,6 +44,30 @@ func token(name string) lex.Action {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func unwrap(value string) string {
|
||||||
|
return value[1 : len(value)-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
func wrappedToken(name string) lex.Action {
|
||||||
|
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
||||||
|
return s.Token(TokenIds[name], unwrap(string(m.Bytes)), m), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func numberToken(name string, wrapped bool) lex.Action {
|
||||||
|
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
||||||
|
var numberString = string(m.Bytes)
|
||||||
|
if wrapped {
|
||||||
|
numberString = unwrap(numberString)
|
||||||
|
}
|
||||||
|
var number, errParsingInt = strconv.ParseInt(numberString, 10, 64) // nolint
|
||||||
|
if errParsingInt != nil {
|
||||||
|
return nil, errParsingInt
|
||||||
|
}
|
||||||
|
return s.Token(TokenIds[name], number, m), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Creates the lexer object and compiles the NFA.
|
// Creates the lexer object and compiles the NFA.
|
||||||
func initLexer() (*lex.Lexer, error) {
|
func initLexer() (*lex.Lexer, error) {
|
||||||
lexer := lex.NewLexer()
|
lexer := lex.NewLexer()
|
||||||
@ -51,9 +76,10 @@ func initLexer() (*lex.Lexer, error) {
|
|||||||
lexer.Add([]byte(r), token(lit))
|
lexer.Add([]byte(r), token(lit))
|
||||||
}
|
}
|
||||||
lexer.Add([]byte(`([Oo][Rr]|[Aa][Nn][Dd]|==)`), token("OPERATION"))
|
lexer.Add([]byte(`([Oo][Rr]|[Aa][Nn][Dd]|==)`), token("OPERATION"))
|
||||||
lexer.Add([]byte(`\[-?[0-9]+\]`), token("ARRAY_INDEX"))
|
lexer.Add([]byte(`\[-?[0-9]+\]`), numberToken("ARRAY_INDEX", true))
|
||||||
|
lexer.Add([]byte(`-?[0-9]+`), numberToken("ARRAY_INDEX", false))
|
||||||
lexer.Add([]byte("( |\t|\n|\r)+"), skip)
|
lexer.Add([]byte("( |\t|\n|\r)+"), skip)
|
||||||
lexer.Add([]byte(`"[^ "]+"`), token("PATH"))
|
lexer.Add([]byte(`"[^ "]+"`), wrappedToken("PATH"))
|
||||||
lexer.Add([]byte(`[^ \.\[\(\)=]+`), token("PATH"))
|
lexer.Add([]byte(`[^ \.\[\(\)=]+`), token("PATH"))
|
||||||
lexer.Add([]byte(`\.`), skip)
|
lexer.Add([]byte(`\.`), skip)
|
||||||
err := lexer.Compile()
|
err := lexer.Compile()
|
||||||
|
@ -11,25 +11,25 @@ var tokeniserTests = []struct {
|
|||||||
expectedTokens []interface{}
|
expectedTokens []interface{}
|
||||||
}{ // TODO: Ensure ALL documented examples have tests! sheesh
|
}{ // TODO: Ensure ALL documented examples have tests! sheesh
|
||||||
|
|
||||||
// {"apples.BANANAS", append(make([]interface{}, 0), "apples", "BANANAS")},
|
{"apples.BANANAS", append(make([]interface{}, 0), "apples", "BANANAS")},
|
||||||
// {"a.b.**", append(make([]interface{}, 0), "a", "b", "**")},
|
{"a.b.**", append(make([]interface{}, 0), "a", "b", "**")},
|
||||||
// {"a.\"=\".frog", append(make([]interface{}, 0), "a", "=", "frog")},
|
{"a.\"=\".frog", append(make([]interface{}, 0), "a", "=", "frog")},
|
||||||
// {"a.b.*", append(make([]interface{}, 0), "a", "b", "*")},
|
{"a.b.*", append(make([]interface{}, 0), "a", "b", "*")},
|
||||||
// {"a.b.thin*", append(make([]interface{}, 0), "a", "b", "thin*")},
|
{"a.b.thin*", append(make([]interface{}, 0), "a", "b", "thin*")},
|
||||||
// {"a.b[0]", append(make([]interface{}, 0), "a", "b", "0")},
|
{"a.b[0]", append(make([]interface{}, 0), "a", "b", int64(0))},
|
||||||
// {"a.b[*]", append(make([]interface{}, 0), "a", "b", "[*]")},
|
{"a.b[*]", append(make([]interface{}, 0), "a", "b", "[*]")},
|
||||||
// {"a.b[-12]", append(make([]interface{}, 0), "a", "b", "-12")},
|
{"a.b[-12]", append(make([]interface{}, 0), "a", "b", int64(-12))},
|
||||||
// {"a.b.0", append(make([]interface{}, 0), "a", "b", "0")},
|
{"a.b.0", append(make([]interface{}, 0), "a", "b", int64(0))},
|
||||||
// {"a.b.d[+]", append(make([]interface{}, 0), "a", "b", "d", "[+]")},
|
{"a.b.d[+]", append(make([]interface{}, 0), "a", "b", "d", "[+]")},
|
||||||
// {"a", append(make([]interface{}, 0), "a")},
|
{"a", append(make([]interface{}, 0), "a")},
|
||||||
// {"\"a.b\".c", append(make([]interface{}, 0), "a.b", "c")},
|
{"\"a.b\".c", append(make([]interface{}, 0), "a.b", "c")},
|
||||||
// {`b."foo.bar"`, append(make([]interface{}, 0), "b", "foo.bar")},
|
{`b."foo.bar"`, append(make([]interface{}, 0), "b", "foo.bar")},
|
||||||
// {"animals(.==cat)", append(make([]interface{}, 0), "animals", "(", "==", "cat", ")")}, // TODO validate this dot is not a join?
|
{"animals(.==cat)", append(make([]interface{}, 0), "animals", "(", "==", "cat", ")")}, // TODO validate this dot is not a join?
|
||||||
// {"animals(.==c*)", append(make([]interface{}, 0), "animals", "(", "==", "c*", ")")}, // TODO validate this dot is not a join?
|
{"animals(.==c*)", append(make([]interface{}, 0), "animals", "(", "==", "c*", ")")}, // TODO validate this dot is not a join?
|
||||||
// {"[1].a.d", append(make([]interface{}, 0), int64(1), "a", "d")},
|
{"[1].a.d", append(make([]interface{}, 0), int64(1), "a", "d")},
|
||||||
// {"a[0].c", append(make([]interface{}, 0), "a", int64(0), "c")},
|
{"a[0].c", append(make([]interface{}, 0), "a", int64(0), "c")},
|
||||||
// {"[0]", append(make([]interface{}, 0), int64(0))},
|
{"[0]", append(make([]interface{}, 0), int64(0))},
|
||||||
// {"a.cool(s.d.f==cool)", append(make([]interface{}, 0), "a", "cool", "(", "s", "d", "f", "==", "cool", ")")},
|
{"a.cool(s.d.f==cool)", append(make([]interface{}, 0), "a", "cool", "(", "s", "d", "f", "==", "cool", ")")},
|
||||||
{"a.cool(s.d.f==cool OR t.b.h==frog).caterpillar", append(make([]interface{}, 0), "a", "cool", "(", "s", "d", "f", "==", "cool", "OR", "t", "b", "h", "==", "frog", ")", "caterpillar")},
|
{"a.cool(s.d.f==cool OR t.b.h==frog).caterpillar", append(make([]interface{}, 0), "a", "cool", "(", "s", "d", "f", "==", "cool", "OR", "t", "b", "h", "==", "frog", ")", "caterpillar")},
|
||||||
{"a.cool(s.d.f==cool and t.b.h==frog)*", append(make([]interface{}, 0), "a", "cool", "(", "s", "d", "f", "==", "cool", "and", "t", "b", "h", "==", "frog", ")", "*")},
|
{"a.cool(s.d.f==cool and t.b.h==frog)*", append(make([]interface{}, 0), "a", "cool", "(", "s", "d", "f", "==", "cool", "and", "t", "b", "h", "==", "frog", ")", "*")},
|
||||||
{"a.cool(s.d.f==cool and t.b.h==frog).th*", append(make([]interface{}, 0), "a", "cool", "(", "s", "d", "f", "==", "cool", "and", "t", "b", "h", "==", "frog", ")", "th*")},
|
{"a.cool(s.d.f==cool and t.b.h==frog).th*", append(make([]interface{}, 0), "a", "cool", "(", "s", "d", "f", "==", "cool", "and", "t", "b", "h", "==", "frog", ")", "th*")},
|
||||||
|
Loading…
Reference in New Issue
Block a user