diff --git a/pkg/yqlib/expression_processing_test.go b/pkg/yqlib/expression_processing_test.go index 93cb2047..aac09934 100644 --- a/pkg/yqlib/expression_processing_test.go +++ b/pkg/yqlib/expression_processing_test.go @@ -12,6 +12,11 @@ var pathTests = []struct { expectedTokens []interface{} expectedPostFix []interface{} }{ + { + `"\""`, + append(make([]interface{}, 0), "\" (string)"), + append(make([]interface{}, 0), "\" (string)"), + }, { `[]|join(".")`, append(make([]interface{}, 0), "[", "EMPTY", "]", "PIPE", "JOIN", "(", ". (string)", ")"), diff --git a/pkg/yqlib/expression_tokeniser.go b/pkg/yqlib/expression_tokeniser.go index 888b882d..e509c848 100644 --- a/pkg/yqlib/expression_tokeniser.go +++ b/pkg/yqlib/expression_tokeniser.go @@ -189,6 +189,7 @@ func stringValue(wrapped bool) lex.Action { if wrapped { value = unwrap(value) } + value = strings.ReplaceAll(value, "\\\"", "\"") return &token{TokenType: operationToken, Operation: createValueOperation(value, value)}, nil } } @@ -334,7 +335,7 @@ func initLexer() (*lex.Lexer, error) { lexer.Add([]byte(`[Nn][Uu][Ll][Ll]`), nullValue()) lexer.Add([]byte(`~`), nullValue()) - lexer.Add([]byte(`"[^"]*"`), stringValue(true)) + lexer.Add([]byte(`"([^"\\]*(\\.[^"\\]*)*)"`), stringValue(true)) lexer.Add([]byte(`strenv\([^\)]+\)`), envOp(true)) lexer.Add([]byte(`env\([^\)]+\)`), envOp(false))