mirror of
https://github.com/mikefarah/yq.git
synced 2024-11-12 05:38:04 +00:00
Cleaning up exposed public api
This commit is contained in:
parent
e28df367eb
commit
49ac2bac13
@ -17,8 +17,8 @@ func format(attr color.Attribute) string {
|
|||||||
return fmt.Sprintf("%s[%dm", escape, attr)
|
return fmt.Sprintf("%s[%dm", escape, attr)
|
||||||
}
|
}
|
||||||
|
|
||||||
func ColorizeAndPrint(bytes []byte, writer io.Writer) error {
|
func colorizeAndPrint(yamlBytes []byte, writer io.Writer) error {
|
||||||
tokens := lexer.Tokenize(string(bytes))
|
tokens := lexer.Tokenize(string(yamlBytes))
|
||||||
var p printer.Printer
|
var p printer.Printer
|
||||||
p.Bool = func() *printer.Property {
|
p.Bool = func() *printer.Property {
|
||||||
return &printer.Property{
|
return &printer.Property{
|
||||||
|
@ -50,7 +50,7 @@ func (ye *yamlEncoder) Encode(node *yaml.Node) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if ye.colorise {
|
if ye.colorise {
|
||||||
return ColorizeAndPrint(tempBuffer.Bytes(), ye.destination)
|
return colorizeAndPrint(tempBuffer.Bytes(), ye.destination)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -86,7 +86,7 @@ type Operation struct {
|
|||||||
StringValue string
|
StringValue string
|
||||||
CandidateNode *CandidateNode // used for Value Path elements
|
CandidateNode *CandidateNode // used for Value Path elements
|
||||||
Preferences interface{}
|
Preferences interface{}
|
||||||
UpdateAssign bool // used for assign ops, when true it means we evaluate the rhs given the lhs (instead of matching nodes)
|
UpdateAssign bool // used for assign ops, when true it means we evaluate the rhs given the lhs
|
||||||
}
|
}
|
||||||
|
|
||||||
func createValueOperation(value interface{}, stringValue string) *Operation {
|
func createValueOperation(value interface{}, stringValue string) *Operation {
|
||||||
|
@ -6,15 +6,15 @@ import (
|
|||||||
logging "gopkg.in/op/go-logging.v1"
|
logging "gopkg.in/op/go-logging.v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
type pathPostFixerInterface interface {
|
type pathPostFixer interface {
|
||||||
ConvertToPostfix([]*token) ([]*Operation, error)
|
ConvertToPostfix([]*token) ([]*Operation, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type pathPostFixer struct {
|
type pathPostFixerImpl struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func newPathPostFixer() pathPostFixerInterface {
|
func newPathPostFixer() pathPostFixer {
|
||||||
return &pathPostFixer{}
|
return &pathPostFixerImpl{}
|
||||||
}
|
}
|
||||||
|
|
||||||
func popOpToResult(opStack []*token, result []*Operation) ([]*token, []*Operation) {
|
func popOpToResult(opStack []*token, result []*Operation) ([]*token, []*Operation) {
|
||||||
@ -23,22 +23,22 @@ func popOpToResult(opStack []*token, result []*Operation) ([]*token, []*Operatio
|
|||||||
return opStack, append(result, newOp.Operation)
|
return opStack, append(result, newOp.Operation)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *pathPostFixer) ConvertToPostfix(infixTokens []*token) ([]*Operation, error) {
|
func (p *pathPostFixerImpl) ConvertToPostfix(infixTokens []*token) ([]*Operation, error) {
|
||||||
var result []*Operation
|
var result []*Operation
|
||||||
// surround the whole thing with quotes
|
// surround the whole thing with quotes
|
||||||
var opStack = []*token{&token{TokenType: OpenBracket}}
|
var opStack = []*token{&token{TokenType: openBracket}}
|
||||||
var tokens = append(infixTokens, &token{TokenType: CloseBracket})
|
var tokens = append(infixTokens, &token{TokenType: closeBracket})
|
||||||
|
|
||||||
for _, currentToken := range tokens {
|
for _, currentToken := range tokens {
|
||||||
log.Debugf("postfix processing currentToken %v, %v", currentToken.toString(), currentToken.Operation)
|
log.Debugf("postfix processing currentToken %v, %v", currentToken.toString(), currentToken.Operation)
|
||||||
switch currentToken.TokenType {
|
switch currentToken.TokenType {
|
||||||
case OpenBracket, OpenCollect, OpenCollectObject:
|
case openBracket, openCollect, openCollectObject:
|
||||||
opStack = append(opStack, currentToken)
|
opStack = append(opStack, currentToken)
|
||||||
case CloseCollect, CloseCollectObject:
|
case closeCollect, closeCollectObject:
|
||||||
var opener tokenType = OpenCollect
|
var opener tokenType = openCollect
|
||||||
var collectOperator *operationType = collectOpType
|
var collectOperator *operationType = collectOpType
|
||||||
if currentToken.TokenType == CloseCollectObject {
|
if currentToken.TokenType == closeCollectObject {
|
||||||
opener = OpenCollectObject
|
opener = openCollectObject
|
||||||
collectOperator = collectObjectOpType
|
collectOperator = collectObjectOpType
|
||||||
}
|
}
|
||||||
itemsInMiddle := false
|
itemsInMiddle := false
|
||||||
@ -56,10 +56,10 @@ func (p *pathPostFixer) ConvertToPostfix(infixTokens []*token) ([]*Operation, er
|
|||||||
// now we should have [] as the last element on the opStack, get rid of it
|
// now we should have [] as the last element on the opStack, get rid of it
|
||||||
opStack = opStack[0 : len(opStack)-1]
|
opStack = opStack[0 : len(opStack)-1]
|
||||||
//and append a collect to the opStack
|
//and append a collect to the opStack
|
||||||
opStack = append(opStack, &token{TokenType: OperationToken, Operation: &Operation{OperationType: shortPipeOpType}})
|
opStack = append(opStack, &token{TokenType: operationToken, Operation: &Operation{OperationType: shortPipeOpType}})
|
||||||
opStack = append(opStack, &token{TokenType: OperationToken, Operation: &Operation{OperationType: collectOperator}})
|
opStack = append(opStack, &token{TokenType: operationToken, Operation: &Operation{OperationType: collectOperator}})
|
||||||
case CloseBracket:
|
case closeBracket:
|
||||||
for len(opStack) > 0 && opStack[len(opStack)-1].TokenType != OpenBracket {
|
for len(opStack) > 0 && opStack[len(opStack)-1].TokenType != openBracket {
|
||||||
opStack, result = popOpToResult(opStack, result)
|
opStack, result = popOpToResult(opStack, result)
|
||||||
}
|
}
|
||||||
if len(opStack) == 0 {
|
if len(opStack) == 0 {
|
||||||
@ -72,7 +72,7 @@ func (p *pathPostFixer) ConvertToPostfix(infixTokens []*token) ([]*Operation, er
|
|||||||
var currentPrecedence = currentToken.Operation.OperationType.Precedence
|
var currentPrecedence = currentToken.Operation.OperationType.Precedence
|
||||||
// pop off higher precedent operators onto the result
|
// pop off higher precedent operators onto the result
|
||||||
for len(opStack) > 0 &&
|
for len(opStack) > 0 &&
|
||||||
opStack[len(opStack)-1].TokenType == OperationToken &&
|
opStack[len(opStack)-1].TokenType == operationToken &&
|
||||||
opStack[len(opStack)-1].Operation.OperationType.Precedence >= currentPrecedence {
|
opStack[len(opStack)-1].Operation.OperationType.Precedence >= currentPrecedence {
|
||||||
opStack, result = popOpToResult(opStack, result)
|
opStack, result = popOpToResult(opStack, result)
|
||||||
}
|
}
|
||||||
|
@ -15,14 +15,14 @@ func skip(*lex.Scanner, *machines.Match) (interface{}, error) {
|
|||||||
type tokenType uint32
|
type tokenType uint32
|
||||||
|
|
||||||
const (
|
const (
|
||||||
OperationToken = 1 << iota
|
operationToken = 1 << iota
|
||||||
OpenBracket
|
openBracket
|
||||||
CloseBracket
|
closeBracket
|
||||||
OpenCollect
|
openCollect
|
||||||
CloseCollect
|
closeCollect
|
||||||
OpenCollectObject
|
openCollectObject
|
||||||
CloseCollectObject
|
closeCollectObject
|
||||||
TraverseArrayCollect
|
traverseArrayCollect
|
||||||
)
|
)
|
||||||
|
|
||||||
type token struct {
|
type token struct {
|
||||||
@ -34,22 +34,22 @@ type token struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (t *token) toString() string {
|
func (t *token) toString() string {
|
||||||
if t.TokenType == OperationToken {
|
if t.TokenType == operationToken {
|
||||||
log.Debug("toString, its an op")
|
log.Debug("toString, its an op")
|
||||||
return t.Operation.toString()
|
return t.Operation.toString()
|
||||||
} else if t.TokenType == OpenBracket {
|
} else if t.TokenType == openBracket {
|
||||||
return "("
|
return "("
|
||||||
} else if t.TokenType == CloseBracket {
|
} else if t.TokenType == closeBracket {
|
||||||
return ")"
|
return ")"
|
||||||
} else if t.TokenType == OpenCollect {
|
} else if t.TokenType == openCollect {
|
||||||
return "["
|
return "["
|
||||||
} else if t.TokenType == CloseCollect {
|
} else if t.TokenType == closeCollect {
|
||||||
return "]"
|
return "]"
|
||||||
} else if t.TokenType == OpenCollectObject {
|
} else if t.TokenType == openCollectObject {
|
||||||
return "{"
|
return "{"
|
||||||
} else if t.TokenType == CloseCollectObject {
|
} else if t.TokenType == closeCollectObject {
|
||||||
return "}"
|
return "}"
|
||||||
} else if t.TokenType == TraverseArrayCollect {
|
} else if t.TokenType == traverseArrayCollect {
|
||||||
return ".["
|
return ".["
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
@ -66,7 +66,7 @@ func pathToken(wrapped bool) lex.Action {
|
|||||||
}
|
}
|
||||||
log.Debug("PathToken %v", value)
|
log.Debug("PathToken %v", value)
|
||||||
op := &Operation{OperationType: traversePathOpType, Value: value, StringValue: value}
|
op := &Operation{OperationType: traversePathOpType, Value: value, StringValue: value}
|
||||||
return &token{TokenType: OperationToken, Operation: op, CheckForPostTraverse: true}, nil
|
return &token{TokenType: operationToken, Operation: op, CheckForPostTraverse: true}, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -80,7 +80,7 @@ func documentToken() lex.Action {
|
|||||||
}
|
}
|
||||||
log.Debug("documentToken %v", string(m.Bytes))
|
log.Debug("documentToken %v", string(m.Bytes))
|
||||||
op := &Operation{OperationType: documentFilterOpType, Value: number, StringValue: numberString}
|
op := &Operation{OperationType: documentFilterOpType, Value: number, StringValue: numberString}
|
||||||
return &token{TokenType: OperationToken, Operation: op, CheckForPostTraverse: true}, nil
|
return &token{TokenType: operationToken, Operation: op, CheckForPostTraverse: true}, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -97,7 +97,7 @@ func assignOpToken(updateAssign bool) lex.Action {
|
|||||||
log.Debug("assignOpToken %v", string(m.Bytes))
|
log.Debug("assignOpToken %v", string(m.Bytes))
|
||||||
value := string(m.Bytes)
|
value := string(m.Bytes)
|
||||||
op := &Operation{OperationType: assignOpType, Value: assignOpType.Type, StringValue: value, UpdateAssign: updateAssign}
|
op := &Operation{OperationType: assignOpType, Value: assignOpType.Type, StringValue: value, UpdateAssign: updateAssign}
|
||||||
return &token{TokenType: OperationToken, Operation: op}, nil
|
return &token{TokenType: operationToken, Operation: op}, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -110,7 +110,7 @@ func opTokenWithPrefs(op *operationType, assignOpType *operationType, preference
|
|||||||
if assignOpType != nil {
|
if assignOpType != nil {
|
||||||
assign = &Operation{OperationType: assignOpType, Value: assignOpType.Type, StringValue: value, Preferences: preferences}
|
assign = &Operation{OperationType: assignOpType, Value: assignOpType.Type, StringValue: value, Preferences: preferences}
|
||||||
}
|
}
|
||||||
return &token{TokenType: OperationToken, Operation: op, AssignOperation: assign}, nil
|
return &token{TokenType: operationToken, Operation: op, AssignOperation: assign}, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -125,7 +125,7 @@ func assignAllCommentsOp(updateAssign bool) lex.Action {
|
|||||||
UpdateAssign: updateAssign,
|
UpdateAssign: updateAssign,
|
||||||
Preferences: &commentOpPreferences{LineComment: true, HeadComment: true, FootComment: true},
|
Preferences: &commentOpPreferences{LineComment: true, HeadComment: true, FootComment: true},
|
||||||
}
|
}
|
||||||
return &token{TokenType: OperationToken, Operation: op}, nil
|
return &token{TokenType: operationToken, Operation: op}, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -147,7 +147,7 @@ func numberValue() lex.Action {
|
|||||||
return nil, errParsingInt
|
return nil, errParsingInt
|
||||||
}
|
}
|
||||||
|
|
||||||
return &token{TokenType: OperationToken, Operation: createValueOperation(number, numberString)}, nil
|
return &token{TokenType: operationToken, Operation: createValueOperation(number, numberString)}, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -158,13 +158,13 @@ func floatValue() lex.Action {
|
|||||||
if errParsingInt != nil {
|
if errParsingInt != nil {
|
||||||
return nil, errParsingInt
|
return nil, errParsingInt
|
||||||
}
|
}
|
||||||
return &token{TokenType: OperationToken, Operation: createValueOperation(number, numberString)}, nil
|
return &token{TokenType: operationToken, Operation: createValueOperation(number, numberString)}, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func booleanValue(val bool) lex.Action {
|
func booleanValue(val bool) lex.Action {
|
||||||
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
||||||
return &token{TokenType: OperationToken, Operation: createValueOperation(val, string(m.Bytes))}, nil
|
return &token{TokenType: operationToken, Operation: createValueOperation(val, string(m.Bytes))}, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -174,7 +174,7 @@ func stringValue(wrapped bool) lex.Action {
|
|||||||
if wrapped {
|
if wrapped {
|
||||||
value = unwrap(value)
|
value = unwrap(value)
|
||||||
}
|
}
|
||||||
return &token{TokenType: OperationToken, Operation: createValueOperation(value, value)}, nil
|
return &token{TokenType: operationToken, Operation: createValueOperation(value, value)}, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -196,29 +196,29 @@ func envOp(strenv bool) lex.Action {
|
|||||||
envOperation.OperationType = envOpType
|
envOperation.OperationType = envOpType
|
||||||
envOperation.Preferences = preferences
|
envOperation.Preferences = preferences
|
||||||
|
|
||||||
return &token{TokenType: OperationToken, Operation: envOperation}, nil
|
return &token{TokenType: operationToken, Operation: envOperation}, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func nullValue() lex.Action {
|
func nullValue() lex.Action {
|
||||||
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
||||||
return &token{TokenType: OperationToken, Operation: createValueOperation(nil, string(m.Bytes))}, nil
|
return &token{TokenType: operationToken, Operation: createValueOperation(nil, string(m.Bytes))}, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func selfToken() lex.Action {
|
func selfToken() lex.Action {
|
||||||
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
|
||||||
op := &Operation{OperationType: selfReferenceOpType}
|
op := &Operation{OperationType: selfReferenceOpType}
|
||||||
return &token{TokenType: OperationToken, Operation: op}, nil
|
return &token{TokenType: operationToken, Operation: op}, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func initLexer() (*lex.Lexer, error) {
|
func initLexer() (*lex.Lexer, error) {
|
||||||
lexer := lex.NewLexer()
|
lexer := lex.NewLexer()
|
||||||
lexer.Add([]byte(`\(`), literalToken(OpenBracket, false))
|
lexer.Add([]byte(`\(`), literalToken(openBracket, false))
|
||||||
lexer.Add([]byte(`\)`), literalToken(CloseBracket, true))
|
lexer.Add([]byte(`\)`), literalToken(closeBracket, true))
|
||||||
|
|
||||||
lexer.Add([]byte(`\.\[`), literalToken(TraverseArrayCollect, false))
|
lexer.Add([]byte(`\.\[`), literalToken(traverseArrayCollect, false))
|
||||||
lexer.Add([]byte(`\.\.`), opTokenWithPrefs(recursiveDescentOpType, nil, &recursiveDescentPreferences{RecurseArray: true,
|
lexer.Add([]byte(`\.\.`), opTokenWithPrefs(recursiveDescentOpType, nil, &recursiveDescentPreferences{RecurseArray: true,
|
||||||
TraversePreferences: &traversePreferences{FollowAlias: false, IncludeMapKeys: false}}))
|
TraversePreferences: &traversePreferences{FollowAlias: false, IncludeMapKeys: false}}))
|
||||||
|
|
||||||
@ -291,10 +291,10 @@ func initLexer() (*lex.Lexer, error) {
|
|||||||
lexer.Add([]byte(`strenv\([^\)]+\)`), envOp(true))
|
lexer.Add([]byte(`strenv\([^\)]+\)`), envOp(true))
|
||||||
lexer.Add([]byte(`env\([^\)]+\)`), envOp(false))
|
lexer.Add([]byte(`env\([^\)]+\)`), envOp(false))
|
||||||
|
|
||||||
lexer.Add([]byte(`\[`), literalToken(OpenCollect, false))
|
lexer.Add([]byte(`\[`), literalToken(openCollect, false))
|
||||||
lexer.Add([]byte(`\]`), literalToken(CloseCollect, true))
|
lexer.Add([]byte(`\]`), literalToken(closeCollect, true))
|
||||||
lexer.Add([]byte(`\{`), literalToken(OpenCollectObject, false))
|
lexer.Add([]byte(`\{`), literalToken(openCollectObject, false))
|
||||||
lexer.Add([]byte(`\}`), literalToken(CloseCollectObject, true))
|
lexer.Add([]byte(`\}`), literalToken(closeCollectObject, true))
|
||||||
lexer.Add([]byte(`\*`), opTokenWithPrefs(multiplyOpType, nil, &multiplyPreferences{AppendArrays: false}))
|
lexer.Add([]byte(`\*`), opTokenWithPrefs(multiplyOpType, nil, &multiplyPreferences{AppendArrays: false}))
|
||||||
lexer.Add([]byte(`\*\+`), opTokenWithPrefs(multiplyOpType, nil, &multiplyPreferences{AppendArrays: true}))
|
lexer.Add([]byte(`\*\+`), opTokenWithPrefs(multiplyOpType, nil, &multiplyPreferences{AppendArrays: true}))
|
||||||
lexer.Add([]byte(`\+`), opToken(addOpType))
|
lexer.Add([]byte(`\+`), opToken(addOpType))
|
||||||
@ -307,23 +307,23 @@ func initLexer() (*lex.Lexer, error) {
|
|||||||
return lexer, nil
|
return lexer, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type pathTokeniserInterface interface {
|
type pathTokeniser interface {
|
||||||
Tokenise(path string) ([]*token, error)
|
Tokenise(path string) ([]*token, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type pathTokeniser struct {
|
type pathTokeniserImpl struct {
|
||||||
lexer *lex.Lexer
|
lexer *lex.Lexer
|
||||||
}
|
}
|
||||||
|
|
||||||
func newPathTokeniser() pathTokeniserInterface {
|
func newPathTokeniser() pathTokeniser {
|
||||||
var lexer, err = initLexer()
|
var lexer, err = initLexer()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
return &pathTokeniser{lexer}
|
return &pathTokeniserImpl{lexer}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *pathTokeniser) Tokenise(path string) ([]*token, error) {
|
func (p *pathTokeniserImpl) Tokenise(path string) ([]*token, error) {
|
||||||
scanner, err := p.lexer.Scanner([]byte(path))
|
scanner, err := p.lexer.Scanner([]byte(path))
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -356,23 +356,23 @@ func (p *pathTokeniser) Tokenise(path string) ([]*token, error) {
|
|||||||
return postProcessedTokens, nil
|
return postProcessedTokens, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *pathTokeniser) handleToken(tokens []*token, index int, postProcessedTokens []*token) (tokensAccum []*token, skipNextToken bool) {
|
func (p *pathTokeniserImpl) handleToken(tokens []*token, index int, postProcessedTokens []*token) (tokensAccum []*token, skipNextToken bool) {
|
||||||
skipNextToken = false
|
skipNextToken = false
|
||||||
currentToken := tokens[index]
|
currentToken := tokens[index]
|
||||||
|
|
||||||
if currentToken.TokenType == TraverseArrayCollect {
|
if currentToken.TokenType == traverseArrayCollect {
|
||||||
//need to put a traverse array then a collect currentToken
|
//need to put a traverse array then a collect currentToken
|
||||||
// do this by adding traverse then converting currentToken to collect
|
// do this by adding traverse then converting currentToken to collect
|
||||||
|
|
||||||
op := &Operation{OperationType: traverseArrayOpType, StringValue: "TRAVERSE_ARRAY"}
|
op := &Operation{OperationType: traverseArrayOpType, StringValue: "TRAVERSE_ARRAY"}
|
||||||
postProcessedTokens = append(postProcessedTokens, &token{TokenType: OperationToken, Operation: op})
|
postProcessedTokens = append(postProcessedTokens, &token{TokenType: operationToken, Operation: op})
|
||||||
|
|
||||||
currentToken = &token{TokenType: OpenCollect}
|
currentToken = &token{TokenType: openCollect}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if index != len(tokens)-1 && currentToken.AssignOperation != nil &&
|
if index != len(tokens)-1 && currentToken.AssignOperation != nil &&
|
||||||
tokens[index+1].TokenType == OperationToken &&
|
tokens[index+1].TokenType == operationToken &&
|
||||||
tokens[index+1].Operation.OperationType == assignOpType {
|
tokens[index+1].Operation.OperationType == assignOpType {
|
||||||
currentToken.Operation = currentToken.AssignOperation
|
currentToken.Operation = currentToken.AssignOperation
|
||||||
currentToken.Operation.UpdateAssign = tokens[index+1].Operation.UpdateAssign
|
currentToken.Operation.UpdateAssign = tokens[index+1].Operation.UpdateAssign
|
||||||
@ -382,25 +382,25 @@ func (p *pathTokeniser) handleToken(tokens []*token, index int, postProcessedTok
|
|||||||
postProcessedTokens = append(postProcessedTokens, currentToken)
|
postProcessedTokens = append(postProcessedTokens, currentToken)
|
||||||
|
|
||||||
if index != len(tokens)-1 && currentToken.CheckForPostTraverse &&
|
if index != len(tokens)-1 && currentToken.CheckForPostTraverse &&
|
||||||
tokens[index+1].TokenType == OperationToken &&
|
tokens[index+1].TokenType == operationToken &&
|
||||||
tokens[index+1].Operation.OperationType == traversePathOpType {
|
tokens[index+1].Operation.OperationType == traversePathOpType {
|
||||||
op := &Operation{OperationType: shortPipeOpType, Value: "PIPE"}
|
op := &Operation{OperationType: shortPipeOpType, Value: "PIPE"}
|
||||||
postProcessedTokens = append(postProcessedTokens, &token{TokenType: OperationToken, Operation: op})
|
postProcessedTokens = append(postProcessedTokens, &token{TokenType: operationToken, Operation: op})
|
||||||
}
|
}
|
||||||
if index != len(tokens)-1 && currentToken.CheckForPostTraverse &&
|
if index != len(tokens)-1 && currentToken.CheckForPostTraverse &&
|
||||||
tokens[index+1].TokenType == OpenCollect {
|
tokens[index+1].TokenType == openCollect {
|
||||||
|
|
||||||
op := &Operation{OperationType: shortPipeOpType, Value: "PIPE"}
|
op := &Operation{OperationType: shortPipeOpType, Value: "PIPE"}
|
||||||
postProcessedTokens = append(postProcessedTokens, &token{TokenType: OperationToken, Operation: op})
|
postProcessedTokens = append(postProcessedTokens, &token{TokenType: operationToken, Operation: op})
|
||||||
|
|
||||||
op = &Operation{OperationType: traverseArrayOpType}
|
op = &Operation{OperationType: traverseArrayOpType}
|
||||||
postProcessedTokens = append(postProcessedTokens, &token{TokenType: OperationToken, Operation: op})
|
postProcessedTokens = append(postProcessedTokens, &token{TokenType: operationToken, Operation: op})
|
||||||
}
|
}
|
||||||
if index != len(tokens)-1 && currentToken.CheckForPostTraverse &&
|
if index != len(tokens)-1 && currentToken.CheckForPostTraverse &&
|
||||||
tokens[index+1].TokenType == TraverseArrayCollect {
|
tokens[index+1].TokenType == traverseArrayCollect {
|
||||||
|
|
||||||
op := &Operation{OperationType: shortPipeOpType, Value: "PIPE"}
|
op := &Operation{OperationType: shortPipeOpType, Value: "PIPE"}
|
||||||
postProcessedTokens = append(postProcessedTokens, &token{TokenType: OperationToken, Operation: op})
|
postProcessedTokens = append(postProcessedTokens, &token{TokenType: operationToken, Operation: op})
|
||||||
|
|
||||||
}
|
}
|
||||||
return postProcessedTokens, skipNextToken
|
return postProcessedTokens, skipNextToken
|
||||||
|
@ -5,22 +5,22 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
)
|
)
|
||||||
|
|
||||||
type WriteInPlaceHandler interface {
|
type writeInPlaceHandler interface {
|
||||||
CreateTempFile() (*os.File, error)
|
CreateTempFile() (*os.File, error)
|
||||||
FinishWriteInPlace(evaluatedSuccessfully bool)
|
FinishWriteInPlace(evaluatedSuccessfully bool)
|
||||||
}
|
}
|
||||||
|
|
||||||
type writeInPlaceHandler struct {
|
type writeInPlaceHandlerImpl struct {
|
||||||
inputFilename string
|
inputFilename string
|
||||||
tempFile *os.File
|
tempFile *os.File
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewWriteInPlaceHandler(inputFile string) WriteInPlaceHandler {
|
func NewWriteInPlaceHandler(inputFile string) writeInPlaceHandler {
|
||||||
|
|
||||||
return &writeInPlaceHandler{inputFile, nil}
|
return &writeInPlaceHandlerImpl{inputFile, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w *writeInPlaceHandler) CreateTempFile() (*os.File, error) {
|
func (w *writeInPlaceHandlerImpl) CreateTempFile() (*os.File, error) {
|
||||||
info, err := os.Stat(w.inputFilename)
|
info, err := os.Stat(w.inputFilename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -46,7 +46,7 @@ func (w *writeInPlaceHandler) CreateTempFile() (*os.File, error) {
|
|||||||
return file, err
|
return file, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w *writeInPlaceHandler) FinishWriteInPlace(evaluatedSuccessfully bool) {
|
func (w *writeInPlaceHandlerImpl) FinishWriteInPlace(evaluatedSuccessfully bool) {
|
||||||
log.Debug("Going to write-inplace, evaluatedSuccessfully=%v, target=%v", evaluatedSuccessfully, w.inputFilename)
|
log.Debug("Going to write-inplace, evaluatedSuccessfully=%v, target=%v", evaluatedSuccessfully, w.inputFilename)
|
||||||
safelyCloseFile(w.tempFile)
|
safelyCloseFile(w.tempFile)
|
||||||
if evaluatedSuccessfully {
|
if evaluatedSuccessfully {
|
||||||
|
Loading…
Reference in New Issue
Block a user