fixing exposed functions and interfaces

This commit is contained in:
Mike Farah 2021-01-11 16:46:28 +11:00
parent 30c269a66c
commit 5e2c19cc86
7 changed files with 87 additions and 76 deletions

1
go.sum
View File

@ -281,6 +281,7 @@ golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgw
golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc h1:NCy3Ohtk6Iny5V/reW2Ktypo4zIpWBdRJ1uFMjBxdg8=
golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=

View File

@ -2,11 +2,13 @@ package yqlib
import "container/list"
/**
Loads all yaml documents of all files given into memory, then runs the given expression once.
**/
// A yaml expression evaluator that runs the expression once against all files/nodes in memory.
type Evaluator interface {
EvaluateFiles(expression string, filenames []string, printer Printer) error
// Runs the expression once against the list of candidate nodes, returns the
// resulting nodes.
EvaluateNodes(expression string, inputCandidateNodes *list.List) (*list.List, error)
}
type allAtOnceEvaluator struct {
@ -18,12 +20,17 @@ func NewAllAtOnceEvaluator() Evaluator {
return &allAtOnceEvaluator{treeNavigator: NewDataTreeNavigator(), treeCreator: NewPathTreeCreator()}
}
func (e *allAtOnceEvaluator) EvaluateFiles(expression string, filenames []string, printer Printer) error {
fileIndex := 0
func (e *allAtOnceEvaluator) EvaluateNodes(expression string, inputCandidates *list.List) (*list.List, error) {
node, err := treeCreator.ParsePath(expression)
if err != nil {
return err
return nil, err
}
return treeNavigator.GetMatchingNodes(inputCandidates, node)
}
func (e *allAtOnceEvaluator) EvaluateFiles(expression string, filenames []string, printer Printer) error {
fileIndex := 0
var allDocuments *list.List = list.New()
for _, filename := range filenames {
reader, err := readStream(filename)
@ -37,7 +44,7 @@ func (e *allAtOnceEvaluator) EvaluateFiles(expression string, filenames []string
allDocuments.PushBackList(fileDocuments)
fileIndex = fileIndex + 1
}
matches, err := treeNavigator.GetMatchingNodes(allDocuments, node)
matches, err := e.EvaluateNodes(expression, allDocuments)
if err != nil {
return err
}

View File

@ -164,8 +164,8 @@ var pathTests = []struct {
},
}
var tokeniser = NewPathTokeniser()
var postFixer = NewPathPostFixer()
var tokeniser = newPathTokeniser()
var postFixer = newPathPostFixer()
func TestPathParsing(t *testing.T) {
for _, tt := range pathTests {

View File

@ -6,38 +6,38 @@ import (
logging "gopkg.in/op/go-logging.v1"
)
type PathPostFixer interface {
ConvertToPostfix([]*Token) ([]*Operation, error)
type pathPostFixerInterface interface {
ConvertToPostfix([]*token) ([]*Operation, error)
}
type pathPostFixer struct {
}
func NewPathPostFixer() PathPostFixer {
func newPathPostFixer() pathPostFixerInterface {
return &pathPostFixer{}
}
func popOpToResult(opStack []*Token, result []*Operation) ([]*Token, []*Operation) {
var newOp *Token
func popOpToResult(opStack []*token, result []*Operation) ([]*token, []*Operation) {
var newOp *token
opStack, newOp = opStack[0:len(opStack)-1], opStack[len(opStack)-1]
return opStack, append(result, newOp.Operation)
}
func (p *pathPostFixer) ConvertToPostfix(infixTokens []*Token) ([]*Operation, error) {
func (p *pathPostFixer) ConvertToPostfix(infixTokens []*token) ([]*Operation, error) {
var result []*Operation
// surround the whole thing with quotes
var opStack = []*Token{&Token{TokenType: OpenBracket}}
var tokens = append(infixTokens, &Token{TokenType: CloseBracket})
var opStack = []*token{&token{TokenType: OpenBracket}}
var tokens = append(infixTokens, &token{TokenType: CloseBracket})
for _, token := range tokens {
log.Debugf("postfix processing token %v, %v", token.toString(), token.Operation)
switch token.TokenType {
for _, currentToken := range tokens {
log.Debugf("postfix processing currentToken %v, %v", currentToken.toString(), currentToken.Operation)
switch currentToken.TokenType {
case OpenBracket, OpenCollect, OpenCollectObject:
opStack = append(opStack, token)
opStack = append(opStack, currentToken)
case CloseCollect, CloseCollectObject:
var opener TokenType = OpenCollect
var opener tokenType = OpenCollect
var collectOperator *OperationType = Collect
if token.TokenType == CloseCollectObject {
if currentToken.TokenType == CloseCollectObject {
opener = OpenCollectObject
collectOperator = CollectObject
}
@ -56,8 +56,8 @@ func (p *pathPostFixer) ConvertToPostfix(infixTokens []*Token) ([]*Operation, er
// now we should have [] as the last element on the opStack, get rid of it
opStack = opStack[0 : len(opStack)-1]
//and append a collect to the opStack
opStack = append(opStack, &Token{TokenType: OperationToken, Operation: &Operation{OperationType: ShortPipe}})
opStack = append(opStack, &Token{TokenType: OperationToken, Operation: &Operation{OperationType: collectOperator}})
opStack = append(opStack, &token{TokenType: OperationToken, Operation: &Operation{OperationType: ShortPipe}})
opStack = append(opStack, &token{TokenType: OperationToken, Operation: &Operation{OperationType: collectOperator}})
case CloseBracket:
for len(opStack) > 0 && opStack[len(opStack)-1].TokenType != OpenBracket {
opStack, result = popOpToResult(opStack, result)
@ -69,7 +69,7 @@ func (p *pathPostFixer) ConvertToPostfix(infixTokens []*Token) ([]*Operation, er
opStack = opStack[0 : len(opStack)-1]
default:
var currentPrecedence = token.Operation.OperationType.Precedence
var currentPrecedence = currentToken.Operation.OperationType.Precedence
// pop off higher precedent operators onto the result
for len(opStack) > 0 &&
opStack[len(opStack)-1].TokenType == OperationToken &&
@ -77,14 +77,14 @@ func (p *pathPostFixer) ConvertToPostfix(infixTokens []*Token) ([]*Operation, er
opStack, result = popOpToResult(opStack, result)
}
// add this operator to the opStack
opStack = append(opStack, token)
opStack = append(opStack, currentToken)
}
}
if log.IsEnabledFor(logging.DEBUG) {
log.Debugf("PostFix Result:")
for _, token := range result {
log.Debugf("> %v", token.toString())
for _, currentToken := range result {
log.Debugf("> %v", currentToken.toString())
}
}

View File

@ -12,7 +12,7 @@ func skip(*lex.Scanner, *machines.Match) (interface{}, error) {
return nil, nil
}
type TokenType uint32
type tokenType uint32
const (
OperationToken = 1 << iota
@ -25,15 +25,15 @@ const (
TraverseArrayCollect
)
type Token struct {
TokenType TokenType
type token struct {
TokenType tokenType
Operation *Operation
AssignOperation *Operation // e.g. tag (GetTag) op becomes AssignTag if '=' follows it
CheckForPostTraverse bool // e.g. [1]cat should really be [1].cat
}
func (t *Token) toString() string {
func (t *token) toString() string {
if t.TokenType == OperationToken {
log.Debug("toString, its an op")
return t.Operation.toString()
@ -66,7 +66,7 @@ func pathToken(wrapped bool) lex.Action {
}
log.Debug("PathToken %v", value)
op := &Operation{OperationType: TraversePath, Value: value, StringValue: value}
return &Token{TokenType: OperationToken, Operation: op, CheckForPostTraverse: true}, nil
return &token{TokenType: OperationToken, Operation: op, CheckForPostTraverse: true}, nil
}
}
@ -80,7 +80,7 @@ func documentToken() lex.Action {
}
log.Debug("documentToken %v", string(m.Bytes))
op := &Operation{OperationType: DocumentFilter, Value: number, StringValue: numberString}
return &Token{TokenType: OperationToken, Operation: op, CheckForPostTraverse: true}, nil
return &token{TokenType: OperationToken, Operation: op, CheckForPostTraverse: true}, nil
}
}
@ -97,7 +97,7 @@ func assignOpToken(updateAssign bool) lex.Action {
log.Debug("assignOpToken %v", string(m.Bytes))
value := string(m.Bytes)
op := &Operation{OperationType: Assign, Value: Assign.Type, StringValue: value, UpdateAssign: updateAssign}
return &Token{TokenType: OperationToken, Operation: op}, nil
return &token{TokenType: OperationToken, Operation: op}, nil
}
}
@ -110,7 +110,7 @@ func opTokenWithPrefs(op *OperationType, assignOpType *OperationType, preference
if assignOpType != nil {
assign = &Operation{OperationType: assignOpType, Value: assignOpType.Type, StringValue: value, Preferences: preferences}
}
return &Token{TokenType: OperationToken, Operation: op, AssignOperation: assign}, nil
return &token{TokenType: OperationToken, Operation: op, AssignOperation: assign}, nil
}
}
@ -125,13 +125,13 @@ func assignAllCommentsOp(updateAssign bool) lex.Action {
UpdateAssign: updateAssign,
Preferences: &CommentOpPreferences{LineComment: true, HeadComment: true, FootComment: true},
}
return &Token{TokenType: OperationToken, Operation: op}, nil
return &token{TokenType: OperationToken, Operation: op}, nil
}
}
func literalToken(pType TokenType, checkForPost bool) lex.Action {
func literalToken(pType tokenType, checkForPost bool) lex.Action {
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
return &Token{TokenType: pType, CheckForPostTraverse: checkForPost}, nil
return &token{TokenType: pType, CheckForPostTraverse: checkForPost}, nil
}
}
@ -147,7 +147,7 @@ func numberValue() lex.Action {
return nil, errParsingInt
}
return &Token{TokenType: OperationToken, Operation: CreateValueOperation(number, numberString)}, nil
return &token{TokenType: OperationToken, Operation: CreateValueOperation(number, numberString)}, nil
}
}
@ -158,13 +158,13 @@ func floatValue() lex.Action {
if errParsingInt != nil {
return nil, errParsingInt
}
return &Token{TokenType: OperationToken, Operation: CreateValueOperation(number, numberString)}, nil
return &token{TokenType: OperationToken, Operation: CreateValueOperation(number, numberString)}, nil
}
}
func booleanValue(val bool) lex.Action {
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
return &Token{TokenType: OperationToken, Operation: CreateValueOperation(val, string(m.Bytes))}, nil
return &token{TokenType: OperationToken, Operation: CreateValueOperation(val, string(m.Bytes))}, nil
}
}
@ -174,7 +174,7 @@ func stringValue(wrapped bool) lex.Action {
if wrapped {
value = unwrap(value)
}
return &Token{TokenType: OperationToken, Operation: CreateValueOperation(value, value)}, nil
return &token{TokenType: OperationToken, Operation: CreateValueOperation(value, value)}, nil
}
}
@ -196,20 +196,20 @@ func envOp(strenv bool) lex.Action {
envOperation.OperationType = EnvOp
envOperation.Preferences = preferences
return &Token{TokenType: OperationToken, Operation: envOperation}, nil
return &token{TokenType: OperationToken, Operation: envOperation}, nil
}
}
func nullValue() lex.Action {
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
return &Token{TokenType: OperationToken, Operation: CreateValueOperation(nil, string(m.Bytes))}, nil
return &token{TokenType: OperationToken, Operation: CreateValueOperation(nil, string(m.Bytes))}, nil
}
}
func selfToken() lex.Action {
return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
op := &Operation{OperationType: SelfReference}
return &Token{TokenType: OperationToken, Operation: op}, nil
return &token{TokenType: OperationToken, Operation: op}, nil
}
}
@ -307,15 +307,15 @@ func initLexer() (*lex.Lexer, error) {
return lexer, nil
}
type PathTokeniser interface {
Tokenise(path string) ([]*Token, error)
type pathTokeniserInterface interface {
Tokenise(path string) ([]*token, error)
}
type pathTokeniser struct {
lexer *lex.Lexer
}
func NewPathTokeniser() PathTokeniser {
func newPathTokeniser() pathTokeniserInterface {
var lexer, err = initLexer()
if err != nil {
panic(err)
@ -323,25 +323,25 @@ func NewPathTokeniser() PathTokeniser {
return &pathTokeniser{lexer}
}
func (p *pathTokeniser) Tokenise(path string) ([]*Token, error) {
func (p *pathTokeniser) Tokenise(path string) ([]*token, error) {
scanner, err := p.lexer.Scanner([]byte(path))
if err != nil {
return nil, fmt.Errorf("Parsing expression: %v", err)
}
var tokens []*Token
var tokens []*token
for tok, err, eof := scanner.Next(); !eof; tok, err, eof = scanner.Next() {
if tok != nil {
token := tok.(*Token)
log.Debugf("Tokenising %v", token.toString())
tokens = append(tokens, token)
currentToken := tok.(*token)
log.Debugf("Tokenising %v", currentToken.toString())
tokens = append(tokens, currentToken)
}
if err != nil {
return nil, fmt.Errorf("Parsing expression: %v", err)
}
}
var postProcessedTokens = make([]*Token, 0)
var postProcessedTokens = make([]*token, 0)
skipNextToken := false
@ -356,51 +356,51 @@ func (p *pathTokeniser) Tokenise(path string) ([]*Token, error) {
return postProcessedTokens, nil
}
func (p *pathTokeniser) handleToken(tokens []*Token, index int, postProcessedTokens []*Token) (tokensAccum []*Token, skipNextToken bool) {
func (p *pathTokeniser) handleToken(tokens []*token, index int, postProcessedTokens []*token) (tokensAccum []*token, skipNextToken bool) {
skipNextToken = false
token := tokens[index]
currentToken := tokens[index]
if token.TokenType == TraverseArrayCollect {
//need to put a traverse array then a collect token
// do this by adding traverse then converting token to collect
if currentToken.TokenType == TraverseArrayCollect {
//need to put a traverse array then a collect currentToken
// do this by adding traverse then converting currentToken to collect
op := &Operation{OperationType: TraverseArray, StringValue: "TRAVERSE_ARRAY"}
postProcessedTokens = append(postProcessedTokens, &Token{TokenType: OperationToken, Operation: op})
postProcessedTokens = append(postProcessedTokens, &token{TokenType: OperationToken, Operation: op})
token = &Token{TokenType: OpenCollect}
currentToken = &token{TokenType: OpenCollect}
}
if index != len(tokens)-1 && token.AssignOperation != nil &&
if index != len(tokens)-1 && currentToken.AssignOperation != nil &&
tokens[index+1].TokenType == OperationToken &&
tokens[index+1].Operation.OperationType == Assign {
token.Operation = token.AssignOperation
token.Operation.UpdateAssign = tokens[index+1].Operation.UpdateAssign
currentToken.Operation = currentToken.AssignOperation
currentToken.Operation.UpdateAssign = tokens[index+1].Operation.UpdateAssign
skipNextToken = true
}
postProcessedTokens = append(postProcessedTokens, token)
postProcessedTokens = append(postProcessedTokens, currentToken)
if index != len(tokens)-1 && token.CheckForPostTraverse &&
if index != len(tokens)-1 && currentToken.CheckForPostTraverse &&
tokens[index+1].TokenType == OperationToken &&
tokens[index+1].Operation.OperationType == TraversePath {
op := &Operation{OperationType: ShortPipe, Value: "PIPE"}
postProcessedTokens = append(postProcessedTokens, &Token{TokenType: OperationToken, Operation: op})
postProcessedTokens = append(postProcessedTokens, &token{TokenType: OperationToken, Operation: op})
}
if index != len(tokens)-1 && token.CheckForPostTraverse &&
if index != len(tokens)-1 && currentToken.CheckForPostTraverse &&
tokens[index+1].TokenType == OpenCollect {
op := &Operation{OperationType: ShortPipe, Value: "PIPE"}
postProcessedTokens = append(postProcessedTokens, &Token{TokenType: OperationToken, Operation: op})
postProcessedTokens = append(postProcessedTokens, &token{TokenType: OperationToken, Operation: op})
op = &Operation{OperationType: TraverseArray}
postProcessedTokens = append(postProcessedTokens, &Token{TokenType: OperationToken, Operation: op})
postProcessedTokens = append(postProcessedTokens, &token{TokenType: OperationToken, Operation: op})
}
if index != len(tokens)-1 && token.CheckForPostTraverse &&
if index != len(tokens)-1 && currentToken.CheckForPostTraverse &&
tokens[index+1].TokenType == TraverseArrayCollect {
op := &Operation{OperationType: ShortPipe, Value: "PIPE"}
postProcessedTokens = append(postProcessedTokens, &Token{TokenType: OperationToken, Operation: op})
postProcessedTokens = append(postProcessedTokens, &token{TokenType: OperationToken, Operation: op})
}
return postProcessedTokens, skipNextToken

View File

@ -5,8 +5,8 @@ import (
"strings"
)
var myPathTokeniser = NewPathTokeniser()
var myPathPostfixer = NewPathPostFixer()
var myPathTokeniser = newPathTokeniser()
var myPathPostfixer = newPathPostFixer()
type PathTreeNode struct {
Operation *Operation

View File

@ -8,6 +8,9 @@ import (
yaml "gopkg.in/yaml.v3"
)
// A yaml expression evaluator that runs the expression multiple times for each given yaml document.
// Uses less memory than loading all documents and running the expression once, but this cannot process
// cross document expressions.
type StreamEvaluator interface {
Evaluate(filename string, reader io.Reader, node *PathTreeNode, printer Printer) error
EvaluateFiles(expression string, filenames []string, printer Printer) error