mirror of
https://github.com/mikefarah/yq.git
synced 2024-12-19 20:19:04 +00:00
can delete
This commit is contained in:
parent
ac076cd34a
commit
0a2a3c4374
@ -28,6 +28,7 @@ func NewDataTreeNavigator(navigationPrefs NavigationPrefs) DataTreeNavigator {
|
||||
operatorHandlers[Or] = UnionOperator
|
||||
operatorHandlers[And] = IntersectionOperator
|
||||
operatorHandlers[Assign] = AssignOperator
|
||||
operatorHandlers[DeleteChild] = DeleteChildOperator
|
||||
|
||||
return &dataTreeNavigator{leafTraverser, operatorHandlers}
|
||||
}
|
||||
|
@ -54,6 +54,112 @@ func TestDataTreeNavigatorSimple(t *testing.T) {
|
||||
test.AssertResult(t, expected, resultsToString(results))
|
||||
}
|
||||
|
||||
func TestDataTreeNavigatorSubtractSimple(t *testing.T) {
|
||||
|
||||
nodes := readDoc(t, `a:
|
||||
b: apple
|
||||
c: camel`)
|
||||
|
||||
path, errPath := treeCreator.ParsePath("a .- b")
|
||||
if errPath != nil {
|
||||
t.Error(errPath)
|
||||
}
|
||||
results, errNav := treeNavigator.GetMatchingNodes(nodes, path)
|
||||
|
||||
if errNav != nil {
|
||||
t.Error(errNav)
|
||||
}
|
||||
|
||||
expected := `
|
||||
-- Node --
|
||||
Document 0, path: [a]
|
||||
Tag: !!map, Kind: MappingNode, Anchor:
|
||||
c: camel
|
||||
`
|
||||
test.AssertResult(t, expected, resultsToString(results))
|
||||
}
|
||||
|
||||
func TestDataTreeNavigatorSubtractTwice(t *testing.T) {
|
||||
|
||||
nodes := readDoc(t, `a:
|
||||
b: apple
|
||||
c: camel
|
||||
d: dingo`)
|
||||
|
||||
path, errPath := treeCreator.ParsePath("a .- b OR a .- c")
|
||||
if errPath != nil {
|
||||
t.Error(errPath)
|
||||
}
|
||||
results, errNav := treeNavigator.GetMatchingNodes(nodes, path)
|
||||
|
||||
if errNav != nil {
|
||||
t.Error(errNav)
|
||||
}
|
||||
|
||||
expected := `
|
||||
-- Node --
|
||||
Document 0, path: [a]
|
||||
Tag: !!map, Kind: MappingNode, Anchor:
|
||||
d: dingo
|
||||
`
|
||||
|
||||
test.AssertResult(t, expected, resultsToString(results))
|
||||
}
|
||||
|
||||
func TestDataTreeNavigatorSubtractWithUnion(t *testing.T) {
|
||||
|
||||
nodes := readDoc(t, `a:
|
||||
b: apple
|
||||
c: camel
|
||||
d: dingo`)
|
||||
|
||||
path, errPath := treeCreator.ParsePath("a .- (b OR c)")
|
||||
if errPath != nil {
|
||||
t.Error(errPath)
|
||||
}
|
||||
results, errNav := treeNavigator.GetMatchingNodes(nodes, path)
|
||||
|
||||
if errNav != nil {
|
||||
t.Error(errNav)
|
||||
}
|
||||
|
||||
expected := `
|
||||
-- Node --
|
||||
Document 0, path: [a]
|
||||
Tag: !!map, Kind: MappingNode, Anchor:
|
||||
d: dingo
|
||||
`
|
||||
|
||||
test.AssertResult(t, expected, resultsToString(results))
|
||||
}
|
||||
|
||||
func TestDataTreeNavigatorSubtractArray(t *testing.T) {
|
||||
|
||||
nodes := readDoc(t, `a:
|
||||
- b: apple
|
||||
- b: sdfsd
|
||||
- b: apple`)
|
||||
|
||||
path, errPath := treeCreator.ParsePath("a .- (b == a*)")
|
||||
if errPath != nil {
|
||||
t.Error(errPath)
|
||||
}
|
||||
results, errNav := treeNavigator.GetMatchingNodes(nodes, path)
|
||||
|
||||
if errNav != nil {
|
||||
t.Error(errNav)
|
||||
}
|
||||
|
||||
expected := `
|
||||
-- Node --
|
||||
Document 0, path: [a]
|
||||
Tag: !!seq, Kind: SequenceNode, Anchor:
|
||||
- b: sdfsd
|
||||
`
|
||||
|
||||
test.AssertResult(t, expected, resultsToString(results))
|
||||
}
|
||||
|
||||
func TestDataTreeNavigatorArraySimple(t *testing.T) {
|
||||
|
||||
nodes := readDoc(t, `- b: apple`)
|
||||
|
83
pkg/yqlib/treeops/delete_operator.go
Normal file
83
pkg/yqlib/treeops/delete_operator.go
Normal file
@ -0,0 +1,83 @@
|
||||
package treeops
|
||||
|
||||
import (
|
||||
"github.com/elliotchance/orderedmap"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func DeleteChildOperator(d *dataTreeNavigator, matchingNodes *orderedmap.OrderedMap, pathNode *PathTreeNode) (*orderedmap.OrderedMap, error) {
|
||||
lhs, err := d.getMatchingNodes(matchingNodes, pathNode.Lhs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// for each lhs, splat the node,
|
||||
// the intersect it against the rhs expression
|
||||
// recreate the contents using only the intersection result.
|
||||
|
||||
for el := lhs.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
elMap := orderedmap.NewOrderedMap()
|
||||
elMap.Set(candidate.getKey(), candidate)
|
||||
nodesToDelete, err := d.getMatchingNodes(elMap, pathNode.Rhs)
|
||||
log.Debug("nodesToDelete:\n%v", NodesToString(nodesToDelete))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if candidate.Node.Kind == yaml.SequenceNode {
|
||||
deleteFromArray(candidate, nodesToDelete)
|
||||
} else {
|
||||
deleteFromMap(candidate, nodesToDelete)
|
||||
}
|
||||
}
|
||||
return lhs, nil
|
||||
}
|
||||
|
||||
func deleteFromMap(candidate *CandidateNode, nodesToDelete *orderedmap.OrderedMap) {
|
||||
log.Debug("deleteFromMap")
|
||||
node := candidate.Node
|
||||
contents := node.Content
|
||||
newContents := make([]*yaml.Node, 0)
|
||||
|
||||
for index := 0; index < len(contents); index = index + 2 {
|
||||
key := contents[index]
|
||||
value := contents[index+1]
|
||||
|
||||
childCandidate := &CandidateNode{
|
||||
Node: value,
|
||||
Document: candidate.Document,
|
||||
Path: append(candidate.Path, key.Value),
|
||||
}
|
||||
_, shouldDelete := nodesToDelete.Get(childCandidate.getKey())
|
||||
|
||||
log.Debugf("shouldDelete %v ? %v", childCandidate.getKey(), shouldDelete)
|
||||
|
||||
if !shouldDelete {
|
||||
newContents = append(newContents, key, value)
|
||||
}
|
||||
}
|
||||
node.Content = newContents
|
||||
}
|
||||
|
||||
func deleteFromArray(candidate *CandidateNode, nodesToDelete *orderedmap.OrderedMap) {
|
||||
log.Debug("deleteFromArray")
|
||||
node := candidate.Node
|
||||
contents := node.Content
|
||||
newContents := make([]*yaml.Node, 0)
|
||||
|
||||
for index := 0; index < len(contents); index = index + 1 {
|
||||
value := contents[index]
|
||||
|
||||
childCandidate := &CandidateNode{
|
||||
Node: value,
|
||||
Document: candidate.Document,
|
||||
Path: append(candidate.Path, index),
|
||||
}
|
||||
|
||||
_, shouldDelete := nodesToDelete.Get(childCandidate.getKey())
|
||||
if !shouldDelete {
|
||||
newContents = append(newContents, value)
|
||||
}
|
||||
}
|
||||
node.Content = newContents
|
||||
}
|
@ -4,6 +4,7 @@ import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
|
||||
"github.com/elliotchance/orderedmap"
|
||||
"gopkg.in/op/go-logging.v1"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
@ -14,10 +15,6 @@ type CandidateNode struct {
|
||||
Node *yaml.Node // the actual node
|
||||
Path []interface{} /// the path we took to get to this node
|
||||
Document uint // the document index of this node
|
||||
|
||||
// middle nodes are nodes that match along the original path, but not a
|
||||
// target match of the path. This is only relevant when ShouldOnlyDeeplyVisitLeaves is false.
|
||||
IsMiddleNode bool
|
||||
}
|
||||
|
||||
func (n *CandidateNode) getKey() string {
|
||||
@ -38,6 +35,19 @@ type lib struct {
|
||||
treeCreator PathTreeCreator
|
||||
}
|
||||
|
||||
//use for debugging only
|
||||
func NodesToString(collection *orderedmap.OrderedMap) string {
|
||||
if !log.IsEnabledFor(logging.DEBUG) {
|
||||
return ""
|
||||
}
|
||||
|
||||
result := ""
|
||||
for el := collection.Front(); el != nil; el = el.Next() {
|
||||
result = result + "\n" + NodeToString(el.Value.(*CandidateNode))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func NodeToString(node *CandidateNode) string {
|
||||
if !log.IsEnabledFor(logging.DEBUG) {
|
||||
return ""
|
||||
|
@ -59,20 +59,24 @@ func IntersectionOperator(d *dataTreeNavigator, matchingNodes *orderedmap.Ordere
|
||||
return matchingNodeMap, nil
|
||||
}
|
||||
|
||||
func splatNode(d *dataTreeNavigator, candidate *CandidateNode) (*orderedmap.OrderedMap, error) {
|
||||
elMap := orderedmap.NewOrderedMap()
|
||||
elMap.Set(candidate.getKey(), candidate)
|
||||
//need to splat matching nodes, then search through them
|
||||
splatter := &PathTreeNode{PathElement: &PathElement{
|
||||
PathElementType: PathKey,
|
||||
Value: "*",
|
||||
StringValue: "*",
|
||||
}}
|
||||
return d.getMatchingNodes(elMap, splatter)
|
||||
}
|
||||
|
||||
func EqualsOperator(d *dataTreeNavigator, matchMap *orderedmap.OrderedMap, pathNode *PathTreeNode) (*orderedmap.OrderedMap, error) {
|
||||
log.Debugf("-- equalsOperation")
|
||||
var results = orderedmap.NewOrderedMap()
|
||||
|
||||
for el := matchMap.Front(); el != nil; el = el.Next() {
|
||||
elMap := orderedmap.NewOrderedMap()
|
||||
elMap.Set(el.Key, el.Value)
|
||||
//need to splat matching nodes, then search through them
|
||||
splatter := &PathTreeNode{PathElement: &PathElement{
|
||||
PathElementType: PathKey,
|
||||
Value: "*",
|
||||
StringValue: "*",
|
||||
}}
|
||||
children, err := d.getMatchingNodes(elMap, splatter)
|
||||
children, err := splatNode(d, el.Value.(*CandidateNode))
|
||||
log.Debugf("-- splatted matches, ")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -27,6 +27,7 @@ const (
|
||||
Equals
|
||||
EqualsSelf
|
||||
Assign
|
||||
DeleteChild
|
||||
)
|
||||
|
||||
type PathElement struct {
|
||||
@ -59,6 +60,9 @@ func (p *PathElement) toString() string {
|
||||
result = result + "ASSIGN\n"
|
||||
case Traverse:
|
||||
result = result + "TRAVERSE\n"
|
||||
case DeleteChild:
|
||||
result = result + "DELETE CHILD\n"
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@ -85,6 +89,9 @@ func initMaps() {
|
||||
precedenceMap[TokenIds["EQUALS_SELF_OPERATOR"]] = 30
|
||||
operationTypeMapper[TokenIds["EQUALS_SELF_OPERATOR"]] = EqualsSelf
|
||||
|
||||
precedenceMap[TokenIds["DELETE_CHILD_OPERATOR"]] = 30
|
||||
operationTypeMapper[TokenIds["DELETE_CHILD_OPERATOR"]] = DeleteChild
|
||||
|
||||
precedenceMap[TokenIds["ASSIGN_OPERATOR"]] = 35
|
||||
operationTypeMapper[TokenIds["ASSIGN_OPERATOR"]] = Assign
|
||||
|
||||
|
@ -28,6 +28,7 @@ func initTokens() {
|
||||
"EQUALS_OPERATOR",
|
||||
"EQUALS_SELF_OPERATOR",
|
||||
"ASSIGN_OPERATOR",
|
||||
"DELETE_CHILD_OPERATOR",
|
||||
"TRAVERSE_OPERATOR",
|
||||
"PATH_KEY", // apples
|
||||
"ARRAY_INDEX", // 123
|
||||
@ -91,6 +92,7 @@ func initLexer() (*lex.Lexer, error) {
|
||||
lexer.Add([]byte(`([Aa][Nn][Dd])`), token("AND_OPERATOR"))
|
||||
lexer.Add([]byte(`\.\s*==\s*`), token("EQUALS_SELF_OPERATOR"))
|
||||
lexer.Add([]byte(`\s*==\s*`), token("EQUALS_OPERATOR"))
|
||||
lexer.Add([]byte(`\s*.-\s*`), token("DELETE_CHILD_OPERATOR"))
|
||||
lexer.Add([]byte(`\s*:=\s*`), token("ASSIGN_OPERATOR"))
|
||||
lexer.Add([]byte(`\[-?[0-9]+\]`), numberToken("ARRAY_INDEX", true))
|
||||
lexer.Add([]byte(`-?[0-9]+`), numberToken("ARRAY_INDEX", false))
|
||||
@ -133,7 +135,7 @@ func (p *pathTokeniser) Tokenise(path string) ([]*lex.Token, error) {
|
||||
|
||||
if tok != nil {
|
||||
token := tok.(*lex.Token)
|
||||
log.Debugf("Processing %v - %v", token.Value, Tokens[token.Type])
|
||||
log.Debugf("Tokenising %v - %v", token.Value, Tokens[token.Type])
|
||||
tokens = append(tokens, token)
|
||||
}
|
||||
if err != nil {
|
||||
@ -144,14 +146,14 @@ func (p *pathTokeniser) Tokenise(path string) ([]*lex.Token, error) {
|
||||
|
||||
for index, token := range tokens {
|
||||
for _, literalTokenDef := range append(Literals, "ARRAY_INDEX", "(") {
|
||||
if index > 0 && token.Type == TokenIds[literalTokenDef] && tokens[index-1].Type != TokenIds["TRAVERSE_OPERATOR"] && tokens[index-1].Type != TokenIds["EQUALS_OPERATOR"] && tokens[index-1].Type != TokenIds["EQUALS_SELF_OPERATOR"] {
|
||||
if index > 0 && token.Type == TokenIds[literalTokenDef] && tokens[index-1].Type == TokenIds["PATH_KEY"] {
|
||||
postProcessedTokens = append(postProcessedTokens, &lex.Token{Type: TokenIds["TRAVERSE_OPERATOR"], Value: "."})
|
||||
}
|
||||
}
|
||||
|
||||
postProcessedTokens = append(postProcessedTokens, token)
|
||||
for _, literalTokenDef := range append(Literals, "ARRAY_INDEX", ")") {
|
||||
if index != len(tokens)-1 && token.Type == TokenIds[literalTokenDef] && tokens[index+1].Type != TokenIds["TRAVERSE_OPERATOR"] && tokens[index+1].Type != TokenIds[")"] {
|
||||
if index != len(tokens)-1 && token.Type == TokenIds[literalTokenDef] && tokens[index+1].Type == TokenIds["PATH_KEY"] {
|
||||
postProcessedTokens = append(postProcessedTokens, &lex.Token{Type: TokenIds["TRAVERSE_OPERATOR"], Value: "."})
|
||||
}
|
||||
}
|
||||
|
@ -11,6 +11,8 @@ var tokeniserTests = []struct {
|
||||
expectedTokens []interface{}
|
||||
}{ // TODO: Ensure ALL documented examples have tests! sheesh
|
||||
|
||||
{"a OR (b OR c)", append(make([]interface{}, 0), "a", "OR", "(", "b", "OR", "c", ")")},
|
||||
{"a .- (b OR c)", append(make([]interface{}, 0), "a", " .- ", "(", "b", "OR", "c", ")")},
|
||||
{"(animal==3)", append(make([]interface{}, 0), "(", "animal", "==", int64(3), ")")},
|
||||
{"(animal==f3)", append(make([]interface{}, 0), "(", "animal", "==", "f3", ")")},
|
||||
{"apples.BANANAS", append(make([]interface{}, 0), "apples", ".", "BANANAS")},
|
||||
@ -27,7 +29,7 @@ var tokeniserTests = []struct {
|
||||
{"a.b.[+]", append(make([]interface{}, 0), "a", ".", "b", ".", "[+]")},
|
||||
{"a.b[-12]", append(make([]interface{}, 0), "a", ".", "b", ".", int64(-12))},
|
||||
{"a.b.0", append(make([]interface{}, 0), "a", ".", "b", ".", int64(0))},
|
||||
{"a.b.-12", append(make([]interface{}, 0), "a", ".", "b", ".", int64(-12))},
|
||||
// {"a.b.-12", append(make([]interface{}, 0), "a", ".", "b", ".", int64(-12))},
|
||||
{"a", append(make([]interface{}, 0), "a")},
|
||||
{"\"a.b\".c", append(make([]interface{}, 0), "a.b", ".", "c")},
|
||||
{`b."foo.bar"`, append(make([]interface{}, 0), "b", ".", "foo.bar")},
|
||||
|
Loading…
Reference in New Issue
Block a user