mirror of
https://github.com/mikefarah/yq.git
synced 2025-01-23 14:16:10 +00:00
Generic ast (#1829)
Remove dependency on yaml.Node for internal AST representation. Yaml decoder is now just another decoder.
This commit is contained in:
parent
7430419413
commit
13d1bbb45f
@ -9,7 +9,7 @@ testLoadFileNotExist() {
|
||||
testLoadFileExpNotExist() {
|
||||
result=$(./yq e -n 'load(.a)' 2>&1)
|
||||
assertEquals 1 $?
|
||||
assertEquals "Error: Filename expression returned nil" "$result"
|
||||
assertEquals "Error: filename expression returned nil" "$result"
|
||||
}
|
||||
|
||||
testStrLoadFileNotExist() {
|
||||
@ -21,7 +21,7 @@ testStrLoadFileNotExist() {
|
||||
testStrLoadFileExpNotExist() {
|
||||
result=$(./yq e -n 'strload(.a)' 2>&1)
|
||||
assertEquals 1 $?
|
||||
assertEquals "Error: Filename expression returned nil" "$result"
|
||||
assertEquals "Error: filename expression returned nil" "$result"
|
||||
}
|
||||
|
||||
source ./scripts/shunit2
|
@ -1 +1,2 @@
|
||||
["foobar", "foobaz", "blarp"]
|
||||
a: #things
|
||||
meow
|
@ -1 +1 @@
|
||||
<cat>3</cat>
|
||||
<zoo><thing><frog>boing</frog></thing></zoo>
|
@ -1,11 +1,2 @@
|
||||
# Some doc
|
||||
|
||||
a: true
|
||||
b:
|
||||
c: 2
|
||||
d: [3, 4, 5]
|
||||
e:
|
||||
- name: fred
|
||||
value: 3
|
||||
- name: sam
|
||||
value: 4
|
||||
# things
|
||||
a: apple
|
4
go.mod
4
go.mod
@ -25,9 +25,9 @@ require (
|
||||
require (
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-isatty v0.0.17 // indirect
|
||||
github.com/mattn/go-isatty v0.0.19 // indirect
|
||||
golang.org/x/sys v0.13.0 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f // indirect
|
||||
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect
|
||||
)
|
||||
|
||||
go 1.20
|
||||
|
15
go.sum
15
go.sum
@ -15,22 +15,27 @@ github.com/elliotchance/orderedmap v1.5.0 h1:1IsExUsjv5XNBD3ZdC7jkAAqLWOOKdbPTmk
|
||||
github.com/elliotchance/orderedmap v1.5.0/go.mod h1:wsDwEaX5jEoyhbs7x93zk2H/qv0zwuhg4inXhDkYqys=
|
||||
github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs=
|
||||
github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw=
|
||||
github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q=
|
||||
github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD876Lmtgy7VtROAbHHXk8no=
|
||||
github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE=
|
||||
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
||||
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||
github.com/goccy/go-yaml v1.11.2 h1:joq77SxuyIs9zzxEjgyLBugMQ9NEgTWxXfz2wVqwAaQ=
|
||||
github.com/goccy/go-yaml v1.11.2/go.mod h1:wKnAMd44+9JAAnGQpWVEgBzGt3YuTaQ4uXoHvE4m7WU=
|
||||
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
||||
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8=
|
||||
github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
|
||||
github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y=
|
||||
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
|
||||
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
|
||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng=
|
||||
github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA=
|
||||
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4=
|
||||
github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e h1:aoZm08cpOy4WuID//EZDgcC4zIxODThtZNPirFr42+A=
|
||||
@ -52,15 +57,17 @@ github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcU
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/yuin/gopher-lua v1.1.0 h1:BojcDhfyDWgU2f2TOzYK/g5p2gxMrku8oupLDqlnSqE=
|
||||
github.com/yuin/gopher-lua v1.1.0/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw=
|
||||
golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=
|
||||
golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM=
|
||||
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE=
|
||||
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
|
||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f h1:uF6paiQQebLeSXkrTqHqz0MXhXXS1KgF41eUdBNvxK0=
|
||||
golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
|
||||
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk=
|
||||
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/op/go-logging.v1 v1.0.0-20160211212156-b2cb9fa56473 h1:6D+BvnJ/j6e222UW8s2qTSe3wGBtvo0MbVQG/c5k8RE=
|
||||
|
@ -2,8 +2,6 @@ package yqlib
|
||||
|
||||
import (
|
||||
"container/list"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
// A yaml expression evaluator that runs the expression once against all files/nodes in memory.
|
||||
@ -11,7 +9,7 @@ type Evaluator interface {
|
||||
EvaluateFiles(expression string, filenames []string, printer Printer, decoder Decoder) error
|
||||
|
||||
// EvaluateNodes takes an expression and one or more yaml nodes, returning a list of matching candidate nodes
|
||||
EvaluateNodes(expression string, nodes ...*yaml.Node) (*list.List, error)
|
||||
EvaluateNodes(expression string, nodes ...*CandidateNode) (*list.List, error)
|
||||
|
||||
// EvaluateCandidateNodes takes an expression and list of candidate nodes, returning a list of matching candidate nodes
|
||||
EvaluateCandidateNodes(expression string, inputCandidateNodes *list.List) (*list.List, error)
|
||||
@ -26,10 +24,10 @@ func NewAllAtOnceEvaluator() Evaluator {
|
||||
return &allAtOnceEvaluator{treeNavigator: NewDataTreeNavigator()}
|
||||
}
|
||||
|
||||
func (e *allAtOnceEvaluator) EvaluateNodes(expression string, nodes ...*yaml.Node) (*list.List, error) {
|
||||
func (e *allAtOnceEvaluator) EvaluateNodes(expression string, nodes ...*CandidateNode) (*list.List, error) {
|
||||
inputCandidates := list.New()
|
||||
for _, node := range nodes {
|
||||
inputCandidates.PushBack(&CandidateNode{Node: node})
|
||||
inputCandidates.PushBack(node)
|
||||
}
|
||||
return e.EvaluateCandidateNodes(expression, inputCandidates)
|
||||
}
|
||||
@ -65,13 +63,7 @@ func (e *allAtOnceEvaluator) EvaluateFiles(expression string, filenames []string
|
||||
}
|
||||
|
||||
if allDocuments.Len() == 0 {
|
||||
candidateNode := &CandidateNode{
|
||||
Document: 0,
|
||||
Filename: "",
|
||||
Node: &yaml.Node{Kind: yaml.DocumentNode, Content: []*yaml.Node{{Tag: "!!null", Kind: yaml.ScalarNode}}},
|
||||
FileIndex: 0,
|
||||
LeadingContent: "",
|
||||
}
|
||||
candidateNode := createScalarNode(nil, "")
|
||||
allDocuments.PushBack(candidateNode)
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,8 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/mikefarah/yq/v4/test"
|
||||
@ -18,7 +20,7 @@ var evaluateNodesScenario = []expressionScenario{
|
||||
document: `a: hello`,
|
||||
expression: `.`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: hello\n",
|
||||
"D0, P[], (!!map)::a: hello\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -32,9 +34,23 @@ var evaluateNodesScenario = []expressionScenario{
|
||||
|
||||
func TestAllAtOnceEvaluateNodes(t *testing.T) {
|
||||
var evaluator = NewAllAtOnceEvaluator()
|
||||
// logging.SetLevel(logging.DEBUG, "")
|
||||
for _, tt := range evaluateNodesScenario {
|
||||
node := test.ParseData(tt.document)
|
||||
list, _ := evaluator.EvaluateNodes(tt.expression, &node)
|
||||
decoder := NewYamlDecoder(NewDefaultYamlPreferences())
|
||||
reader := bufio.NewReader(strings.NewReader(tt.document))
|
||||
err := decoder.Init(reader)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
candidateNode, errorReading := decoder.Decode()
|
||||
|
||||
if errorReading != nil {
|
||||
t.Error(errorReading)
|
||||
return
|
||||
}
|
||||
|
||||
list, _ := evaluator.EvaluateNodes(tt.expression, candidateNode)
|
||||
test.AssertResultComplex(t, tt.expected, resultsToString(t, list))
|
||||
}
|
||||
}
|
||||
|
@ -3,51 +3,189 @@ package yqlib
|
||||
import (
|
||||
"container/list"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/jinzhu/copier"
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type Kind uint32
|
||||
|
||||
const (
|
||||
SequenceNode Kind = 1 << iota
|
||||
MappingNode
|
||||
ScalarNode
|
||||
AliasNode
|
||||
)
|
||||
|
||||
type Style uint32
|
||||
|
||||
const (
|
||||
TaggedStyle Style = 1 << iota
|
||||
DoubleQuotedStyle
|
||||
SingleQuotedStyle
|
||||
LiteralStyle
|
||||
FoldedStyle
|
||||
FlowStyle
|
||||
)
|
||||
|
||||
func createStringScalarNode(stringValue string) *CandidateNode {
|
||||
var node = &CandidateNode{Kind: ScalarNode}
|
||||
node.Value = stringValue
|
||||
node.Tag = "!!str"
|
||||
return node
|
||||
}
|
||||
|
||||
func createScalarNode(value interface{}, stringValue string) *CandidateNode {
|
||||
var node = &CandidateNode{Kind: ScalarNode}
|
||||
node.Value = stringValue
|
||||
|
||||
switch value.(type) {
|
||||
case float32, float64:
|
||||
node.Tag = "!!float"
|
||||
case int, int64, int32:
|
||||
node.Tag = "!!int"
|
||||
case bool:
|
||||
node.Tag = "!!bool"
|
||||
case string:
|
||||
node.Tag = "!!str"
|
||||
case nil:
|
||||
node.Tag = "!!null"
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
type CandidateNode struct {
|
||||
Node *yaml.Node // the actual node
|
||||
Kind Kind
|
||||
Style Style
|
||||
|
||||
Tag string
|
||||
Value string
|
||||
Anchor string
|
||||
Alias *CandidateNode
|
||||
Content []*CandidateNode
|
||||
|
||||
HeadComment string
|
||||
LineComment string
|
||||
FootComment string
|
||||
|
||||
Parent *CandidateNode // parent node
|
||||
Key *yaml.Node // node key, if this is a value from a map (or index in an array)
|
||||
Key *CandidateNode // node key, if this is a value from a map (or index in an array)
|
||||
|
||||
LeadingContent string
|
||||
TrailingContent string
|
||||
|
||||
Path []interface{} /// the path we took to get to this node
|
||||
Document uint // the document index of this node
|
||||
Filename string
|
||||
FileIndex int
|
||||
document uint // the document index of this node
|
||||
filename string
|
||||
|
||||
Line int
|
||||
Column int
|
||||
|
||||
fileIndex int
|
||||
// when performing op against all nodes given, this will treat all the nodes as one
|
||||
// (e.g. top level cross document merge). This property does not propagate to child nodes.
|
||||
EvaluateTogether bool
|
||||
IsMapKey bool
|
||||
}
|
||||
|
||||
func (n *CandidateNode) CreateChild() *CandidateNode {
|
||||
return &CandidateNode{
|
||||
Parent: n,
|
||||
}
|
||||
}
|
||||
|
||||
func (n *CandidateNode) SetDocument(idx uint) {
|
||||
n.document = idx
|
||||
}
|
||||
|
||||
func (n *CandidateNode) GetDocument() uint {
|
||||
// defer to parent
|
||||
if n.Parent != nil {
|
||||
return n.Parent.GetDocument()
|
||||
}
|
||||
return n.document
|
||||
}
|
||||
|
||||
func (n *CandidateNode) SetFilename(name string) {
|
||||
n.filename = name
|
||||
}
|
||||
|
||||
func (n *CandidateNode) GetFilename() string {
|
||||
if n.Parent != nil {
|
||||
return n.Parent.GetFilename()
|
||||
}
|
||||
return n.filename
|
||||
}
|
||||
|
||||
func (n *CandidateNode) SetFileIndex(idx int) {
|
||||
n.fileIndex = idx
|
||||
}
|
||||
|
||||
func (n *CandidateNode) GetFileIndex() int {
|
||||
if n.Parent != nil {
|
||||
return n.Parent.GetFileIndex()
|
||||
}
|
||||
return n.fileIndex
|
||||
}
|
||||
|
||||
func (n *CandidateNode) GetKey() string {
|
||||
keyPrefix := ""
|
||||
if n.IsMapKey {
|
||||
keyPrefix = "key-"
|
||||
keyPrefix = fmt.Sprintf("key-%v-", n.Value)
|
||||
}
|
||||
return fmt.Sprintf("%v%v - %v", keyPrefix, n.Document, n.Path)
|
||||
key := ""
|
||||
if n.Key != nil {
|
||||
key = n.Key.Value
|
||||
}
|
||||
return fmt.Sprintf("%v%v - %v", keyPrefix, n.GetDocument(), key)
|
||||
}
|
||||
|
||||
func (n *CandidateNode) GetNiceTag() string {
|
||||
return unwrapDoc(n.Node).Tag
|
||||
func (n *CandidateNode) getParsedKey() interface{} {
|
||||
if n.IsMapKey {
|
||||
return n.Value
|
||||
}
|
||||
if n.Key == nil {
|
||||
return nil
|
||||
}
|
||||
if n.Key.Tag == "!!str" {
|
||||
return n.Key.Value
|
||||
}
|
||||
index, err := parseInt(n.Key.Value)
|
||||
if err != nil {
|
||||
return n.Key.Value
|
||||
}
|
||||
return index
|
||||
|
||||
}
|
||||
|
||||
func (n *CandidateNode) GetPath() []interface{} {
|
||||
key := n.getParsedKey()
|
||||
if n.Parent != nil && key != nil {
|
||||
return append(n.Parent.GetPath(), key)
|
||||
}
|
||||
|
||||
if key != nil {
|
||||
return []interface{}{key}
|
||||
}
|
||||
return make([]interface{}, 0)
|
||||
}
|
||||
|
||||
func (n *CandidateNode) GetNicePath() string {
|
||||
if n.Path != nil && len(n.Path) >= 0 {
|
||||
pathStr := make([]string, len(n.Path))
|
||||
for i, v := range n.Path {
|
||||
pathStr[i] = fmt.Sprintf("%v", v)
|
||||
var sb strings.Builder
|
||||
path := n.GetPath()
|
||||
for i, element := range path {
|
||||
elementStr := fmt.Sprintf("%v", element)
|
||||
switch element.(type) {
|
||||
case int:
|
||||
sb.WriteString("[" + elementStr + "]")
|
||||
default:
|
||||
if i == 0 {
|
||||
sb.WriteString(elementStr)
|
||||
} else if strings.ContainsRune(elementStr, '.') {
|
||||
sb.WriteString("[" + elementStr + "]")
|
||||
} else {
|
||||
sb.WriteString("." + elementStr)
|
||||
}
|
||||
return strings.Join(pathStr, ".")
|
||||
}
|
||||
return ""
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func (n *CandidateNode) AsList() *list.List {
|
||||
@ -56,134 +194,233 @@ func (n *CandidateNode) AsList() *list.List {
|
||||
return elMap
|
||||
}
|
||||
|
||||
func (n *CandidateNode) CreateChildInMap(key *yaml.Node, node *yaml.Node) *CandidateNode {
|
||||
var value interface{}
|
||||
if key != nil {
|
||||
value = key.Value
|
||||
func (n *CandidateNode) SetParent(parent *CandidateNode) {
|
||||
n.Parent = parent
|
||||
}
|
||||
|
||||
func (n *CandidateNode) AddKeyValueChild(rawKey *CandidateNode, rawValue *CandidateNode) (*CandidateNode, *CandidateNode) {
|
||||
key := rawKey.Copy()
|
||||
key.SetParent(n)
|
||||
key.IsMapKey = true
|
||||
|
||||
value := rawValue.Copy()
|
||||
value.SetParent(n)
|
||||
value.Key = key
|
||||
|
||||
n.Content = append(n.Content, key, value)
|
||||
return key, value
|
||||
}
|
||||
|
||||
func (n *CandidateNode) AddChild(rawChild *CandidateNode) {
|
||||
value := rawChild.Copy()
|
||||
value.SetParent(n)
|
||||
if value.Key != nil {
|
||||
value.Key.SetParent(n)
|
||||
} else {
|
||||
index := len(n.Content)
|
||||
keyNode := createScalarNode(index, fmt.Sprintf("%v", index))
|
||||
keyNode.SetParent(n)
|
||||
value.Key = keyNode
|
||||
}
|
||||
n.Content = append(n.Content, value)
|
||||
}
|
||||
|
||||
func (n *CandidateNode) AddChildren(children []*CandidateNode) {
|
||||
if n.Kind == MappingNode {
|
||||
for i := 0; i < len(children); i += 2 {
|
||||
key := children[i]
|
||||
value := children[i+1]
|
||||
n.AddKeyValueChild(key, value)
|
||||
}
|
||||
|
||||
} else {
|
||||
for _, rawChild := range children {
|
||||
n.AddChild(rawChild)
|
||||
}
|
||||
return &CandidateNode{
|
||||
Node: node,
|
||||
Path: n.createChildPath(value),
|
||||
Parent: n,
|
||||
Key: key,
|
||||
Document: n.Document,
|
||||
Filename: n.Filename,
|
||||
FileIndex: n.FileIndex,
|
||||
}
|
||||
}
|
||||
|
||||
func (n *CandidateNode) CreateChildInArray(index int, node *yaml.Node) *CandidateNode {
|
||||
return &CandidateNode{
|
||||
Node: node,
|
||||
Path: n.createChildPath(index),
|
||||
Parent: n,
|
||||
Key: &yaml.Node{Kind: yaml.ScalarNode, Value: fmt.Sprintf("%v", index), Tag: "!!int"},
|
||||
Document: n.Document,
|
||||
Filename: n.Filename,
|
||||
FileIndex: n.FileIndex,
|
||||
func (n *CandidateNode) GetValueRep() (interface{}, error) {
|
||||
log.Debugf("GetValueRep for %v value: %v", n.GetNicePath(), n.Value)
|
||||
realTag := n.guessTagFromCustomType()
|
||||
|
||||
switch realTag {
|
||||
case "!!int":
|
||||
_, val, err := parseInt64(n.Value)
|
||||
return val, err
|
||||
case "!!float":
|
||||
// need to test this
|
||||
return strconv.ParseFloat(n.Value, 64)
|
||||
case "!!bool":
|
||||
return isTruthyNode(n), nil
|
||||
case "!!null":
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return n.Value, nil
|
||||
}
|
||||
|
||||
func (n *CandidateNode) CreateReplacement(node *yaml.Node) *CandidateNode {
|
||||
return &CandidateNode{
|
||||
Node: node,
|
||||
Path: n.createChildPath(nil),
|
||||
Parent: n.Parent,
|
||||
Key: n.Key,
|
||||
IsMapKey: n.IsMapKey,
|
||||
Document: n.Document,
|
||||
Filename: n.Filename,
|
||||
FileIndex: n.FileIndex,
|
||||
func (n *CandidateNode) guessTagFromCustomType() string {
|
||||
if strings.HasPrefix(n.Tag, "!!") {
|
||||
return n.Tag
|
||||
} else if n.Value == "" {
|
||||
log.Debug("guessTagFromCustomType: node has no value to guess the type with")
|
||||
return n.Tag
|
||||
}
|
||||
dataBucket, errorReading := parseSnippet(n.Value)
|
||||
|
||||
if errorReading != nil {
|
||||
log.Debug("guessTagFromCustomType: could not guess underlying tag type %v", errorReading)
|
||||
return n.Tag
|
||||
}
|
||||
guessedTag := dataBucket.Tag
|
||||
log.Info("im guessing the tag %v is a %v", n.Tag, guessedTag)
|
||||
return guessedTag
|
||||
}
|
||||
|
||||
func (n *CandidateNode) CreateReplacementWithDocWrappers(node *yaml.Node) *CandidateNode {
|
||||
replacement := n.CreateReplacement(node)
|
||||
func (n *CandidateNode) CreateReplacement(kind Kind, tag string, value string) *CandidateNode {
|
||||
node := &CandidateNode{
|
||||
Kind: kind,
|
||||
Tag: tag,
|
||||
Value: value,
|
||||
}
|
||||
return n.CopyAsReplacement(node)
|
||||
}
|
||||
|
||||
func (n *CandidateNode) CopyAsReplacement(replacement *CandidateNode) *CandidateNode {
|
||||
newCopy := replacement.Copy()
|
||||
newCopy.Parent = n.Parent
|
||||
|
||||
if n.IsMapKey {
|
||||
newCopy.Key = n
|
||||
} else {
|
||||
newCopy.Key = n.Key
|
||||
}
|
||||
|
||||
return newCopy
|
||||
}
|
||||
|
||||
func (n *CandidateNode) CreateReplacementWithComments(kind Kind, tag string, style Style) *CandidateNode {
|
||||
replacement := n.CreateReplacement(kind, tag, "")
|
||||
replacement.LeadingContent = n.LeadingContent
|
||||
replacement.TrailingContent = n.TrailingContent
|
||||
replacement.HeadComment = n.HeadComment
|
||||
replacement.LineComment = n.LineComment
|
||||
replacement.FootComment = n.FootComment
|
||||
replacement.Style = style
|
||||
return replacement
|
||||
}
|
||||
|
||||
func (n *CandidateNode) createChildPath(path interface{}) []interface{} {
|
||||
if path == nil {
|
||||
newPath := make([]interface{}, len(n.Path))
|
||||
copy(newPath, n.Path)
|
||||
return newPath
|
||||
}
|
||||
|
||||
//don't use append as they may actually modify the path of the original node!
|
||||
newPath := make([]interface{}, len(n.Path)+1)
|
||||
copy(newPath, n.Path)
|
||||
newPath[len(n.Path)] = path
|
||||
return newPath
|
||||
func (n *CandidateNode) Copy() *CandidateNode {
|
||||
return n.doCopy(true)
|
||||
}
|
||||
|
||||
func (n *CandidateNode) Copy() (*CandidateNode, error) {
|
||||
clone := &CandidateNode{}
|
||||
err := copier.Copy(clone, n)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
func (n *CandidateNode) CopyWithoutContent() *CandidateNode {
|
||||
return n.doCopy(false)
|
||||
}
|
||||
|
||||
func (n *CandidateNode) doCopy(cloneContent bool) *CandidateNode {
|
||||
var content []*CandidateNode
|
||||
|
||||
var copyKey *CandidateNode
|
||||
if n.Key != nil {
|
||||
copyKey = n.Key.Copy()
|
||||
}
|
||||
clone.Node = deepClone(n.Node)
|
||||
return clone, nil
|
||||
|
||||
clone := &CandidateNode{
|
||||
Kind: n.Kind,
|
||||
Style: n.Style,
|
||||
|
||||
Tag: n.Tag,
|
||||
Value: n.Value,
|
||||
Anchor: n.Anchor,
|
||||
|
||||
// ok not to clone this,
|
||||
// as its a reference to somewhere else.
|
||||
Alias: n.Alias,
|
||||
Content: content,
|
||||
|
||||
HeadComment: n.HeadComment,
|
||||
LineComment: n.LineComment,
|
||||
FootComment: n.FootComment,
|
||||
|
||||
Parent: n.Parent,
|
||||
Key: copyKey,
|
||||
|
||||
LeadingContent: n.LeadingContent,
|
||||
|
||||
document: n.document,
|
||||
filename: n.filename,
|
||||
fileIndex: n.fileIndex,
|
||||
|
||||
Line: n.Line,
|
||||
Column: n.Column,
|
||||
|
||||
EvaluateTogether: n.EvaluateTogether,
|
||||
IsMapKey: n.IsMapKey,
|
||||
}
|
||||
|
||||
if cloneContent {
|
||||
clone.AddChildren(n.Content)
|
||||
}
|
||||
|
||||
return clone
|
||||
}
|
||||
|
||||
// updates this candidate from the given candidate node
|
||||
func (n *CandidateNode) UpdateFrom(other *CandidateNode, prefs assignPreferences) {
|
||||
|
||||
// if this is an empty map or empty array, use the style of other node.
|
||||
if (n.Node.Kind != yaml.ScalarNode && len(n.Node.Content) == 0) ||
|
||||
if (n.Kind != ScalarNode && len(n.Content) == 0) ||
|
||||
// if the tag has changed (e.g. from str to bool)
|
||||
(guessTagFromCustomType(n.Node) != guessTagFromCustomType(other.Node)) {
|
||||
n.Node.Style = other.Node.Style
|
||||
(n.guessTagFromCustomType() != other.guessTagFromCustomType()) {
|
||||
n.Style = other.Style
|
||||
}
|
||||
|
||||
n.Node.Content = deepCloneContent(other.Node.Content)
|
||||
n.Node.Kind = other.Node.Kind
|
||||
n.Node.Value = other.Node.Value
|
||||
n.Content = make([]*CandidateNode, 0)
|
||||
n.Kind = other.Kind
|
||||
n.AddChildren(other.Content)
|
||||
|
||||
n.Value = other.Value
|
||||
|
||||
n.UpdateAttributesFrom(other, prefs)
|
||||
|
||||
}
|
||||
|
||||
func (n *CandidateNode) UpdateAttributesFrom(other *CandidateNode, prefs assignPreferences) {
|
||||
log.Debug("UpdateAttributesFrom: n: %v other: %v", n.GetKey(), other.GetKey())
|
||||
if n.Node.Kind != other.Node.Kind {
|
||||
log.Debug("UpdateAttributesFrom: n: %v other: %v", NodeToString(n), NodeToString(other))
|
||||
if n.Kind != other.Kind {
|
||||
// clear out the contents when switching to a different type
|
||||
// e.g. map to array
|
||||
n.Node.Content = make([]*yaml.Node, 0)
|
||||
n.Node.Value = ""
|
||||
n.Content = make([]*CandidateNode, 0)
|
||||
n.Value = ""
|
||||
}
|
||||
n.Node.Kind = other.Node.Kind
|
||||
n.Kind = other.Kind
|
||||
|
||||
// don't clobber custom tags...
|
||||
if prefs.ClobberCustomTags || strings.HasPrefix(n.Node.Tag, "!!") || n.Node.Tag == "" {
|
||||
n.Node.Tag = other.Node.Tag
|
||||
if prefs.ClobberCustomTags || strings.HasPrefix(n.Tag, "!!") || n.Tag == "" {
|
||||
n.Tag = other.Tag
|
||||
}
|
||||
|
||||
n.Node.Alias = other.Node.Alias
|
||||
n.Alias = other.Alias
|
||||
|
||||
if !prefs.DontOverWriteAnchor {
|
||||
n.Node.Anchor = other.Node.Anchor
|
||||
n.Anchor = other.Anchor
|
||||
}
|
||||
|
||||
// merge will pickup the style of the new thing
|
||||
// when autocreating nodes
|
||||
|
||||
if n.Node.Style == 0 {
|
||||
n.Node.Style = other.Node.Style
|
||||
if n.Style == 0 {
|
||||
n.Style = other.Style
|
||||
}
|
||||
|
||||
if other.Node.FootComment != "" {
|
||||
n.Node.FootComment = other.Node.FootComment
|
||||
if other.FootComment != "" {
|
||||
n.FootComment = other.FootComment
|
||||
}
|
||||
if other.TrailingContent != "" {
|
||||
n.TrailingContent = other.TrailingContent
|
||||
if other.HeadComment != "" {
|
||||
n.HeadComment = other.HeadComment
|
||||
}
|
||||
if other.Node.HeadComment != "" {
|
||||
n.Node.HeadComment = other.Node.HeadComment
|
||||
}
|
||||
if other.Node.LineComment != "" {
|
||||
n.Node.LineComment = other.Node.LineComment
|
||||
if other.LineComment != "" {
|
||||
n.LineComment = other.LineComment
|
||||
}
|
||||
}
|
||||
|
180
pkg/yqlib/candidate_node_goccy_yaml.go
Normal file
180
pkg/yqlib/candidate_node_goccy_yaml.go
Normal file
@ -0,0 +1,180 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/goccy/go-yaml/ast"
|
||||
goccyToken "github.com/goccy/go-yaml/token"
|
||||
)
|
||||
|
||||
func (o *CandidateNode) goccyDecodeIntoChild(childNode ast.Node, cm yaml.CommentMap) (*CandidateNode, error) {
|
||||
newChild := o.CreateChild()
|
||||
|
||||
err := newChild.UnmarshalGoccyYAML(childNode, cm)
|
||||
return newChild, err
|
||||
}
|
||||
|
||||
func (o *CandidateNode) UnmarshalGoccyYAML(node ast.Node, cm yaml.CommentMap) error {
|
||||
log.Debugf("UnmarshalYAML %v", node)
|
||||
log.Debugf("UnmarshalYAML %v", node.Type().String())
|
||||
log.Debugf("UnmarshalYAML Node Value: %v", node.String())
|
||||
log.Debugf("UnmarshalYAML Node GetComment: %v", node.GetComment())
|
||||
|
||||
if node.GetComment() != nil {
|
||||
commentMapComments := cm[node.GetPath()]
|
||||
for _, comment := range node.GetComment().Comments {
|
||||
// need to use the comment map to find the position :/
|
||||
log.Debugf("%v has a comment of [%v]", node.GetPath(), comment.Token.Value)
|
||||
for _, commentMapComment := range commentMapComments {
|
||||
commentMapValue := strings.Join(commentMapComment.Texts, "\n")
|
||||
if commentMapValue == comment.Token.Value {
|
||||
log.Debug("found a matching entry in comment map")
|
||||
// we found the comment in the comment map,
|
||||
// now we can process the position
|
||||
switch commentMapComment.Position {
|
||||
case yaml.CommentHeadPosition:
|
||||
o.HeadComment = comment.String()
|
||||
log.Debug("its a head comment %v", comment.String())
|
||||
case yaml.CommentLinePosition:
|
||||
o.LineComment = comment.String()
|
||||
log.Debug("its a line comment %v", comment.String())
|
||||
case yaml.CommentFootPosition:
|
||||
o.FootComment = comment.String()
|
||||
log.Debug("its a foot comment %v", comment.String())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
o.Value = node.String()
|
||||
switch node.Type() {
|
||||
case ast.IntegerType:
|
||||
o.Kind = ScalarNode
|
||||
o.Tag = "!!int"
|
||||
case ast.FloatType:
|
||||
o.Kind = ScalarNode
|
||||
o.Tag = "!!float"
|
||||
case ast.BoolType:
|
||||
o.Kind = ScalarNode
|
||||
o.Tag = "!!bool"
|
||||
case ast.NullType:
|
||||
o.Kind = ScalarNode
|
||||
o.Tag = "!!null"
|
||||
o.Value = node.GetToken().Value
|
||||
case ast.StringType:
|
||||
o.Kind = ScalarNode
|
||||
o.Tag = "!!str"
|
||||
switch node.GetToken().Type {
|
||||
case goccyToken.SingleQuoteType:
|
||||
o.Style = SingleQuotedStyle
|
||||
case goccyToken.DoubleQuoteType:
|
||||
o.Style = DoubleQuotedStyle
|
||||
}
|
||||
o.Value = node.(*ast.StringNode).Value
|
||||
log.Debugf("string value %v", node.(*ast.StringNode).Value)
|
||||
case ast.LiteralType:
|
||||
o.Kind = ScalarNode
|
||||
o.Tag = "!!str"
|
||||
o.Style = LiteralStyle
|
||||
astLiteral := node.(*ast.LiteralNode)
|
||||
log.Debugf("astLiteral.Start.Type %v", astLiteral.Start.Type)
|
||||
if astLiteral.Start.Type == goccyToken.FoldedType {
|
||||
log.Debugf("folded Type %v", astLiteral.Start.Type)
|
||||
o.Style = FoldedStyle
|
||||
}
|
||||
log.Debug("start value: %v ", node.(*ast.LiteralNode).Start.Value)
|
||||
log.Debug("start value: %v ", node.(*ast.LiteralNode).Start.Type)
|
||||
// TODO: here I could put the original value with line breaks
|
||||
// to solve the multiline > problem
|
||||
o.Value = astLiteral.Value.Value
|
||||
case ast.TagType:
|
||||
if err := o.UnmarshalGoccyYAML(node.(*ast.TagNode).Value, cm); err != nil {
|
||||
return err
|
||||
}
|
||||
o.Tag = node.(*ast.TagNode).Start.Value
|
||||
case ast.MappingType:
|
||||
log.Debugf("UnmarshalYAML - a mapping node")
|
||||
o.Kind = MappingNode
|
||||
o.Tag = "!!map"
|
||||
|
||||
mappingNode := node.(*ast.MappingNode)
|
||||
if mappingNode.IsFlowStyle {
|
||||
o.Style = FlowStyle
|
||||
}
|
||||
for _, mappingValueNode := range mappingNode.Values {
|
||||
err := o.goccyProcessMappingValueNode(mappingValueNode, cm)
|
||||
if err != nil {
|
||||
return ast.ErrInvalidAnchorName
|
||||
}
|
||||
}
|
||||
if mappingNode.FootComment != nil {
|
||||
log.Debugf("mapping node has a foot comment of: %v", mappingNode.FootComment)
|
||||
o.FootComment = mappingNode.FootComment.String()
|
||||
}
|
||||
case ast.MappingValueType:
|
||||
log.Debugf("UnmarshalYAML - a mapping node")
|
||||
o.Kind = MappingNode
|
||||
o.Tag = "!!map"
|
||||
mappingValueNode := node.(*ast.MappingValueNode)
|
||||
err := o.goccyProcessMappingValueNode(mappingValueNode, cm)
|
||||
if err != nil {
|
||||
return ast.ErrInvalidAnchorName
|
||||
}
|
||||
case ast.SequenceType:
|
||||
log.Debugf("UnmarshalYAML - a sequence node")
|
||||
o.Kind = SequenceNode
|
||||
o.Tag = "!!seq"
|
||||
sequenceNode := node.(*ast.SequenceNode)
|
||||
if sequenceNode.IsFlowStyle {
|
||||
o.Style = FlowStyle
|
||||
}
|
||||
astSeq := sequenceNode.Values
|
||||
o.Content = make([]*CandidateNode, len(astSeq))
|
||||
for i := 0; i < len(astSeq); i++ {
|
||||
keyNode := o.CreateChild()
|
||||
keyNode.IsMapKey = true
|
||||
keyNode.Tag = "!!int"
|
||||
keyNode.Kind = ScalarNode
|
||||
keyNode.Value = fmt.Sprintf("%v", i)
|
||||
|
||||
valueNode, err := o.goccyDecodeIntoChild(astSeq[i], cm)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
valueNode.Key = keyNode
|
||||
o.Content[i] = valueNode
|
||||
}
|
||||
|
||||
default:
|
||||
log.Debugf("UnmarshalYAML - node idea of the type!!")
|
||||
}
|
||||
log.Debugf("KIND: %v", o.Kind)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (o *CandidateNode) goccyProcessMappingValueNode(mappingEntry *ast.MappingValueNode, cm yaml.CommentMap) error {
|
||||
log.Debug("UnmarshalYAML MAP KEY entry %v", mappingEntry.Key)
|
||||
keyNode, err := o.goccyDecodeIntoChild(mappingEntry.Key, cm)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
keyNode.IsMapKey = true
|
||||
|
||||
log.Debug("UnmarshalYAML MAP VALUE entry %v", mappingEntry.Value)
|
||||
valueNode, err := o.goccyDecodeIntoChild(mappingEntry.Value, cm)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if mappingEntry.FootComment != nil {
|
||||
valueNode.FootComment = mappingEntry.FootComment.String()
|
||||
}
|
||||
o.AddKeyValueChild(keyNode, valueNode)
|
||||
|
||||
return nil
|
||||
}
|
149
pkg/yqlib/candidate_node_test.go
Normal file
149
pkg/yqlib/candidate_node_test.go
Normal file
@ -0,0 +1,149 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/mikefarah/yq/v4/test"
|
||||
)
|
||||
|
||||
type valueRepScenario struct {
|
||||
input string
|
||||
tag string
|
||||
expected interface{}
|
||||
}
|
||||
|
||||
var valueRepScenarios = []valueRepScenario{
|
||||
{
|
||||
input: `"cat"`,
|
||||
expected: `"cat"`,
|
||||
},
|
||||
{
|
||||
input: `3`,
|
||||
expected: int64(3),
|
||||
},
|
||||
{
|
||||
input: `3.1`,
|
||||
expected: float64(3.1),
|
||||
},
|
||||
{
|
||||
input: `true`,
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
input: `y`,
|
||||
tag: "!!bool",
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
tag: "!!null",
|
||||
expected: nil,
|
||||
},
|
||||
}
|
||||
|
||||
func TestCandidateNodeGetValueRepScenarios(t *testing.T) {
|
||||
for _, tt := range valueRepScenarios {
|
||||
node := CandidateNode{Value: tt.input, Tag: tt.tag}
|
||||
actual, err := node.GetValueRep()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
test.AssertResult(t, tt.expected, actual)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCandidateNodeChildWhenParentUpdated(t *testing.T) {
|
||||
parent := CandidateNode{}
|
||||
child := parent.CreateChild()
|
||||
parent.SetDocument(1)
|
||||
parent.SetFileIndex(2)
|
||||
parent.SetFilename("meow")
|
||||
test.AssertResultWithContext(t, "meow", child.GetFilename(), "filename")
|
||||
test.AssertResultWithContext(t, 2, child.GetFileIndex(), "file index")
|
||||
test.AssertResultWithContext(t, uint(1), child.GetDocument(), "document index")
|
||||
}
|
||||
|
||||
type createScalarNodeScenario struct {
|
||||
value interface{}
|
||||
stringValue string
|
||||
expectedTag string
|
||||
}
|
||||
|
||||
var createScalarScenarios = []createScalarNodeScenario{
|
||||
{
|
||||
value: "mike",
|
||||
stringValue: "mike",
|
||||
expectedTag: "!!str",
|
||||
},
|
||||
{
|
||||
value: 3,
|
||||
stringValue: "3",
|
||||
expectedTag: "!!int",
|
||||
},
|
||||
{
|
||||
value: 3.1,
|
||||
stringValue: "3.1",
|
||||
expectedTag: "!!float",
|
||||
},
|
||||
{
|
||||
value: true,
|
||||
stringValue: "true",
|
||||
expectedTag: "!!bool",
|
||||
},
|
||||
{
|
||||
value: nil,
|
||||
stringValue: "~",
|
||||
expectedTag: "!!null",
|
||||
},
|
||||
}
|
||||
|
||||
func TestCreateScalarNodeScenarios(t *testing.T) {
|
||||
for _, tt := range createScalarScenarios {
|
||||
actual := createScalarNode(tt.value, tt.stringValue)
|
||||
test.AssertResultWithContext(t, tt.stringValue, actual.Value, fmt.Sprintf("Value for: Value: [%v], String: %v", tt.value, tt.stringValue))
|
||||
test.AssertResultWithContext(t, tt.expectedTag, actual.Tag, fmt.Sprintf("Value for: Value: [%v], String: %v", tt.value, tt.stringValue))
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetKeyForMapValue(t *testing.T) {
|
||||
key := createStringScalarNode("yourKey")
|
||||
n := CandidateNode{Key: key, Value: "meow", document: 3}
|
||||
test.AssertResult(t, "3 - yourKey", n.GetKey())
|
||||
}
|
||||
|
||||
func TestGetKeyForMapKey(t *testing.T) {
|
||||
key := createStringScalarNode("yourKey")
|
||||
key.IsMapKey = true
|
||||
key.document = 3
|
||||
test.AssertResult(t, "key-yourKey-3 - ", key.GetKey())
|
||||
}
|
||||
|
||||
func TestGetKeyForValue(t *testing.T) {
|
||||
n := CandidateNode{Value: "meow", document: 3}
|
||||
test.AssertResult(t, "3 - ", n.GetKey())
|
||||
}
|
||||
|
||||
func TestGetParsedKeyForMapKey(t *testing.T) {
|
||||
key := createStringScalarNode("yourKey")
|
||||
key.IsMapKey = true
|
||||
key.document = 3
|
||||
test.AssertResult(t, "yourKey", key.getParsedKey())
|
||||
}
|
||||
|
||||
func TestGetParsedKeyForLooseValue(t *testing.T) {
|
||||
n := CandidateNode{Value: "meow", document: 3}
|
||||
test.AssertResult(t, nil, n.getParsedKey())
|
||||
}
|
||||
|
||||
func TestGetParsedKeyForMapValue(t *testing.T) {
|
||||
key := createStringScalarNode("yourKey")
|
||||
n := CandidateNode{Key: key, Value: "meow", document: 3}
|
||||
test.AssertResult(t, "yourKey", n.getParsedKey())
|
||||
}
|
||||
|
||||
func TestGetParsedKeyForArrayValue(t *testing.T) {
|
||||
key := createScalarNode(4, "4")
|
||||
n := CandidateNode{Key: key, Value: "meow", document: 3}
|
||||
test.AssertResult(t, 4, n.getParsedKey())
|
||||
}
|
218
pkg/yqlib/candidate_node_yaml.go
Normal file
218
pkg/yqlib/candidate_node_yaml.go
Normal file
@ -0,0 +1,218 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func MapYamlStyle(original yaml.Style) Style {
|
||||
switch original {
|
||||
case yaml.TaggedStyle:
|
||||
return TaggedStyle
|
||||
case yaml.DoubleQuotedStyle:
|
||||
return DoubleQuotedStyle
|
||||
case yaml.SingleQuotedStyle:
|
||||
return SingleQuotedStyle
|
||||
case yaml.LiteralStyle:
|
||||
return LiteralStyle
|
||||
case yaml.FoldedStyle:
|
||||
return FoldedStyle
|
||||
case yaml.FlowStyle:
|
||||
return FlowStyle
|
||||
case 0:
|
||||
return 0
|
||||
}
|
||||
return Style(original)
|
||||
}
|
||||
|
||||
func MapToYamlStyle(original Style) yaml.Style {
|
||||
switch original {
|
||||
case TaggedStyle:
|
||||
return yaml.TaggedStyle
|
||||
case DoubleQuotedStyle:
|
||||
return yaml.DoubleQuotedStyle
|
||||
case SingleQuotedStyle:
|
||||
return yaml.SingleQuotedStyle
|
||||
case LiteralStyle:
|
||||
return yaml.LiteralStyle
|
||||
case FoldedStyle:
|
||||
return yaml.FoldedStyle
|
||||
case FlowStyle:
|
||||
return yaml.FlowStyle
|
||||
case 0:
|
||||
return 0
|
||||
}
|
||||
return yaml.Style(original)
|
||||
}
|
||||
|
||||
func (o *CandidateNode) copyFromYamlNode(node *yaml.Node, anchorMap map[string]*CandidateNode) {
|
||||
o.Style = MapYamlStyle(node.Style)
|
||||
|
||||
o.Tag = node.Tag
|
||||
o.Value = node.Value
|
||||
o.Anchor = node.Anchor
|
||||
|
||||
if o.Anchor != "" {
|
||||
anchorMap[o.Anchor] = o
|
||||
log.Debug("set anchor %v to %v", o.Anchor, NodeToString(o))
|
||||
}
|
||||
|
||||
// its a single alias
|
||||
if node.Alias != nil && node.Alias.Anchor != "" {
|
||||
o.Alias = anchorMap[node.Alias.Anchor]
|
||||
log.Debug("set alias to %v", NodeToString(anchorMap[node.Alias.Anchor]))
|
||||
}
|
||||
o.HeadComment = node.HeadComment
|
||||
o.LineComment = node.LineComment
|
||||
o.FootComment = node.FootComment
|
||||
|
||||
o.Line = node.Line
|
||||
o.Column = node.Column
|
||||
}
|
||||
|
||||
func (o *CandidateNode) copyToYamlNode(node *yaml.Node) {
|
||||
node.Style = MapToYamlStyle(o.Style)
|
||||
|
||||
node.Tag = o.Tag
|
||||
node.Value = o.Value
|
||||
node.Anchor = o.Anchor
|
||||
|
||||
// node.Alias = TODO - find Alias in our own structure
|
||||
// might need to be a post process thing
|
||||
|
||||
node.HeadComment = o.HeadComment
|
||||
|
||||
node.LineComment = o.LineComment
|
||||
node.FootComment = o.FootComment
|
||||
|
||||
node.Line = o.Line
|
||||
node.Column = o.Column
|
||||
}
|
||||
|
||||
func (o *CandidateNode) decodeIntoChild(childNode *yaml.Node, anchorMap map[string]*CandidateNode) (*CandidateNode, error) {
|
||||
newChild := o.CreateChild()
|
||||
|
||||
// null yaml.Nodes to not end up calling UnmarshalYAML
|
||||
// so we call it explicitly
|
||||
if childNode.Tag == "!!null" {
|
||||
newChild.Kind = ScalarNode
|
||||
newChild.copyFromYamlNode(childNode, anchorMap)
|
||||
return newChild, nil
|
||||
}
|
||||
|
||||
err := newChild.UnmarshalYAML(childNode, anchorMap)
|
||||
return newChild, err
|
||||
}
|
||||
|
||||
func (o *CandidateNode) UnmarshalYAML(node *yaml.Node, anchorMap map[string]*CandidateNode) error {
|
||||
log.Debugf("UnmarshalYAML %v", node.Tag)
|
||||
switch node.Kind {
|
||||
case yaml.AliasNode:
|
||||
log.Debug("UnmarshalYAML - alias from yaml: %v", o.Tag)
|
||||
o.Kind = AliasNode
|
||||
o.copyFromYamlNode(node, anchorMap)
|
||||
return nil
|
||||
case yaml.ScalarNode:
|
||||
log.Debugf("UnmarshalYAML - a scalar")
|
||||
o.Kind = ScalarNode
|
||||
o.copyFromYamlNode(node, anchorMap)
|
||||
return nil
|
||||
case yaml.MappingNode:
|
||||
log.Debugf("UnmarshalYAML - a mapping node")
|
||||
o.Kind = MappingNode
|
||||
o.copyFromYamlNode(node, anchorMap)
|
||||
o.Content = make([]*CandidateNode, len(node.Content))
|
||||
for i := 0; i < len(node.Content); i += 2 {
|
||||
|
||||
keyNode, err := o.decodeIntoChild(node.Content[i], anchorMap)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
keyNode.IsMapKey = true
|
||||
|
||||
valueNode, err := o.decodeIntoChild(node.Content[i+1], anchorMap)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
valueNode.Key = keyNode
|
||||
|
||||
o.Content[i] = keyNode
|
||||
o.Content[i+1] = valueNode
|
||||
}
|
||||
log.Debugf("UnmarshalYAML - finished mapping node")
|
||||
return nil
|
||||
case yaml.SequenceNode:
|
||||
log.Debugf("UnmarshalYAML - a sequence: %v", len(node.Content))
|
||||
o.Kind = SequenceNode
|
||||
|
||||
o.copyFromYamlNode(node, anchorMap)
|
||||
log.Debugf("node Style: %v", node.Style)
|
||||
log.Debugf("o Style: %v", o.Style)
|
||||
o.Content = make([]*CandidateNode, len(node.Content))
|
||||
for i := 0; i < len(node.Content); i++ {
|
||||
keyNode := o.CreateChild()
|
||||
keyNode.IsMapKey = true
|
||||
keyNode.Tag = "!!int"
|
||||
keyNode.Kind = ScalarNode
|
||||
keyNode.Value = fmt.Sprintf("%v", i)
|
||||
|
||||
valueNode, err := o.decodeIntoChild(node.Content[i], anchorMap)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
valueNode.Key = keyNode
|
||||
o.Content[i] = valueNode
|
||||
}
|
||||
return nil
|
||||
case 0:
|
||||
// not sure when this happens
|
||||
o.copyFromYamlNode(node, anchorMap)
|
||||
log.Debugf("UnmarshalYAML - err.. %v", NodeToString(o))
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("orderedMap: invalid yaml node")
|
||||
}
|
||||
}
|
||||
|
||||
func (o *CandidateNode) MarshalYAML() (*yaml.Node, error) {
|
||||
log.Debug("MarshalYAML to yaml: %v", o.Tag)
|
||||
switch o.Kind {
|
||||
case AliasNode:
|
||||
log.Debug("MarshalYAML - alias to yaml: %v", o.Tag)
|
||||
target := &yaml.Node{Kind: yaml.AliasNode}
|
||||
o.copyToYamlNode(target)
|
||||
return target, nil
|
||||
case ScalarNode:
|
||||
log.Debug("MarshalYAML - scalar: %v", o.Value)
|
||||
target := &yaml.Node{Kind: yaml.ScalarNode}
|
||||
o.copyToYamlNode(target)
|
||||
return target, nil
|
||||
case MappingNode, SequenceNode:
|
||||
targetKind := yaml.MappingNode
|
||||
if o.Kind == SequenceNode {
|
||||
targetKind = yaml.SequenceNode
|
||||
}
|
||||
target := &yaml.Node{Kind: targetKind}
|
||||
o.copyToYamlNode(target)
|
||||
log.Debugf("original style: %v", o.Style)
|
||||
log.Debugf("original: %v, tag: %v, style: %v, kind: %v", NodeToString(o), target.Tag, target.Style, target.Kind == yaml.SequenceNode)
|
||||
target.Content = make([]*yaml.Node, len(o.Content))
|
||||
for i := 0; i < len(o.Content); i++ {
|
||||
|
||||
child, err := o.Content[i].MarshalYAML()
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
target.Content[i] = child
|
||||
}
|
||||
return target, nil
|
||||
}
|
||||
target := &yaml.Node{}
|
||||
o.copyToYamlNode(target)
|
||||
return target, nil
|
||||
}
|
172
pkg/yqlib/candidiate_node_json.go
Normal file
172
pkg/yqlib/candidiate_node_json.go
Normal file
@ -0,0 +1,172 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
)
|
||||
|
||||
func (o *CandidateNode) setScalarFromJson(value interface{}) error {
|
||||
o.Kind = ScalarNode
|
||||
switch rawData := value.(type) {
|
||||
case nil:
|
||||
o.Tag = "!!null"
|
||||
o.Value = "null"
|
||||
case float64, float32:
|
||||
o.Value = fmt.Sprintf("%v", value)
|
||||
o.Tag = "!!float"
|
||||
// json decoder returns ints as float.
|
||||
if value == float64(int64(rawData.(float64))) {
|
||||
// aha it's an int disguised as a float
|
||||
o.Tag = "!!int"
|
||||
o.Value = fmt.Sprintf("%v", int64(value.(float64)))
|
||||
}
|
||||
case int, int64, int32:
|
||||
o.Value = fmt.Sprintf("%v", value)
|
||||
o.Tag = "!!int"
|
||||
case bool:
|
||||
o.Value = fmt.Sprintf("%v", value)
|
||||
o.Tag = "!!bool"
|
||||
case string:
|
||||
o.Value = rawData
|
||||
o.Tag = "!!str"
|
||||
default:
|
||||
return fmt.Errorf("unrecognised type :( %v", rawData)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (o *CandidateNode) UnmarshalJSON(data []byte) error {
|
||||
log.Debug("UnmarshalJSON")
|
||||
switch data[0] {
|
||||
case '{':
|
||||
log.Debug("UnmarshalJSON - its a map!")
|
||||
// its a map
|
||||
o.Kind = MappingNode
|
||||
o.Tag = "!!map"
|
||||
|
||||
dec := json.NewDecoder(bytes.NewReader(data))
|
||||
_, err := dec.Token() // open object
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// cycle through k/v
|
||||
var tok json.Token
|
||||
for tok, err = dec.Token(); err == nil; tok, err = dec.Token() {
|
||||
// we can expect two types: string or Delim. Delim automatically means
|
||||
// that it is the closing bracket of the object, whereas string means
|
||||
// that there is another key.
|
||||
if _, ok := tok.(json.Delim); ok {
|
||||
break
|
||||
}
|
||||
|
||||
childKey := o.CreateChild()
|
||||
childKey.IsMapKey = true
|
||||
childKey.Value = tok.(string)
|
||||
childKey.Kind = ScalarNode
|
||||
childKey.Tag = "!!str"
|
||||
|
||||
childValue := o.CreateChild()
|
||||
childValue.Key = childKey
|
||||
|
||||
if err := dec.Decode(childValue); err != nil {
|
||||
return err
|
||||
}
|
||||
o.Content = append(o.Content, childKey, childValue)
|
||||
}
|
||||
// unexpected error
|
||||
if err != nil && !errors.Is(err, io.EOF) {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
case '[':
|
||||
o.Kind = SequenceNode
|
||||
o.Tag = "!!seq"
|
||||
log.Debug("UnmarshalJSON - its an array!")
|
||||
var children []*CandidateNode
|
||||
if err := json.Unmarshal(data, &children); err != nil {
|
||||
return err
|
||||
}
|
||||
// now we put the children into the content, and set a key value for them
|
||||
for i, child := range children {
|
||||
|
||||
if child == nil {
|
||||
// need to represent it as a null scalar
|
||||
child = createScalarNode(nil, "null")
|
||||
}
|
||||
childKey := o.CreateChild()
|
||||
childKey.Kind = ScalarNode
|
||||
childKey.Tag = "!!int"
|
||||
childKey.Value = fmt.Sprintf("%v", i)
|
||||
childKey.IsMapKey = true
|
||||
|
||||
child.Parent = o
|
||||
child.Key = childKey
|
||||
o.Content = append(o.Content, child)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
log.Debug("UnmarshalJSON - its a scalar!")
|
||||
// otherwise, must be a scalar
|
||||
var scalar interface{}
|
||||
err := json.Unmarshal(data, &scalar)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Debug("UnmarshalJSON - scalar is %v", scalar)
|
||||
|
||||
return o.setScalarFromJson(scalar)
|
||||
|
||||
}
|
||||
|
||||
func (o *CandidateNode) MarshalJSON() ([]byte, error) {
|
||||
log.Debugf("MarshalJSON %v", NodeToString(o))
|
||||
buf := new(bytes.Buffer)
|
||||
enc := json.NewEncoder(buf)
|
||||
enc.SetIndent("", " ")
|
||||
enc.SetEscapeHTML(false) // do not escape html chars e.g. &, <, >
|
||||
|
||||
switch o.Kind {
|
||||
case AliasNode:
|
||||
log.Debugf("MarshalJSON AliasNode")
|
||||
err := enc.Encode(o.Alias)
|
||||
return buf.Bytes(), err
|
||||
case ScalarNode:
|
||||
log.Debugf("MarshalJSON ScalarNode")
|
||||
value, err := o.GetValueRep()
|
||||
if err != nil {
|
||||
return buf.Bytes(), err
|
||||
}
|
||||
err = enc.Encode(value)
|
||||
return buf.Bytes(), err
|
||||
case MappingNode:
|
||||
log.Debugf("MarshalJSON MappingNode")
|
||||
buf.WriteByte('{')
|
||||
for i := 0; i < len(o.Content); i += 2 {
|
||||
if err := enc.Encode(o.Content[i].Value); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
buf.WriteByte(':')
|
||||
if err := enc.Encode(o.Content[i+1]); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if i != len(o.Content)-2 {
|
||||
buf.WriteByte(',')
|
||||
}
|
||||
}
|
||||
buf.WriteByte('}')
|
||||
return buf.Bytes(), nil
|
||||
case SequenceNode:
|
||||
log.Debugf("MarshalJSON SequenceNode")
|
||||
err := enc.Encode(o.Content)
|
||||
return buf.Bytes(), err
|
||||
default:
|
||||
err := enc.Encode(nil)
|
||||
return buf.Bytes(), err
|
||||
}
|
||||
}
|
@ -83,11 +83,7 @@ func (n *Context) DeepClone() Context {
|
||||
// copier doesn't do lists properly for some reason
|
||||
clone.MatchingNodes = list.New()
|
||||
for el := n.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
clonedNode, err := el.Value.(*CandidateNode).Copy()
|
||||
if err != nil {
|
||||
log.Error("Error cloning context :(")
|
||||
panic(err)
|
||||
}
|
||||
clonedNode := el.Value.(*CandidateNode).Copy()
|
||||
clone.MatchingNodes.PushBack(clonedNode)
|
||||
}
|
||||
|
||||
|
3111
pkg/yqlib/cover.out
Normal file
3111
pkg/yqlib/cover.out
Normal file
File diff suppressed because it is too large
Load Diff
@ -119,6 +119,22 @@ var csvScenarios = []formatScenario{
|
||||
expected: csvMissing,
|
||||
scenarioType: "roundtrip-csv",
|
||||
},
|
||||
{
|
||||
description: "decode csv key",
|
||||
skipDoc: true,
|
||||
input: csvSimple,
|
||||
expression: ".[0].name | key",
|
||||
expected: "name\n",
|
||||
scenarioType: "decode-csv-object",
|
||||
},
|
||||
{
|
||||
description: "decode csv parent",
|
||||
skipDoc: true,
|
||||
input: csvSimple,
|
||||
expression: ".[0].name | parent | .height",
|
||||
expected: "168.8\n",
|
||||
scenarioType: "decode-csv-object",
|
||||
},
|
||||
{
|
||||
description: "Parse CSV into an array of objects",
|
||||
subdescription: "First row is assumed to be the header row.",
|
||||
|
@ -4,7 +4,6 @@ import (
|
||||
"fmt"
|
||||
|
||||
logging "gopkg.in/op/go-logging.v1"
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type DataTreeNavigator interface {
|
||||
@ -13,7 +12,7 @@ type DataTreeNavigator interface {
|
||||
// a new context of matching candidates
|
||||
GetMatchingNodes(context Context, expressionNode *ExpressionNode) (Context, error)
|
||||
|
||||
DeeplyAssign(context Context, path []interface{}, rhsNode *yaml.Node) error
|
||||
DeeplyAssign(context Context, path []interface{}, rhsNode *CandidateNode) error
|
||||
}
|
||||
|
||||
type dataTreeNavigator struct {
|
||||
@ -23,12 +22,7 @@ func NewDataTreeNavigator() DataTreeNavigator {
|
||||
return &dataTreeNavigator{}
|
||||
}
|
||||
|
||||
func (d *dataTreeNavigator) DeeplyAssign(context Context, path []interface{}, rhsNode *yaml.Node) error {
|
||||
|
||||
rhsCandidateNode := &CandidateNode{
|
||||
Path: path,
|
||||
Node: rhsNode,
|
||||
}
|
||||
func (d *dataTreeNavigator) DeeplyAssign(context Context, path []interface{}, rhsCandidateNode *CandidateNode) error {
|
||||
|
||||
assignmentOp := &Operation{OperationType: assignOpType, Preferences: assignPreferences{}}
|
||||
|
||||
@ -55,7 +49,6 @@ func (d *dataTreeNavigator) GetMatchingNodes(context Context, expressionNode *Ex
|
||||
log.Debug(NodeToString(el.Value.(*CandidateNode)))
|
||||
}
|
||||
}
|
||||
log.Debug(">>")
|
||||
handler := expressionNode.Operation.OperationType.Handler
|
||||
if handler != nil {
|
||||
return handler(d, context, expressionNode)
|
||||
|
@ -5,8 +5,6 @@ import (
|
||||
"encoding/base64"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type base64Padder struct {
|
||||
@ -70,11 +68,5 @@ func (dec *base64Decoder) Decode() (*CandidateNode, error) {
|
||||
}
|
||||
}
|
||||
dec.readAnything = true
|
||||
return &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: buf.String(),
|
||||
},
|
||||
}, nil
|
||||
return createStringScalarNode(buf.String()), nil
|
||||
}
|
||||
|
@ -6,7 +6,6 @@ import (
|
||||
"io"
|
||||
|
||||
"github.com/dimchansky/utfbom"
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type csvObjectDecoder struct {
|
||||
@ -28,7 +27,7 @@ func (dec *csvObjectDecoder) Init(reader io.Reader) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dec *csvObjectDecoder) convertToYamlNode(content string) *yaml.Node {
|
||||
func (dec *csvObjectDecoder) convertToNode(content string) *CandidateNode {
|
||||
node, err := parseSnippet(content)
|
||||
if err != nil {
|
||||
return createScalarNode(content, content)
|
||||
@ -36,14 +35,11 @@ func (dec *csvObjectDecoder) convertToYamlNode(content string) *yaml.Node {
|
||||
return node
|
||||
}
|
||||
|
||||
func (dec *csvObjectDecoder) createObject(headerRow []string, contentRow []string) *yaml.Node {
|
||||
objectNode := &yaml.Node{Kind: yaml.MappingNode, Tag: "!!map"}
|
||||
func (dec *csvObjectDecoder) createObject(headerRow []string, contentRow []string) *CandidateNode {
|
||||
objectNode := &CandidateNode{Kind: MappingNode, Tag: "!!map"}
|
||||
|
||||
for i, header := range headerRow {
|
||||
objectNode.Content = append(
|
||||
objectNode.Content,
|
||||
createScalarNode(header, header),
|
||||
dec.convertToYamlNode(contentRow[i]))
|
||||
objectNode.AddKeyValueChild(createScalarNode(header, header), dec.convertToNode(contentRow[i]))
|
||||
}
|
||||
return objectNode
|
||||
}
|
||||
@ -58,13 +54,13 @@ func (dec *csvObjectDecoder) Decode() (*CandidateNode, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rootArray := &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq"}
|
||||
rootArray := &CandidateNode{Kind: SequenceNode, Tag: "!!seq"}
|
||||
|
||||
contentRow, err := dec.reader.Read()
|
||||
|
||||
for err == nil && len(contentRow) > 0 {
|
||||
log.Debugf("Adding contentRow: %v", contentRow)
|
||||
rootArray.Content = append(rootArray.Content, dec.createObject(headerRow, contentRow))
|
||||
rootArray.AddChild(dec.createObject(headerRow, contentRow))
|
||||
contentRow, err = dec.reader.Read()
|
||||
log.Debugf("Read next contentRow: %v, %v", contentRow, err)
|
||||
}
|
||||
@ -72,10 +68,5 @@ func (dec *csvObjectDecoder) Decode() (*CandidateNode, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
Kind: yaml.DocumentNode,
|
||||
Content: []*yaml.Node{rootArray},
|
||||
},
|
||||
}, nil
|
||||
return rootArray, nil
|
||||
}
|
||||
|
42
pkg/yqlib/decoder_goccy_yaml.go
Normal file
42
pkg/yqlib/decoder_goccy_yaml.go
Normal file
@ -0,0 +1,42 @@
|
||||
//go:build !yq_noyaml
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"io"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/goccy/go-yaml/ast"
|
||||
)
|
||||
|
||||
type goccyYamlDecoder struct {
|
||||
decoder yaml.Decoder
|
||||
cm yaml.CommentMap
|
||||
}
|
||||
|
||||
func NewGoccyYAMLDecoder() Decoder {
|
||||
return &goccyYamlDecoder{}
|
||||
}
|
||||
|
||||
func (dec *goccyYamlDecoder) Init(reader io.Reader) error {
|
||||
dec.cm = yaml.CommentMap{}
|
||||
dec.decoder = *yaml.NewDecoder(reader, yaml.CommentToMap(dec.cm), yaml.UseOrderedMap())
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dec *goccyYamlDecoder) Decode() (*CandidateNode, error) {
|
||||
|
||||
var ast ast.Node
|
||||
|
||||
err := dec.decoder.Decode(&ast)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
candidateNode := &CandidateNode{}
|
||||
if err := candidateNode.UnmarshalGoccyYAML(ast, dec.cm); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return candidateNode, nil
|
||||
}
|
@ -3,11 +3,9 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type jsonDecoder struct {
|
||||
@ -25,76 +23,11 @@ func (dec *jsonDecoder) Init(reader io.Reader) error {
|
||||
|
||||
func (dec *jsonDecoder) Decode() (*CandidateNode, error) {
|
||||
|
||||
var dataBucket orderedMap
|
||||
log.Debug("going to decode")
|
||||
var dataBucket CandidateNode
|
||||
err := dec.decoder.Decode(&dataBucket)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
node, err := dec.convertToYamlNode(&dataBucket)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
Kind: yaml.DocumentNode,
|
||||
Content: []*yaml.Node{node},
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (dec *jsonDecoder) convertToYamlNode(data *orderedMap) (*yaml.Node, error) {
|
||||
if data == nil {
|
||||
return createScalarNode(nil, "null"), nil
|
||||
}
|
||||
if data.kv == nil {
|
||||
switch rawData := data.altVal.(type) {
|
||||
case nil:
|
||||
return createScalarNode(nil, "null"), nil
|
||||
case float64, float32:
|
||||
// json decoder returns ints as float.'
|
||||
intNum := int(rawData.(float64))
|
||||
|
||||
// if the integer representation is the same as the original
|
||||
// then its an int.
|
||||
if float64(intNum) == rawData.(float64) {
|
||||
return createScalarNode(intNum, fmt.Sprintf("%v", intNum)), nil
|
||||
}
|
||||
|
||||
return createScalarNode(rawData, fmt.Sprintf("%v", rawData)), nil
|
||||
case int, int64, int32, string, bool:
|
||||
return createScalarNode(rawData, fmt.Sprintf("%v", rawData)), nil
|
||||
case []*orderedMap:
|
||||
return dec.parseArray(rawData)
|
||||
default:
|
||||
return nil, fmt.Errorf("unrecognised type :( %v", rawData)
|
||||
}
|
||||
}
|
||||
|
||||
var yamlMap = &yaml.Node{Kind: yaml.MappingNode, Tag: "!!map"}
|
||||
for i, keyValuePair := range data.kv {
|
||||
yamlValue, err := dec.convertToYamlNode(&data.kv[i].V)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
yamlMap.Content = append(yamlMap.Content, createScalarNode(keyValuePair.K, keyValuePair.K), yamlValue)
|
||||
}
|
||||
return yamlMap, nil
|
||||
|
||||
}
|
||||
|
||||
func (dec *jsonDecoder) parseArray(dataArray []*orderedMap) (*yaml.Node, error) {
|
||||
|
||||
var yamlMap = &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq"}
|
||||
|
||||
for _, value := range dataArray {
|
||||
yamlValue, err := dec.convertToYamlNode(value)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
yamlMap.Content = append(yamlMap.Content, yamlValue)
|
||||
}
|
||||
return yamlMap, nil
|
||||
return &dataBucket, nil
|
||||
}
|
||||
|
@ -8,7 +8,6 @@ import (
|
||||
"math"
|
||||
|
||||
lua "github.com/yuin/gopher-lua"
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type luaDecoder struct {
|
||||
@ -28,17 +27,17 @@ func (dec *luaDecoder) Init(reader io.Reader) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dec *luaDecoder) convertToYamlNode(ls *lua.LState, lv lua.LValue) *yaml.Node {
|
||||
func (dec *luaDecoder) convertToYamlNode(ls *lua.LState, lv lua.LValue) *CandidateNode {
|
||||
switch lv.Type() {
|
||||
case lua.LTNil:
|
||||
return &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!null",
|
||||
Value: "",
|
||||
}
|
||||
case lua.LTBool:
|
||||
return &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!bool",
|
||||
Value: lv.String(),
|
||||
}
|
||||
@ -46,22 +45,22 @@ func (dec *luaDecoder) convertToYamlNode(ls *lua.LState, lv lua.LValue) *yaml.No
|
||||
n := float64(lua.LVAsNumber(lv))
|
||||
// various special case floats
|
||||
if math.IsNaN(n) {
|
||||
return &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!float",
|
||||
Value: ".nan",
|
||||
}
|
||||
}
|
||||
if math.IsInf(n, 1) {
|
||||
return &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!float",
|
||||
Value: ".inf",
|
||||
}
|
||||
}
|
||||
if math.IsInf(n, -1) {
|
||||
return &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!float",
|
||||
Value: "-.inf",
|
||||
}
|
||||
@ -69,27 +68,27 @@ func (dec *luaDecoder) convertToYamlNode(ls *lua.LState, lv lua.LValue) *yaml.No
|
||||
|
||||
// does it look like an integer?
|
||||
if n == float64(int(n)) {
|
||||
return &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!int",
|
||||
Value: lv.String(),
|
||||
}
|
||||
}
|
||||
|
||||
return &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!float",
|
||||
Value: lv.String(),
|
||||
}
|
||||
case lua.LTString:
|
||||
return &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: lv.String(),
|
||||
}
|
||||
case lua.LTFunction:
|
||||
return &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "tag:lua.org,2006,function",
|
||||
Value: lv.String(),
|
||||
}
|
||||
@ -97,12 +96,12 @@ func (dec *luaDecoder) convertToYamlNode(ls *lua.LState, lv lua.LValue) *yaml.No
|
||||
// Simultaneously create a sequence and a map, pick which one to return
|
||||
// based on whether all keys were consecutive integers
|
||||
i := 1
|
||||
yaml_sequence := &yaml.Node{
|
||||
Kind: yaml.SequenceNode,
|
||||
yaml_sequence := &CandidateNode{
|
||||
Kind: SequenceNode,
|
||||
Tag: "!!seq",
|
||||
}
|
||||
yaml_map := &yaml.Node{
|
||||
Kind: yaml.MappingNode,
|
||||
yaml_map := &CandidateNode{
|
||||
Kind: MappingNode,
|
||||
Tag: "!!map",
|
||||
}
|
||||
t := lv.(*lua.LTable)
|
||||
@ -113,11 +112,13 @@ func (dec *luaDecoder) convertToYamlNode(ls *lua.LState, lv lua.LValue) *yaml.No
|
||||
} else {
|
||||
i = 0
|
||||
}
|
||||
yaml_map.Content = append(yaml_map.Content, dec.convertToYamlNode(ls, k))
|
||||
newKey := dec.convertToYamlNode(ls, k)
|
||||
|
||||
yv := dec.convertToYamlNode(ls, v)
|
||||
yaml_map.Content = append(yaml_map.Content, yv)
|
||||
yaml_map.AddKeyValueChild(newKey, yv)
|
||||
|
||||
if i != 0 {
|
||||
yaml_sequence.Content = append(yaml_sequence.Content, yv)
|
||||
yaml_sequence.AddChild(yv)
|
||||
}
|
||||
k, v = ls.Next(t, k)
|
||||
}
|
||||
@ -126,8 +127,8 @@ func (dec *luaDecoder) convertToYamlNode(ls *lua.LState, lv lua.LValue) *yaml.No
|
||||
}
|
||||
return yaml_map
|
||||
default:
|
||||
return &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
LineComment: fmt.Sprintf("Unhandled Lua type: %s", lv.Type().String()),
|
||||
Tag: "!!null",
|
||||
Value: lv.String(),
|
||||
@ -135,7 +136,7 @@ func (dec *luaDecoder) convertToYamlNode(ls *lua.LState, lv lua.LValue) *yaml.No
|
||||
}
|
||||
}
|
||||
|
||||
func (dec *luaDecoder) decideTopLevelNode(ls *lua.LState) *yaml.Node {
|
||||
func (dec *luaDecoder) decideTopLevelNode(ls *lua.LState) *CandidateNode {
|
||||
if ls.GetTop() == 0 {
|
||||
// no items were explicitly returned, encode the globals table instead
|
||||
return dec.convertToYamlNode(ls, ls.Get(lua.GlobalsIndex))
|
||||
@ -160,10 +161,5 @@ func (dec *luaDecoder) Decode() (*CandidateNode, error) {
|
||||
}
|
||||
firstNode := dec.decideTopLevelNode(ls)
|
||||
dec.finished = true
|
||||
return &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
Kind: yaml.DocumentNode,
|
||||
Content: []*yaml.Node{firstNode},
|
||||
},
|
||||
}, nil
|
||||
return firstNode, nil
|
||||
}
|
||||
|
@ -8,7 +8,6 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/magiconair/properties"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type propertiesDecoder struct {
|
||||
@ -52,16 +51,13 @@ func (dec *propertiesDecoder) applyPropertyComments(context Context, path []inte
|
||||
assignmentOp := &Operation{OperationType: assignOpType, Preferences: assignPreferences{}}
|
||||
|
||||
rhsCandidateNode := &CandidateNode{
|
||||
Path: path,
|
||||
Node: &yaml.Node{
|
||||
Tag: "!!str",
|
||||
Value: fmt.Sprintf("%v", path[len(path)-1]),
|
||||
HeadComment: dec.processComment(strings.Join(comments, "\n")),
|
||||
Kind: yaml.ScalarNode,
|
||||
},
|
||||
Kind: ScalarNode,
|
||||
}
|
||||
|
||||
rhsCandidateNode.Node.Tag = guessTagFromCustomType(rhsCandidateNode.Node)
|
||||
rhsCandidateNode.Tag = rhsCandidateNode.guessTagFromCustomType()
|
||||
|
||||
rhsOp := &Operation{OperationType: referenceOpType, CandidateNode: rhsCandidateNode}
|
||||
|
||||
@ -87,13 +83,8 @@ func (dec *propertiesDecoder) applyProperty(context Context, properties *propert
|
||||
}
|
||||
}
|
||||
|
||||
rhsNode := &yaml.Node{
|
||||
Value: value,
|
||||
Tag: "!!str",
|
||||
Kind: yaml.ScalarNode,
|
||||
}
|
||||
|
||||
rhsNode.Tag = guessTagFromCustomType(rhsNode)
|
||||
rhsNode := createStringScalarNode(value)
|
||||
rhsNode.Tag = rhsNode.guessTagFromCustomType()
|
||||
|
||||
return dec.d.DeeplyAssign(context, path, rhsNode)
|
||||
}
|
||||
@ -118,10 +109,8 @@ func (dec *propertiesDecoder) Decode() (*CandidateNode, error) {
|
||||
properties.DisableExpansion = true
|
||||
|
||||
rootMap := &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
Kind: yaml.MappingNode,
|
||||
Kind: MappingNode,
|
||||
Tag: "!!map",
|
||||
},
|
||||
}
|
||||
|
||||
context := Context{}
|
||||
@ -135,11 +124,6 @@ func (dec *propertiesDecoder) Decode() (*CandidateNode, error) {
|
||||
}
|
||||
dec.finished = true
|
||||
|
||||
return &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
Kind: yaml.DocumentNode,
|
||||
Content: []*yaml.Node{rootMap.Node},
|
||||
},
|
||||
}, nil
|
||||
return rootMap, nil
|
||||
|
||||
}
|
||||
|
@ -27,11 +27,14 @@ func processFormatScenario(s formatScenario, decoder Decoder, encoder Encoder) (
|
||||
decoder = NewYamlDecoder(ConfiguredYamlPreferences)
|
||||
}
|
||||
|
||||
log.Debugf("reading docs")
|
||||
inputs, err := readDocuments(strings.NewReader(s.input), "sample.yml", 0, decoder)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
log.Debugf("done reading the documents")
|
||||
|
||||
expression := s.expression
|
||||
if expression == "" {
|
||||
expression = "."
|
||||
@ -45,6 +48,8 @@ func processFormatScenario(s formatScenario, decoder Decoder, encoder Encoder) (
|
||||
|
||||
context, err := NewDataTreeNavigator().GetMatchingNodes(Context{MatchingNodes: inputs}, exp)
|
||||
|
||||
log.Debugf("Going to print: %v", NodesToString(context.MatchingNodes))
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
@ -11,7 +11,6 @@ import (
|
||||
"time"
|
||||
|
||||
toml "github.com/pelletier/go-toml/v2/unstable"
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type tomlDecoder struct {
|
||||
@ -37,10 +36,9 @@ func (dec *tomlDecoder) Init(reader io.Reader) error {
|
||||
}
|
||||
dec.parser.Reset(buf.Bytes())
|
||||
dec.rootMap = &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
Kind: yaml.MappingNode,
|
||||
Kind: MappingNode,
|
||||
Tag: "!!map",
|
||||
}}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -64,6 +62,7 @@ func (dec *tomlDecoder) processKeyValueIntoMap(rootMap *CandidateNode, tomlNode
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
context := Context{}
|
||||
context = context.SingleChildContext(rootMap)
|
||||
|
||||
@ -95,8 +94,8 @@ func (dec *tomlDecoder) decodeKeyValuesIntoMap(rootMap *CandidateNode, tomlNode
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func (dec *tomlDecoder) createInlineTableMap(tomlNode *toml.Node) (*yaml.Node, error) {
|
||||
content := make([]*yaml.Node, 0)
|
||||
func (dec *tomlDecoder) createInlineTableMap(tomlNode *toml.Node) (*CandidateNode, error) {
|
||||
content := make([]*CandidateNode, 0)
|
||||
log.Debug("!! createInlineTableMap")
|
||||
|
||||
iterator := tomlNode.Children()
|
||||
@ -107,28 +106,26 @@ func (dec *tomlDecoder) createInlineTableMap(tomlNode *toml.Node) (*yaml.Node, e
|
||||
}
|
||||
|
||||
keyValues := &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
Kind: yaml.MappingNode,
|
||||
Kind: MappingNode,
|
||||
Tag: "!!map",
|
||||
},
|
||||
}
|
||||
|
||||
if err := dec.processKeyValueIntoMap(keyValues, child); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
content = append(content, keyValues.Node.Content...)
|
||||
content = append(content, keyValues.Content...)
|
||||
}
|
||||
|
||||
return &yaml.Node{
|
||||
Kind: yaml.MappingNode,
|
||||
return &CandidateNode{
|
||||
Kind: MappingNode,
|
||||
Tag: "!!map",
|
||||
Content: content,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (dec *tomlDecoder) createArray(tomlNode *toml.Node) (*yaml.Node, error) {
|
||||
content := make([]*yaml.Node, 0)
|
||||
func (dec *tomlDecoder) createArray(tomlNode *toml.Node) (*CandidateNode, error) {
|
||||
content := make([]*CandidateNode, 0)
|
||||
iterator := tomlNode.Children()
|
||||
for iterator.Next() {
|
||||
child := iterator.Node()
|
||||
@ -139,43 +136,43 @@ func (dec *tomlDecoder) createArray(tomlNode *toml.Node) (*yaml.Node, error) {
|
||||
content = append(content, yamlNode)
|
||||
}
|
||||
|
||||
return &yaml.Node{
|
||||
Kind: yaml.SequenceNode,
|
||||
return &CandidateNode{
|
||||
Kind: SequenceNode,
|
||||
Tag: "!!seq",
|
||||
Content: content,
|
||||
}, nil
|
||||
|
||||
}
|
||||
|
||||
func (dec *tomlDecoder) createStringScalar(tomlNode *toml.Node) (*yaml.Node, error) {
|
||||
func (dec *tomlDecoder) createStringScalar(tomlNode *toml.Node) (*CandidateNode, error) {
|
||||
content := string(tomlNode.Data)
|
||||
return createScalarNode(content, content), nil
|
||||
}
|
||||
|
||||
func (dec *tomlDecoder) createBoolScalar(tomlNode *toml.Node) (*yaml.Node, error) {
|
||||
func (dec *tomlDecoder) createBoolScalar(tomlNode *toml.Node) (*CandidateNode, error) {
|
||||
content := string(tomlNode.Data)
|
||||
return createScalarNode(content == "true", content), nil
|
||||
}
|
||||
|
||||
func (dec *tomlDecoder) createIntegerScalar(tomlNode *toml.Node) (*yaml.Node, error) {
|
||||
func (dec *tomlDecoder) createIntegerScalar(tomlNode *toml.Node) (*CandidateNode, error) {
|
||||
content := string(tomlNode.Data)
|
||||
_, num, err := parseInt64(content)
|
||||
return createScalarNode(num, content), err
|
||||
}
|
||||
|
||||
func (dec *tomlDecoder) createDateTimeScalar(tomlNode *toml.Node) (*yaml.Node, error) {
|
||||
func (dec *tomlDecoder) createDateTimeScalar(tomlNode *toml.Node) (*CandidateNode, error) {
|
||||
content := string(tomlNode.Data)
|
||||
val, err := parseDateTime(time.RFC3339, content)
|
||||
return createScalarNode(val, content), err
|
||||
}
|
||||
|
||||
func (dec *tomlDecoder) createFloatScalar(tomlNode *toml.Node) (*yaml.Node, error) {
|
||||
func (dec *tomlDecoder) createFloatScalar(tomlNode *toml.Node) (*CandidateNode, error) {
|
||||
content := string(tomlNode.Data)
|
||||
num, err := strconv.ParseFloat(content, 64)
|
||||
return createScalarNode(num, content), err
|
||||
}
|
||||
|
||||
func (dec *tomlDecoder) decodeNode(tomlNode *toml.Node) (*yaml.Node, error) {
|
||||
func (dec *tomlDecoder) decodeNode(tomlNode *toml.Node) (*CandidateNode, error) {
|
||||
switch tomlNode.Kind {
|
||||
case toml.Key, toml.String:
|
||||
return dec.createStringScalar(tomlNode)
|
||||
@ -241,16 +238,11 @@ func (dec *tomlDecoder) Decode() (*CandidateNode, error) {
|
||||
// must have finished
|
||||
dec.finished = true
|
||||
|
||||
if len(dec.rootMap.Node.Content) == 0 {
|
||||
if len(dec.rootMap.Content) == 0 {
|
||||
return nil, io.EOF
|
||||
}
|
||||
|
||||
return &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
Kind: yaml.DocumentNode,
|
||||
Content: []*yaml.Node{dec.rootMap.Node},
|
||||
},
|
||||
}, deferredError
|
||||
return dec.rootMap, deferredError
|
||||
|
||||
}
|
||||
|
||||
@ -259,9 +251,9 @@ func (dec *tomlDecoder) processTopLevelNode(currentNode *toml.Node) (bool, error
|
||||
var err error
|
||||
log.Debug("!!!!!!!!!!!!Going to process %v state is current %v", currentNode.Kind, NodeToString(dec.rootMap))
|
||||
if currentNode.Kind == toml.Table {
|
||||
runAgainstCurrentExp, err = dec.processTable((currentNode))
|
||||
runAgainstCurrentExp, err = dec.processTable(currentNode)
|
||||
} else if currentNode.Kind == toml.ArrayTable {
|
||||
runAgainstCurrentExp, err = dec.processArrayTable((currentNode))
|
||||
runAgainstCurrentExp, err = dec.processArrayTable(currentNode)
|
||||
} else {
|
||||
runAgainstCurrentExp, err = dec.decodeKeyValuesIntoMap(dec.rootMap, currentNode)
|
||||
}
|
||||
@ -281,10 +273,8 @@ func (dec *tomlDecoder) processTable(currentNode *toml.Node) (bool, error) {
|
||||
}
|
||||
|
||||
tableNodeValue := &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
Kind: yaml.MappingNode,
|
||||
Kind: MappingNode,
|
||||
Tag: "!!map",
|
||||
},
|
||||
}
|
||||
|
||||
tableValue := dec.parser.Expression()
|
||||
@ -301,21 +291,18 @@ func (dec *tomlDecoder) processTable(currentNode *toml.Node) (bool, error) {
|
||||
c := Context{}
|
||||
|
||||
c = c.SingleChildContext(dec.rootMap)
|
||||
err = dec.d.DeeplyAssign(c, fullPath, tableNodeValue.Node)
|
||||
err = dec.d.DeeplyAssign(c, fullPath, tableNodeValue)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return runAgainstCurrentExp, nil
|
||||
}
|
||||
|
||||
func (dec *tomlDecoder) arrayAppend(context Context, path []interface{}, rhsNode *yaml.Node) error {
|
||||
func (dec *tomlDecoder) arrayAppend(context Context, path []interface{}, rhsNode *CandidateNode) error {
|
||||
rhsCandidateNode := &CandidateNode{
|
||||
Path: path,
|
||||
Node: &yaml.Node{
|
||||
Kind: yaml.SequenceNode,
|
||||
Kind: SequenceNode,
|
||||
Tag: "!!seq",
|
||||
Content: []*yaml.Node{rhsNode},
|
||||
},
|
||||
Content: []*CandidateNode{rhsNode},
|
||||
}
|
||||
|
||||
assignmentOp := &Operation{OperationType: addAssignOpType}
|
||||
@ -346,10 +333,8 @@ func (dec *tomlDecoder) processArrayTable(currentNode *toml.Node) (bool, error)
|
||||
}
|
||||
|
||||
tableNodeValue := &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
Kind: yaml.MappingNode,
|
||||
Kind: MappingNode,
|
||||
Tag: "!!map",
|
||||
},
|
||||
}
|
||||
|
||||
tableValue := dec.parser.Expression()
|
||||
@ -363,7 +348,7 @@ func (dec *tomlDecoder) processArrayTable(currentNode *toml.Node) (bool, error)
|
||||
c = c.SingleChildContext(dec.rootMap)
|
||||
|
||||
// += function
|
||||
err = dec.arrayAppend(c, fullPath, tableNodeValue.Node)
|
||||
err = dec.arrayAppend(c, fullPath, tableNodeValue)
|
||||
|
||||
return runAgainstCurrentExp, err
|
||||
}
|
||||
|
@ -4,8 +4,6 @@ import (
|
||||
"bytes"
|
||||
"io"
|
||||
"net/url"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type uriDecoder struct {
|
||||
@ -50,11 +48,5 @@ func (dec *uriDecoder) Decode() (*CandidateNode, error) {
|
||||
return nil, err
|
||||
}
|
||||
dec.readAnything = true
|
||||
return &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: newValue,
|
||||
},
|
||||
}, nil
|
||||
return createStringScalarNode(newValue), nil
|
||||
}
|
||||
|
@ -12,7 +12,6 @@ import (
|
||||
"unicode"
|
||||
|
||||
"golang.org/x/net/html/charset"
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type xmlDecoder struct {
|
||||
@ -36,14 +35,14 @@ func (dec *xmlDecoder) Init(reader io.Reader) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dec *xmlDecoder) createSequence(nodes []*xmlNode) (*yaml.Node, error) {
|
||||
yamlNode := &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq"}
|
||||
func (dec *xmlDecoder) createSequence(nodes []*xmlNode) (*CandidateNode, error) {
|
||||
yamlNode := &CandidateNode{Kind: SequenceNode, Tag: "!!seq"}
|
||||
for _, child := range nodes {
|
||||
yamlChild, err := dec.convertToYamlNode(child)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
yamlNode.Content = append(yamlNode.Content, yamlChild)
|
||||
yamlNode.AddChild(yamlChild)
|
||||
}
|
||||
|
||||
return yamlNode, nil
|
||||
@ -64,9 +63,9 @@ func (dec *xmlDecoder) processComment(c string) string {
|
||||
return replacement
|
||||
}
|
||||
|
||||
func (dec *xmlDecoder) createMap(n *xmlNode) (*yaml.Node, error) {
|
||||
func (dec *xmlDecoder) createMap(n *xmlNode) (*CandidateNode, error) {
|
||||
log.Debug("createMap: headC: %v, lineC: %v, footC: %v", n.HeadComment, n.LineComment, n.FootComment)
|
||||
yamlNode := &yaml.Node{Kind: yaml.MappingNode, Tag: "!!map"}
|
||||
yamlNode := &CandidateNode{Kind: MappingNode, Tag: "!!map"}
|
||||
|
||||
if len(n.Data) > 0 {
|
||||
log.Debugf("creating content node for map: %v", dec.prefs.ContentName)
|
||||
@ -75,14 +74,14 @@ func (dec *xmlDecoder) createMap(n *xmlNode) (*yaml.Node, error) {
|
||||
labelNode.HeadComment = dec.processComment(n.HeadComment)
|
||||
labelNode.LineComment = dec.processComment(n.LineComment)
|
||||
labelNode.FootComment = dec.processComment(n.FootComment)
|
||||
yamlNode.Content = append(yamlNode.Content, labelNode, dec.createValueNodeFromData(n.Data))
|
||||
yamlNode.AddKeyValueChild(labelNode, dec.createValueNodeFromData(n.Data))
|
||||
}
|
||||
|
||||
for i, keyValuePair := range n.Children {
|
||||
label := keyValuePair.K
|
||||
children := keyValuePair.V
|
||||
labelNode := createScalarNode(label, label)
|
||||
var valueNode *yaml.Node
|
||||
var valueNode *CandidateNode
|
||||
var err error
|
||||
|
||||
if i == 0 {
|
||||
@ -120,32 +119,32 @@ func (dec *xmlDecoder) createMap(n *xmlNode) (*yaml.Node, error) {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
yamlNode.Content = append(yamlNode.Content, labelNode, valueNode)
|
||||
yamlNode.AddKeyValueChild(labelNode, valueNode)
|
||||
}
|
||||
|
||||
return yamlNode, nil
|
||||
}
|
||||
|
||||
func (dec *xmlDecoder) createValueNodeFromData(values []string) *yaml.Node {
|
||||
func (dec *xmlDecoder) createValueNodeFromData(values []string) *CandidateNode {
|
||||
switch len(values) {
|
||||
case 0:
|
||||
return createScalarNode(nil, "")
|
||||
case 1:
|
||||
return createScalarNode(values[0], values[0])
|
||||
default:
|
||||
content := make([]*yaml.Node, 0)
|
||||
content := make([]*CandidateNode, 0)
|
||||
for _, value := range values {
|
||||
content = append(content, createScalarNode(value, value))
|
||||
}
|
||||
return &yaml.Node{
|
||||
Kind: yaml.SequenceNode,
|
||||
return &CandidateNode{
|
||||
Kind: SequenceNode,
|
||||
Tag: "!!seq",
|
||||
Content: content,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (dec *xmlDecoder) convertToYamlNode(n *xmlNode) (*yaml.Node, error) {
|
||||
func (dec *xmlDecoder) convertToYamlNode(n *xmlNode) (*CandidateNode, error) {
|
||||
if len(n.Children) > 0 {
|
||||
return dec.createMap(n)
|
||||
}
|
||||
@ -189,12 +188,7 @@ func (dec *xmlDecoder) Decode() (*CandidateNode, error) {
|
||||
dec.readAnything = true
|
||||
dec.finished = true
|
||||
|
||||
return &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
Kind: yaml.DocumentNode,
|
||||
Content: []*yaml.Node{firstNode},
|
||||
},
|
||||
}, nil
|
||||
return firstNode, nil
|
||||
}
|
||||
|
||||
type xmlNode struct {
|
||||
|
@ -22,6 +22,7 @@ type yamlDecoder struct {
|
||||
|
||||
readAnything bool
|
||||
firstFile bool
|
||||
documentIndex uint
|
||||
}
|
||||
|
||||
func NewYamlDecoder(prefs YamlPreferences) Decoder {
|
||||
@ -93,12 +94,14 @@ func (dec *yamlDecoder) Init(reader io.Reader) error {
|
||||
dec.readAnything = false
|
||||
dec.decoder = *yaml.NewDecoder(readerToUse)
|
||||
dec.firstFile = false
|
||||
dec.documentIndex = 0
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dec *yamlDecoder) Decode() (*CandidateNode, error) {
|
||||
var dataBucket yaml.Node
|
||||
err := dec.decoder.Decode(&dataBucket)
|
||||
var yamlNode yaml.Node
|
||||
err := dec.decoder.Decode(&yamlNode)
|
||||
|
||||
if errors.Is(err, io.EOF) && dec.leadingContent != "" && !dec.readAnything {
|
||||
// force returning an empty node with a comment.
|
||||
dec.readAnything = true
|
||||
@ -116,28 +119,27 @@ func (dec *yamlDecoder) Decode() (*CandidateNode, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
candidateNode := &CandidateNode{
|
||||
Node: &dataBucket,
|
||||
candidateNode := CandidateNode{document: dec.documentIndex}
|
||||
// don't bother with the DocumentNode
|
||||
err = candidateNode.UnmarshalYAML(yamlNode.Content[0], make(map[string]*CandidateNode))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
candidateNode.HeadComment = yamlNode.HeadComment + candidateNode.HeadComment
|
||||
candidateNode.FootComment = yamlNode.FootComment + candidateNode.FootComment
|
||||
|
||||
if dec.leadingContent != "" {
|
||||
candidateNode.LeadingContent = dec.leadingContent
|
||||
dec.leadingContent = ""
|
||||
}
|
||||
dec.readAnything = true
|
||||
// move document comments into candidate node
|
||||
// otherwise unwrap drops them.
|
||||
candidateNode.TrailingContent = dataBucket.FootComment
|
||||
dataBucket.FootComment = ""
|
||||
return candidateNode, nil
|
||||
dec.documentIndex++
|
||||
return &candidateNode, nil
|
||||
}
|
||||
|
||||
func (dec *yamlDecoder) blankNodeWithComment() *CandidateNode {
|
||||
return &CandidateNode{
|
||||
Document: 0,
|
||||
Filename: "",
|
||||
Node: &yaml.Node{Kind: yaml.DocumentNode, Content: []*yaml.Node{{Tag: "!!null", Kind: yaml.ScalarNode}}},
|
||||
FileIndex: 0,
|
||||
LeadingContent: dec.leadingContent,
|
||||
}
|
||||
node := createScalarNode(nil, "")
|
||||
node.LeadingContent = dec.leadingContent
|
||||
return node
|
||||
}
|
||||
|
@ -187,7 +187,6 @@ yq '. head_comment="single"' sample.yml
|
||||
will output
|
||||
```yaml
|
||||
# single
|
||||
|
||||
a: cat
|
||||
```
|
||||
|
||||
|
@ -66,6 +66,7 @@ will output
|
||||
```yaml
|
||||
Mike: cat
|
||||
Mike: dog
|
||||
---
|
||||
Rosey: monkey
|
||||
Rosey: sheep
|
||||
```
|
||||
|
@ -85,6 +85,7 @@ will output
|
||||
```yaml
|
||||
match: cat
|
||||
doc: 0
|
||||
---
|
||||
match: frog
|
||||
doc: 1
|
||||
```
|
||||
|
@ -54,7 +54,6 @@ yq eval-all 'file_index' sample.yml another.yml
|
||||
will output
|
||||
```yaml
|
||||
0
|
||||
---
|
||||
1
|
||||
```
|
||||
|
||||
|
@ -14,7 +14,7 @@ Given a sample.json file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -P '.' sample.json
|
||||
yq -p=json sample.json
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
@ -30,7 +30,7 @@ Given a sample.json file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -P '.' sample.json
|
||||
yq -p=json sample.json
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
|
@ -129,15 +129,13 @@ zoo:
|
||||
```
|
||||
|
||||
## Parse xml: force all as an array
|
||||
Because of the way yq works, when updating everything you need to update the children before the parents. By default `..` will match parents first, so we reverse that before updating.
|
||||
|
||||
Given a sample.xml file of:
|
||||
```xml
|
||||
<zoo><thing><frog>boing</frog></thing></zoo>
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -oy '([..] | reverse | .[]) |= [] + .' sample.xml
|
||||
yq -oy '.. |= [] + .' sample.xml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
|
@ -2,20 +2,18 @@ package yqlib
|
||||
|
||||
import (
|
||||
"io"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type Encoder interface {
|
||||
Encode(writer io.Writer, node *yaml.Node) error
|
||||
Encode(writer io.Writer, node *CandidateNode) error
|
||||
PrintDocumentSeparator(writer io.Writer) error
|
||||
PrintLeadingContent(writer io.Writer, content string) error
|
||||
CanHandleAliases() bool
|
||||
}
|
||||
|
||||
func mapKeysToStrings(node *yaml.Node) {
|
||||
func mapKeysToStrings(node *CandidateNode) {
|
||||
|
||||
if node.Kind == yaml.MappingNode {
|
||||
if node.Kind == MappingNode {
|
||||
for index, child := range node.Content {
|
||||
if index%2 == 0 { // its a map key
|
||||
child.Tag = "!!str"
|
||||
|
@ -4,8 +4,6 @@ import (
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type base64Encoder struct {
|
||||
@ -28,11 +26,10 @@ func (e *base64Encoder) PrintLeadingContent(writer io.Writer, content string) er
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *base64Encoder) Encode(writer io.Writer, originalNode *yaml.Node) error {
|
||||
node := unwrapDoc(originalNode)
|
||||
if guessTagFromCustomType(node) != "!!str" {
|
||||
func (e *base64Encoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
if node.guessTagFromCustomType() != "!!str" {
|
||||
return fmt.Errorf("cannot encode %v as base64, can only operate on strings. Please first pipe through another encoding operator to convert the value to a string", node.Tag)
|
||||
}
|
||||
_, err := writer.Write([]byte(e.encoding.EncodeToString([]byte(originalNode.Value))))
|
||||
_, err := writer.Write([]byte(e.encoding.EncodeToString([]byte(node.Value))))
|
||||
return err
|
||||
}
|
||||
|
@ -4,8 +4,6 @@ import (
|
||||
"encoding/csv"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type csvEncoder struct {
|
||||
@ -28,12 +26,12 @@ func (e *csvEncoder) PrintLeadingContent(writer io.Writer, content string) error
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *csvEncoder) encodeRow(csvWriter *csv.Writer, contents []*yaml.Node) error {
|
||||
func (e *csvEncoder) encodeRow(csvWriter *csv.Writer, contents []*CandidateNode) error {
|
||||
stringValues := make([]string, len(contents))
|
||||
|
||||
for i, child := range contents {
|
||||
|
||||
if child.Kind != yaml.ScalarNode {
|
||||
if child.Kind != ScalarNode {
|
||||
return fmt.Errorf("csv encoding only works for arrays of scalars (string/numbers/booleans), child[%v] is a %v", i, child.Tag)
|
||||
}
|
||||
stringValues[i] = child.Value
|
||||
@ -41,10 +39,10 @@ func (e *csvEncoder) encodeRow(csvWriter *csv.Writer, contents []*yaml.Node) err
|
||||
return csvWriter.Write(stringValues)
|
||||
}
|
||||
|
||||
func (e *csvEncoder) encodeArrays(csvWriter *csv.Writer, content []*yaml.Node) error {
|
||||
func (e *csvEncoder) encodeArrays(csvWriter *csv.Writer, content []*CandidateNode) error {
|
||||
for i, child := range content {
|
||||
|
||||
if child.Kind != yaml.SequenceNode {
|
||||
if child.Kind != SequenceNode {
|
||||
return fmt.Errorf("csv encoding only works for arrays of scalars (string/numbers/booleans), child[%v] is a %v", i, child.Tag)
|
||||
}
|
||||
err := e.encodeRow(csvWriter, child.Content)
|
||||
@ -55,16 +53,16 @@ func (e *csvEncoder) encodeArrays(csvWriter *csv.Writer, content []*yaml.Node) e
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *csvEncoder) extractHeader(child *yaml.Node) ([]*yaml.Node, error) {
|
||||
if child.Kind != yaml.MappingNode {
|
||||
func (e *csvEncoder) extractHeader(child *CandidateNode) ([]*CandidateNode, error) {
|
||||
if child.Kind != MappingNode {
|
||||
return nil, fmt.Errorf("csv object encoding only works for arrays of flat objects (string key => string/numbers/boolean value), child[0] is a %v", child.Tag)
|
||||
}
|
||||
mapKeys := getMapKeys(child)
|
||||
return mapKeys.Content, nil
|
||||
}
|
||||
|
||||
func (e *csvEncoder) createChildRow(child *yaml.Node, headers []*yaml.Node) []*yaml.Node {
|
||||
childRow := make([]*yaml.Node, 0)
|
||||
func (e *csvEncoder) createChildRow(child *CandidateNode, headers []*CandidateNode) []*CandidateNode {
|
||||
childRow := make([]*CandidateNode, 0)
|
||||
for _, header := range headers {
|
||||
keyIndex := findKeyInMap(child, header)
|
||||
value := createScalarNode(nil, "")
|
||||
@ -77,7 +75,7 @@ func (e *csvEncoder) createChildRow(child *yaml.Node, headers []*yaml.Node) []*y
|
||||
|
||||
}
|
||||
|
||||
func (e *csvEncoder) encodeObjects(csvWriter *csv.Writer, content []*yaml.Node) error {
|
||||
func (e *csvEncoder) encodeObjects(csvWriter *csv.Writer, content []*CandidateNode) error {
|
||||
headers, err := e.extractHeader(content[0])
|
||||
if err != nil {
|
||||
return nil
|
||||
@ -89,7 +87,7 @@ func (e *csvEncoder) encodeObjects(csvWriter *csv.Writer, content []*yaml.Node)
|
||||
}
|
||||
|
||||
for i, child := range content {
|
||||
if child.Kind != yaml.MappingNode {
|
||||
if child.Kind != MappingNode {
|
||||
return fmt.Errorf("csv object encoding only works for arrays of flat objects (string key => string/numbers/boolean value), child[%v] is a %v", i, child.Tag)
|
||||
}
|
||||
row := e.createChildRow(child, headers)
|
||||
@ -102,26 +100,25 @@ func (e *csvEncoder) encodeObjects(csvWriter *csv.Writer, content []*yaml.Node)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *csvEncoder) Encode(writer io.Writer, originalNode *yaml.Node) error {
|
||||
if originalNode.Kind == yaml.ScalarNode {
|
||||
return writeString(writer, originalNode.Value+"\n")
|
||||
func (e *csvEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
if node.Kind == ScalarNode {
|
||||
return writeString(writer, node.Value+"\n")
|
||||
}
|
||||
|
||||
csvWriter := csv.NewWriter(writer)
|
||||
csvWriter.Comma = e.separator
|
||||
|
||||
// node must be a sequence
|
||||
node := unwrapDoc(originalNode)
|
||||
if node.Kind != yaml.SequenceNode {
|
||||
if node.Kind != SequenceNode {
|
||||
return fmt.Errorf("csv encoding only works for arrays, got: %v", node.Tag)
|
||||
} else if len(node.Content) == 0 {
|
||||
return nil
|
||||
}
|
||||
if node.Content[0].Kind == yaml.ScalarNode {
|
||||
if node.Content[0].Kind == ScalarNode {
|
||||
return e.encodeRow(csvWriter, node.Content)
|
||||
}
|
||||
|
||||
if node.Content[0].Kind == yaml.MappingNode {
|
||||
if node.Content[0].Kind == MappingNode {
|
||||
return e.encodeObjects(csvWriter, node.Content)
|
||||
}
|
||||
|
||||
|
@ -7,7 +7,6 @@ import (
|
||||
"io"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type jsonEncoder struct {
|
||||
@ -38,9 +37,11 @@ func (je *jsonEncoder) PrintLeadingContent(writer io.Writer, content string) err
|
||||
return nil
|
||||
}
|
||||
|
||||
func (je *jsonEncoder) Encode(writer io.Writer, node *yaml.Node) error {
|
||||
func (je *jsonEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
log.Debugf("I need to encode %v", NodeToString(node))
|
||||
log.Debugf("kids %v", len(node.Content))
|
||||
|
||||
if node.Kind == yaml.ScalarNode && je.UnwrapScalar {
|
||||
if node.Kind == ScalarNode && je.UnwrapScalar {
|
||||
return writeString(writer, node.Value+"\n")
|
||||
}
|
||||
|
||||
@ -54,14 +55,7 @@ func (je *jsonEncoder) Encode(writer io.Writer, node *yaml.Node) error {
|
||||
encoder.SetEscapeHTML(false) // do not escape html chars e.g. &, <, >
|
||||
encoder.SetIndent("", je.indentString)
|
||||
|
||||
var dataBucket orderedMap
|
||||
// firstly, convert all map keys to strings
|
||||
mapKeysToStrings(node)
|
||||
errorDecoding := node.Decode(&dataBucket)
|
||||
if errorDecoding != nil {
|
||||
return errorDecoding
|
||||
}
|
||||
err := encoder.Encode(dataBucket)
|
||||
err := encoder.Encode(node)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -6,8 +6,6 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type luaEncoder struct {
|
||||
@ -82,10 +80,10 @@ func (le *luaEncoder) PrintLeadingContent(writer io.Writer, content string) erro
|
||||
return nil
|
||||
}
|
||||
|
||||
func (le *luaEncoder) encodeString(writer io.Writer, node *yaml.Node) error {
|
||||
func (le *luaEncoder) encodeString(writer io.Writer, node *CandidateNode) error {
|
||||
quote := "\""
|
||||
switch node.Style {
|
||||
case yaml.LiteralStyle, yaml.FoldedStyle, yaml.FlowStyle:
|
||||
case LiteralStyle, FoldedStyle, FlowStyle:
|
||||
for i := 0; i < 10; i++ {
|
||||
if !strings.Contains(node.Value, "]"+strings.Repeat("=", i)+"]") {
|
||||
err := writeString(writer, "["+strings.Repeat("=", i)+"[\n")
|
||||
@ -99,7 +97,7 @@ func (le *luaEncoder) encodeString(writer io.Writer, node *yaml.Node) error {
|
||||
return writeString(writer, "]"+strings.Repeat("=", i)+"]")
|
||||
}
|
||||
}
|
||||
case yaml.SingleQuotedStyle:
|
||||
case SingleQuotedStyle:
|
||||
quote = "'"
|
||||
|
||||
// fallthrough to regular ol' string
|
||||
@ -118,7 +116,7 @@ func (le *luaEncoder) writeIndent(writer io.Writer) error {
|
||||
return writeString(writer, strings.Repeat(le.indentStr, le.indent))
|
||||
}
|
||||
|
||||
func (le *luaEncoder) encodeArray(writer io.Writer, node *yaml.Node) error {
|
||||
func (le *luaEncoder) encodeArray(writer io.Writer, node *CandidateNode) error {
|
||||
err := writeString(writer, "{")
|
||||
if err != nil {
|
||||
return err
|
||||
@ -181,7 +179,7 @@ func needsQuoting(s string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (le *luaEncoder) encodeMap(writer io.Writer, node *yaml.Node, global bool) error {
|
||||
func (le *luaEncoder) encodeMap(writer io.Writer, node *CandidateNode, global bool) error {
|
||||
if !global {
|
||||
err := writeString(writer, "{")
|
||||
if err != nil {
|
||||
@ -263,14 +261,14 @@ func (le *luaEncoder) encodeMap(writer io.Writer, node *yaml.Node, global bool)
|
||||
return writeString(writer, "}")
|
||||
}
|
||||
|
||||
func (le *luaEncoder) encodeAny(writer io.Writer, node *yaml.Node) error {
|
||||
func (le *luaEncoder) encodeAny(writer io.Writer, node *CandidateNode) error {
|
||||
switch node.Kind {
|
||||
case yaml.SequenceNode:
|
||||
case SequenceNode:
|
||||
return le.encodeArray(writer, node)
|
||||
case yaml.MappingNode:
|
||||
case MappingNode:
|
||||
return le.encodeMap(writer, node, false)
|
||||
case yaml.ScalarNode:
|
||||
switch node.ShortTag() {
|
||||
case ScalarNode:
|
||||
switch node.Tag {
|
||||
case "!!str":
|
||||
return le.encodeString(writer, node)
|
||||
case "!!null":
|
||||
@ -282,8 +280,7 @@ func (le *luaEncoder) encodeAny(writer io.Writer, node *yaml.Node) error {
|
||||
return writeString(writer, strings.ToLower(node.Value))
|
||||
case "!!int":
|
||||
if strings.HasPrefix(node.Value, "0o") {
|
||||
var octalValue int
|
||||
err := node.Decode(&octalValue)
|
||||
_, octalValue, err := parseInt64(node.Value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -302,16 +299,14 @@ func (le *luaEncoder) encodeAny(writer io.Writer, node *yaml.Node) error {
|
||||
return writeString(writer, node.Value)
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("Lua encoder NYI -- %s", node.ShortTag())
|
||||
return fmt.Errorf("Lua encoder NYI -- %s", node.Tag)
|
||||
}
|
||||
case yaml.DocumentNode:
|
||||
return le.encodeAny(writer, node.Content[0])
|
||||
default:
|
||||
return fmt.Errorf("Lua encoder NYI -- %s", node.ShortTag())
|
||||
return fmt.Errorf("Lua encoder NYI -- %s", node.Tag)
|
||||
}
|
||||
}
|
||||
|
||||
func (le *luaEncoder) encodeTopLevel(writer io.Writer, node *yaml.Node) error {
|
||||
func (le *luaEncoder) encodeTopLevel(writer io.Writer, node *CandidateNode) error {
|
||||
err := writeString(writer, le.docPrefix)
|
||||
if err != nil {
|
||||
return err
|
||||
@ -323,12 +318,10 @@ func (le *luaEncoder) encodeTopLevel(writer io.Writer, node *yaml.Node) error {
|
||||
return writeString(writer, le.docSuffix)
|
||||
}
|
||||
|
||||
func (le *luaEncoder) Encode(writer io.Writer, node *yaml.Node) error {
|
||||
if node.Kind == yaml.DocumentNode {
|
||||
return le.Encode(writer, node.Content[0])
|
||||
}
|
||||
func (le *luaEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
|
||||
if le.globals {
|
||||
if node.Kind != yaml.MappingNode {
|
||||
if node.Kind != MappingNode {
|
||||
return fmt.Errorf("--lua-global requires a top level MappingNode")
|
||||
}
|
||||
return le.encodeMap(writer, node, true)
|
||||
|
@ -8,7 +8,6 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/magiconair/properties"
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type propertiesEncoder struct {
|
||||
@ -62,9 +61,9 @@ func (pe *propertiesEncoder) PrintLeadingContent(writer io.Writer, content strin
|
||||
return nil
|
||||
}
|
||||
|
||||
func (pe *propertiesEncoder) Encode(writer io.Writer, node *yaml.Node) error {
|
||||
func (pe *propertiesEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
|
||||
if node.Kind == yaml.ScalarNode {
|
||||
if node.Kind == ScalarNode {
|
||||
return writeString(writer, node.Value+"\n")
|
||||
}
|
||||
|
||||
@ -79,7 +78,7 @@ func (pe *propertiesEncoder) Encode(writer io.Writer, node *yaml.Node) error {
|
||||
return err
|
||||
}
|
||||
|
||||
func (pe *propertiesEncoder) doEncode(p *properties.Properties, node *yaml.Node, path string, keyNode *yaml.Node) error {
|
||||
func (pe *propertiesEncoder) doEncode(p *properties.Properties, node *CandidateNode, path string, keyNode *CandidateNode) error {
|
||||
|
||||
comments := ""
|
||||
if keyNode != nil {
|
||||
@ -91,7 +90,7 @@ func (pe *propertiesEncoder) doEncode(p *properties.Properties, node *yaml.Node,
|
||||
p.SetComments(path, strings.Split(commentsWithSpaces, "\n"))
|
||||
|
||||
switch node.Kind {
|
||||
case yaml.ScalarNode:
|
||||
case ScalarNode:
|
||||
var nodeValue string
|
||||
if pe.unwrapScalar || !strings.Contains(node.Value, " ") {
|
||||
nodeValue = node.Value
|
||||
@ -100,13 +99,11 @@ func (pe *propertiesEncoder) doEncode(p *properties.Properties, node *yaml.Node,
|
||||
}
|
||||
_, _, err := p.Set(path, nodeValue)
|
||||
return err
|
||||
case yaml.DocumentNode:
|
||||
return pe.doEncode(p, node.Content[0], path, node)
|
||||
case yaml.SequenceNode:
|
||||
case SequenceNode:
|
||||
return pe.encodeArray(p, node.Content, path)
|
||||
case yaml.MappingNode:
|
||||
case MappingNode:
|
||||
return pe.encodeMap(p, node.Content, path)
|
||||
case yaml.AliasNode:
|
||||
case AliasNode:
|
||||
return pe.doEncode(p, node.Alias, path, nil)
|
||||
default:
|
||||
return fmt.Errorf("Unsupported node %v", node.Tag)
|
||||
@ -120,7 +117,7 @@ func (pe *propertiesEncoder) appendPath(path string, key interface{}) string {
|
||||
return fmt.Sprintf("%v.%v", path, key)
|
||||
}
|
||||
|
||||
func (pe *propertiesEncoder) encodeArray(p *properties.Properties, kids []*yaml.Node, path string) error {
|
||||
func (pe *propertiesEncoder) encodeArray(p *properties.Properties, kids []*CandidateNode, path string) error {
|
||||
for index, child := range kids {
|
||||
err := pe.doEncode(p, child, pe.appendPath(path, index), nil)
|
||||
if err != nil {
|
||||
@ -130,7 +127,7 @@ func (pe *propertiesEncoder) encodeArray(p *properties.Properties, kids []*yaml.
|
||||
return nil
|
||||
}
|
||||
|
||||
func (pe *propertiesEncoder) encodeMap(p *properties.Properties, kids []*yaml.Node, path string) error {
|
||||
func (pe *propertiesEncoder) encodeMap(p *properties.Properties, kids []*CandidateNode, path string) error {
|
||||
for index := 0; index < len(kids); index = index + 2 {
|
||||
key := kids[index]
|
||||
value := kids[index+1]
|
||||
|
@ -18,7 +18,7 @@ func yamlToProps(sampleYaml string, unwrapScalar bool) string {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
node := inputs.Front().Value.(*CandidateNode).Node
|
||||
node := inputs.Front().Value.(*CandidateNode)
|
||||
err = propsEncoder.Encode(writer, node)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
|
@ -5,8 +5,6 @@ import (
|
||||
"io"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
var unsafeChars = regexp.MustCompile(`[^\w@%+=:,./-]`)
|
||||
@ -31,13 +29,12 @@ func (e *shEncoder) PrintLeadingContent(writer io.Writer, content string) error
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *shEncoder) Encode(writer io.Writer, originalNode *yaml.Node) error {
|
||||
node := unwrapDoc(originalNode)
|
||||
if guessTagFromCustomType(node) != "!!str" {
|
||||
func (e *shEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
if node.guessTagFromCustomType() != "!!str" {
|
||||
return fmt.Errorf("cannot encode %v as URI, can only operate on strings. Please first pipe through another encoding operator to convert the value to a string", node.Tag)
|
||||
}
|
||||
|
||||
return writeString(writer, e.encode(originalNode.Value))
|
||||
return writeString(writer, e.encode(node.Value))
|
||||
}
|
||||
|
||||
// put any (shell-unsafe) characters into a single-quoted block, close the block lazily
|
||||
|
@ -7,7 +7,6 @@ import (
|
||||
"unicode/utf8"
|
||||
|
||||
"golang.org/x/text/unicode/norm"
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type shellVariablesEncoder struct {
|
||||
@ -29,7 +28,7 @@ func (pe *shellVariablesEncoder) PrintLeadingContent(_ io.Writer, _ string) erro
|
||||
return nil
|
||||
}
|
||||
|
||||
func (pe *shellVariablesEncoder) Encode(writer io.Writer, node *yaml.Node) error {
|
||||
func (pe *shellVariablesEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
|
||||
mapKeysToStrings(node)
|
||||
err := pe.doEncode(&writer, node, "")
|
||||
@ -40,12 +39,12 @@ func (pe *shellVariablesEncoder) Encode(writer io.Writer, node *yaml.Node) error
|
||||
return err
|
||||
}
|
||||
|
||||
func (pe *shellVariablesEncoder) doEncode(w *io.Writer, node *yaml.Node, path string) error {
|
||||
func (pe *shellVariablesEncoder) doEncode(w *io.Writer, node *CandidateNode, path string) error {
|
||||
|
||||
// Note this drops all comments.
|
||||
|
||||
switch node.Kind {
|
||||
case yaml.ScalarNode:
|
||||
case ScalarNode:
|
||||
nonemptyPath := path
|
||||
if path == "" {
|
||||
// We can't assign an empty variable "=somevalue" because that would error out if sourced in a shell,
|
||||
@ -55,9 +54,7 @@ func (pe *shellVariablesEncoder) doEncode(w *io.Writer, node *yaml.Node, path st
|
||||
}
|
||||
_, err := io.WriteString(*w, nonemptyPath+"="+quoteValue(node.Value)+"\n")
|
||||
return err
|
||||
case yaml.DocumentNode:
|
||||
return pe.doEncode(w, node.Content[0], path)
|
||||
case yaml.SequenceNode:
|
||||
case SequenceNode:
|
||||
for index, child := range node.Content {
|
||||
err := pe.doEncode(w, child, appendPath(path, index))
|
||||
if err != nil {
|
||||
@ -65,7 +62,7 @@ func (pe *shellVariablesEncoder) doEncode(w *io.Writer, node *yaml.Node, path st
|
||||
}
|
||||
}
|
||||
return nil
|
||||
case yaml.MappingNode:
|
||||
case MappingNode:
|
||||
for index := 0; index < len(node.Content); index = index + 2 {
|
||||
key := node.Content[index]
|
||||
value := node.Content[index+1]
|
||||
@ -75,7 +72,7 @@ func (pe *shellVariablesEncoder) doEncode(w *io.Writer, node *yaml.Node, path st
|
||||
}
|
||||
}
|
||||
return nil
|
||||
case yaml.AliasNode:
|
||||
case AliasNode:
|
||||
return pe.doEncode(w, node.Alias, path)
|
||||
default:
|
||||
return fmt.Errorf("Unsupported node %v", node.Tag)
|
||||
|
@ -18,7 +18,7 @@ func assertEncodesTo(t *testing.T, yaml string, shellvars string) {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
node := inputs.Front().Value.(*CandidateNode).Node
|
||||
node := inputs.Front().Value.(*CandidateNode)
|
||||
err = encoder.Encode(writer, node)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
|
@ -11,7 +11,8 @@ import (
|
||||
"github.com/mikefarah/yq/v4/test"
|
||||
)
|
||||
|
||||
func yamlToJSON(sampleYaml string, indent int) string {
|
||||
func yamlToJSON(t *testing.T, sampleYaml string, indent int) string {
|
||||
t.Helper()
|
||||
var output bytes.Buffer
|
||||
writer := bufio.NewWriter(&output)
|
||||
|
||||
@ -20,7 +21,10 @@ func yamlToJSON(sampleYaml string, indent int) string {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
node := inputs.Front().Value.(*CandidateNode).Node
|
||||
node := inputs.Front().Value.(*CandidateNode)
|
||||
log.Debugf("%v", NodeToString(node))
|
||||
// log.Debugf("Content[0] %v", NodeToString(node.Content[0]))
|
||||
|
||||
err = jsonEncoder.Encode(writer, node)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
@ -46,31 +50,31 @@ banana:
|
||||
}
|
||||
]
|
||||
}`
|
||||
var actualJSON = yamlToJSON(sampleYaml, 2)
|
||||
var actualJSON = yamlToJSON(t, sampleYaml, 2)
|
||||
test.AssertResult(t, expectedJSON, actualJSON)
|
||||
}
|
||||
|
||||
func TestJsonNullInArray(t *testing.T) {
|
||||
var sampleYaml = `[null]`
|
||||
var actualJSON = yamlToJSON(sampleYaml, 0)
|
||||
var actualJSON = yamlToJSON(t, sampleYaml, 0)
|
||||
test.AssertResult(t, sampleYaml, actualJSON)
|
||||
}
|
||||
|
||||
func TestJsonNull(t *testing.T) {
|
||||
var sampleYaml = `null`
|
||||
var actualJSON = yamlToJSON(sampleYaml, 0)
|
||||
var actualJSON = yamlToJSON(t, sampleYaml, 0)
|
||||
test.AssertResult(t, sampleYaml, actualJSON)
|
||||
}
|
||||
|
||||
func TestJsonNullInObject(t *testing.T) {
|
||||
var sampleYaml = `{x: null}`
|
||||
var actualJSON = yamlToJSON(sampleYaml, 0)
|
||||
var actualJSON = yamlToJSON(t, sampleYaml, 0)
|
||||
test.AssertResult(t, `{"x":null}`, actualJSON)
|
||||
}
|
||||
|
||||
func TestJsonEncoderDoesNotEscapeHTMLChars(t *testing.T) {
|
||||
var sampleYaml = `build: "( ./lint && ./format && ./compile ) < src.code"`
|
||||
var expectedJSON = `{"build":"( ./lint && ./format && ./compile ) < src.code"}`
|
||||
var actualJSON = yamlToJSON(sampleYaml, 0)
|
||||
var actualJSON = yamlToJSON(t, sampleYaml, 0)
|
||||
test.AssertResult(t, expectedJSON, actualJSON)
|
||||
}
|
||||
|
@ -3,8 +3,6 @@ package yqlib
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type tomlEncoder struct {
|
||||
@ -14,8 +12,8 @@ func NewTomlEncoder() Encoder {
|
||||
return &tomlEncoder{}
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) Encode(writer io.Writer, node *yaml.Node) error {
|
||||
if node.Kind == yaml.ScalarNode {
|
||||
func (te *tomlEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
if node.Kind == ScalarNode {
|
||||
return writeString(writer, node.Value+"\n")
|
||||
}
|
||||
return fmt.Errorf("only scalars (e.g. strings, numbers, booleans) are supported for TOML output at the moment. Please use yaml output format (-oy) until the encoder has been fully implemented")
|
||||
|
@ -4,8 +4,6 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type uriEncoder struct {
|
||||
@ -27,11 +25,10 @@ func (e *uriEncoder) PrintLeadingContent(writer io.Writer, content string) error
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *uriEncoder) Encode(writer io.Writer, originalNode *yaml.Node) error {
|
||||
node := unwrapDoc(originalNode)
|
||||
if guessTagFromCustomType(node) != "!!str" {
|
||||
func (e *uriEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
if node.guessTagFromCustomType() != "!!str" {
|
||||
return fmt.Errorf("cannot encode %v as URI, can only operate on strings. Please first pipe through another encoding operator to convert the value to a string", node.Tag)
|
||||
}
|
||||
_, err := writer.Write([]byte(url.QueryEscape(originalNode.Value)))
|
||||
_, err := writer.Write([]byte(url.QueryEscape(node.Value)))
|
||||
return err
|
||||
}
|
||||
|
@ -8,8 +8,6 @@ import (
|
||||
"io"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type xmlEncoder struct {
|
||||
@ -41,19 +39,18 @@ func (e *xmlEncoder) PrintLeadingContent(writer io.Writer, content string) error
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *xmlEncoder) Encode(writer io.Writer, node *yaml.Node) error {
|
||||
func (e *xmlEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
encoder := xml.NewEncoder(writer)
|
||||
// hack so we can manually add newlines to procInst and directives
|
||||
e.writer = writer
|
||||
encoder.Indent("", e.indentString)
|
||||
var newLine xml.CharData = []byte("\n")
|
||||
|
||||
mapNode := unwrapDoc(node)
|
||||
if mapNode.Tag == "!!map" {
|
||||
if node.Tag == "!!map" {
|
||||
// make sure <?xml .. ?> processing instructions are encoded first
|
||||
for i := 0; i < len(mapNode.Content); i += 2 {
|
||||
key := mapNode.Content[i]
|
||||
value := mapNode.Content[i+1]
|
||||
for i := 0; i < len(node.Content); i += 2 {
|
||||
key := node.Content[i]
|
||||
value := node.Content[i+1]
|
||||
|
||||
if key.Value == (e.prefs.ProcInstPrefix + "xml") {
|
||||
name := strings.Replace(key.Value, e.prefs.ProcInstPrefix, "", 1)
|
||||
@ -82,29 +79,12 @@ func (e *xmlEncoder) Encode(writer io.Writer, node *yaml.Node) error {
|
||||
}
|
||||
|
||||
switch node.Kind {
|
||||
case yaml.MappingNode:
|
||||
case MappingNode:
|
||||
err := e.encodeTopLevelMap(encoder, node)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case yaml.DocumentNode:
|
||||
err := e.encodeComment(encoder, headAndLineComment(node))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
unwrappedNode := unwrapDoc(node)
|
||||
if unwrappedNode.Kind != yaml.MappingNode {
|
||||
return fmt.Errorf("cannot encode %v to XML - only maps can be encoded", unwrappedNode.Tag)
|
||||
}
|
||||
err = e.encodeTopLevelMap(encoder, unwrappedNode)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = e.encodeComment(encoder, footComment(node))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case yaml.ScalarNode:
|
||||
case ScalarNode:
|
||||
var charData xml.CharData = []byte(node.Value)
|
||||
err := encoder.EncodeToken(charData)
|
||||
if err != nil {
|
||||
@ -112,14 +92,14 @@ func (e *xmlEncoder) Encode(writer io.Writer, node *yaml.Node) error {
|
||||
}
|
||||
return encoder.Flush()
|
||||
default:
|
||||
return fmt.Errorf("unsupported type %v", node.Tag)
|
||||
return fmt.Errorf("cannot encode %v to XML - only maps can be encoded", node.Tag)
|
||||
}
|
||||
|
||||
return encoder.EncodeToken(newLine)
|
||||
|
||||
}
|
||||
|
||||
func (e *xmlEncoder) encodeTopLevelMap(encoder *xml.Encoder, node *yaml.Node) error {
|
||||
func (e *xmlEncoder) encodeTopLevelMap(encoder *xml.Encoder, node *CandidateNode) error {
|
||||
err := e.encodeComment(encoder, headAndLineComment(node))
|
||||
if err != nil {
|
||||
return err
|
||||
@ -178,7 +158,7 @@ func (e *xmlEncoder) encodeTopLevelMap(encoder *xml.Encoder, node *yaml.Node) er
|
||||
return e.encodeComment(encoder, footComment(node))
|
||||
}
|
||||
|
||||
func (e *xmlEncoder) encodeStart(encoder *xml.Encoder, node *yaml.Node, start xml.StartElement) error {
|
||||
func (e *xmlEncoder) encodeStart(encoder *xml.Encoder, node *CandidateNode, start xml.StartElement) error {
|
||||
err := encoder.EncodeToken(start)
|
||||
if err != nil {
|
||||
return err
|
||||
@ -186,7 +166,7 @@ func (e *xmlEncoder) encodeStart(encoder *xml.Encoder, node *yaml.Node, start xm
|
||||
return e.encodeComment(encoder, headComment(node))
|
||||
}
|
||||
|
||||
func (e *xmlEncoder) encodeEnd(encoder *xml.Encoder, node *yaml.Node, start xml.StartElement) error {
|
||||
func (e *xmlEncoder) encodeEnd(encoder *xml.Encoder, node *CandidateNode, start xml.StartElement) error {
|
||||
err := encoder.EncodeToken(start.End())
|
||||
if err != nil {
|
||||
return err
|
||||
@ -194,13 +174,13 @@ func (e *xmlEncoder) encodeEnd(encoder *xml.Encoder, node *yaml.Node, start xml.
|
||||
return e.encodeComment(encoder, footComment(node))
|
||||
}
|
||||
|
||||
func (e *xmlEncoder) doEncode(encoder *xml.Encoder, node *yaml.Node, start xml.StartElement) error {
|
||||
func (e *xmlEncoder) doEncode(encoder *xml.Encoder, node *CandidateNode, start xml.StartElement) error {
|
||||
switch node.Kind {
|
||||
case yaml.MappingNode:
|
||||
case MappingNode:
|
||||
return e.encodeMap(encoder, node, start)
|
||||
case yaml.SequenceNode:
|
||||
case SequenceNode:
|
||||
return e.encodeArray(encoder, node, start)
|
||||
case yaml.ScalarNode:
|
||||
case ScalarNode:
|
||||
err := e.encodeStart(encoder, node, start)
|
||||
if err != nil {
|
||||
return err
|
||||
@ -258,7 +238,7 @@ func (e *xmlEncoder) encodeComment(encoder *xml.Encoder, commentStr string) erro
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *xmlEncoder) encodeArray(encoder *xml.Encoder, node *yaml.Node, start xml.StartElement) error {
|
||||
func (e *xmlEncoder) encodeArray(encoder *xml.Encoder, node *CandidateNode, start xml.StartElement) error {
|
||||
|
||||
if err := e.encodeComment(encoder, headAndLineComment(node)); err != nil {
|
||||
return err
|
||||
@ -280,7 +260,7 @@ func (e *xmlEncoder) isAttribute(name string) bool {
|
||||
!strings.HasPrefix(name, e.prefs.ProcInstPrefix)
|
||||
}
|
||||
|
||||
func (e *xmlEncoder) encodeMap(encoder *xml.Encoder, node *yaml.Node, start xml.StartElement) error {
|
||||
func (e *xmlEncoder) encodeMap(encoder *xml.Encoder, node *CandidateNode, start xml.StartElement) error {
|
||||
log.Debug("its a map")
|
||||
|
||||
//first find all the attributes and put them on the start token
|
||||
@ -289,7 +269,7 @@ func (e *xmlEncoder) encodeMap(encoder *xml.Encoder, node *yaml.Node, start xml.
|
||||
value := node.Content[i+1]
|
||||
|
||||
if e.isAttribute(key.Value) {
|
||||
if value.Kind == yaml.ScalarNode {
|
||||
if value.Kind == ScalarNode {
|
||||
attributeName := strings.Replace(key.Value, e.prefs.AttributePrefix, "", 1)
|
||||
start.Attr = append(start.Attr, xml.Attr{Name: xml.Name{Local: attributeName}, Value: value.Value})
|
||||
} else {
|
||||
|
@ -5,6 +5,7 @@ import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"io"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
@ -40,6 +41,8 @@ func (ye *yamlEncoder) PrintDocumentSeparator(writer io.Writer) error {
|
||||
func (ye *yamlEncoder) PrintLeadingContent(writer io.Writer, content string) error {
|
||||
reader := bufio.NewReader(strings.NewReader(content))
|
||||
|
||||
var commentLineRegEx = regexp.MustCompile(`^\s*#`)
|
||||
|
||||
for {
|
||||
|
||||
readline, errReading := reader.ReadString('\n')
|
||||
@ -53,6 +56,9 @@ func (ye *yamlEncoder) PrintLeadingContent(writer io.Writer, content string) err
|
||||
}
|
||||
|
||||
} else {
|
||||
if len(readline) > 0 && readline != "\n" && readline[0] != '%' && !commentLineRegEx.MatchString(readline) {
|
||||
readline = "# " + readline
|
||||
}
|
||||
if err := writeString(writer, readline); err != nil {
|
||||
return err
|
||||
}
|
||||
@ -72,10 +78,14 @@ func (ye *yamlEncoder) PrintLeadingContent(writer io.Writer, content string) err
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ye *yamlEncoder) Encode(writer io.Writer, node *yaml.Node) error {
|
||||
|
||||
if node.Kind == yaml.ScalarNode && ye.prefs.UnwrapScalar {
|
||||
return writeString(writer, node.Value+"\n")
|
||||
func (ye *yamlEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
log.Debug("encoderYaml - going to print %v", NodeToString(node))
|
||||
if node.Kind == ScalarNode && ye.prefs.UnwrapScalar {
|
||||
valueToPrint := node.Value
|
||||
if node.LeadingContent == "" || valueToPrint != "" {
|
||||
valueToPrint = valueToPrint + "\n"
|
||||
}
|
||||
return writeString(writer, valueToPrint)
|
||||
}
|
||||
|
||||
destination := writer
|
||||
@ -88,7 +98,20 @@ func (ye *yamlEncoder) Encode(writer io.Writer, node *yaml.Node) error {
|
||||
|
||||
encoder.SetIndent(ye.indent)
|
||||
|
||||
if err := encoder.Encode(node); err != nil {
|
||||
target, err := node.MarshalYAML()
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
trailingContent := target.FootComment
|
||||
target.FootComment = ""
|
||||
|
||||
if err := encoder.Encode(target); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := ye.PrintLeadingContent(destination, trailingContent); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
|
170
pkg/yqlib/goccy_yaml_test.go
Normal file
170
pkg/yqlib/goccy_yaml_test.go
Normal file
@ -0,0 +1,170 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/mikefarah/yq/v4/test"
|
||||
)
|
||||
|
||||
var goccyYamlFormatScenarios = []formatScenario{
|
||||
{
|
||||
description: "basic - 3",
|
||||
skipDoc: true,
|
||||
input: "3",
|
||||
expected: "3\n",
|
||||
},
|
||||
{
|
||||
description: "basic - 3.1",
|
||||
skipDoc: true,
|
||||
input: "3.1",
|
||||
expected: "3.1\n",
|
||||
},
|
||||
{
|
||||
description: "basic - 3.1",
|
||||
skipDoc: true,
|
||||
input: "mike: 3",
|
||||
expected: "mike: 3\n",
|
||||
},
|
||||
{
|
||||
description: "basic - 3.1",
|
||||
skipDoc: true,
|
||||
input: "{mike: 3}",
|
||||
expected: "{mike: 3}\n",
|
||||
},
|
||||
{
|
||||
description: "basic - map multiple entries",
|
||||
skipDoc: true,
|
||||
input: "mike: 3\nfred: 12\n",
|
||||
expected: "mike: 3\nfred: 12\n",
|
||||
},
|
||||
{
|
||||
description: "basic - 3.1",
|
||||
skipDoc: true,
|
||||
input: "{\nmike: 3\n}",
|
||||
expected: "{mike: 3}\n",
|
||||
},
|
||||
{
|
||||
description: "basic - 3.1",
|
||||
skipDoc: true,
|
||||
input: "mike: !!cat 3",
|
||||
expected: "mike: !!cat 3\n",
|
||||
},
|
||||
{
|
||||
description: "basic - 3.1",
|
||||
skipDoc: true,
|
||||
input: "- 3",
|
||||
expected: "- 3\n",
|
||||
},
|
||||
{
|
||||
description: "basic - 3.1",
|
||||
skipDoc: true,
|
||||
input: "[3]",
|
||||
expected: "[3]\n",
|
||||
},
|
||||
{
|
||||
description: "basic - plain string",
|
||||
skipDoc: true,
|
||||
input: `a: meow`,
|
||||
expected: "a: meow\n",
|
||||
},
|
||||
{
|
||||
description: "basic - double quoted string",
|
||||
skipDoc: true,
|
||||
input: `a: "meow"`,
|
||||
expected: "a: \"meow\"\n",
|
||||
},
|
||||
{
|
||||
description: "basic - single quoted string",
|
||||
skipDoc: true,
|
||||
input: `a: 'meow'`,
|
||||
expected: "a: 'meow'\n",
|
||||
},
|
||||
{
|
||||
description: "basic - string block",
|
||||
skipDoc: true,
|
||||
input: "a: |\n meow\n",
|
||||
expected: "a: |\n meow\n",
|
||||
},
|
||||
{
|
||||
description: "basic - long string",
|
||||
skipDoc: true,
|
||||
input: "a: the cute cat wrote a long sentence that wasn't wrapped at all.\n",
|
||||
expected: "a: the cute cat wrote a long sentence that wasn't wrapped at all.\n",
|
||||
},
|
||||
{
|
||||
description: "basic - string block",
|
||||
skipDoc: true,
|
||||
input: "a: |-\n meow\n",
|
||||
expected: "a: |-\n meow\n",
|
||||
},
|
||||
{
|
||||
description: "basic - line comment",
|
||||
skipDoc: true,
|
||||
input: "a: meow # line comment\n",
|
||||
expected: "a: meow # line comment\n",
|
||||
},
|
||||
{
|
||||
description: "basic - line comment",
|
||||
skipDoc: true,
|
||||
input: "# head comment\na: #line comment\n meow\n",
|
||||
expected: "# head comment\na: meow #line comment\n", // go-yaml does this
|
||||
},
|
||||
{
|
||||
description: "basic - foot comment",
|
||||
skipDoc: true,
|
||||
input: "a: meow\n# foot comment\n",
|
||||
expected: "a: meow\n# foot comment\n",
|
||||
},
|
||||
{
|
||||
description: "basic - foot comment",
|
||||
skipDoc: true,
|
||||
input: "a: meow\nb: woof\n# foot comment\n",
|
||||
expected: "a: meow\nb: woof\n# foot comment\n",
|
||||
},
|
||||
{
|
||||
description: "basic - boolean",
|
||||
skipDoc: true,
|
||||
input: "true\n",
|
||||
expected: "true\n",
|
||||
},
|
||||
{
|
||||
description: "basic - null",
|
||||
skipDoc: true,
|
||||
input: "a: null\n",
|
||||
expected: "a: null\n",
|
||||
},
|
||||
{
|
||||
description: "basic - ~",
|
||||
skipDoc: true,
|
||||
input: "a: ~\n",
|
||||
expected: "a: ~\n",
|
||||
},
|
||||
// {
|
||||
// description: "basic - ~",
|
||||
// skipDoc: true,
|
||||
// input: "null\n",
|
||||
// expected: "null\n",
|
||||
// },
|
||||
// {
|
||||
// skipDoc: true,
|
||||
// description: "trailing comment",
|
||||
// input: "test:",
|
||||
// expected: "test:",
|
||||
// },
|
||||
// {
|
||||
// skipDoc: true,
|
||||
// description: "trailing comment",
|
||||
// input: "test:\n# this comment will be removed",
|
||||
// expected: "test:\n# this comment will be removed",
|
||||
// },
|
||||
}
|
||||
|
||||
func testGoccyYamlScenario(t *testing.T, s formatScenario) {
|
||||
test.AssertResultWithContext(t, s.expected, mustProcessFormatScenario(s, NewGoccyYAMLDecoder(), NewYamlEncoder(2, false, ConfiguredYamlPreferences)), s.description)
|
||||
}
|
||||
|
||||
func TestGoccyYmlFormatScenarios(t *testing.T) {
|
||||
for _, tt := range goccyYamlFormatScenarios {
|
||||
testGoccyYamlScenario(t, tt)
|
||||
}
|
||||
}
|
@ -11,7 +11,7 @@ import (
|
||||
"github.com/mikefarah/yq/v4/test"
|
||||
)
|
||||
|
||||
const complexExpectYaml = `D0, P[], (!!map)::a: Easy! as one two three
|
||||
const complexExpectYaml = `a: Easy! as one two three
|
||||
b:
|
||||
c: 2
|
||||
d:
|
||||
@ -99,13 +99,54 @@ var jsonScenarios = []formatScenario{
|
||||
description: "Parse json: simple",
|
||||
subdescription: "JSON is a subset of yaml, so all you need to do is prettify the output",
|
||||
input: `{"cat": "meow"}`,
|
||||
expected: "D0, P[], (!!map)::cat: meow\n",
|
||||
scenarioType: "decode-ndjson",
|
||||
expected: "cat: meow\n",
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "Parse json: simple: key",
|
||||
input: `{"cat": "meow"}`,
|
||||
expression: ".cat | key",
|
||||
expected: "\"cat\"\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "Parse json: simple: parent",
|
||||
input: `{"cat": "meow"}`,
|
||||
expression: ".cat | parent",
|
||||
expected: "{\"cat\":\"meow\"}\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "Parse json: simple: path",
|
||||
input: `{"cat": "meow"}`,
|
||||
expression: ".cat | path",
|
||||
expected: "[\"cat\"]\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "Parse json: deeper: path",
|
||||
input: `{"cat": {"noises": "meow"}}`,
|
||||
expression: ".cat.noises | path",
|
||||
expected: "[\"cat\",\"noises\"]\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "Parse json: array path",
|
||||
input: `{"cat": {"noises": ["meow"]}}`,
|
||||
expression: ".cat.noises[0] | path",
|
||||
expected: "[\"cat\",\"noises\",0]\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "bad json",
|
||||
skipDoc: true,
|
||||
input: `{"a": 1 "b": 2}`,
|
||||
expectedError: `bad file 'sample.yml': invalid character '"' after object key:value pair`,
|
||||
input: `{"a": 1 b": 2}`,
|
||||
expectedError: `bad file 'sample.yml': json: string of object unexpected end of JSON input`,
|
||||
scenarioType: "decode-error",
|
||||
},
|
||||
{
|
||||
@ -113,6 +154,7 @@ var jsonScenarios = []formatScenario{
|
||||
subdescription: "JSON is a subset of yaml, so all you need to do is prettify the output",
|
||||
input: `{"a":"Easy! as one two three","b":{"c":2,"d":[3,4]}}`,
|
||||
expected: complexExpectYaml,
|
||||
scenarioType: "decode-ndjson",
|
||||
},
|
||||
{
|
||||
description: "Encode json: simple",
|
||||
@ -213,7 +255,7 @@ var jsonScenarios = []formatScenario{
|
||||
description: "empty string",
|
||||
skipDoc: true,
|
||||
input: `""`,
|
||||
expected: "\"\"\n",
|
||||
expected: "\n",
|
||||
scenarioType: "decode-ndjson",
|
||||
},
|
||||
{
|
||||
@ -316,11 +358,10 @@ func decodeJSON(t *testing.T, jsonString string) *CandidateNode {
|
||||
|
||||
func testJSONScenario(t *testing.T, s formatScenario) {
|
||||
switch s.scenarioType {
|
||||
case "encode", "decode":
|
||||
case "encode":
|
||||
test.AssertResultWithContext(t, s.expected, mustProcessFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewJSONEncoder(s.indent, false, false)), s.description)
|
||||
case "":
|
||||
var actual = resultToString(t, decodeJSON(t, s.input))
|
||||
test.AssertResultWithContext(t, s.expected, actual, s.description)
|
||||
case "decode":
|
||||
test.AssertResultWithContext(t, s.expected, mustProcessFormatScenario(s, NewJSONDecoder(), NewJSONEncoder(s.indent, false, false)), s.description)
|
||||
case "decode-ndjson":
|
||||
test.AssertResultWithContext(t, s.expected, mustProcessFormatScenario(s, NewJSONDecoder(), NewYamlEncoder(2, false, ConfiguredYamlPreferences)), s.description)
|
||||
case "roundtrip-ndjson":
|
||||
|
@ -5,7 +5,6 @@ import (
|
||||
|
||||
"github.com/alecthomas/repr"
|
||||
"github.com/mikefarah/yq/v4/test"
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type participleLexerScenario struct {
|
||||
@ -64,14 +63,12 @@ var participleLexerScenarios = []participleLexerScenario{
|
||||
Value: 0,
|
||||
StringValue: "0",
|
||||
CandidateNode: &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!int",
|
||||
Value: "0",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
TokenType: operationToken,
|
||||
Operation: &Operation{
|
||||
@ -87,14 +84,12 @@ var participleLexerScenarios = []participleLexerScenario{
|
||||
Value: int64(3),
|
||||
StringValue: "3",
|
||||
CandidateNode: &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
Kind: yaml.Kind(8),
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!int",
|
||||
Value: "3",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
TokenType: closeCollect,
|
||||
CheckForPostTraverse: true,
|
||||
@ -129,14 +124,12 @@ var participleLexerScenarios = []participleLexerScenario{
|
||||
Value: int64(-2),
|
||||
StringValue: "-2",
|
||||
CandidateNode: &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!int",
|
||||
Value: "-2",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
TokenType: operationToken,
|
||||
Operation: &Operation{
|
||||
@ -654,8 +647,7 @@ var participleLexerScenarios = []participleLexerScenario{
|
||||
StringValue: "string with a\n",
|
||||
Preferences: nil,
|
||||
CandidateNode: &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: "string with a\n",
|
||||
},
|
||||
@ -663,7 +655,6 @@ var participleLexerScenarios = []participleLexerScenario{
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
expression: `"string with a \""`,
|
||||
tokens: []*token{
|
||||
@ -675,8 +666,7 @@ var participleLexerScenarios = []participleLexerScenario{
|
||||
StringValue: `string with a "`,
|
||||
Preferences: nil,
|
||||
CandidateNode: &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: `string with a "`,
|
||||
},
|
||||
@ -684,7 +674,6 @@ var participleLexerScenarios = []participleLexerScenario{
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestParticipleLexer(t *testing.T) {
|
||||
|
197
pkg/yqlib/lib.go
197
pkg/yqlib/lib.go
@ -2,7 +2,6 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"container/list"
|
||||
"fmt"
|
||||
"math"
|
||||
@ -10,7 +9,6 @@ import (
|
||||
"strings"
|
||||
|
||||
logging "gopkg.in/op/go-logging.v1"
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
var ExpressionParser ExpressionParserInterface
|
||||
@ -191,7 +189,7 @@ type Operation struct {
|
||||
UpdateAssign bool // used for assign ops, when true it means we evaluate the rhs given the lhs
|
||||
}
|
||||
|
||||
func recurseNodeArrayEqual(lhs *yaml.Node, rhs *yaml.Node) bool {
|
||||
func recurseNodeArrayEqual(lhs *CandidateNode, rhs *CandidateNode) bool {
|
||||
if len(lhs.Content) != len(rhs.Content) {
|
||||
return false
|
||||
}
|
||||
@ -204,7 +202,7 @@ func recurseNodeArrayEqual(lhs *yaml.Node, rhs *yaml.Node) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func findInArray(array *yaml.Node, item *yaml.Node) int {
|
||||
func findInArray(array *CandidateNode, item *CandidateNode) int {
|
||||
|
||||
for index := 0; index < len(array.Content); index = index + 1 {
|
||||
if recursiveNodeEqual(array.Content[index], item) {
|
||||
@ -214,7 +212,7 @@ func findInArray(array *yaml.Node, item *yaml.Node) int {
|
||||
return -1
|
||||
}
|
||||
|
||||
func findKeyInMap(dataMap *yaml.Node, item *yaml.Node) int {
|
||||
func findKeyInMap(dataMap *CandidateNode, item *CandidateNode) int {
|
||||
|
||||
for index := 0; index < len(dataMap.Content); index = index + 2 {
|
||||
if recursiveNodeEqual(dataMap.Content[index], item) {
|
||||
@ -224,7 +222,7 @@ func findKeyInMap(dataMap *yaml.Node, item *yaml.Node) int {
|
||||
return -1
|
||||
}
|
||||
|
||||
func recurseNodeObjectEqual(lhs *yaml.Node, rhs *yaml.Node) bool {
|
||||
func recurseNodeObjectEqual(lhs *CandidateNode, rhs *CandidateNode) bool {
|
||||
if len(lhs.Content) != len(rhs.Content) {
|
||||
return false
|
||||
}
|
||||
@ -242,28 +240,10 @@ func recurseNodeObjectEqual(lhs *yaml.Node, rhs *yaml.Node) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func guessTagFromCustomType(node *yaml.Node) string {
|
||||
if strings.HasPrefix(node.Tag, "!!") {
|
||||
return node.Tag
|
||||
} else if node.Value == "" {
|
||||
log.Debug("guessTagFromCustomType: node has no value to guess the type with")
|
||||
return node.Tag
|
||||
}
|
||||
dataBucket, errorReading := parseSnippet(node.Value)
|
||||
|
||||
if errorReading != nil {
|
||||
log.Debug("guessTagFromCustomType: could not guess underlying tag type %v", errorReading)
|
||||
return node.Tag
|
||||
}
|
||||
guessedTag := unwrapDoc(dataBucket).Tag
|
||||
log.Info("im guessing the tag %v is a %v", node.Tag, guessedTag)
|
||||
return guessedTag
|
||||
}
|
||||
|
||||
func parseSnippet(value string) (*yaml.Node, error) {
|
||||
func parseSnippet(value string) (*CandidateNode, error) {
|
||||
if value == "" {
|
||||
return &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!null",
|
||||
}, nil
|
||||
}
|
||||
@ -272,30 +252,26 @@ func parseSnippet(value string) (*yaml.Node, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
parsedNode, err := decoder.Decode()
|
||||
result, err := decoder.Decode()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(parsedNode.Node.Content) == 0 {
|
||||
return nil, fmt.Errorf("bad data")
|
||||
}
|
||||
result := unwrapDoc(parsedNode.Node)
|
||||
result.Line = 0
|
||||
result.Column = 0
|
||||
return result, err
|
||||
}
|
||||
|
||||
func recursiveNodeEqual(lhs *yaml.Node, rhs *yaml.Node) bool {
|
||||
func recursiveNodeEqual(lhs *CandidateNode, rhs *CandidateNode) bool {
|
||||
if lhs.Kind != rhs.Kind {
|
||||
return false
|
||||
}
|
||||
|
||||
if lhs.Kind == yaml.ScalarNode {
|
||||
if lhs.Kind == ScalarNode {
|
||||
//process custom tags of scalar nodes.
|
||||
//dont worry about matching tags of maps or arrays.
|
||||
|
||||
lhsTag := guessTagFromCustomType(lhs)
|
||||
rhsTag := guessTagFromCustomType(rhs)
|
||||
lhsTag := lhs.guessTagFromCustomType()
|
||||
rhsTag := rhs.guessTagFromCustomType()
|
||||
|
||||
if lhsTag != rhsTag {
|
||||
return false
|
||||
@ -305,75 +281,32 @@ func recursiveNodeEqual(lhs *yaml.Node, rhs *yaml.Node) bool {
|
||||
if lhs.Tag == "!!null" {
|
||||
return true
|
||||
|
||||
} else if lhs.Kind == yaml.ScalarNode {
|
||||
} else if lhs.Kind == ScalarNode {
|
||||
return lhs.Value == rhs.Value
|
||||
} else if lhs.Kind == yaml.SequenceNode {
|
||||
} else if lhs.Kind == SequenceNode {
|
||||
return recurseNodeArrayEqual(lhs, rhs)
|
||||
} else if lhs.Kind == yaml.MappingNode {
|
||||
} else if lhs.Kind == MappingNode {
|
||||
return recurseNodeObjectEqual(lhs, rhs)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func deepCloneContent(content []*yaml.Node) []*yaml.Node {
|
||||
clonedContent := make([]*yaml.Node, len(content))
|
||||
for i, child := range content {
|
||||
clonedContent[i] = deepClone(child)
|
||||
}
|
||||
return clonedContent
|
||||
}
|
||||
|
||||
func deepCloneNoContent(node *yaml.Node) *yaml.Node {
|
||||
return deepCloneWithOptions(node, false)
|
||||
}
|
||||
func deepClone(node *yaml.Node) *yaml.Node {
|
||||
return deepCloneWithOptions(node, true)
|
||||
}
|
||||
|
||||
func deepCloneWithOptions(node *yaml.Node, cloneContent bool) *yaml.Node {
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
var clonedContent []*yaml.Node
|
||||
if cloneContent {
|
||||
clonedContent = deepCloneContent(node.Content)
|
||||
}
|
||||
return &yaml.Node{
|
||||
Content: clonedContent,
|
||||
Kind: node.Kind,
|
||||
Style: node.Style,
|
||||
Tag: node.Tag,
|
||||
Value: node.Value,
|
||||
Anchor: node.Anchor,
|
||||
Alias: node.Alias,
|
||||
HeadComment: node.HeadComment,
|
||||
LineComment: node.LineComment,
|
||||
FootComment: node.FootComment,
|
||||
Line: node.Line,
|
||||
Column: node.Column,
|
||||
}
|
||||
}
|
||||
|
||||
// yaml numbers can be hex encoded...
|
||||
// yaml numbers can be hex and octal encoded...
|
||||
func parseInt64(numberString string) (string, int64, error) {
|
||||
if strings.HasPrefix(numberString, "0x") ||
|
||||
strings.HasPrefix(numberString, "0X") {
|
||||
num, err := strconv.ParseInt(numberString[2:], 16, 64)
|
||||
return "0x%X", num, err
|
||||
} else if strings.HasPrefix(numberString, "0o") {
|
||||
num, err := strconv.ParseInt(numberString[2:], 8, 64)
|
||||
return "0o%o", num, err
|
||||
}
|
||||
num, err := strconv.ParseInt(numberString, 10, 64)
|
||||
return "%v", num, err
|
||||
}
|
||||
|
||||
func parseInt(numberString string) (int, error) {
|
||||
var err error
|
||||
var parsed int64
|
||||
if strings.HasPrefix(numberString, "0x") ||
|
||||
strings.HasPrefix(numberString, "0X") {
|
||||
parsed, err = strconv.ParseInt(numberString[2:], 16, 64)
|
||||
} else {
|
||||
parsed, err = strconv.ParseInt(numberString, 10, 64)
|
||||
}
|
||||
_, parsed, err := parseInt64(numberString)
|
||||
|
||||
if err != nil {
|
||||
return 0, err
|
||||
@ -384,45 +317,19 @@ func parseInt(numberString string) (int, error) {
|
||||
return int(parsed), err
|
||||
}
|
||||
|
||||
func createStringScalarNode(stringValue string) *yaml.Node {
|
||||
var node = &yaml.Node{Kind: yaml.ScalarNode}
|
||||
node.Value = stringValue
|
||||
node.Tag = "!!str"
|
||||
return node
|
||||
}
|
||||
|
||||
func createScalarNode(value interface{}, stringValue string) *yaml.Node {
|
||||
var node = &yaml.Node{Kind: yaml.ScalarNode}
|
||||
node.Value = stringValue
|
||||
|
||||
switch value.(type) {
|
||||
case float32, float64:
|
||||
node.Tag = "!!float"
|
||||
case int, int64, int32:
|
||||
node.Tag = "!!int"
|
||||
case bool:
|
||||
node.Tag = "!!bool"
|
||||
case string:
|
||||
node.Tag = "!!str"
|
||||
case nil:
|
||||
node.Tag = "!!null"
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
func headAndLineComment(node *yaml.Node) string {
|
||||
func headAndLineComment(node *CandidateNode) string {
|
||||
return headComment(node) + lineComment(node)
|
||||
}
|
||||
|
||||
func headComment(node *yaml.Node) string {
|
||||
func headComment(node *CandidateNode) string {
|
||||
return strings.Replace(node.HeadComment, "#", "", 1)
|
||||
}
|
||||
|
||||
func lineComment(node *yaml.Node) string {
|
||||
func lineComment(node *CandidateNode) string {
|
||||
return strings.Replace(node.LineComment, "#", "", 1)
|
||||
}
|
||||
|
||||
func footComment(node *yaml.Node) string {
|
||||
func footComment(node *CandidateNode) string {
|
||||
return strings.Replace(node.FootComment, "#", "", 1)
|
||||
}
|
||||
|
||||
@ -434,7 +341,7 @@ func createValueOperation(value interface{}, stringValue string) *Operation {
|
||||
OperationType: valueOpType,
|
||||
Value: value,
|
||||
StringValue: stringValue,
|
||||
CandidateNode: &CandidateNode{Node: node},
|
||||
CandidateNode: node,
|
||||
}
|
||||
}
|
||||
|
||||
@ -471,40 +378,46 @@ func NodeToString(node *CandidateNode) string {
|
||||
if !log.IsEnabledFor(logging.DEBUG) {
|
||||
return ""
|
||||
}
|
||||
value := node.Node
|
||||
if value == nil {
|
||||
if node == nil {
|
||||
return "-- nil --"
|
||||
}
|
||||
buf := new(bytes.Buffer)
|
||||
encoder := yaml.NewEncoder(buf)
|
||||
errorEncoding := encoder.Encode(value)
|
||||
if errorEncoding != nil {
|
||||
log.Error("Error debugging node, %v", errorEncoding.Error())
|
||||
}
|
||||
errorClosingEncoder := encoder.Close()
|
||||
if errorClosingEncoder != nil {
|
||||
log.Error("Error closing encoder: ", errorClosingEncoder.Error())
|
||||
}
|
||||
tag := value.Tag
|
||||
if value.Kind == yaml.DocumentNode {
|
||||
tag = "doc"
|
||||
} else if value.Kind == yaml.AliasNode {
|
||||
tag := node.Tag
|
||||
if node.Kind == AliasNode {
|
||||
tag = "alias"
|
||||
}
|
||||
return fmt.Sprintf(`D%v, P%v, (%v)::%v`, node.Document, node.Path, tag, buf.String())
|
||||
valueToUse := node.Value
|
||||
if valueToUse == "" {
|
||||
valueToUse = fmt.Sprintf("%v kids", len(node.Content))
|
||||
}
|
||||
return fmt.Sprintf(`D%v, P%v, %v (%v)::%v`, node.GetDocument(), node.GetNicePath(), KindString(node.Kind), tag, valueToUse)
|
||||
}
|
||||
|
||||
func KindString(kind yaml.Kind) string {
|
||||
func NodeContentToString(node *CandidateNode, depth int) string {
|
||||
if !log.IsEnabledFor(logging.DEBUG) {
|
||||
return ""
|
||||
}
|
||||
var sb strings.Builder
|
||||
for _, child := range node.Content {
|
||||
for i := 0; i < depth; i++ {
|
||||
sb.WriteString(" ")
|
||||
}
|
||||
sb.WriteString("- ")
|
||||
sb.WriteString(NodeToString(child))
|
||||
sb.WriteString("\n")
|
||||
sb.WriteString(NodeContentToString(child, depth+1))
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func KindString(kind Kind) string {
|
||||
switch kind {
|
||||
case yaml.ScalarNode:
|
||||
case ScalarNode:
|
||||
return "ScalarNode"
|
||||
case yaml.SequenceNode:
|
||||
case SequenceNode:
|
||||
return "SequenceNode"
|
||||
case yaml.MappingNode:
|
||||
case MappingNode:
|
||||
return "MappingNode"
|
||||
case yaml.DocumentNode:
|
||||
return "DocumentNode"
|
||||
case yaml.AliasNode:
|
||||
case AliasNode:
|
||||
return "AliasNode"
|
||||
default:
|
||||
return "unknown!"
|
||||
|
@ -1,10 +1,10 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/mikefarah/yq/v4/test"
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func TestGetLogger(t *testing.T) {
|
||||
@ -16,7 +16,7 @@ func TestGetLogger(t *testing.T) {
|
||||
|
||||
type parseSnippetScenario struct {
|
||||
snippet string
|
||||
expected *yaml.Node
|
||||
expected *CandidateNode
|
||||
expectedError string
|
||||
}
|
||||
|
||||
@ -27,15 +27,15 @@ var parseSnippetScenarios = []parseSnippetScenario{
|
||||
},
|
||||
{
|
||||
snippet: "",
|
||||
expected: &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
expected: &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!null",
|
||||
},
|
||||
},
|
||||
{
|
||||
snippet: "null",
|
||||
expected: &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
expected: &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!null",
|
||||
Value: "null",
|
||||
Line: 0,
|
||||
@ -44,8 +44,8 @@ var parseSnippetScenarios = []parseSnippetScenario{
|
||||
},
|
||||
{
|
||||
snippet: "3",
|
||||
expected: &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
expected: &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!int",
|
||||
Value: "3",
|
||||
Line: 0,
|
||||
@ -54,8 +54,8 @@ var parseSnippetScenarios = []parseSnippetScenario{
|
||||
},
|
||||
{
|
||||
snippet: "cat",
|
||||
expected: &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
expected: &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: "cat",
|
||||
Line: 0,
|
||||
@ -64,8 +64,8 @@ var parseSnippetScenarios = []parseSnippetScenario{
|
||||
},
|
||||
{
|
||||
snippet: "3.1",
|
||||
expected: &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
expected: &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!float",
|
||||
Value: "3.1",
|
||||
Line: 0,
|
||||
@ -74,8 +74,8 @@ var parseSnippetScenarios = []parseSnippetScenario{
|
||||
},
|
||||
{
|
||||
snippet: "true",
|
||||
expected: &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
expected: &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!bool",
|
||||
Value: "true",
|
||||
Line: 0,
|
||||
@ -93,7 +93,7 @@ func TestParseSnippet(t *testing.T) {
|
||||
} else {
|
||||
test.AssertResultComplexWithContext(t, tt.expectedError, err.Error(), tt.snippet)
|
||||
}
|
||||
return
|
||||
continue
|
||||
}
|
||||
if err != nil {
|
||||
t.Error(tt.snippet)
|
||||
@ -102,3 +102,37 @@ func TestParseSnippet(t *testing.T) {
|
||||
test.AssertResultComplexWithContext(t, tt.expected, actual, tt.snippet)
|
||||
}
|
||||
}
|
||||
|
||||
type parseInt64Scenario struct {
|
||||
numberString string
|
||||
expectedParsedNumber int64
|
||||
}
|
||||
|
||||
var parseInt64Scenarios = []parseInt64Scenario{
|
||||
{
|
||||
numberString: "34",
|
||||
expectedParsedNumber: 34,
|
||||
},
|
||||
{
|
||||
numberString: "0x10",
|
||||
expectedParsedNumber: 16,
|
||||
},
|
||||
{
|
||||
numberString: "0o10",
|
||||
expectedParsedNumber: 8,
|
||||
},
|
||||
}
|
||||
|
||||
func TestParseInt64(t *testing.T) {
|
||||
for _, tt := range parseInt64Scenarios {
|
||||
format, actualNumber, err := parseInt64(tt.numberString)
|
||||
|
||||
if err != nil {
|
||||
t.Error(tt.numberString)
|
||||
t.Error(err)
|
||||
}
|
||||
test.AssertResultComplexWithContext(t, tt.expectedParsedNumber, actualNumber, tt.numberString)
|
||||
|
||||
test.AssertResultComplexWithContext(t, tt.numberString, fmt.Sprintf(format, actualNumber), fmt.Sprintf("Formatting of: %v", tt.numberString))
|
||||
}
|
||||
}
|
||||
|
@ -31,6 +31,38 @@ cities:
|
||||
- Perth
|
||||
`,
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "path",
|
||||
expression: ".cities[2] | path",
|
||||
input: `return {
|
||||
["country"] = "Australia"; -- this place
|
||||
["cities"] = {
|
||||
"Sydney",
|
||||
"Melbourne",
|
||||
"Brisbane",
|
||||
"Perth",
|
||||
};
|
||||
};
|
||||
`,
|
||||
expected: "- cities\n- 2\n",
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "path",
|
||||
expression: ".cities[2] | key",
|
||||
input: `return {
|
||||
["country"] = "Australia"; -- this place
|
||||
["cities"] = {
|
||||
"Sydney",
|
||||
"Melbourne",
|
||||
"Brisbane",
|
||||
"Perth",
|
||||
};
|
||||
};
|
||||
`,
|
||||
expected: "2\n",
|
||||
},
|
||||
{
|
||||
description: "Basic output example",
|
||||
scenarioType: "encode",
|
||||
|
@ -4,7 +4,6 @@ func matchKey(name string, pattern string) (matched bool) {
|
||||
if pattern == "" {
|
||||
return name == pattern
|
||||
}
|
||||
log.Debug("pattern: %v", pattern)
|
||||
if pattern == "*" {
|
||||
log.Debug("wild!")
|
||||
return true
|
||||
|
@ -5,8 +5,6 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func createAddOp(lhs *ExpressionNode, rhs *ExpressionNode) *ExpressionNode {
|
||||
@ -19,23 +17,21 @@ func addAssignOperator(d *dataTreeNavigator, context Context, expressionNode *Ex
|
||||
return compoundAssignFunction(d, context, expressionNode, createAddOp)
|
||||
}
|
||||
|
||||
func toNodes(candidate *CandidateNode, lhs *CandidateNode) ([]*yaml.Node, error) {
|
||||
if candidate.Node.Tag == "!!null" {
|
||||
return []*yaml.Node{}, nil
|
||||
}
|
||||
clone, err := candidate.Copy()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
func toNodes(candidate *CandidateNode, lhs *CandidateNode) []*CandidateNode {
|
||||
if candidate.Tag == "!!null" {
|
||||
return []*CandidateNode{}
|
||||
}
|
||||
|
||||
switch candidate.Node.Kind {
|
||||
case yaml.SequenceNode:
|
||||
return clone.Node.Content, nil
|
||||
clone := candidate.Copy()
|
||||
|
||||
switch candidate.Kind {
|
||||
case SequenceNode:
|
||||
return clone.Content
|
||||
default:
|
||||
if len(lhs.Node.Content) > 0 {
|
||||
clone.Node.Style = lhs.Node.Content[0].Style
|
||||
if len(lhs.Content) > 0 {
|
||||
clone.Style = lhs.Content[0].Style
|
||||
}
|
||||
return []*yaml.Node{clone.Node}, nil
|
||||
return []*CandidateNode{clone}
|
||||
}
|
||||
|
||||
}
|
||||
@ -47,50 +43,42 @@ func addOperator(d *dataTreeNavigator, context Context, expressionNode *Expressi
|
||||
}
|
||||
|
||||
func add(d *dataTreeNavigator, context Context, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
||||
lhs.Node = unwrapDoc(lhs.Node)
|
||||
rhs.Node = unwrapDoc(rhs.Node)
|
||||
|
||||
lhsNode := lhs.Node
|
||||
lhsNode := lhs
|
||||
|
||||
if lhsNode.Tag == "!!null" {
|
||||
return lhs.CreateReplacement(rhs.Node), nil
|
||||
return lhs.CopyAsReplacement(rhs), nil
|
||||
}
|
||||
|
||||
target := lhs.CreateReplacement(&yaml.Node{
|
||||
Anchor: lhs.Node.Anchor,
|
||||
})
|
||||
target := lhs.CopyWithoutContent()
|
||||
|
||||
switch lhsNode.Kind {
|
||||
case yaml.MappingNode:
|
||||
if rhs.Node.Kind != yaml.MappingNode {
|
||||
return nil, fmt.Errorf("%v (%v) cannot be added to a %v (%v)", rhs.Node.Tag, rhs.GetNicePath(), lhsNode.Tag, lhs.GetNicePath())
|
||||
case MappingNode:
|
||||
if rhs.Kind != MappingNode {
|
||||
return nil, fmt.Errorf("%v (%v) cannot be added to a %v (%v)", rhs.Tag, rhs.GetNicePath(), lhsNode.Tag, lhs.GetNicePath())
|
||||
}
|
||||
addMaps(target, lhs, rhs)
|
||||
case yaml.SequenceNode:
|
||||
if err := addSequences(target, lhs, rhs); err != nil {
|
||||
return nil, err
|
||||
case SequenceNode:
|
||||
addSequences(target, lhs, rhs)
|
||||
case ScalarNode:
|
||||
if rhs.Kind != ScalarNode {
|
||||
return nil, fmt.Errorf("%v (%v) cannot be added to a %v (%v)", rhs.Tag, rhs.GetNicePath(), lhsNode.Tag, lhs.GetNicePath())
|
||||
}
|
||||
|
||||
case yaml.ScalarNode:
|
||||
if rhs.Node.Kind != yaml.ScalarNode {
|
||||
return nil, fmt.Errorf("%v (%v) cannot be added to a %v (%v)", rhs.Node.Tag, rhs.GetNicePath(), lhsNode.Tag, lhs.GetNicePath())
|
||||
}
|
||||
target.Node.Kind = yaml.ScalarNode
|
||||
target.Node.Style = lhsNode.Style
|
||||
if err := addScalars(context, target, lhsNode, rhs.Node); err != nil {
|
||||
target.Kind = ScalarNode
|
||||
target.Style = lhsNode.Style
|
||||
if err := addScalars(context, target, lhsNode, rhs); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return target, nil
|
||||
}
|
||||
|
||||
func addScalars(context Context, target *CandidateNode, lhs *yaml.Node, rhs *yaml.Node) error {
|
||||
func addScalars(context Context, target *CandidateNode, lhs *CandidateNode, rhs *CandidateNode) error {
|
||||
lhsTag := lhs.Tag
|
||||
rhsTag := guessTagFromCustomType(rhs)
|
||||
rhsTag := rhs.guessTagFromCustomType()
|
||||
lhsIsCustom := false
|
||||
if !strings.HasPrefix(lhsTag, "!!") {
|
||||
// custom tag - we have to have a guess
|
||||
lhsTag = guessTagFromCustomType(lhs)
|
||||
lhsTag = lhs.guessTagFromCustomType()
|
||||
lhsIsCustom = true
|
||||
}
|
||||
|
||||
@ -106,15 +94,16 @@ func addScalars(context Context, target *CandidateNode, lhs *yaml.Node, rhs *yam
|
||||
return addDateTimes(context.GetDateTimeLayout(), target, lhs, rhs)
|
||||
|
||||
} else if lhsTag == "!!str" {
|
||||
target.Node.Tag = lhs.Tag
|
||||
target.Tag = lhs.Tag
|
||||
if rhsTag == "!!null" {
|
||||
target.Node.Value = lhs.Value
|
||||
target.Value = lhs.Value
|
||||
} else {
|
||||
target.Node.Value = lhs.Value + rhs.Value
|
||||
target.Value = lhs.Value + rhs.Value
|
||||
}
|
||||
|
||||
} else if rhsTag == "!!str" {
|
||||
target.Node.Tag = rhs.Tag
|
||||
target.Node.Value = lhs.Value + rhs.Value
|
||||
target.Tag = rhs.Tag
|
||||
target.Value = lhs.Value + rhs.Value
|
||||
} else if lhsTag == "!!int" && rhsTag == "!!int" {
|
||||
format, lhsNum, err := parseInt64(lhs.Value)
|
||||
if err != nil {
|
||||
@ -125,8 +114,8 @@ func addScalars(context Context, target *CandidateNode, lhs *yaml.Node, rhs *yam
|
||||
return err
|
||||
}
|
||||
sum := lhsNum + rhsNum
|
||||
target.Node.Tag = lhs.Tag
|
||||
target.Node.Value = fmt.Sprintf(format, sum)
|
||||
target.Tag = lhs.Tag
|
||||
target.Value = fmt.Sprintf(format, sum)
|
||||
} else if (lhsTag == "!!int" || lhsTag == "!!float") && (rhsTag == "!!int" || rhsTag == "!!float") {
|
||||
lhsNum, err := strconv.ParseFloat(lhs.Value, 64)
|
||||
if err != nil {
|
||||
@ -138,18 +127,18 @@ func addScalars(context Context, target *CandidateNode, lhs *yaml.Node, rhs *yam
|
||||
}
|
||||
sum := lhsNum + rhsNum
|
||||
if lhsIsCustom {
|
||||
target.Node.Tag = lhs.Tag
|
||||
target.Tag = lhs.Tag
|
||||
} else {
|
||||
target.Node.Tag = "!!float"
|
||||
target.Tag = "!!float"
|
||||
}
|
||||
target.Node.Value = fmt.Sprintf("%v", sum)
|
||||
target.Value = fmt.Sprintf("%v", sum)
|
||||
} else {
|
||||
return fmt.Errorf("%v cannot be added to %v", lhsTag, rhsTag)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func addDateTimes(layout string, target *CandidateNode, lhs *yaml.Node, rhs *yaml.Node) error {
|
||||
func addDateTimes(layout string, target *CandidateNode, lhs *CandidateNode, rhs *CandidateNode) error {
|
||||
|
||||
duration, err := time.ParseDuration(rhs.Value)
|
||||
if err != nil {
|
||||
@ -162,52 +151,57 @@ func addDateTimes(layout string, target *CandidateNode, lhs *yaml.Node, rhs *yam
|
||||
}
|
||||
|
||||
newTime := currentTime.Add(duration)
|
||||
target.Node.Value = newTime.Format(layout)
|
||||
target.Value = newTime.Format(layout)
|
||||
return nil
|
||||
|
||||
}
|
||||
|
||||
func addSequences(target *CandidateNode, lhs *CandidateNode, rhs *CandidateNode) error {
|
||||
target.Node.Kind = yaml.SequenceNode
|
||||
if len(lhs.Node.Content) > 0 {
|
||||
target.Node.Style = lhs.Node.Style
|
||||
func addSequences(target *CandidateNode, lhs *CandidateNode, rhs *CandidateNode) {
|
||||
log.Debugf("adding sequences! target: %v; lhs %v; rhs: %v", NodeToString(target), NodeToString(lhs), NodeToString(rhs))
|
||||
target.Kind = SequenceNode
|
||||
if len(lhs.Content) == 0 {
|
||||
log.Debugf("dont copy lhs style")
|
||||
target.Style = 0
|
||||
}
|
||||
target.Node.Tag = lhs.Node.Tag
|
||||
target.Tag = lhs.Tag
|
||||
|
||||
extraNodes, err := toNodes(rhs, lhs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
target.Node.Content = append(deepCloneContent(lhs.Node.Content), extraNodes...)
|
||||
return nil
|
||||
extraNodes := toNodes(rhs, lhs)
|
||||
|
||||
target.AddChildren(lhs.Content)
|
||||
target.AddChildren(extraNodes)
|
||||
}
|
||||
|
||||
func addMaps(target *CandidateNode, lhsC *CandidateNode, rhsC *CandidateNode) {
|
||||
lhs := lhsC.Node
|
||||
rhs := rhsC.Node
|
||||
lhs := lhsC
|
||||
rhs := rhsC
|
||||
|
||||
target.Node.Content = make([]*yaml.Node, len(lhs.Content))
|
||||
copy(target.Node.Content, lhs.Content)
|
||||
if len(lhs.Content) == 0 {
|
||||
log.Debugf("dont copy lhs style")
|
||||
target.Style = 0
|
||||
}
|
||||
|
||||
target.Content = make([]*CandidateNode, 0)
|
||||
target.AddChildren(lhs.Content)
|
||||
|
||||
for index := 0; index < len(rhs.Content); index = index + 2 {
|
||||
key := rhs.Content[index]
|
||||
value := rhs.Content[index+1]
|
||||
log.Debug("finding %v", key.Value)
|
||||
indexInLHS := findKeyInMap(target.Node, key)
|
||||
indexInLHS := findKeyInMap(target, key)
|
||||
log.Debug("indexInLhs %v", indexInLHS)
|
||||
if indexInLHS < 0 {
|
||||
// not in there, append it
|
||||
target.Node.Content = append(target.Node.Content, key, value)
|
||||
target.AddKeyValueChild(key, value)
|
||||
} else {
|
||||
// it's there, replace it
|
||||
target.Node.Content[indexInLHS+1] = value
|
||||
oldValue := target.Content[indexInLHS+1]
|
||||
newValueCopy := oldValue.CopyAsReplacement(value)
|
||||
target.Content[indexInLHS+1] = newValueCopy
|
||||
}
|
||||
}
|
||||
target.Node.Kind = yaml.MappingNode
|
||||
target.Kind = MappingNode
|
||||
if len(lhs.Content) > 0 {
|
||||
target.Node.Style = lhs.Style
|
||||
target.Style = lhs.Style
|
||||
}
|
||||
target.Node.Tag = lhs.Tag
|
||||
target.Tag = lhs.Tag
|
||||
}
|
||||
|
@ -36,7 +36,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: `{}`,
|
||||
expression: "(.a + .b) as $x | .",
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{}\n",
|
||||
"D0, P[], (!!map)::{}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -44,7 +44,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: `a: 0`,
|
||||
expression: ".a += .b.c",
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: 0\n",
|
||||
"D0, P[], (!!map)::a: 0\n",
|
||||
},
|
||||
},
|
||||
|
||||
@ -63,7 +63,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
dontFormatInputForDoc: true,
|
||||
expression: `.a += .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: [1, 2, 3, 4]\nb:\n - 3\n - 4\n",
|
||||
"D0, P[], (!!map)::a: [1, 2, 3, 4]\nb:\n - 3\n - 4\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -98,7 +98,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: `a: ['dog']`,
|
||||
expression: `.a += "cat"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: ['dog', 'cat']\n",
|
||||
"D0, P[], (!!map)::a: ['dog', 'cat']\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -106,7 +106,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: `a: [dog]`,
|
||||
expression: `.a = ["cat"] + .a`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: [cat, dog]\n",
|
||||
"D0, P[], (!!map)::a: [cat, dog]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -116,7 +116,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: `{a: ['dog'], b: cat}`,
|
||||
expression: `.a = .a + .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: ['dog', 'cat'], b: cat}\n",
|
||||
"D0, P[], (!!map)::{a: ['dog', 'cat'], b: cat}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -125,7 +125,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: `a: []`,
|
||||
expression: `.a += "cat"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a:\n - cat\n",
|
||||
"D0, P[], (!!map)::a:\n - cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -134,7 +134,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: `a: [dog]`,
|
||||
expression: `.a += "cat"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: [dog, cat]\n",
|
||||
"D0, P[], (!!map)::a: [dog, cat]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -171,7 +171,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: `a: {}`,
|
||||
expression: `.a += {"b": "cat"}`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a:\n b: cat\n",
|
||||
"D0, P[], (!!map)::a:\n b: cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -180,7 +180,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: `a: {c: dog}`,
|
||||
expression: `.a += {"b": "cat"}`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: {c: dog, b: cat}\n",
|
||||
"D0, P[], (!!map)::a: {c: dog, b: cat}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -196,7 +196,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: `a: { a1: {b: [cat]}, a2: {b: [dog]}, a3: {} }`,
|
||||
expression: `.a[].b += ["mouse"]`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: {a1: {b: [cat, mouse]}, a2: {b: [dog, mouse]}, a3: {b: [mouse]}}\n",
|
||||
"D0, P[], (!!map)::a: {a1: {b: [cat, mouse]}, a2: {b: [dog, mouse]}, a3: {b: [mouse]}}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -204,7 +204,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: `{a: cat, b: meow}`,
|
||||
expression: `.a += .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: catmeow, b: meow}\n",
|
||||
"D0, P[], (!!map)::{a: catmeow, b: meow}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -245,7 +245,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: `{a: 3, b: 4.9}`,
|
||||
expression: `.a = .a + .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: 7.9, b: 4.9}\n",
|
||||
"D0, P[], (!!map)::{a: 7.9, b: 4.9}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -254,7 +254,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: `{a: 3, b: 4}`,
|
||||
expression: `.a = .a + .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: 7, b: 4}\n",
|
||||
"D0, P[], (!!map)::{a: 7, b: 4}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -262,7 +262,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: `{a: 3, b: 5}`,
|
||||
expression: `.[] += 1`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: 4, b: 6}\n",
|
||||
"D0, P[], (!!map)::{a: 4, b: 6}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -271,7 +271,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: `a: 2021-01-01T00:00:00Z`,
|
||||
expression: `.a += "3h10m"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: 2021-01-01T03:10:00Z\n",
|
||||
"D0, P[], (!!map)::a: 2021-01-01T03:10:00Z\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -280,7 +280,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: `a: 2021-01-01`,
|
||||
expression: `.a += "24h"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: 2021-01-02T00:00:00Z\n",
|
||||
"D0, P[], (!!map)::a: 2021-01-02T00:00:00Z\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -289,7 +289,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: `a: Saturday, 15-Dec-01 at 2:59AM GMT`,
|
||||
expression: `with_dtf("Monday, 02-Jan-06 at 3:04PM MST", .a += "3h1m")`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: Saturday, 15-Dec-01 at 6:00AM GMT\n",
|
||||
"D0, P[], (!!map)::a: Saturday, 15-Dec-01 at 6:00AM GMT\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -299,7 +299,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: `a: !cat Saturday, 15-Dec-01 at 2:59AM GMT`,
|
||||
expression: `with_dtf("Monday, 02-Jan-06 at 3:04PM MST", .a += "3h1m")`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: !cat Saturday, 15-Dec-01 at 6:00AM GMT\n",
|
||||
"D0, P[], (!!map)::a: !cat Saturday, 15-Dec-01 at 6:00AM GMT\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -316,7 +316,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: "a: {thing: {name: Astuff, value: x}, a1: cool}\nb: {thing: {name: Bstuff, legs: 3}, b1: neat}",
|
||||
expression: `.a += .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: {thing: {name: Bstuff, legs: 3}, a1: cool, b1: neat}\nb: {thing: {name: Bstuff, legs: 3}, b1: neat}\n",
|
||||
"D0, P[], (!!map)::a: {thing: {name: Bstuff, legs: 3}, a1: cool, b1: neat}\nb: {thing: {name: Bstuff, legs: 3}, b1: neat}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -325,7 +325,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: "a: !horse cat\nb: !goat _meow",
|
||||
expression: `.a += .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: !horse cat_meow\nb: !goat _meow\n",
|
||||
"D0, P[], (!!map)::a: !horse cat_meow\nb: !goat _meow\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -334,7 +334,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: "a: !horse 1.2\nb: !goat 2.3",
|
||||
expression: `.a += .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: !horse 3.5\nb: !goat 2.3\n",
|
||||
"D0, P[], (!!map)::a: !horse 3.5\nb: !goat 2.3\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -342,7 +342,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: "a: !horse 2\nb: !goat 2.3",
|
||||
expression: `.a += .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: !horse 4.3\nb: !goat 2.3\n",
|
||||
"D0, P[], (!!map)::a: !horse 4.3\nb: !goat 2.3\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -350,7 +350,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: "a: 2\nb: !goat 2.3",
|
||||
expression: `.a += .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: 4.3\nb: !goat 2.3\n",
|
||||
"D0, P[], (!!map)::a: 4.3\nb: !goat 2.3\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -359,7 +359,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: "a: !horse 2\nb: !goat 3",
|
||||
expression: `.a += .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: !horse 5\nb: !goat 3\n",
|
||||
"D0, P[], (!!map)::a: !horse 5\nb: !goat 3\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -369,7 +369,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: "a: !horse [a]\nb: !goat [b]",
|
||||
expression: `.a += .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: !horse [a, b]\nb: !goat [b]\n",
|
||||
"D0, P[], (!!map)::a: !horse [a, b]\nb: !goat [b]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -378,7 +378,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
document: "a: &horse [1]",
|
||||
expression: `.a += 2`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: &horse [1, 2]\n",
|
||||
"D0, P[], (!!map)::a: &horse [1, 2]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -9,10 +9,7 @@ func alternativeOperator(d *dataTreeNavigator, context Context, expressionNode *
|
||||
if lhs == nil {
|
||||
return nil, nil
|
||||
}
|
||||
truthy, err := isTruthy(lhs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
truthy := isTruthyNode(lhs)
|
||||
if truthy {
|
||||
return lhs, nil
|
||||
}
|
||||
@ -29,15 +26,9 @@ func alternativeFunc(d *dataTreeNavigator, context Context, lhs *CandidateNode,
|
||||
if rhs == nil {
|
||||
return lhs, nil
|
||||
}
|
||||
lhs.Node = unwrapDoc(lhs.Node)
|
||||
rhs.Node = unwrapDoc(rhs.Node)
|
||||
log.Debugf("Alternative LHS: %v", lhs.Node.Tag)
|
||||
log.Debugf("- RHS: %v", rhs.Node.Tag)
|
||||
|
||||
isTrue, err := isTruthy(lhs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if isTrue {
|
||||
isTrue := isTruthyNode(lhs)
|
||||
if isTrue {
|
||||
return lhs, nil
|
||||
}
|
||||
return rhs, nil
|
||||
|
@ -19,7 +19,7 @@ var alternativeOperatorScenarios = []expressionScenario{
|
||||
expression: `(.b // "hello") as $x | .`,
|
||||
document: `a: bridge`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: bridge\n",
|
||||
"D0, P[], (!!map)::a: bridge\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -91,7 +91,7 @@ var alternativeOperatorScenarios = []expressionScenario{
|
||||
expression: "(.a // (.a = 0)) += 1",
|
||||
document: `a: 1`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: 2\n",
|
||||
"D0, P[], (!!map)::a: 2\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -3,8 +3,6 @@ package yqlib
|
||||
import (
|
||||
"container/list"
|
||||
"fmt"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func assignAliasOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
@ -18,7 +16,7 @@ func assignAliasOperator(d *dataTreeNavigator, context Context, expressionNode *
|
||||
return Context{}, err
|
||||
}
|
||||
if rhs.MatchingNodes.Front() != nil {
|
||||
aliasName = rhs.MatchingNodes.Front().Value.(*CandidateNode).Node.Value
|
||||
aliasName = rhs.MatchingNodes.Front().Value.(*CandidateNode).Value
|
||||
}
|
||||
}
|
||||
|
||||
@ -38,13 +36,13 @@ func assignAliasOperator(d *dataTreeNavigator, context Context, expressionNode *
|
||||
return Context{}, err
|
||||
}
|
||||
if rhs.MatchingNodes.Front() != nil {
|
||||
aliasName = rhs.MatchingNodes.Front().Value.(*CandidateNode).Node.Value
|
||||
aliasName = rhs.MatchingNodes.Front().Value.(*CandidateNode).Value
|
||||
}
|
||||
}
|
||||
|
||||
if aliasName != "" {
|
||||
candidate.Node.Kind = yaml.AliasNode
|
||||
candidate.Node.Value = aliasName
|
||||
candidate.Kind = AliasNode
|
||||
candidate.Value = aliasName
|
||||
}
|
||||
}
|
||||
return context, nil
|
||||
@ -56,8 +54,7 @@ func getAliasOperator(d *dataTreeNavigator, context Context, expressionNode *Exp
|
||||
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
node := &yaml.Node{Kind: yaml.ScalarNode, Value: candidate.Node.Value, Tag: "!!str"}
|
||||
result := candidate.CreateReplacement(node)
|
||||
result := candidate.CreateReplacement(ScalarNode, "!!str", candidate.Value)
|
||||
results.PushBack(result)
|
||||
}
|
||||
return context.ChildContext(results), nil
|
||||
@ -75,7 +72,7 @@ func assignAnchorOperator(d *dataTreeNavigator, context Context, expressionNode
|
||||
}
|
||||
|
||||
if rhs.MatchingNodes.Front() != nil {
|
||||
anchorName = rhs.MatchingNodes.Front().Value.(*CandidateNode).Node.Value
|
||||
anchorName = rhs.MatchingNodes.Front().Value.(*CandidateNode).Value
|
||||
}
|
||||
}
|
||||
|
||||
@ -96,11 +93,11 @@ func assignAnchorOperator(d *dataTreeNavigator, context Context, expressionNode
|
||||
}
|
||||
|
||||
if rhs.MatchingNodes.Front() != nil {
|
||||
anchorName = rhs.MatchingNodes.Front().Value.(*CandidateNode).Node.Value
|
||||
anchorName = rhs.MatchingNodes.Front().Value.(*CandidateNode).Value
|
||||
}
|
||||
}
|
||||
|
||||
candidate.Node.Anchor = anchorName
|
||||
candidate.Anchor = anchorName
|
||||
}
|
||||
return context, nil
|
||||
}
|
||||
@ -111,9 +108,8 @@ func getAnchorOperator(d *dataTreeNavigator, context Context, expressionNode *Ex
|
||||
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
anchor := candidate.Node.Anchor
|
||||
node := &yaml.Node{Kind: yaml.ScalarNode, Value: anchor, Tag: "!!str"}
|
||||
result := candidate.CreateReplacement(node)
|
||||
anchor := candidate.Anchor
|
||||
result := candidate.CreateReplacement(ScalarNode, "!!str", anchor)
|
||||
results.PushBack(result)
|
||||
}
|
||||
return context.ChildContext(results), nil
|
||||
@ -131,7 +127,7 @@ func explodeOperator(d *dataTreeNavigator, context Context, expressionNode *Expr
|
||||
return Context{}, err
|
||||
}
|
||||
for childEl := rhs.MatchingNodes.Front(); childEl != nil; childEl = childEl.Next() {
|
||||
err = explodeNode(childEl.Value.(*CandidateNode).Node, context)
|
||||
err = explodeNode(childEl.Value.(*CandidateNode), context)
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
@ -142,7 +138,7 @@ func explodeOperator(d *dataTreeNavigator, context Context, expressionNode *Expr
|
||||
return context, nil
|
||||
}
|
||||
|
||||
func reconstructAliasedMap(node *yaml.Node, context Context) error {
|
||||
func reconstructAliasedMap(node *CandidateNode, context Context) error {
|
||||
var newContent = list.New()
|
||||
// can I short cut here by prechecking if there's an anchor in the map?
|
||||
// no it needs to recurse in overrideEntry.
|
||||
@ -157,7 +153,7 @@ func reconstructAliasedMap(node *yaml.Node, context Context) error {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if valueNode.Kind == yaml.SequenceNode {
|
||||
if valueNode.Kind == SequenceNode {
|
||||
log.Debugf("an alias merge list!")
|
||||
for index := len(valueNode.Content) - 1; index >= 0; index = index - 1 {
|
||||
aliasNode := valueNode.Content[index]
|
||||
@ -175,39 +171,39 @@ func reconstructAliasedMap(node *yaml.Node, context Context) error {
|
||||
}
|
||||
}
|
||||
}
|
||||
node.Content = make([]*yaml.Node, newContent.Len())
|
||||
index := 0
|
||||
node.Content = make([]*CandidateNode, 0)
|
||||
for newEl := newContent.Front(); newEl != nil; newEl = newEl.Next() {
|
||||
node.Content[index] = newEl.Value.(*yaml.Node)
|
||||
index++
|
||||
node.AddChild(newEl.Value.(*CandidateNode))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func explodeNode(node *yaml.Node, context Context) error {
|
||||
func explodeNode(node *CandidateNode, context Context) error {
|
||||
log.Debugf("explodeNode - %v", NodeToString(node))
|
||||
node.Anchor = ""
|
||||
switch node.Kind {
|
||||
case yaml.SequenceNode, yaml.DocumentNode:
|
||||
case SequenceNode:
|
||||
for index, contentNode := range node.Content {
|
||||
log.Debugf("exploding index %v", index)
|
||||
log.Debugf("explodeNode - index %v", index)
|
||||
errorInContent := explodeNode(contentNode, context)
|
||||
if errorInContent != nil {
|
||||
return errorInContent
|
||||
}
|
||||
}
|
||||
return nil
|
||||
case yaml.AliasNode:
|
||||
log.Debugf("its an alias!")
|
||||
case AliasNode:
|
||||
log.Debugf("explodeNode - an alias to %v", NodeToString(node.Alias))
|
||||
if node.Alias != nil {
|
||||
node.Kind = node.Alias.Kind
|
||||
node.Style = node.Alias.Style
|
||||
node.Tag = node.Alias.Tag
|
||||
node.Content = deepCloneContent(node.Alias.Content)
|
||||
node.AddChildren(node.Alias.Content)
|
||||
node.Value = node.Alias.Value
|
||||
node.Alias = nil
|
||||
}
|
||||
log.Debug("now I'm %v", NodeToString(node))
|
||||
return nil
|
||||
case yaml.MappingNode:
|
||||
case MappingNode:
|
||||
// //check the map has an alias in it
|
||||
hasAlias := false
|
||||
for index := 0; index < len(node.Content); index = index + 2 {
|
||||
@ -241,11 +237,13 @@ func explodeNode(node *yaml.Node, context Context) error {
|
||||
}
|
||||
}
|
||||
|
||||
func applyAlias(node *yaml.Node, alias *yaml.Node, aliasIndex int, newContent Context) error {
|
||||
func applyAlias(node *CandidateNode, alias *CandidateNode, aliasIndex int, newContent Context) error {
|
||||
log.Debug("alias is nil ?")
|
||||
if alias == nil {
|
||||
return nil
|
||||
}
|
||||
if alias.Kind != yaml.MappingNode {
|
||||
log.Debug("alias: %v", NodeToString(alias))
|
||||
if alias.Kind != MappingNode {
|
||||
return fmt.Errorf("merge anchor only supports maps, got %v instead", alias.Tag)
|
||||
}
|
||||
for index := 0; index < len(alias.Content); index = index + 2 {
|
||||
@ -260,7 +258,7 @@ func applyAlias(node *yaml.Node, alias *yaml.Node, aliasIndex int, newContent Co
|
||||
return nil
|
||||
}
|
||||
|
||||
func overrideEntry(node *yaml.Node, key *yaml.Node, value *yaml.Node, startIndex int, newContent Context) error {
|
||||
func overrideEntry(node *CandidateNode, key *CandidateNode, value *CandidateNode, startIndex int, newContent Context) error {
|
||||
|
||||
err := explodeNode(value, newContent)
|
||||
|
||||
@ -270,8 +268,8 @@ func overrideEntry(node *yaml.Node, key *yaml.Node, value *yaml.Node, startIndex
|
||||
|
||||
for newEl := newContent.MatchingNodes.Front(); newEl != nil; newEl = newEl.Next() {
|
||||
valueEl := newEl.Next() // move forward twice
|
||||
keyNode := newEl.Value.(*yaml.Node)
|
||||
log.Debugf("checking new content %v:%v", keyNode.Value, valueEl.Value.(*yaml.Node).Value)
|
||||
keyNode := newEl.Value.(*CandidateNode)
|
||||
log.Debugf("checking new content %v:%v", keyNode.Value, valueEl.Value.(*CandidateNode).Value)
|
||||
if keyNode.Value == key.Value && keyNode.Alias == nil && key.Alias == nil {
|
||||
log.Debugf("overridign new content")
|
||||
valueEl.Value = value
|
||||
|
@ -24,7 +24,7 @@ thingTwo:
|
||||
<<: *item_value
|
||||
`
|
||||
|
||||
var expectedUpdatedArrayRef = `D0, P[], (doc)::item_value: &item_value
|
||||
var expectedUpdatedArrayRef = `D0, P[], (!!map)::item_value: &item_value
|
||||
value: true
|
||||
thingOne:
|
||||
name: item_1
|
||||
@ -34,6 +34,25 @@ thingTwo:
|
||||
!!merge <<: *item_value
|
||||
`
|
||||
|
||||
var explodeMergeAnchorsExpected = `D0, P[], (!!map)::foo:
|
||||
a: foo_a
|
||||
thing: foo_thing
|
||||
c: foo_c
|
||||
bar:
|
||||
b: bar_b
|
||||
thing: bar_thing
|
||||
c: bar_c
|
||||
foobarList:
|
||||
b: bar_b
|
||||
thing: foo_thing
|
||||
c: foobarList_c
|
||||
a: foo_a
|
||||
foobar:
|
||||
c: foo_c
|
||||
a: foo_a
|
||||
thing: foobar_thing
|
||||
`
|
||||
|
||||
var anchorOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
skipDoc: true,
|
||||
@ -76,7 +95,7 @@ var anchorOperatorScenarios = []expressionScenario{
|
||||
document: `a: cat`,
|
||||
expression: `.a anchor = "foobar"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: &foobar cat\n",
|
||||
"D0, P[], (!!map)::a: &foobar cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -84,7 +103,7 @@ var anchorOperatorScenarios = []expressionScenario{
|
||||
document: `a: {b: cat}`,
|
||||
expression: `.a anchor |= .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: &cat {b: cat}\n",
|
||||
"D0, P[], (!!map)::a: &cat {b: cat}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -92,7 +111,7 @@ var anchorOperatorScenarios = []expressionScenario{
|
||||
document: `a: {c: cat}`,
|
||||
expression: `.a anchor |= .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: {c: cat}\n",
|
||||
"D0, P[], (!!map)::a: {c: cat}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -100,7 +119,7 @@ var anchorOperatorScenarios = []expressionScenario{
|
||||
document: `a: {c: cat}`,
|
||||
expression: `.a anchor = .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: {c: cat}\n",
|
||||
"D0, P[], (!!map)::a: {c: cat}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -116,7 +135,7 @@ var anchorOperatorScenarios = []expressionScenario{
|
||||
document: `{b: &meow purr, a: cat}`,
|
||||
expression: `.a alias = "meow"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{b: &meow purr, a: *meow}\n",
|
||||
"D0, P[], (!!map)::{b: &meow purr, a: *meow}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -124,7 +143,7 @@ var anchorOperatorScenarios = []expressionScenario{
|
||||
document: `{b: &meow purr, a: cat}`,
|
||||
expression: `.a alias = ""`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{b: &meow purr, a: cat}\n",
|
||||
"D0, P[], (!!map)::{b: &meow purr, a: cat}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -132,7 +151,7 @@ var anchorOperatorScenarios = []expressionScenario{
|
||||
document: `{b: &meow purr, a: cat}`,
|
||||
expression: `.a alias = .c`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{b: &meow purr, a: cat}\n",
|
||||
"D0, P[], (!!map)::{b: &meow purr, a: cat}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -140,7 +159,7 @@ var anchorOperatorScenarios = []expressionScenario{
|
||||
document: `{b: &meow purr, a: cat}`,
|
||||
expression: `.a alias |= .c`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{b: &meow purr, a: cat}\n",
|
||||
"D0, P[], (!!map)::{b: &meow purr, a: cat}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -148,7 +167,34 @@ var anchorOperatorScenarios = []expressionScenario{
|
||||
document: `{b: &meow purr, a: {f: meow}}`,
|
||||
expression: `.a alias |= .f`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{b: &meow purr, a: *meow}\n",
|
||||
"D0, P[], (!!map)::{b: &meow purr, a: *meow}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Dont explode alias and anchor - check alias parent",
|
||||
skipDoc: true,
|
||||
document: `{a: &a [1], b: *a}`,
|
||||
expression: `.b[]`,
|
||||
expected: []string{
|
||||
"D0, P[a 0], (!!int)::1\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Explode alias and anchor - check alias parent",
|
||||
skipDoc: true,
|
||||
document: `{a: &a cat, b: *a}`,
|
||||
expression: `explode(.) | .b`,
|
||||
expected: []string{
|
||||
"D0, P[b], (!!str)::cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Explode alias and anchor - check original parent",
|
||||
skipDoc: true,
|
||||
document: `{a: &a cat, b: *a}`,
|
||||
expression: `explode(.) | .a`,
|
||||
expected: []string{
|
||||
"D0, P[a], (!!str)::cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -156,7 +202,7 @@ var anchorOperatorScenarios = []expressionScenario{
|
||||
document: `{f : {a: &a cat, b: *a}}`,
|
||||
expression: `explode(.f)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{f: {a: cat, b: cat}}\n",
|
||||
"D0, P[], (!!map)::{f: {a: cat, b: cat}}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -164,7 +210,7 @@ var anchorOperatorScenarios = []expressionScenario{
|
||||
document: `a: mike`,
|
||||
expression: `explode(.a)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: mike\n",
|
||||
"D0, P[], (!!map)::a: mike\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -172,31 +218,14 @@ var anchorOperatorScenarios = []expressionScenario{
|
||||
document: `{f : {a: &a cat, *a: b}}`,
|
||||
expression: `explode(.f)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{f: {a: cat, cat: b}}\n",
|
||||
"D0, P[], (!!map)::{f: {a: cat, cat: b}}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Explode with merge anchors",
|
||||
document: mergeDocSample,
|
||||
expression: `explode(.)`,
|
||||
expected: []string{`D0, P[], (doc)::foo:
|
||||
a: foo_a
|
||||
thing: foo_thing
|
||||
c: foo_c
|
||||
bar:
|
||||
b: bar_b
|
||||
thing: bar_thing
|
||||
c: bar_c
|
||||
foobarList:
|
||||
b: bar_b
|
||||
thing: foo_thing
|
||||
c: foobarList_c
|
||||
a: foo_a
|
||||
foobar:
|
||||
c: foo_c
|
||||
a: foo_a
|
||||
thing: foobar_thing
|
||||
`},
|
||||
expected: []string{explodeMergeAnchorsExpected},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
@ -220,10 +249,10 @@ foobar:
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
document: `{f : {a: &a cat, b: &b {f: *a}, *a: *b}}`,
|
||||
document: `{f : {a: &a cat, b: &b {foo: *a}, *a: *b}}`,
|
||||
expression: `explode(.f)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{f: {a: cat, b: {f: cat}, cat: {f: cat}}}\n",
|
||||
"D0, P[], (!!map)::{f: {a: cat, b: {foo: cat}, cat: {foo: cat}}}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -10,7 +10,7 @@ var arrayToMapScenarios = []expressionScenario{
|
||||
document: `cool: [null, null, hello]`,
|
||||
expression: `.cool |= array_to_map`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::cool:\n 2: hello\n",
|
||||
"D0, P[], (!!map)::cool:\n 2: hello\n",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -8,8 +8,7 @@ type assignPreferences struct {
|
||||
|
||||
func assignUpdateFunc(prefs assignPreferences) crossFunctionCalculation {
|
||||
return func(d *dataTreeNavigator, context Context, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
||||
rhs.Node = unwrapDoc(rhs.Node)
|
||||
if !prefs.OnlyWriteNull || lhs.Node.Tag == "!!null" {
|
||||
if !prefs.OnlyWriteNull || lhs.Tag == "!!null" {
|
||||
lhs.UpdateFrom(rhs, prefs)
|
||||
}
|
||||
return lhs, nil
|
||||
@ -46,7 +45,11 @@ func assignUpdateOperator(d *dataTreeNavigator, context Context, expressionNode
|
||||
return context, err
|
||||
}
|
||||
|
||||
for el := lhs.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
//traverse backwards through the context -
|
||||
// like delete, we need to run against the children first.
|
||||
// (e.g. consider when running with expression '.. |= [.]' - we need
|
||||
// to wrap the children first
|
||||
for el := lhs.MatchingNodes.Back(); el != nil; el = el.Prev() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
|
||||
rhs, err := d.GetMatchingNodes(context.SingleChildContext(candidate), expressionNode.RHS)
|
||||
@ -60,7 +63,6 @@ func assignUpdateOperator(d *dataTreeNavigator, context Context, expressionNode
|
||||
|
||||
if first != nil {
|
||||
rhsCandidate := first.Value.(*CandidateNode)
|
||||
rhsCandidate.Node = unwrapDoc(rhsCandidate.Node)
|
||||
candidate.UpdateFrom(rhsCandidate, prefs)
|
||||
}
|
||||
}
|
||||
@ -92,7 +94,7 @@ func assignAttributesOperator(d *dataTreeNavigator, context Context, expressionN
|
||||
if expressionNode.Operation.Preferences != nil {
|
||||
prefs = expressionNode.Operation.Preferences.(assignPreferences)
|
||||
}
|
||||
if !prefs.OnlyWriteNull || candidate.Node.Tag == "!!null" {
|
||||
if !prefs.OnlyWriteNull || candidate.Tag == "!!null" {
|
||||
candidate.UpdateAttributesFrom(first.Value.(*CandidateNode), prefs)
|
||||
}
|
||||
}
|
||||
|
@ -22,7 +22,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: "{}",
|
||||
expression: `.a |= .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: null\n",
|
||||
"D0, P[], (!!map)::a: null\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -30,7 +30,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: mergeAnchorAssign,
|
||||
expression: `.c = .b | .a.x = "ModifiedValue" | explode(.)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a:\n x: ModifiedValue\nb:\n x: ModifiedValue\nc:\n x: ModifiedValue\n",
|
||||
"D0, P[], (!!map)::a:\n x: ModifiedValue\nb:\n x: ModifiedValue\nc:\n x: ModifiedValue\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -38,7 +38,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: "{}",
|
||||
expression: `.a = .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: null\n",
|
||||
"D0, P[], (!!map)::a: null\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -47,7 +47,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: "a: cat",
|
||||
expression: `.a = [.a]`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a:\n - cat\n",
|
||||
"D0, P[], (!!map)::a:\n - cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -56,7 +56,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: `a: "3"`,
|
||||
expression: `.a = 3`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: 3\n",
|
||||
"D0, P[], (!!map)::a: 3\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -65,7 +65,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: `a: "true"`,
|
||||
expression: `.a = true`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: true\n",
|
||||
"D0, P[], (!!map)::a: true\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -74,7 +74,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: `a: !cat "meow"`,
|
||||
expression: `.a = "woof"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: !cat \"woof\"\n",
|
||||
"D0, P[], (!!map)::a: !cat \"woof\"\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -82,7 +82,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: `{a: {b: {g: foof}}}`,
|
||||
expression: `.a |= .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: {g: foof}}\n",
|
||||
"D0, P[], (!!map)::{a: {g: foof}}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -90,7 +90,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: `[1,2,3]`,
|
||||
expression: `.[] |= . * 2`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::[2, 4, 6]\n",
|
||||
"D0, P[], (!!seq)::[2, 4, 6]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -100,7 +100,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document2: "{b: bob}",
|
||||
expression: `select(fileIndex==0).a = select(fileIndex==1) | select(fileIndex==0)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: {b: bob}}\n",
|
||||
"D0, P[], (!!map)::{a: {b: bob}}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -108,7 +108,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: `{a: {b: child}, b: sibling}`,
|
||||
expression: `.a = .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: sibling, b: sibling}\n",
|
||||
"D0, P[], (!!map)::{a: sibling, b: sibling}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -116,7 +116,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: `{a: fieldA, b: fieldB, c: fieldC}`,
|
||||
expression: `(.a, .c) = "potato"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: potato, b: fieldB, c: potato}\n",
|
||||
"D0, P[], (!!map)::{a: potato, b: fieldB, c: potato}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -124,7 +124,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: `{a: {b: apple}}`,
|
||||
expression: `.a.b = "frog"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: {b: frog}}\n",
|
||||
"D0, P[], (!!map)::{a: {b: frog}}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -133,7 +133,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: `{a: {b: apple}}`,
|
||||
expression: `.a.b |= "frog"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: {b: frog}}\n",
|
||||
"D0, P[], (!!map)::{a: {b: frog}}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -149,7 +149,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: `{a: {b: apple}}`,
|
||||
expression: `.a.b |= 5`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: {b: 5}}\n",
|
||||
"D0, P[], (!!map)::{a: {b: 5}}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -157,7 +157,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: `{a: {b: apple}}`,
|
||||
expression: `.a.b |= 3.142`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: {b: 3.142}}\n",
|
||||
"D0, P[], (!!map)::{a: {b: 3.142}}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -166,7 +166,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: `{a: {b: apple, c: cactus}}`,
|
||||
expression: `(.a[] | select(. == "apple")) = "frog"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: {b: frog, c: cactus}}\n",
|
||||
"D0, P[], (!!map)::{a: {b: frog, c: cactus}}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -174,7 +174,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: `{a: {b: apple, c: cactus}}`,
|
||||
expression: `(.a.[] | select(. == "apple")) = "frog"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: {b: frog, c: cactus}}\n",
|
||||
"D0, P[], (!!map)::{a: {b: frog, c: cactus}}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -182,7 +182,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: `[candy, apple, sandy]`,
|
||||
expression: `(.[] | select(. == "*andy")) = "bogs"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::[bogs, apple, bogs]\n",
|
||||
"D0, P[], (!!seq)::[bogs, apple, bogs]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -191,7 +191,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: `{}`,
|
||||
expression: `.a.b |= "bogs"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a:\n b: bogs\n",
|
||||
"D0, P[], (!!map)::a:\n b: bogs\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -201,7 +201,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: `a: &cool cat`,
|
||||
expression: `.a = "dog"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: &cool dog\n",
|
||||
"D0, P[], (!!map)::a: &cool dog\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -210,7 +210,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: `{}`,
|
||||
expression: `.a.b.[0] |= "bogs"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a:\n b:\n - bogs\n",
|
||||
"D0, P[], (!!map)::a:\n b:\n - bogs\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -218,7 +218,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: `{}`,
|
||||
expression: `.a.b.[1].c |= "bogs"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a:\n b:\n - null\n - c: bogs\n",
|
||||
"D0, P[], (!!map)::a:\n b:\n - null\n - c: bogs\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -226,7 +226,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: "a: !cat meow\nb: !dog woof",
|
||||
expression: `.a = .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: !cat woof\nb: !dog woof\n",
|
||||
"D0, P[], (!!map)::a: !cat woof\nb: !dog woof\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -235,7 +235,7 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
document: "a: !cat meow\nb: !dog woof",
|
||||
expression: `.a =c .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: !dog woof\nb: !dog woof\n",
|
||||
"D0, P[], (!!map)::a: !dog woof\nb: !dog woof\n",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -3,36 +3,25 @@ package yqlib
|
||||
import (
|
||||
"container/list"
|
||||
"fmt"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func isTruthyNode(node *yaml.Node) (bool, error) {
|
||||
value := true
|
||||
func isTruthyNode(node *CandidateNode) bool {
|
||||
if node == nil {
|
||||
return false
|
||||
}
|
||||
if node.Tag == "!!null" {
|
||||
return false, nil
|
||||
}
|
||||
if node.Kind == yaml.ScalarNode && node.Tag == "!!bool" {
|
||||
errDecoding := node.Decode(&value)
|
||||
if errDecoding != nil {
|
||||
return false, errDecoding
|
||||
return false
|
||||
}
|
||||
if node.Kind == ScalarNode && node.Tag == "!!bool" {
|
||||
// yes/y/true/on
|
||||
return (strings.EqualFold(node.Value, "y") ||
|
||||
strings.EqualFold(node.Value, "yes") ||
|
||||
strings.EqualFold(node.Value, "on") ||
|
||||
strings.EqualFold(node.Value, "true"))
|
||||
|
||||
}
|
||||
return value, nil
|
||||
}
|
||||
|
||||
func isTruthy(c *CandidateNode) (bool, error) {
|
||||
node := unwrapDoc(c.Node)
|
||||
return isTruthyNode(node)
|
||||
}
|
||||
|
||||
func getBoolean(candidate *CandidateNode) (bool, error) {
|
||||
if candidate != nil {
|
||||
candidate.Node = unwrapDoc(candidate.Node)
|
||||
return isTruthy(candidate)
|
||||
}
|
||||
return false, nil
|
||||
return true
|
||||
}
|
||||
|
||||
func getOwner(lhs *CandidateNode, rhs *CandidateNode) *CandidateNode {
|
||||
@ -48,10 +37,7 @@ func getOwner(lhs *CandidateNode, rhs *CandidateNode) *CandidateNode {
|
||||
|
||||
func returnRhsTruthy(d *dataTreeNavigator, context Context, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
||||
owner := getOwner(lhs, rhs)
|
||||
rhsBool, err := getBoolean(rhs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rhsBool := isTruthyNode(rhs)
|
||||
|
||||
return createBooleanCandidate(owner, rhsBool), nil
|
||||
}
|
||||
@ -61,7 +47,7 @@ func returnLHSWhen(targetBool bool) func(lhs *CandidateNode) (*CandidateNode, er
|
||||
var err error
|
||||
var lhsBool bool
|
||||
|
||||
if lhsBool, err = getBoolean(lhs); err != nil || lhsBool != targetBool {
|
||||
if lhsBool = isTruthyNode(lhs); lhsBool != targetBool {
|
||||
return nil, err
|
||||
}
|
||||
owner := &CandidateNode{}
|
||||
@ -72,29 +58,24 @@ func returnLHSWhen(targetBool bool) func(lhs *CandidateNode) (*CandidateNode, er
|
||||
}
|
||||
}
|
||||
|
||||
func findBoolean(wantBool bool, d *dataTreeNavigator, context Context, expressionNode *ExpressionNode, sequenceNode *yaml.Node) (bool, error) {
|
||||
func findBoolean(wantBool bool, d *dataTreeNavigator, context Context, expressionNode *ExpressionNode, sequenceNode *CandidateNode) (bool, error) {
|
||||
for _, node := range sequenceNode.Content {
|
||||
|
||||
if expressionNode != nil {
|
||||
//need to evaluate the expression against the node
|
||||
candidate := &CandidateNode{Node: node}
|
||||
rhs, err := d.GetMatchingNodes(context.SingleReadonlyChildContext(candidate), expressionNode)
|
||||
rhs, err := d.GetMatchingNodes(context.SingleReadonlyChildContext(node), expressionNode)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
if rhs.MatchingNodes.Len() > 0 {
|
||||
node = rhs.MatchingNodes.Front().Value.(*CandidateNode).Node
|
||||
node = rhs.MatchingNodes.Front().Value.(*CandidateNode)
|
||||
} else {
|
||||
// no results found, ignore this entry
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
truthy, err := isTruthyNode(node)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
if truthy == wantBool {
|
||||
if isTruthyNode(node) == wantBool {
|
||||
return true, nil
|
||||
}
|
||||
}
|
||||
@ -106,11 +87,10 @@ func allOperator(d *dataTreeNavigator, context Context, expressionNode *Expressi
|
||||
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
candidateNode := unwrapDoc(candidate.Node)
|
||||
if candidateNode.Kind != yaml.SequenceNode {
|
||||
return Context{}, fmt.Errorf("any only supports arrays, was %v", candidateNode.Tag)
|
||||
if candidate.Kind != SequenceNode {
|
||||
return Context{}, fmt.Errorf("any only supports arrays, was %v", candidate.Tag)
|
||||
}
|
||||
booleanResult, err := findBoolean(false, d, context, expressionNode.RHS, candidateNode)
|
||||
booleanResult, err := findBoolean(false, d, context, expressionNode.RHS, candidate)
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
@ -125,11 +105,10 @@ func anyOperator(d *dataTreeNavigator, context Context, expressionNode *Expressi
|
||||
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
candidateNode := unwrapDoc(candidate.Node)
|
||||
if candidateNode.Kind != yaml.SequenceNode {
|
||||
return Context{}, fmt.Errorf("any only supports arrays, was %v", candidateNode.Tag)
|
||||
if candidate.Kind != SequenceNode {
|
||||
return Context{}, fmt.Errorf("any only supports arrays, was %v", candidate.Tag)
|
||||
}
|
||||
booleanResult, err := findBoolean(true, d, context, expressionNode.RHS, candidateNode)
|
||||
booleanResult, err := findBoolean(true, d, context, expressionNode.RHS, candidate)
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
@ -164,10 +143,7 @@ func notOperator(d *dataTreeNavigator, context Context, expressionNode *Expressi
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
log.Debug("notOperation checking %v", candidate)
|
||||
truthy, errDecoding := isTruthy(candidate)
|
||||
if errDecoding != nil {
|
||||
return Context{}, errDecoding
|
||||
}
|
||||
truthy := isTruthyNode(candidate)
|
||||
result := createBooleanCandidate(candidate, !truthy)
|
||||
results.PushBack(result)
|
||||
}
|
||||
|
@ -43,7 +43,7 @@ var booleanOperatorScenarios = []expressionScenario{
|
||||
document: "b: hi",
|
||||
expression: `select(.a or .b)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::b: hi\n",
|
||||
"D0, P[], (!!map)::b: hi\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -51,7 +51,7 @@ var booleanOperatorScenarios = []expressionScenario{
|
||||
document: "b: hi",
|
||||
expression: `select((.a and .b) | not)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::b: hi\n",
|
||||
"D0, P[], (!!map)::b: hi\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -106,7 +106,7 @@ var booleanOperatorScenarios = []expressionScenario{
|
||||
document: "a: [rad, awesome]\nb: [meh, whatever]",
|
||||
expression: `.[] |= any_c(. == "awesome")`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: true\nb: false\n",
|
||||
"D0, P[], (!!map)::a: true\nb: false\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -114,7 +114,7 @@ var booleanOperatorScenarios = []expressionScenario{
|
||||
document: `[{pet: cat}]`,
|
||||
expression: `any_c(.name == "harry") as $c | .`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::[{pet: cat}]\n",
|
||||
"D0, P[], (!!seq)::[{pet: cat}]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -170,7 +170,7 @@ var booleanOperatorScenarios = []expressionScenario{
|
||||
document: "a: [rad, awesome]\nb: [meh, 12]",
|
||||
expression: `.[] |= all_c(tag == "!!str")`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: true\nb: false\n",
|
||||
"D0, P[], (!!map)::a: true\nb: false\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -205,7 +205,7 @@ var booleanOperatorScenarios = []expressionScenario{
|
||||
document: `{}`,
|
||||
expression: `(.a.b or .c) as $x | .`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{}\n",
|
||||
"D0, P[], (!!map)::{}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -213,7 +213,7 @@ var booleanOperatorScenarios = []expressionScenario{
|
||||
document: `{}`,
|
||||
expression: `(.a.b and .c) as $x | .`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{}\n",
|
||||
"D0, P[], (!!map)::{}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -2,12 +2,10 @@ package yqlib
|
||||
|
||||
import (
|
||||
"container/list"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func collectTogether(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (*CandidateNode, error) {
|
||||
collectedNode := &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq"}
|
||||
collectedNode := &CandidateNode{Kind: SequenceNode, Tag: "!!seq"}
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
collectExpResults, err := d.GetMatchingNodes(context.SingleReadonlyChildContext(candidate), expressionNode)
|
||||
@ -17,10 +15,10 @@ func collectTogether(d *dataTreeNavigator, context Context, expressionNode *Expr
|
||||
for result := collectExpResults.MatchingNodes.Front(); result != nil; result = result.Next() {
|
||||
resultC := result.Value.(*CandidateNode)
|
||||
log.Debugf("found this: %v", NodeToString(resultC))
|
||||
collectedNode.Content = append(collectedNode.Content, unwrapDoc(resultC.Node))
|
||||
collectedNode.AddChild(resultC)
|
||||
}
|
||||
}
|
||||
return &CandidateNode{Node: collectedNode}, nil
|
||||
return collectedNode, nil
|
||||
}
|
||||
|
||||
func collectOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
@ -28,9 +26,8 @@ func collectOperator(d *dataTreeNavigator, context Context, expressionNode *Expr
|
||||
|
||||
if context.MatchingNodes.Len() == 0 {
|
||||
log.Debugf("nothing to collect")
|
||||
node := &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq", Value: "[]"}
|
||||
candidate := &CandidateNode{Node: node}
|
||||
return context.SingleChildContext(candidate), nil
|
||||
node := &CandidateNode{Kind: SequenceNode, Tag: "!!seq", Value: "[]"}
|
||||
return context.SingleChildContext(node), nil
|
||||
}
|
||||
|
||||
var evaluateAllTogether = true
|
||||
@ -55,8 +52,7 @@ func collectOperator(d *dataTreeNavigator, context Context, expressionNode *Expr
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
|
||||
collectedNode := &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq"}
|
||||
collectCandidate := candidate.CreateReplacement(collectedNode)
|
||||
collectCandidate := candidate.CreateReplacement(SequenceNode, "!!seq", "")
|
||||
|
||||
log.Debugf("collect rhs: %v", expressionNode.RHS.Operation.toString())
|
||||
|
||||
@ -68,7 +64,7 @@ func collectOperator(d *dataTreeNavigator, context Context, expressionNode *Expr
|
||||
for result := collectExpResults.MatchingNodes.Front(); result != nil; result = result.Next() {
|
||||
resultC := result.Value.(*CandidateNode)
|
||||
log.Debugf("found this: %v", NodeToString(resultC))
|
||||
collectedNode.Content = append(collectedNode.Content, unwrapDoc(resultC.Node))
|
||||
collectCandidate.AddChild(resultC)
|
||||
}
|
||||
log.Debugf("done collect rhs: %v", expressionNode.RHS.Operation.toString())
|
||||
|
||||
|
@ -2,8 +2,6 @@ package yqlib
|
||||
|
||||
import (
|
||||
"container/list"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
/*
|
||||
@ -23,31 +21,47 @@ func collectObjectOperator(d *dataTreeNavigator, originalContext Context, expres
|
||||
context := originalContext.WritableClone()
|
||||
|
||||
if context.MatchingNodes.Len() == 0 {
|
||||
node := &yaml.Node{Kind: yaml.MappingNode, Tag: "!!map", Value: "{}"}
|
||||
candidate := &CandidateNode{Node: node}
|
||||
candidate := &CandidateNode{Kind: MappingNode, Tag: "!!map", Value: "{}"}
|
||||
log.Debugf("-- collectObjectOperation - starting with empty map")
|
||||
return context.SingleChildContext(candidate), nil
|
||||
}
|
||||
first := context.MatchingNodes.Front().Value.(*CandidateNode)
|
||||
var rotated = make([]*list.List, len(first.Node.Content))
|
||||
var rotated = make([]*list.List, len(first.Content))
|
||||
|
||||
for i := 0; i < len(first.Node.Content); i++ {
|
||||
for i := 0; i < len(first.Content); i++ {
|
||||
rotated[i] = list.New()
|
||||
}
|
||||
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidateNode := el.Value.(*CandidateNode)
|
||||
for i := 0; i < len(first.Node.Content); i++ {
|
||||
rotated[i].PushBack(candidateNode.CreateChildInArray(i, candidateNode.Node.Content[i]))
|
||||
|
||||
for i := 0; i < len(first.Content); i++ {
|
||||
log.Debugf("rotate[%v] = %v", i, NodeToString(candidateNode.Content[i]))
|
||||
log.Debugf("children:\n%v", NodeContentToString(candidateNode.Content[i], 0))
|
||||
rotated[i].PushBack(candidateNode.Content[i])
|
||||
}
|
||||
}
|
||||
log.Debugf("-- collectObjectOperation, length of rotated is %v", len(rotated))
|
||||
|
||||
newObject := list.New()
|
||||
for i := 0; i < len(first.Node.Content); i++ {
|
||||
for i := 0; i < len(first.Content); i++ {
|
||||
additions, err := collect(d, context.ChildContext(list.New()), rotated[i])
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
newObject.PushBackList(additions.MatchingNodes)
|
||||
// we should reset the parents and keys of these top level nodes,
|
||||
// as they are new
|
||||
for el := additions.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
addition := el.Value.(*CandidateNode)
|
||||
additionCopy := addition.Copy()
|
||||
|
||||
additionCopy.SetParent(nil)
|
||||
additionCopy.Key = nil
|
||||
|
||||
log.Debugf("-- collectObjectOperation, adding result %v", NodeToString(additionCopy))
|
||||
|
||||
newObject.PushBack(additionCopy)
|
||||
}
|
||||
}
|
||||
|
||||
return context.ChildContext(newObject), nil
|
||||
@ -60,19 +74,17 @@ func collect(d *dataTreeNavigator, context Context, remainingMatches *list.List)
|
||||
}
|
||||
|
||||
candidate := remainingMatches.Remove(remainingMatches.Front()).(*CandidateNode)
|
||||
log.Debugf("-- collectObjectOperation - collect %v", NodeToString(candidate))
|
||||
|
||||
splatted, err := splat(context.SingleChildContext(candidate),
|
||||
traversePreferences{DontFollowAlias: true, IncludeMapKeys: false})
|
||||
|
||||
for splatEl := splatted.MatchingNodes.Front(); splatEl != nil; splatEl = splatEl.Next() {
|
||||
splatEl.Value.(*CandidateNode).Path = nil
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
|
||||
if context.MatchingNodes.Len() == 0 {
|
||||
log.Debugf("-- collectObjectOperation - collect context is empty, next")
|
||||
return collect(d, splatted, remainingMatches)
|
||||
}
|
||||
|
||||
@ -82,14 +94,12 @@ func collect(d *dataTreeNavigator, context Context, remainingMatches *list.List)
|
||||
aggCandidate := el.Value.(*CandidateNode)
|
||||
for splatEl := splatted.MatchingNodes.Front(); splatEl != nil; splatEl = splatEl.Next() {
|
||||
splatCandidate := splatEl.Value.(*CandidateNode)
|
||||
newCandidate, err := aggCandidate.Copy()
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
|
||||
newCandidate.Path = nil
|
||||
log.Debugf("-- collectObjectOperation; splatCandidate: %v", NodeToString(splatCandidate))
|
||||
newCandidate := aggCandidate.Copy()
|
||||
log.Debugf("-- collectObjectOperation; aggCandidate: %v", NodeToString(aggCandidate))
|
||||
|
||||
newCandidate, err = multiply(multiplyPreferences{AppendArrays: false})(d, context, newCandidate, splatCandidate)
|
||||
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
|
@ -5,6 +5,20 @@ import (
|
||||
)
|
||||
|
||||
var collectObjectOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
skipDoc: true,
|
||||
expression: `{"name": "mike"} | .name`,
|
||||
expected: []string{
|
||||
"D0, P[name], (!!str)::mike\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
expression: `{"person": {"names": ["mike"]}} | .person.names[0]`,
|
||||
expected: []string{
|
||||
"D0, P[person names 0], (!!str)::mike\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
document: `[{name: cat}, {name: dog}]`,
|
||||
@ -26,7 +40,7 @@ var collectObjectOperatorScenarios = []expressionScenario{
|
||||
document: "a: []",
|
||||
expression: `.a += [{"key": "att2", "value": "val2"}]`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a:\n - key: att2\n value: val2\n",
|
||||
"D0, P[], (!!map)::a:\n - key: att2\n value: val2\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -63,6 +77,7 @@ var collectObjectOperatorScenarios = []expressionScenario{
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "Two documents",
|
||||
document: "{name: Mike}\n",
|
||||
document2: "{name: Bob}\n",
|
||||
expression: `{"wrap": .}`,
|
||||
@ -73,11 +88,12 @@ var collectObjectOperatorScenarios = []expressionScenario{
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "two embedded documents",
|
||||
document: "{name: Mike}\n---\n{name: Bob}",
|
||||
expression: `{"wrap": .}`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!map)::wrap: {name: Mike}\n",
|
||||
"D0, P[], (!!map)::wrap: {name: Bob}\n",
|
||||
"D1, P[], (!!map)::wrap: {name: Bob}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -105,8 +121,8 @@ var collectObjectOperatorScenarios = []expressionScenario{
|
||||
expected: []string{
|
||||
"D0, P[], (!!map)::Mike: cat\n",
|
||||
"D0, P[], (!!map)::Mike: dog\n",
|
||||
"D0, P[], (!!map)::Rosey: monkey\n",
|
||||
"D0, P[], (!!map)::Rosey: sheep\n",
|
||||
"D1, P[], (!!map)::Rosey: monkey\n",
|
||||
"D1, P[], (!!map)::Rosey: sheep\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -5,6 +5,13 @@ import (
|
||||
)
|
||||
|
||||
var collectOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
skipDoc: true,
|
||||
expression: `["x", "y"] | .[1]`,
|
||||
expected: []string{
|
||||
"D0, P[1], (!!str)::y\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
document: ``,
|
||||
|
@ -3,8 +3,6 @@ package yqlib
|
||||
import (
|
||||
"container/list"
|
||||
"fmt"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func columnOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
@ -14,8 +12,7 @@ func columnOperator(d *dataTreeNavigator, context Context, expressionNode *Expre
|
||||
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
node := &yaml.Node{Kind: yaml.ScalarNode, Value: fmt.Sprintf("%v", candidate.Node.Column), Tag: "!!int"}
|
||||
result := candidate.CreateReplacement(node)
|
||||
result := candidate.CreateReplacement(ScalarNode, "!!int", fmt.Sprintf("%v", candidate.Column))
|
||||
results.PushBack(result)
|
||||
}
|
||||
|
||||
|
@ -5,8 +5,6 @@ import (
|
||||
"bytes"
|
||||
"container/list"
|
||||
"regexp"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type commentOpPreferences struct {
|
||||
@ -35,13 +33,17 @@ func assignCommentsOperator(d *dataTreeNavigator, context Context, expressionNod
|
||||
}
|
||||
|
||||
if rhs.MatchingNodes.Front() != nil {
|
||||
comment = rhs.MatchingNodes.Front().Value.(*CandidateNode).Node.Value
|
||||
comment = rhs.MatchingNodes.Front().Value.(*CandidateNode).Value
|
||||
}
|
||||
}
|
||||
|
||||
log.Debugf("AssignComments comment is %v", comment)
|
||||
|
||||
for el := lhs.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
|
||||
log.Debugf("AssignComments lhs %v", NodeToString(candidate))
|
||||
|
||||
if expressionNode.Operation.UpdateAssign {
|
||||
rhs, err := d.GetMatchingNodes(context.SingleReadonlyChildContext(candidate), expressionNode.RHS)
|
||||
if err != nil {
|
||||
@ -49,26 +51,21 @@ func assignCommentsOperator(d *dataTreeNavigator, context Context, expressionNod
|
||||
}
|
||||
|
||||
if rhs.MatchingNodes.Front() != nil {
|
||||
comment = rhs.MatchingNodes.Front().Value.(*CandidateNode).Node.Value
|
||||
comment = rhs.MatchingNodes.Front().Value.(*CandidateNode).Value
|
||||
}
|
||||
}
|
||||
|
||||
log.Debugf("Setting comment of : %v", candidate.GetKey())
|
||||
if preferences.LineComment {
|
||||
candidate.Node.LineComment = comment
|
||||
log.Debugf("Setting line comment of : %v to %v", candidate.GetKey(), comment)
|
||||
candidate.LineComment = comment
|
||||
}
|
||||
if preferences.HeadComment {
|
||||
candidate.Node.HeadComment = comment
|
||||
candidate.HeadComment = comment
|
||||
candidate.LeadingContent = "" // clobber the leading content, if there was any.
|
||||
}
|
||||
if preferences.FootComment && candidate.Node.Kind == yaml.DocumentNode && comment != "" {
|
||||
candidate.TrailingContent = "# " + comment
|
||||
} else if preferences.FootComment && candidate.Node.Kind == yaml.DocumentNode {
|
||||
candidate.TrailingContent = comment
|
||||
|
||||
} else if preferences.FootComment && candidate.Node.Kind != yaml.DocumentNode {
|
||||
candidate.Node.FootComment = comment
|
||||
candidate.TrailingContent = ""
|
||||
if preferences.FootComment {
|
||||
candidate.FootComment = comment
|
||||
}
|
||||
|
||||
}
|
||||
@ -91,7 +88,8 @@ func getCommentsOperator(d *dataTreeNavigator, context Context, expressionNode *
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
comment := ""
|
||||
if preferences.LineComment {
|
||||
comment = candidate.Node.LineComment
|
||||
log.Debugf("Reading line comment of : %v to %v", candidate.GetKey(), candidate.LineComment)
|
||||
comment = candidate.LineComment
|
||||
} else if preferences.HeadComment && candidate.LeadingContent != "" {
|
||||
var chompRegexp = regexp.MustCompile(`\n$`)
|
||||
var output bytes.Buffer
|
||||
@ -106,18 +104,18 @@ func getCommentsOperator(d *dataTreeNavigator, context Context, expressionNode *
|
||||
comment = output.String()
|
||||
comment = chompRegexp.ReplaceAllString(comment, "")
|
||||
} else if preferences.HeadComment {
|
||||
comment = candidate.Node.HeadComment
|
||||
} else if preferences.FootComment && candidate.Node.Kind == yaml.DocumentNode && candidate.TrailingContent != "" {
|
||||
comment = candidate.TrailingContent
|
||||
comment = candidate.HeadComment
|
||||
} else if preferences.FootComment {
|
||||
comment = candidate.Node.FootComment
|
||||
comment = candidate.FootComment
|
||||
}
|
||||
comment = startCommentCharacterRegExp.ReplaceAllString(comment, "")
|
||||
comment = subsequentCommentCharacterRegExp.ReplaceAllString(comment, "\n")
|
||||
|
||||
node := &yaml.Node{Kind: yaml.ScalarNode, Value: comment, Tag: "!!str"}
|
||||
result := candidate.CreateReplacement(node)
|
||||
result.LeadingContent = "" // don't include the leading yaml content when retrieving a comment
|
||||
result := candidate.CreateReplacement(ScalarNode, "!!str", comment)
|
||||
if candidate.IsMapKey {
|
||||
result.IsMapKey = false
|
||||
result.Key = candidate
|
||||
}
|
||||
results.PushBack(result)
|
||||
}
|
||||
return context.ChildContext(results), nil
|
||||
|
@ -60,7 +60,7 @@ var commentOperatorScenarios = []expressionScenario{
|
||||
document: `a: cat`,
|
||||
expression: `.a line_comment="single"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: cat # single\n",
|
||||
"D0, P[], (!!map)::a: cat # single\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -69,7 +69,7 @@ var commentOperatorScenarios = []expressionScenario{
|
||||
document: "a:\n b: things",
|
||||
expression: `(.a | key) line_comment="single"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: # single\n b: things\n",
|
||||
"D0, P[], (!!map)::a: # single\n b: things\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -77,7 +77,7 @@ var commentOperatorScenarios = []expressionScenario{
|
||||
document: "a: cat\nb: dog",
|
||||
expression: `.a line_comment=.b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: cat # dog\nb: dog\n",
|
||||
"D0, P[], (!!map)::a: cat # dog\nb: dog\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -85,8 +85,8 @@ var commentOperatorScenarios = []expressionScenario{
|
||||
document: "a: cat\n---\na: dog",
|
||||
expression: `.a line_comment |= documentIndex`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: cat # 0\n",
|
||||
"D1, P[], (doc)::a: dog # 1\n",
|
||||
"D0, P[], (!!map)::a: cat # 0\n",
|
||||
"D1, P[], (!!map)::a: dog # 1\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -146,7 +146,7 @@ var commentOperatorScenarios = []expressionScenario{
|
||||
document: `a: cat`,
|
||||
expression: `. head_comment="single"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::# single\n\na: cat\n",
|
||||
"D0, P[], (!!map)::# single\na: cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -154,7 +154,7 @@ var commentOperatorScenarios = []expressionScenario{
|
||||
document: "f: foo\na:\n b: cat",
|
||||
expression: `(.a | key) head_comment="single"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::f: foo\n# single\na:\n b: cat\n",
|
||||
"D0, P[], (!!map)::f: foo\n# single\na:\n b: cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -162,7 +162,7 @@ var commentOperatorScenarios = []expressionScenario{
|
||||
document: `a: cat`,
|
||||
expression: `. foot_comment=.a`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: cat\n# cat\n",
|
||||
"D0, P[], (!!map)::a: cat\n# cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -171,7 +171,7 @@ var commentOperatorScenarios = []expressionScenario{
|
||||
document: "a: cat\n\n# hi",
|
||||
expression: `. foot_comment=""`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: cat\n",
|
||||
"D0, P[], (!!map)::a: cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -179,7 +179,7 @@ var commentOperatorScenarios = []expressionScenario{
|
||||
document: `a: cat`,
|
||||
expression: `. foot_comment=.b.d`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: cat\n",
|
||||
"D0, P[], (!!map)::a: cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -187,7 +187,7 @@ var commentOperatorScenarios = []expressionScenario{
|
||||
document: `a: cat`,
|
||||
expression: `. foot_comment|=.b.d`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: cat\n",
|
||||
"D0, P[], (!!map)::a: cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -195,7 +195,7 @@ var commentOperatorScenarios = []expressionScenario{
|
||||
document: "a: cat # comment\nb: dog # leave this",
|
||||
expression: `.a line_comment=""`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: cat\nb: dog # leave this\n",
|
||||
"D0, P[], (!!map)::a: cat\nb: dog # leave this\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -260,6 +260,42 @@ var commentOperatorScenarios = []expressionScenario{
|
||||
"D0, P[], (!!str)::have a great day\nno really\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "leading spaces",
|
||||
skipDoc: true,
|
||||
document: " # hi",
|
||||
expression: `.`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!null):: # hi\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "string spaces",
|
||||
skipDoc: true,
|
||||
document: "# hi\ncat\n",
|
||||
expression: `.`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!str)::# hi\ncat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "leading spaces with new line",
|
||||
skipDoc: true,
|
||||
document: " # hi\n",
|
||||
expression: `.`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!null):: # hi\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "directive",
|
||||
skipDoc: true,
|
||||
document: "%YAML 1.1\n# hi\n",
|
||||
expression: `.`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!null)::%YAML 1.1\n# hi\n",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestCommentOperatorScenarios(t *testing.T) {
|
||||
|
@ -3,8 +3,6 @@ package yqlib
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type compareTypePref struct {
|
||||
@ -32,27 +30,24 @@ func compare(prefs compareTypePref) func(d *dataTreeNavigator, context Context,
|
||||
return createBooleanCandidate(lhs, false), nil
|
||||
}
|
||||
|
||||
lhs.Node = unwrapDoc(lhs.Node)
|
||||
rhs.Node = unwrapDoc(rhs.Node)
|
||||
|
||||
switch lhs.Node.Kind {
|
||||
case yaml.MappingNode:
|
||||
switch lhs.Kind {
|
||||
case MappingNode:
|
||||
return nil, fmt.Errorf("maps not yet supported for comparison")
|
||||
case yaml.SequenceNode:
|
||||
case SequenceNode:
|
||||
return nil, fmt.Errorf("arrays not yet supported for comparison")
|
||||
default:
|
||||
if rhs.Node.Kind != yaml.ScalarNode {
|
||||
return nil, fmt.Errorf("%v (%v) cannot be subtracted from %v", rhs.Node.Tag, rhs.Path, lhs.Node.Tag)
|
||||
if rhs.Kind != ScalarNode {
|
||||
return nil, fmt.Errorf("%v (%v) cannot be subtracted from %v", rhs.Tag, rhs.GetNicePath(), lhs.Tag)
|
||||
}
|
||||
target := lhs.CreateReplacement(&yaml.Node{})
|
||||
boolV, err := compareScalars(context, prefs, lhs.Node, rhs.Node)
|
||||
target := lhs.CopyWithoutContent()
|
||||
boolV, err := compareScalars(context, prefs, lhs, rhs)
|
||||
|
||||
return createBooleanCandidate(target, boolV), err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func compareDateTime(layout string, prefs compareTypePref, lhs *yaml.Node, rhs *yaml.Node) (bool, error) {
|
||||
func compareDateTime(layout string, prefs compareTypePref, lhs *CandidateNode, rhs *CandidateNode) (bool, error) {
|
||||
lhsTime, err := parseDateTime(layout, lhs.Value)
|
||||
if err != nil {
|
||||
return false, err
|
||||
@ -73,9 +68,9 @@ func compareDateTime(layout string, prefs compareTypePref, lhs *yaml.Node, rhs *
|
||||
|
||||
}
|
||||
|
||||
func compareScalars(context Context, prefs compareTypePref, lhs *yaml.Node, rhs *yaml.Node) (bool, error) {
|
||||
lhsTag := guessTagFromCustomType(lhs)
|
||||
rhsTag := guessTagFromCustomType(rhs)
|
||||
func compareScalars(context Context, prefs compareTypePref, lhs *CandidateNode, rhs *CandidateNode) (bool, error) {
|
||||
lhsTag := lhs.guessTagFromCustomType()
|
||||
rhsTag := rhs.guessTagFromCustomType()
|
||||
|
||||
isDateTime := lhs.Tag == "!!timestamp"
|
||||
// if the lhs is a string, it might be a timestamp in a custom format.
|
||||
|
@ -3,15 +3,13 @@ package yqlib
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func containsOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
return crossFunction(d, context.ReadOnlyClone(), expressionNode, containsWithNodes, false)
|
||||
}
|
||||
|
||||
func containsArrayElement(array *yaml.Node, item *yaml.Node) (bool, error) {
|
||||
func containsArrayElement(array *CandidateNode, item *CandidateNode) (bool, error) {
|
||||
for index := 0; index < len(array.Content); index = index + 1 {
|
||||
containedInArray, err := contains(array.Content[index], item)
|
||||
if err != nil {
|
||||
@ -24,8 +22,8 @@ func containsArrayElement(array *yaml.Node, item *yaml.Node) (bool, error) {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func containsArray(lhs *yaml.Node, rhs *yaml.Node) (bool, error) {
|
||||
if rhs.Kind != yaml.SequenceNode {
|
||||
func containsArray(lhs *CandidateNode, rhs *CandidateNode) (bool, error) {
|
||||
if rhs.Kind != SequenceNode {
|
||||
return containsArrayElement(lhs, rhs)
|
||||
}
|
||||
for index := 0; index < len(rhs.Content); index = index + 1 {
|
||||
@ -40,8 +38,8 @@ func containsArray(lhs *yaml.Node, rhs *yaml.Node) (bool, error) {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func containsObject(lhs *yaml.Node, rhs *yaml.Node) (bool, error) {
|
||||
if rhs.Kind != yaml.MappingNode {
|
||||
func containsObject(lhs *CandidateNode, rhs *CandidateNode) (bool, error) {
|
||||
if rhs.Kind != MappingNode {
|
||||
return false, nil
|
||||
}
|
||||
for index := 0; index < len(rhs.Content); index = index + 2 {
|
||||
@ -68,21 +66,21 @@ func containsObject(lhs *yaml.Node, rhs *yaml.Node) (bool, error) {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func containsScalars(lhs *yaml.Node, rhs *yaml.Node) (bool, error) {
|
||||
func containsScalars(lhs *CandidateNode, rhs *CandidateNode) (bool, error) {
|
||||
if lhs.Tag == "!!str" {
|
||||
return strings.Contains(lhs.Value, rhs.Value), nil
|
||||
}
|
||||
return lhs.Value == rhs.Value, nil
|
||||
}
|
||||
|
||||
func contains(lhs *yaml.Node, rhs *yaml.Node) (bool, error) {
|
||||
func contains(lhs *CandidateNode, rhs *CandidateNode) (bool, error) {
|
||||
switch lhs.Kind {
|
||||
case yaml.MappingNode:
|
||||
case MappingNode:
|
||||
return containsObject(lhs, rhs)
|
||||
case yaml.SequenceNode:
|
||||
case SequenceNode:
|
||||
return containsArray(lhs, rhs)
|
||||
case yaml.ScalarNode:
|
||||
if rhs.Kind != yaml.ScalarNode || lhs.Tag != rhs.Tag {
|
||||
case ScalarNode:
|
||||
if rhs.Kind != ScalarNode || lhs.Tag != rhs.Tag {
|
||||
return false, nil
|
||||
}
|
||||
if lhs.Tag == "!!null" {
|
||||
@ -95,14 +93,11 @@ func contains(lhs *yaml.Node, rhs *yaml.Node) (bool, error) {
|
||||
}
|
||||
|
||||
func containsWithNodes(d *dataTreeNavigator, context Context, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
||||
lhs.Node = unwrapDoc(lhs.Node)
|
||||
rhs.Node = unwrapDoc(rhs.Node)
|
||||
|
||||
if lhs.Node.Kind != rhs.Node.Kind {
|
||||
return nil, fmt.Errorf("%v cannot check contained in %v", rhs.Node.Tag, lhs.Node.Tag)
|
||||
if lhs.Kind != rhs.Kind {
|
||||
return nil, fmt.Errorf("%v cannot check contained in %v", rhs.Tag, lhs.Tag)
|
||||
}
|
||||
|
||||
result, err := contains(lhs.Node, rhs.Node)
|
||||
result, err := contains(lhs, rhs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -2,8 +2,6 @@ package yqlib
|
||||
|
||||
import (
|
||||
"container/list"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func createMapOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
@ -12,10 +10,6 @@ func createMapOperator(d *dataTreeNavigator, context Context, expressionNode *Ex
|
||||
//each matchingNodes entry should turn into a sequence of keys to create.
|
||||
//then collect object should do a cross function of the same index sequence for all matches.
|
||||
|
||||
var path []interface{}
|
||||
|
||||
var document uint
|
||||
|
||||
sequences := list.New()
|
||||
|
||||
if context.MatchingNodes.Len() > 0 {
|
||||
@ -36,50 +30,62 @@ func createMapOperator(d *dataTreeNavigator, context Context, expressionNode *Ex
|
||||
sequences.PushBack(sequenceNode)
|
||||
}
|
||||
|
||||
return context.SingleChildContext(&CandidateNode{Node: listToNodeSeq(sequences), Document: document, Path: path}), nil
|
||||
node := listToNodeSeq(sequences)
|
||||
|
||||
return context.SingleChildContext(node), nil
|
||||
|
||||
}
|
||||
|
||||
func sequenceFor(d *dataTreeNavigator, context Context, matchingNode *CandidateNode, expressionNode *ExpressionNode) (*CandidateNode, error) {
|
||||
var path []interface{}
|
||||
var document uint
|
||||
var filename string
|
||||
var fileIndex int
|
||||
|
||||
var matches = list.New()
|
||||
|
||||
if matchingNode != nil {
|
||||
path = matchingNode.Path
|
||||
document = matchingNode.Document
|
||||
document = matchingNode.GetDocument()
|
||||
filename = matchingNode.GetFilename()
|
||||
fileIndex = matchingNode.GetFileIndex()
|
||||
matches.PushBack(matchingNode)
|
||||
}
|
||||
|
||||
log.Debugf("**********sequenceFor %v", NodeToString(matchingNode))
|
||||
|
||||
mapPairs, err := crossFunction(d, context.ChildContext(matches), expressionNode,
|
||||
func(d *dataTreeNavigator, context Context, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
||||
node := yaml.Node{Kind: yaml.MappingNode, Tag: "!!map"}
|
||||
log.Debugf("LHS:", NodeToString(lhs))
|
||||
log.Debugf("RHS:", NodeToString(rhs))
|
||||
node.Content = []*yaml.Node{
|
||||
unwrapDoc(lhs.Node),
|
||||
unwrapDoc(rhs.Node),
|
||||
}
|
||||
node := &CandidateNode{Kind: MappingNode, Tag: "!!map"}
|
||||
|
||||
return &CandidateNode{Node: &node, Document: document, Path: path}, nil
|
||||
log.Debugf("**********adding key %v and value %v", NodeToString(lhs), NodeToString(rhs))
|
||||
|
||||
node.AddKeyValueChild(lhs, rhs)
|
||||
|
||||
node.document = document
|
||||
node.fileIndex = fileIndex
|
||||
node.filename = filename
|
||||
|
||||
return node, nil
|
||||
}, false)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
innerList := listToNodeSeq(mapPairs.MatchingNodes)
|
||||
innerList.Style = yaml.FlowStyle
|
||||
return &CandidateNode{Node: innerList, Document: document, Path: path}, nil
|
||||
innerList.Style = FlowStyle
|
||||
innerList.document = document
|
||||
innerList.fileIndex = fileIndex
|
||||
innerList.filename = filename
|
||||
return innerList, nil
|
||||
}
|
||||
|
||||
// NOTE: here the document index gets dropped so we
|
||||
// no longer know where the node originates from.
|
||||
func listToNodeSeq(list *list.List) *yaml.Node {
|
||||
node := yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq"}
|
||||
func listToNodeSeq(list *list.List) *CandidateNode {
|
||||
node := CandidateNode{Kind: SequenceNode, Tag: "!!seq"}
|
||||
for entry := list.Front(); entry != nil; entry = entry.Next() {
|
||||
entryCandidate := entry.Value.(*CandidateNode)
|
||||
log.Debugf("Collecting %v into sequence", NodeToString(entryCandidate))
|
||||
node.Content = append(node.Content, entryCandidate.Node)
|
||||
node.AddChild(entryCandidate)
|
||||
}
|
||||
return &node
|
||||
}
|
||||
|
@ -12,6 +12,38 @@ var createMapOperatorScenarios = []expressionScenario{
|
||||
"D0, P[], (!!seq)::- [{frog: jumps}]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "sets key properly",
|
||||
expression: `("frog": "jumps") | .[0][0] | .frog`,
|
||||
expected: []string{
|
||||
"D0, P[0 0 frog], (!!str)::jumps\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "sets key properly on map",
|
||||
expression: `{"frog": "jumps"} | .frog`,
|
||||
expected: []string{
|
||||
"D0, P[frog], (!!str)::jumps\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
document: `{name: Mike, pets: [cat, dog]}`,
|
||||
expression: `(.name: .pets.[]) | .[0][0] | ..`,
|
||||
expected: []string{
|
||||
"D0, P[0 0], (!!map)::Mike: cat\n",
|
||||
"D0, P[0 0 Mike], (!!str)::cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "check path of nested child",
|
||||
document: "pets:\n cows: value",
|
||||
expression: `("b":.pets) | .[0][0] | .b.cows`,
|
||||
expected: []string{
|
||||
"D0, P[0 0 b cows], (!!str)::value\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
document: `{name: Mike, age: 32}`,
|
||||
expression: `.name: .age`,
|
||||
|
@ -6,8 +6,6 @@ import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func getStringParameter(parameterName string, d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (string, error) {
|
||||
@ -19,7 +17,7 @@ func getStringParameter(parameterName string, d *dataTreeNavigator, context Cont
|
||||
return "", fmt.Errorf("could not find %v for format_time", parameterName)
|
||||
}
|
||||
|
||||
return result.MatchingNodes.Front().Value.(*CandidateNode).Node.Value, nil
|
||||
return result.MatchingNodes.Front().Value.(*CandidateNode).Value, nil
|
||||
}
|
||||
|
||||
func withDateTimeFormat(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
@ -41,13 +39,13 @@ var Now = time.Now
|
||||
|
||||
func nowOp(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
|
||||
node := &yaml.Node{
|
||||
node := &CandidateNode{
|
||||
Tag: "!!timestamp",
|
||||
Kind: yaml.ScalarNode,
|
||||
Kind: ScalarNode,
|
||||
Value: Now().Format(time.RFC3339),
|
||||
}
|
||||
|
||||
return context.SingleChildContext(&CandidateNode{Node: node}), nil
|
||||
return context.SingleChildContext(node), nil
|
||||
|
||||
}
|
||||
|
||||
@ -74,7 +72,7 @@ func formatDateTime(d *dataTreeNavigator, context Context, expressionNode *Expre
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
|
||||
parsedTime, err := parseDateTime(layout, candidate.Node.Value)
|
||||
parsedTime, err := parseDateTime(layout, candidate.Value)
|
||||
if err != nil {
|
||||
return Context{}, fmt.Errorf("could not parse datetime of [%v]: %w", candidate.GetNicePath(), err)
|
||||
}
|
||||
@ -83,14 +81,15 @@ func formatDateTime(d *dataTreeNavigator, context Context, expressionNode *Expre
|
||||
node, errorReading := parseSnippet(formattedTimeStr)
|
||||
if errorReading != nil {
|
||||
log.Debugf("could not parse %v - lets just leave it as a string: %w", formattedTimeStr, errorReading)
|
||||
node = &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
node = &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: formattedTimeStr,
|
||||
}
|
||||
}
|
||||
|
||||
results.PushBack(candidate.CreateReplacement(node))
|
||||
node.Parent = candidate.Parent
|
||||
node.Key = candidate.Key
|
||||
results.PushBack(node)
|
||||
}
|
||||
|
||||
return context.ChildContext(results), nil
|
||||
@ -113,19 +112,13 @@ func tzOp(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode)
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
|
||||
parsedTime, err := parseDateTime(layout, candidate.Node.Value)
|
||||
parsedTime, err := parseDateTime(layout, candidate.Value)
|
||||
if err != nil {
|
||||
return Context{}, fmt.Errorf("could not parse datetime of [%v] using layout [%v]: %w", candidate.GetNicePath(), layout, err)
|
||||
}
|
||||
tzTime := parsedTime.In(timezone)
|
||||
|
||||
node := &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
Tag: candidate.Node.Tag,
|
||||
Value: tzTime.Format(layout),
|
||||
}
|
||||
|
||||
results.PushBack(candidate.CreateReplacement(node))
|
||||
results.PushBack(candidate.CreateReplacement(ScalarNode, candidate.Tag, tzTime.Format(layout)))
|
||||
}
|
||||
|
||||
return context.ChildContext(results), nil
|
||||
@ -148,24 +141,20 @@ func fromUnixOp(d *dataTreeNavigator, context Context, expressionNode *Expressio
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
|
||||
actualTag := guessTagFromCustomType(candidate.Node)
|
||||
actualTag := candidate.guessTagFromCustomType()
|
||||
|
||||
if actualTag != "!!int" && guessTagFromCustomType(candidate.Node) != "!!float" {
|
||||
return Context{}, fmt.Errorf("from_unix only works on numbers, found %v instead", candidate.Node.Tag)
|
||||
if actualTag != "!!int" && actualTag != "!!float" {
|
||||
return Context{}, fmt.Errorf("from_unix only works on numbers, found %v instead", candidate.Tag)
|
||||
}
|
||||
|
||||
parsedTime, err := parseUnixTime(candidate.Node.Value)
|
||||
parsedTime, err := parseUnixTime(candidate.Value)
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
|
||||
node := &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
Tag: "!!timestamp",
|
||||
Value: parsedTime.Format(time.RFC3339),
|
||||
}
|
||||
node := candidate.CreateReplacement(ScalarNode, "!!timestamp", parsedTime.Format(time.RFC3339))
|
||||
|
||||
results.PushBack(candidate.CreateReplacement(node))
|
||||
results.PushBack(node)
|
||||
}
|
||||
|
||||
return context.ChildContext(results), nil
|
||||
@ -180,18 +169,12 @@ func toUnixOp(d *dataTreeNavigator, context Context, expressionNode *ExpressionN
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
|
||||
parsedTime, err := parseDateTime(layout, candidate.Node.Value)
|
||||
parsedTime, err := parseDateTime(layout, candidate.Value)
|
||||
if err != nil {
|
||||
return Context{}, fmt.Errorf("could not parse datetime of [%v] using layout [%v]: %w", candidate.GetNicePath(), layout, err)
|
||||
}
|
||||
|
||||
node := &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
Tag: "!!int",
|
||||
Value: fmt.Sprintf("%v", parsedTime.Unix()),
|
||||
}
|
||||
|
||||
results.PushBack(candidate.CreateReplacement(node))
|
||||
results.PushBack(candidate.CreateReplacement(ScalarNode, "!!int", fmt.Sprintf("%v", parsedTime.Unix())))
|
||||
}
|
||||
|
||||
return context.ChildContext(results), nil
|
||||
|
@ -11,7 +11,7 @@ var dateTimeOperatorScenarios = []expressionScenario{
|
||||
document: `a: 2001-12-15T02:59:43.1Z`,
|
||||
expression: `.a |= format_datetime("Monday, 02-Jan-06 at 3:04PM")`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: Saturday, 15-Dec-01 at 2:59AM\n",
|
||||
"D0, P[], (!!map)::a: Saturday, 15-Dec-01 at 2:59AM\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -20,7 +20,7 @@ var dateTimeOperatorScenarios = []expressionScenario{
|
||||
document: `a: Saturday, 15-Dec-01 at 2:59AM`,
|
||||
expression: `.a |= with_dtf("Monday, 02-Jan-06 at 3:04PM"; format_datetime("2006-01-02"))`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: 2001-12-15\n",
|
||||
"D0, P[], (!!map)::a: 2001-12-15\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -36,7 +36,7 @@ var dateTimeOperatorScenarios = []expressionScenario{
|
||||
document: "a: cool",
|
||||
expression: `.updated = now`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: cool\nupdated: 2021-05-19T01:02:03Z\n",
|
||||
"D0, P[], (!!map)::a: cool\nupdated: 2021-05-19T01:02:03Z\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -62,7 +62,7 @@ var dateTimeOperatorScenarios = []expressionScenario{
|
||||
document: "a: cool",
|
||||
expression: `.updated = (now | tz("Australia/Sydney"))`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: cool\nupdated: 2021-05-19T11:02:03+10:00\n",
|
||||
"D0, P[], (!!map)::a: cool\nupdated: 2021-05-19T11:02:03+10:00\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -71,7 +71,7 @@ var dateTimeOperatorScenarios = []expressionScenario{
|
||||
document: "a: Saturday, 15-Dec-01 at 2:59AM GMT",
|
||||
expression: `.a |= with_dtf("Monday, 02-Jan-06 at 3:04PM MST"; tz("Australia/Sydney"))`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: Saturday, 15-Dec-01 at 1:59PM AEDT\n",
|
||||
"D0, P[], (!!map)::a: Saturday, 15-Dec-01 at 1:59PM AEDT\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -80,7 +80,7 @@ var dateTimeOperatorScenarios = []expressionScenario{
|
||||
document: "a: Saturday, 15-Dec-01 at 2:59AM GMT",
|
||||
expression: `.a |= with_dtf("Monday, 02-Jan-06 at 3:04PM MST"; tz("Australia/Sydney"))`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: Saturday, 15-Dec-01 at 1:59PM AEDT\n",
|
||||
"D0, P[], (!!map)::a: Saturday, 15-Dec-01 at 1:59PM AEDT\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -88,7 +88,7 @@ var dateTimeOperatorScenarios = []expressionScenario{
|
||||
document: `a: 2021-01-01T00:00:00Z`,
|
||||
expression: `.a += "3h10m"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: 2021-01-01T03:10:00Z\n",
|
||||
"D0, P[], (!!map)::a: 2021-01-01T03:10:00Z\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -97,7 +97,7 @@ var dateTimeOperatorScenarios = []expressionScenario{
|
||||
document: `a: 2021-01-01T03:10:00Z`,
|
||||
expression: `.a -= "3h10m"`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: 2021-01-01T00:00:00Z\n",
|
||||
"D0, P[], (!!map)::a: 2021-01-01T00:00:00Z\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -105,7 +105,7 @@ var dateTimeOperatorScenarios = []expressionScenario{
|
||||
document: `a: Saturday, 15-Dec-01 at 2:59AM GMT`,
|
||||
expression: `with_dtf("Monday, 02-Jan-06 at 3:04PM MST"; .a += "3h1m")`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: Saturday, 15-Dec-01 at 6:00AM GMT\n",
|
||||
"D0, P[], (!!map)::a: Saturday, 15-Dec-01 at 6:00AM GMT\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -114,7 +114,7 @@ var dateTimeOperatorScenarios = []expressionScenario{
|
||||
document: `a: Saturday, 15-Dec-01 at 2:59AM GMT`,
|
||||
expression: `with_dtf("Monday, 02-Jan-06 at 3:04PM MST"; .a = (.a + "3h1m" | tz("Australia/Perth")))`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: Saturday, 15-Dec-01 at 2:00PM AWST\n",
|
||||
"D0, P[], (!!map)::a: Saturday, 15-Dec-01 at 2:00PM AWST\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -123,7 +123,7 @@ var dateTimeOperatorScenarios = []expressionScenario{
|
||||
document: "a: Saturday, 15-Dec-01 at 2:59AM GMT",
|
||||
expression: `.a |= with_dtf("Monday, 02-Jan-06 at 3:04PM MST", tz("Australia/Sydney"))`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: Saturday, 15-Dec-01 at 1:59PM AEDT\n",
|
||||
"D0, P[], (!!map)::a: Saturday, 15-Dec-01 at 1:59PM AEDT\n",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -3,8 +3,6 @@ package yqlib
|
||||
import (
|
||||
"container/list"
|
||||
"fmt"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func deleteChildOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
@ -17,54 +15,53 @@ func deleteChildOperator(d *dataTreeNavigator, context Context, expressionNode *
|
||||
for el := nodesToDelete.MatchingNodes.Back(); el != nil; el = el.Prev() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
|
||||
if candidate.Node.Kind == yaml.DocumentNode {
|
||||
//need to delete this node from context.
|
||||
if candidate.Parent == nil {
|
||||
// must be a top level thing, delete it
|
||||
return removeFromContext(context, candidate)
|
||||
}
|
||||
log.Debugf("processing deletion of candidate %v", NodeToString(candidate))
|
||||
|
||||
parentNode := candidate.Parent
|
||||
|
||||
candidatePath := candidate.GetPath()
|
||||
childPath := candidatePath[len(candidatePath)-1]
|
||||
|
||||
if parentNode.Kind == MappingNode {
|
||||
deleteFromMap(candidate.Parent, childPath)
|
||||
} else if parentNode.Kind == SequenceNode {
|
||||
deleteFromArray(candidate.Parent, childPath)
|
||||
} else {
|
||||
return Context{}, fmt.Errorf("cannot delete nodes from parent of tag %v", parentNode.Tag)
|
||||
}
|
||||
}
|
||||
return context, nil
|
||||
}
|
||||
|
||||
func removeFromContext(context Context, candidate *CandidateNode) (Context, error) {
|
||||
newResults := list.New()
|
||||
for item := context.MatchingNodes.Front(); item != nil; item = item.Next() {
|
||||
nodeInContext := item.Value.(*CandidateNode)
|
||||
if nodeInContext.Node != candidate.Node {
|
||||
if nodeInContext != candidate {
|
||||
newResults.PushBack(nodeInContext)
|
||||
} else {
|
||||
log.Info("Need to delete this %v", NodeToString(nodeInContext))
|
||||
}
|
||||
}
|
||||
return context.ChildContext(newResults), nil
|
||||
} else if candidate.Parent == nil {
|
||||
//problem: context may already be '.a' and then I pass in '.a.a2'.
|
||||
// should pass in .a2.
|
||||
log.Info("Could not find parent of %v", NodeToString(candidate))
|
||||
return context, nil
|
||||
}
|
||||
|
||||
parentNode := candidate.Parent.Node
|
||||
childPath := candidate.Path[len(candidate.Path)-1]
|
||||
|
||||
if parentNode.Kind == yaml.MappingNode {
|
||||
deleteFromMap(candidate.Parent, childPath)
|
||||
} else if parentNode.Kind == yaml.SequenceNode {
|
||||
deleteFromArray(candidate.Parent, childPath)
|
||||
} else {
|
||||
return Context{}, fmt.Errorf("Cannot delete nodes from parent of tag %v", parentNode.Tag)
|
||||
}
|
||||
}
|
||||
return context, nil
|
||||
}
|
||||
|
||||
func deleteFromMap(candidate *CandidateNode, childPath interface{}) {
|
||||
func deleteFromMap(node *CandidateNode, childPath interface{}) {
|
||||
log.Debug("deleteFromMap")
|
||||
node := unwrapDoc(candidate.Node)
|
||||
contents := node.Content
|
||||
newContents := make([]*yaml.Node, 0)
|
||||
newContents := make([]*CandidateNode, 0)
|
||||
|
||||
for index := 0; index < len(contents); index = index + 2 {
|
||||
key := contents[index]
|
||||
value := contents[index+1]
|
||||
|
||||
childCandidate := candidate.CreateChildInMap(key, value)
|
||||
|
||||
shouldDelete := key.Value == childPath
|
||||
|
||||
log.Debugf("shouldDelete %v ? %v", childCandidate.GetKey(), shouldDelete)
|
||||
log.Debugf("shouldDelete %v ? %v", NodeToString(value), shouldDelete)
|
||||
|
||||
if !shouldDelete {
|
||||
newContents = append(newContents, key, value)
|
||||
@ -73,11 +70,10 @@ func deleteFromMap(candidate *CandidateNode, childPath interface{}) {
|
||||
node.Content = newContents
|
||||
}
|
||||
|
||||
func deleteFromArray(candidate *CandidateNode, childPath interface{}) {
|
||||
func deleteFromArray(node *CandidateNode, childPath interface{}) {
|
||||
log.Debug("deleteFromArray")
|
||||
node := unwrapDoc(candidate.Node)
|
||||
contents := node.Content
|
||||
newContents := make([]*yaml.Node, 0)
|
||||
newContents := make([]*CandidateNode, 0)
|
||||
|
||||
for index := 0; index < len(contents); index = index + 1 {
|
||||
value := contents[index]
|
||||
|
@ -10,7 +10,7 @@ var deleteOperatorScenarios = []expressionScenario{
|
||||
document: `{a: cat, b: dog}`,
|
||||
expression: `del(.b)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: cat}\n",
|
||||
"D0, P[], (!!map)::{a: cat}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -18,7 +18,7 @@ var deleteOperatorScenarios = []expressionScenario{
|
||||
document: `{a: {a1: fred, a2: frood}}`,
|
||||
expression: `del(.a.a1)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: {a2: frood}}\n",
|
||||
"D0, P[], (!!map)::{a: {a2: frood}}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -36,7 +36,7 @@ var deleteOperatorScenarios = []expressionScenario{
|
||||
document: `a: fast`,
|
||||
expression: `del(select(.a == "fast"))`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: slow\n",
|
||||
"D0, P[], (!!map)::a: slow\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -108,7 +108,7 @@ var deleteOperatorScenarios = []expressionScenario{
|
||||
document: `[1,2,3]`,
|
||||
expression: `del(.[1])`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::[1, 3]\n",
|
||||
"D0, P[], (!!seq)::[1, 3]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -116,7 +116,7 @@ var deleteOperatorScenarios = []expressionScenario{
|
||||
document: `a: [1,2,3]`,
|
||||
expression: `del(.a[])`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: []\n",
|
||||
"D0, P[], (!!map)::a: []\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -124,16 +124,20 @@ var deleteOperatorScenarios = []expressionScenario{
|
||||
document: `a: [10,x,10, 10, x, 10]`,
|
||||
expression: `del(.a[] | select(. == 10))`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: [x, x]\n",
|
||||
"D0, P[], (!!map)::a: [x, x]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
document: `a: null`,
|
||||
expression: `del(..)`,
|
||||
expected: []string{},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
document: `a: {thing1: yep, thing2: cool, thing3: hi, b: {thing1: cool, great: huh}}`,
|
||||
expression: `del(..)`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!map)::{}\n",
|
||||
},
|
||||
expected: []string{},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
@ -148,7 +152,7 @@ var deleteOperatorScenarios = []expressionScenario{
|
||||
document: `[{a: cat, b: dog}]`,
|
||||
expression: `del(.[0].a)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::[{b: dog}]\n",
|
||||
"D0, P[], (!!seq)::[{b: dog}]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -156,7 +160,7 @@ var deleteOperatorScenarios = []expressionScenario{
|
||||
document: `{a: cat, b: dog}`,
|
||||
expression: `del(.c)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: cat, b: dog}\n",
|
||||
"D0, P[], (!!map)::{a: cat, b: dog}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -164,7 +168,7 @@ var deleteOperatorScenarios = []expressionScenario{
|
||||
document: `{a: cat, b: dog, c: bat}`,
|
||||
expression: `del( .[] | select(. == "*at") )`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{b: dog}\n",
|
||||
"D0, P[], (!!map)::{b: dog}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -4,8 +4,6 @@ import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func divideOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
@ -15,45 +13,40 @@ func divideOperator(d *dataTreeNavigator, context Context, expressionNode *Expre
|
||||
}
|
||||
|
||||
func divide(d *dataTreeNavigator, context Context, lhs *CandidateNode, rhs *CandidateNode) (*CandidateNode, error) {
|
||||
lhs.Node = unwrapDoc(lhs.Node)
|
||||
rhs.Node = unwrapDoc(rhs.Node)
|
||||
|
||||
lhsNode := lhs.Node
|
||||
|
||||
if lhsNode.Tag == "!!null" {
|
||||
return nil, fmt.Errorf("%v (%v) cannot be divided by %v (%v)", lhsNode.Tag, lhs.GetNicePath(), rhs.Node.Tag, rhs.GetNicePath())
|
||||
if lhs.Tag == "!!null" {
|
||||
return nil, fmt.Errorf("%v (%v) cannot be divided by %v (%v)", lhs.Tag, lhs.GetNicePath(), rhs.Tag, rhs.GetNicePath())
|
||||
}
|
||||
|
||||
target := &yaml.Node{}
|
||||
target := lhs.CopyWithoutContent()
|
||||
|
||||
if lhsNode.Kind == yaml.ScalarNode && rhs.Node.Kind == yaml.ScalarNode {
|
||||
if err := divideScalars(target, lhsNode, rhs.Node); err != nil {
|
||||
if lhs.Kind == ScalarNode && rhs.Kind == ScalarNode {
|
||||
if err := divideScalars(target, lhs, rhs); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
return nil, fmt.Errorf("%v (%v) cannot be divided by %v (%v)", lhsNode.Tag, lhs.GetNicePath(), rhs.Node.Tag, rhs.GetNicePath())
|
||||
return nil, fmt.Errorf("%v (%v) cannot be divided by %v (%v)", lhs.Tag, lhs.GetNicePath(), rhs.Tag, rhs.GetNicePath())
|
||||
}
|
||||
|
||||
return lhs.CreateReplacement(target), nil
|
||||
return target, nil
|
||||
}
|
||||
|
||||
func divideScalars(target *yaml.Node, lhs *yaml.Node, rhs *yaml.Node) error {
|
||||
func divideScalars(target *CandidateNode, lhs *CandidateNode, rhs *CandidateNode) error {
|
||||
lhsTag := lhs.Tag
|
||||
rhsTag := guessTagFromCustomType(rhs)
|
||||
rhsTag := rhs.guessTagFromCustomType()
|
||||
lhsIsCustom := false
|
||||
if !strings.HasPrefix(lhsTag, "!!") {
|
||||
// custom tag - we have to have a guess
|
||||
lhsTag = guessTagFromCustomType(lhs)
|
||||
lhsTag = lhs.guessTagFromCustomType()
|
||||
lhsIsCustom = true
|
||||
}
|
||||
|
||||
if lhsTag == "!!str" && rhsTag == "!!str" {
|
||||
res := split(lhs.Value, rhs.Value)
|
||||
target.Kind = res.Kind
|
||||
target.Tag = res.Tag
|
||||
target.Content = res.Content
|
||||
tKind, tTag, res := split(lhs.Value, rhs.Value)
|
||||
target.Kind = tKind
|
||||
target.Tag = tTag
|
||||
target.AddChildren(res)
|
||||
} else if (lhsTag == "!!int" || lhsTag == "!!float") && (rhsTag == "!!int" || rhsTag == "!!float") {
|
||||
target.Kind = yaml.ScalarNode
|
||||
target.Kind = ScalarNode
|
||||
target.Style = lhs.Style
|
||||
|
||||
lhsNum, err := strconv.ParseFloat(lhs.Value, 64)
|
||||
|
@ -19,7 +19,7 @@ var divideOperatorScenarios = []expressionScenario{
|
||||
document: `{}`,
|
||||
expression: "(.a / .b) as $x | .",
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{}\n",
|
||||
"D0, P[], (!!map)::{}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -27,7 +27,7 @@ var divideOperatorScenarios = []expressionScenario{
|
||||
document: `{a: cat_meow, b: _}`,
|
||||
expression: `.c = .a / .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: cat_meow, b: _, c: [cat, meow]}\n",
|
||||
"D0, P[], (!!map)::{a: cat_meow, b: _, c: [cat, meow]}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -36,7 +36,7 @@ var divideOperatorScenarios = []expressionScenario{
|
||||
document: `{a: 12, b: 2.5}`,
|
||||
expression: `.a = .a / .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: 4.8, b: 2.5}\n",
|
||||
"D0, P[], (!!map)::{a: 4.8, b: 2.5}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -45,7 +45,7 @@ var divideOperatorScenarios = []expressionScenario{
|
||||
document: `{a: 1, b: -1}`,
|
||||
expression: `.a = .a / 0 | .b = .b / 0`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: !!float +Inf, b: !!float -Inf}\n",
|
||||
"D0, P[], (!!map)::{a: !!float +Inf, b: !!float -Inf}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -54,7 +54,7 @@ var divideOperatorScenarios = []expressionScenario{
|
||||
document: "a: !horse cat_meow\nb: !goat _",
|
||||
expression: `.a = .a / .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: !horse\n - cat\n - meow\nb: !goat _\n",
|
||||
"D0, P[], (!!map)::a: !horse\n - cat\n - meow\nb: !goat _\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -63,7 +63,7 @@ var divideOperatorScenarios = []expressionScenario{
|
||||
document: "a: !horse 1.2\nb: !goat 2.3",
|
||||
expression: `.a = .a / .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: !horse 0.5217391304347826\nb: !goat 2.3\n",
|
||||
"D0, P[], (!!map)::a: !horse 0.5217391304347826\nb: !goat 2.3\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -71,7 +71,7 @@ var divideOperatorScenarios = []expressionScenario{
|
||||
document: "a: 2\nb: !goat 2.3",
|
||||
expression: `.a = .a / .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: 0.8695652173913044\nb: !goat 2.3\n",
|
||||
"D0, P[], (!!map)::a: 0.8695652173913044\nb: !goat 2.3\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -80,7 +80,7 @@ var divideOperatorScenarios = []expressionScenario{
|
||||
document: "a: !horse 2\nb: !goat 3",
|
||||
expression: `.a = .a / .b`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: !horse 0.6666666666666666\nb: !goat 3\n",
|
||||
"D0, P[], (!!map)::a: !horse 0.6666666666666666\nb: !goat 3\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -89,7 +89,7 @@ var divideOperatorScenarios = []expressionScenario{
|
||||
document: "a: &horse [1]",
|
||||
expression: `.a[1] = .a[0] / 2`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: &horse [1, 0.5]\n",
|
||||
"D0, P[], (!!map)::a: &horse [1, 0.5]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -3,8 +3,6 @@ package yqlib
|
||||
import (
|
||||
"container/list"
|
||||
"fmt"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func getDocumentIndexOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
@ -12,8 +10,7 @@ func getDocumentIndexOperator(d *dataTreeNavigator, context Context, expressionN
|
||||
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
node := &yaml.Node{Kind: yaml.ScalarNode, Value: fmt.Sprintf("%v", candidate.Document), Tag: "!!int"}
|
||||
scalar := candidate.CreateReplacement(node)
|
||||
scalar := candidate.CreateReplacement(ScalarNode, "!!int", fmt.Sprintf("%v", candidate.GetDocument()))
|
||||
results.PushBack(scalar)
|
||||
}
|
||||
return context.ChildContext(results), nil
|
||||
|
@ -28,7 +28,7 @@ var documentIndexScenarios = []expressionScenario{
|
||||
document: "a: cat\n---\na: frog\n",
|
||||
expression: `select(document_index == 1)`,
|
||||
expected: []string{
|
||||
"D1, P[], (doc)::a: frog\n",
|
||||
"D1, P[], (!!map)::a: frog\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -36,7 +36,7 @@ var documentIndexScenarios = []expressionScenario{
|
||||
document: "a: cat\n---\na: frog\n",
|
||||
expression: `select(di == 1)`,
|
||||
expected: []string{
|
||||
"D1, P[], (doc)::a: frog\n",
|
||||
"D1, P[], (!!map)::a: frog\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -45,7 +45,7 @@ var documentIndexScenarios = []expressionScenario{
|
||||
expression: `.a | ({"match": ., "doc": document_index})`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!map)::match: cat\ndoc: 0\n",
|
||||
"D0, P[], (!!map)::match: frog\ndoc: 1\n",
|
||||
"D1, P[], (!!map)::match: frog\ndoc: 1\n",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -7,8 +7,6 @@ import (
|
||||
"errors"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func configureEncoder(format PrinterOutputFormat, indent int) Encoder {
|
||||
@ -78,9 +76,8 @@ func encodeOperator(d *dataTreeNavigator, context Context, expressionNode *Expre
|
||||
if originalList != nil && originalList.Len() > 0 && hasOnlyOneNewLine.MatchString(stringValue) {
|
||||
|
||||
original := originalList.Front().Value.(*CandidateNode)
|
||||
originalNode := unwrapDoc(original.Node)
|
||||
// original block did not have a newline at the end, get rid of this one too
|
||||
if !endWithNewLine.MatchString(originalNode.Value) {
|
||||
if !endWithNewLine.MatchString(original.Value) {
|
||||
stringValue = chomper.ReplaceAllString(stringValue, "")
|
||||
}
|
||||
}
|
||||
@ -92,8 +89,7 @@ func encodeOperator(d *dataTreeNavigator, context Context, expressionNode *Expre
|
||||
stringValue = chomper.ReplaceAllString(stringValue, "")
|
||||
}
|
||||
|
||||
stringContentNode := &yaml.Node{Kind: yaml.ScalarNode, Tag: "!!str", Value: stringValue}
|
||||
results.PushBack(candidate.CreateReplacement(stringContentNode))
|
||||
results.PushBack(candidate.CreateReplacement(ScalarNode, "!!str", stringValue))
|
||||
}
|
||||
return context.ChildContext(results), nil
|
||||
}
|
||||
@ -141,21 +137,21 @@ func decodeOperator(d *dataTreeNavigator, context Context, expressionNode *Expre
|
||||
|
||||
context.SetVariable("decoded: "+candidate.GetKey(), candidate.AsList())
|
||||
|
||||
log.Debugf("got: [%v]", candidate.Node.Value)
|
||||
log.Debugf("got: [%v]", candidate.Value)
|
||||
|
||||
err := decoder.Init(strings.NewReader(unwrapDoc(candidate.Node).Value))
|
||||
err := decoder.Init(strings.NewReader(candidate.Value))
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
|
||||
decodedNode, errorReading := decoder.Decode()
|
||||
node, errorReading := decoder.Decode()
|
||||
if errorReading != nil {
|
||||
return Context{}, errorReading
|
||||
}
|
||||
//first node is a doc
|
||||
node := unwrapDoc(decodedNode.Node)
|
||||
node.Key = candidate.Key
|
||||
node.Parent = candidate.Parent
|
||||
|
||||
results.PushBack(candidate.CreateReplacement(node))
|
||||
results.PushBack(node)
|
||||
}
|
||||
return context.ChildContext(results), nil
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
var prefix = "D0, P[], (doc)::a:\n cool:\n bob: dylan\n"
|
||||
var prefix = "D0, P[], (!!map)::a:\n cool:\n bob: dylan\n"
|
||||
|
||||
var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
@ -13,7 +13,7 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
document: `{a: {cool: "thing"}}`,
|
||||
expression: `.b = (.a | to_json)`,
|
||||
expected: []string{
|
||||
`D0, P[], (doc)::{a: {cool: "thing"}, b: "{\n \"cool\": \"thing\"\n}\n"}
|
||||
`D0, P[], (!!map)::{a: {cool: "thing"}, b: "{\n \"cool\": \"thing\"\n}\n"}
|
||||
`,
|
||||
},
|
||||
},
|
||||
@ -24,7 +24,7 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
document: `{a: {cool: "thing"}}`,
|
||||
expression: `.b = (.a | to_json(0))`,
|
||||
expected: []string{
|
||||
`D0, P[], (doc)::{a: {cool: "thing"}, b: '{"cool":"thing"}'}
|
||||
`D0, P[], (!!map)::{a: {cool: "thing"}, b: '{"cool":"thing"}'}
|
||||
`,
|
||||
},
|
||||
},
|
||||
@ -35,7 +35,7 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
document: `{a: {cool: "thing"}}`,
|
||||
expression: `.b = (.a | @json)`,
|
||||
expected: []string{
|
||||
`D0, P[], (doc)::{a: {cool: "thing"}, b: '{"cool":"thing"}'}
|
||||
`D0, P[], (!!map)::{a: {cool: "thing"}, b: '{"cool":"thing"}'}
|
||||
`,
|
||||
},
|
||||
},
|
||||
@ -54,7 +54,7 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
document: `{a: {cool: "thing"}}`,
|
||||
expression: `.b = (.a | to_props)`,
|
||||
expected: []string{
|
||||
`D0, P[], (doc)::{a: {cool: "thing"}, b: "cool = thing\n"}
|
||||
`D0, P[], (!!map)::{a: {cool: "thing"}, b: "cool = thing\n"}
|
||||
`,
|
||||
},
|
||||
},
|
||||
@ -63,7 +63,7 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
document: `{a: {cool: "thing"}}`,
|
||||
expression: `.b = (.a | @props)`,
|
||||
expected: []string{
|
||||
`D0, P[], (doc)::{a: {cool: "thing"}, b: "cool = thing\n"}
|
||||
`D0, P[], (!!map)::{a: {cool: "thing"}, b: "cool = thing\n"}
|
||||
`,
|
||||
},
|
||||
},
|
||||
@ -72,7 +72,7 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
document: `a: "cats=great\ndogs=cool as well"`,
|
||||
expression: `.a |= @propsd`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a:\n cats: great\n dogs: cool as well\n",
|
||||
"D0, P[], (!!map)::a:\n cats: great\n dogs: cool as well\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -80,7 +80,7 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
document: `a: "cats,dogs\ngreat,cool as well"`,
|
||||
expression: `.a |= @csvd`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a:\n - cats: great\n dogs: cool as well\n",
|
||||
"D0, P[], (!!map)::a:\n - cats: great\n dogs: cool as well\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -88,7 +88,7 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
document: `a: "cats dogs\ngreat cool as well"`,
|
||||
expression: `.a |= @tsvd`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a:\n - cats: great\n dogs: cool as well\n",
|
||||
"D0, P[], (!!map)::a:\n - cats: great\n dogs: cool as well\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -122,7 +122,7 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
document: `{a: {cool: "thing"}}`,
|
||||
expression: `.b = (.a | to_yaml)`,
|
||||
expected: []string{
|
||||
`D0, P[], (doc)::{a: {cool: "thing"}, b: "{cool: \"thing\"}\n"}
|
||||
`D0, P[], (!!map)::{a: {cool: "thing"}, b: "{cool: \"thing\"}\n"}
|
||||
`,
|
||||
},
|
||||
},
|
||||
@ -131,7 +131,7 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
document: `a: "foo: bar"`,
|
||||
expression: `.b = (.a | from_yaml)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: \"foo: bar\"\nb:\n foo: bar\n",
|
||||
"D0, P[], (!!map)::a: \"foo: bar\"\nb:\n foo: bar\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -140,7 +140,7 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
document: "a: |\n foo: bar\n baz: dog\n",
|
||||
expression: `.a |= (from_yaml | .foo = "cat" | to_yaml)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: |\n foo: cat\n baz: dog\n",
|
||||
"D0, P[], (!!map)::a: |\n foo: cat\n baz: dog\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -149,7 +149,7 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
document: "a: |-\n foo: bar\n baz: dog\n",
|
||||
expression: `.a |= (from_yaml | .foo = "cat" | to_yaml)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: |-\n foo: cat\n baz: dog\n",
|
||||
"D0, P[], (!!map)::a: |-\n foo: cat\n baz: dog\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -158,7 +158,7 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
document: "a: 'foo: bar'",
|
||||
expression: `.a |= (from_yaml | .foo = "cat" | to_yaml)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: 'foo: cat'\n",
|
||||
"D0, P[], (!!map)::a: 'foo: cat'\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -193,7 +193,7 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
document: "a: \"foo: bar\"",
|
||||
expression: `.a |= (from_yaml | .foo = {"a": "frog"} | to_yaml)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: \"foo:\\n a: frog\"\n",
|
||||
"D0, P[], (!!map)::a: \"foo:\\n a: frog\"\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -229,7 +229,7 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
document: `a: "<foo>bar</foo>"`,
|
||||
expression: `.b = (.a | from_xml)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: \"<foo>bar</foo>\"\nb:\n foo: bar\n",
|
||||
"D0, P[], (!!map)::a: \"<foo>bar</foo>\"\nb:\n foo: bar\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -299,7 +299,7 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
document: "coolData: YTogYXBwbGUK",
|
||||
expression: ".coolData |= (@base64d | from_yaml)",
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::coolData:\n a: apple\n",
|
||||
"D0, P[], (!!map)::coolData:\n a: apple\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -3,64 +3,59 @@ package yqlib
|
||||
import (
|
||||
"container/list"
|
||||
"fmt"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func entrySeqFor(key *yaml.Node, value *yaml.Node) *yaml.Node {
|
||||
var keyKey = &yaml.Node{Kind: yaml.ScalarNode, Tag: "!!str", Value: "key"}
|
||||
var valueKey = &yaml.Node{Kind: yaml.ScalarNode, Tag: "!!str", Value: "value"}
|
||||
func entrySeqFor(key *CandidateNode, value *CandidateNode) *CandidateNode {
|
||||
var keyKey = &CandidateNode{Kind: ScalarNode, Tag: "!!str", Value: "key"}
|
||||
var valueKey = &CandidateNode{Kind: ScalarNode, Tag: "!!str", Value: "value"}
|
||||
|
||||
return &yaml.Node{
|
||||
Kind: yaml.MappingNode,
|
||||
return &CandidateNode{
|
||||
Kind: MappingNode,
|
||||
Tag: "!!map",
|
||||
Content: []*yaml.Node{keyKey, key, valueKey, value},
|
||||
Content: []*CandidateNode{keyKey, key, valueKey, value},
|
||||
}
|
||||
}
|
||||
|
||||
func toEntriesFromMap(candidateNode *CandidateNode) *CandidateNode {
|
||||
var sequence = &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq"}
|
||||
var entriesNode = candidateNode.CreateReplacementWithDocWrappers(sequence)
|
||||
var sequence = candidateNode.CreateReplacementWithComments(SequenceNode, "!!seq", 0)
|
||||
|
||||
var contents = unwrapDoc(candidateNode.Node).Content
|
||||
var contents = candidateNode.Content
|
||||
for index := 0; index < len(contents); index = index + 2 {
|
||||
key := contents[index]
|
||||
value := contents[index+1]
|
||||
|
||||
sequence.Content = append(sequence.Content, entrySeqFor(key, value))
|
||||
sequence.AddChild(entrySeqFor(key, value))
|
||||
}
|
||||
return entriesNode
|
||||
return sequence
|
||||
}
|
||||
|
||||
func toEntriesfromSeq(candidateNode *CandidateNode) *CandidateNode {
|
||||
var sequence = &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq"}
|
||||
var entriesNode = candidateNode.CreateReplacementWithDocWrappers(sequence)
|
||||
var sequence = candidateNode.CreateReplacementWithComments(SequenceNode, "!!seq", 0)
|
||||
|
||||
var contents = unwrapDoc(candidateNode.Node).Content
|
||||
var contents = candidateNode.Content
|
||||
for index := 0; index < len(contents); index = index + 1 {
|
||||
key := &yaml.Node{Kind: yaml.ScalarNode, Tag: "!!int", Value: fmt.Sprintf("%v", index)}
|
||||
key := &CandidateNode{Kind: ScalarNode, Tag: "!!int", Value: fmt.Sprintf("%v", index)}
|
||||
value := contents[index]
|
||||
|
||||
sequence.Content = append(sequence.Content, entrySeqFor(key, value))
|
||||
sequence.AddChild(entrySeqFor(key, value))
|
||||
}
|
||||
return entriesNode
|
||||
return sequence
|
||||
}
|
||||
|
||||
func toEntriesOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
var results = list.New()
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
candidateNode := unwrapDoc(candidate.Node)
|
||||
|
||||
switch candidateNode.Kind {
|
||||
case yaml.MappingNode:
|
||||
switch candidate.Kind {
|
||||
case MappingNode:
|
||||
results.PushBack(toEntriesFromMap(candidate))
|
||||
|
||||
case yaml.SequenceNode:
|
||||
case SequenceNode:
|
||||
results.PushBack(toEntriesfromSeq(candidate))
|
||||
default:
|
||||
if candidateNode.Tag != "!!null" {
|
||||
return Context{}, fmt.Errorf("%v has no keys", candidate.Node.Tag)
|
||||
if candidate.Tag != "!!null" {
|
||||
return Context{}, fmt.Errorf("%v has no keys", candidate.Tag)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -68,9 +63,8 @@ func toEntriesOperator(d *dataTreeNavigator, context Context, expressionNode *Ex
|
||||
return context.ChildContext(results), nil
|
||||
}
|
||||
|
||||
func parseEntry(entry *yaml.Node, position int) (*yaml.Node, *yaml.Node, error) {
|
||||
func parseEntry(candidateNode *CandidateNode, position int) (*CandidateNode, *CandidateNode, error) {
|
||||
prefs := traversePreferences{DontAutoCreate: true}
|
||||
candidateNode := &CandidateNode{Node: entry}
|
||||
|
||||
keyResults, err := traverseMap(Context{}, candidateNode, createStringScalarNode("key"), prefs, false)
|
||||
|
||||
@ -88,15 +82,14 @@ func parseEntry(entry *yaml.Node, position int) (*yaml.Node, *yaml.Node, error)
|
||||
return nil, nil, fmt.Errorf("expected to find one 'value' entry but found %v in position %v", valueResults.Len(), position)
|
||||
}
|
||||
|
||||
return keyResults.Front().Value.(*CandidateNode).Node, valueResults.Front().Value.(*CandidateNode).Node, nil
|
||||
return keyResults.Front().Value.(*CandidateNode), valueResults.Front().Value.(*CandidateNode), nil
|
||||
|
||||
}
|
||||
|
||||
func fromEntries(candidateNode *CandidateNode) (*CandidateNode, error) {
|
||||
var node = &yaml.Node{Kind: yaml.MappingNode, Tag: "!!map"}
|
||||
var mapCandidateNode = candidateNode.CreateReplacementWithDocWrappers(node)
|
||||
var node = candidateNode.CopyWithoutContent()
|
||||
|
||||
var contents = unwrapDoc(candidateNode.Node).Content
|
||||
var contents = candidateNode.Content
|
||||
|
||||
for index := 0; index < len(contents); index = index + 1 {
|
||||
key, value, err := parseEntry(contents[index], index)
|
||||
@ -104,19 +97,20 @@ func fromEntries(candidateNode *CandidateNode) (*CandidateNode, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
node.Content = append(node.Content, key, value)
|
||||
node.AddKeyValueChild(key, value)
|
||||
}
|
||||
return mapCandidateNode, nil
|
||||
node.Kind = MappingNode
|
||||
node.Tag = "!!map"
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func fromEntriesOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
var results = list.New()
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
candidateNode := unwrapDoc(candidate.Node)
|
||||
|
||||
switch candidateNode.Kind {
|
||||
case yaml.SequenceNode:
|
||||
switch candidate.Kind {
|
||||
case SequenceNode:
|
||||
mapResult, err := fromEntries(candidate)
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
@ -162,8 +156,13 @@ func withEntriesOperator(d *dataTreeNavigator, context Context, expressionNode *
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
log.Debug("candidate %v", NodeToString(candidate))
|
||||
log.Debug("candidate leading content: %v", candidate.LeadingContent)
|
||||
collected.LeadingContent = candidate.LeadingContent
|
||||
collected.TrailingContent = candidate.TrailingContent
|
||||
log.Debug("candidate FootComment: [%v]", candidate.FootComment)
|
||||
|
||||
collected.HeadComment = candidate.HeadComment
|
||||
collected.FootComment = candidate.FootComment
|
||||
|
||||
log.Debugf("**** collected %v", collected.LeadingContent)
|
||||
|
||||
|
@ -7,7 +7,6 @@ import (
|
||||
"strings"
|
||||
|
||||
parse "github.com/a8m/envsubst/parse"
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type envOpPreferences struct {
|
||||
@ -18,39 +17,40 @@ type envOpPreferences struct {
|
||||
}
|
||||
|
||||
func envOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
envName := expressionNode.Operation.CandidateNode.Node.Value
|
||||
envName := expressionNode.Operation.CandidateNode.Value
|
||||
log.Debug("EnvOperator, env name:", envName)
|
||||
|
||||
rawValue := os.Getenv(envName)
|
||||
|
||||
preferences := expressionNode.Operation.Preferences.(envOpPreferences)
|
||||
|
||||
var node *yaml.Node
|
||||
var node *CandidateNode
|
||||
if preferences.StringValue {
|
||||
node = &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
node = &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: rawValue,
|
||||
}
|
||||
} else if rawValue == "" {
|
||||
return Context{}, fmt.Errorf("Value for env variable '%v' not provided in env()", envName)
|
||||
return Context{}, fmt.Errorf("value for env variable '%v' not provided in env()", envName)
|
||||
} else {
|
||||
var dataBucket yaml.Node
|
||||
decoder := yaml.NewDecoder(strings.NewReader(rawValue))
|
||||
errorReading := decoder.Decode(&dataBucket)
|
||||
if errorReading != nil {
|
||||
return Context{}, errorReading
|
||||
decoder := NewYamlDecoder(ConfiguredYamlPreferences)
|
||||
if err := decoder.Init(strings.NewReader(rawValue)); err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
//first node is a doc
|
||||
node = unwrapDoc(&dataBucket)
|
||||
var err error
|
||||
node, err = decoder.Decode()
|
||||
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
|
||||
}
|
||||
log.Debug("ENV tag", node.Tag)
|
||||
log.Debug("ENV value", node.Value)
|
||||
log.Debug("ENV Kind", node.Kind)
|
||||
|
||||
target := &CandidateNode{Node: node}
|
||||
|
||||
return context.SingleChildContext(target), nil
|
||||
return context.SingleChildContext(node), nil
|
||||
}
|
||||
|
||||
func envsubstOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
@ -70,8 +70,7 @@ func envsubstOperator(d *dataTreeNavigator, context Context, expressionNode *Exp
|
||||
}
|
||||
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
node := unwrapDoc(candidate.Node)
|
||||
node := el.Value.(*CandidateNode)
|
||||
if node.Tag != "!!str" {
|
||||
log.Warning("EnvSubstOperator, env name:", node.Tag, node.Value)
|
||||
return Context{}, fmt.Errorf("cannot substitute with %v, can only substitute strings. Hint: Most often you'll want to use '|=' over '=' for this operation", node.Tag)
|
||||
@ -81,8 +80,7 @@ func envsubstOperator(d *dataTreeNavigator, context Context, expressionNode *Exp
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
targetNode := &yaml.Node{Kind: yaml.ScalarNode, Value: value, Tag: "!!str"}
|
||||
result := candidate.CreateReplacement(targetNode)
|
||||
result := node.CreateReplacement(ScalarNode, "!!str", value)
|
||||
results.PushBack(result)
|
||||
}
|
||||
|
||||
|
@ -60,7 +60,7 @@ var envOperatorScenarios = []expressionScenario{
|
||||
environmentVariables: map[string]string{"pathEnv": ".a.b[0].name", "valueEnv": "moo"},
|
||||
expression: `eval(strenv(pathEnv)) = strenv(valueEnv)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: {b: [{name: moo}, {name: cat}]}}\n",
|
||||
"D0, P[], (!!map)::{a: {b: [{name: moo}, {name: cat}]}}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -136,7 +136,7 @@ var envOperatorScenarios = []expressionScenario{
|
||||
document: "{v: \"${myenv}\"}",
|
||||
expression: `.v |= envsubst`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{v: \"cat meow\"}\n",
|
||||
"D0, P[], (!!map)::{v: \"cat meow\"}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -1,7 +1,5 @@
|
||||
package yqlib
|
||||
|
||||
import "gopkg.in/yaml.v3"
|
||||
|
||||
func equalsOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
log.Debugf("-- equalsOperation")
|
||||
return crossFunction(d, context, expressionNode, isEquals(false), true)
|
||||
@ -12,33 +10,29 @@ func isEquals(flip bool) func(d *dataTreeNavigator, context Context, lhs *Candid
|
||||
value := false
|
||||
log.Debugf("-- isEquals cross function")
|
||||
if lhs == nil && rhs == nil {
|
||||
log.Debugf("-- both are nil")
|
||||
owner := &CandidateNode{}
|
||||
return createBooleanCandidate(owner, !flip), nil
|
||||
} else if lhs == nil {
|
||||
log.Debugf("lhs nil, but rhs is not")
|
||||
rhsNode := unwrapDoc(rhs.Node)
|
||||
value := rhsNode.Tag == "!!null"
|
||||
value := rhs.Tag == "!!null"
|
||||
if flip {
|
||||
value = !value
|
||||
}
|
||||
return createBooleanCandidate(rhs, value), nil
|
||||
} else if rhs == nil {
|
||||
log.Debugf("lhs not nil, but rhs is")
|
||||
lhsNode := unwrapDoc(lhs.Node)
|
||||
value := lhsNode.Tag == "!!null"
|
||||
value := lhs.Tag == "!!null"
|
||||
if flip {
|
||||
value = !value
|
||||
}
|
||||
return createBooleanCandidate(lhs, value), nil
|
||||
}
|
||||
|
||||
lhsNode := unwrapDoc(lhs.Node)
|
||||
rhsNode := unwrapDoc(rhs.Node)
|
||||
|
||||
if lhsNode.Tag == "!!null" {
|
||||
value = (rhsNode.Tag == "!!null")
|
||||
} else if lhsNode.Kind == yaml.ScalarNode && rhsNode.Kind == yaml.ScalarNode {
|
||||
value = matchKey(lhsNode.Value, rhsNode.Value)
|
||||
if lhs.Tag == "!!null" {
|
||||
value = (rhs.Tag == "!!null")
|
||||
} else if lhs.Kind == ScalarNode && rhs.Kind == ScalarNode {
|
||||
value = matchKey(lhs.Value, rhs.Value)
|
||||
}
|
||||
log.Debugf("%v == %v ? %v", NodeToString(lhs), NodeToString(rhs), value)
|
||||
if flip {
|
||||
|
@ -49,7 +49,7 @@ var equalsOperatorScenarios = []expressionScenario{
|
||||
document: "{}",
|
||||
expression: "(.a == .b) as $x | .",
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{}\n",
|
||||
"D0, P[], (!!map)::{}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -65,7 +65,7 @@ var equalsOperatorScenarios = []expressionScenario{
|
||||
document: "{}",
|
||||
expression: "(.a != .b) as $x | .",
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{}\n",
|
||||
"D0, P[], (!!map)::{}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -87,7 +87,7 @@ var equalsOperatorScenarios = []expressionScenario{
|
||||
document: "{a: {b: 10}}",
|
||||
expression: "select(.d == .c)",
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: {b: 10}}\n",
|
||||
"D0, P[], (!!map)::{a: {b: 10}}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -95,7 +95,7 @@ var equalsOperatorScenarios = []expressionScenario{
|
||||
document: "{a: {b: 10}}",
|
||||
expression: "select(null == .c)",
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: {b: 10}}\n",
|
||||
"D0, P[], (!!map)::{a: {b: 10}}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -176,7 +176,7 @@ var equalsOperatorScenarios = []expressionScenario{
|
||||
document: "a: frog",
|
||||
expression: `select(.b != "thing")`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: frog\n",
|
||||
"D0, P[], (!!map)::a: frog\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -184,7 +184,7 @@ var equalsOperatorScenarios = []expressionScenario{
|
||||
document: "a: frog",
|
||||
expression: `select(.b == .c)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: frog\n",
|
||||
"D0, P[], (!!map)::a: frog\n",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ func errorOperator(d *dataTreeNavigator, context Context, expressionNode *Expres
|
||||
}
|
||||
errorMessage := "aborted"
|
||||
if rhs.MatchingNodes.Len() > 0 {
|
||||
errorMessage = rhs.MatchingNodes.Front().Value.(*CandidateNode).Node.Value
|
||||
errorMessage = rhs.MatchingNodes.Front().Value.(*CandidateNode).Value
|
||||
}
|
||||
return Context{}, fmt.Errorf(errorMessage)
|
||||
}
|
||||
|
@ -27,7 +27,7 @@ var errorOperatorScenarios = []expressionScenario{
|
||||
document: "name: Bob\nfavouriteAnimal: cat\n",
|
||||
expression: validationExpression,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::name: Bob\nfavouriteAnimal: cat\nnumPets: 3\n",
|
||||
"D0, P[], (!!map)::name: Bob\nfavouriteAnimal: cat\nnumPets: 3\n",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -17,7 +17,7 @@ func evalOperator(d *dataTreeNavigator, context Context, expressionNode *Express
|
||||
for pathExpStrEntry := pathExpStrResults.MatchingNodes.Front(); pathExpStrEntry != nil; pathExpStrEntry = pathExpStrEntry.Next() {
|
||||
expressionStrCandidate := pathExpStrEntry.Value.(*CandidateNode)
|
||||
|
||||
expressions[expIndex], err = ExpressionParser.ParseExpression(expressionStrCandidate.Node.Value)
|
||||
expressions[expIndex], err = ExpressionParser.ParseExpression(expressionStrCandidate.Value)
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
|
@ -21,7 +21,7 @@ var evalOperatorScenarios = []expressionScenario{
|
||||
environmentVariables: map[string]string{"pathEnv": ".a.b[0].name", "valueEnv": "moo"},
|
||||
expression: `eval(strenv(pathEnv)) = strenv(valueEnv)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{a: {b: [{name: moo}, {name: cat}]}}\n",
|
||||
"D0, P[], (!!map)::{a: {b: [{name: moo}, {name: cat}]}}\n",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -3,8 +3,6 @@ package yqlib
|
||||
import (
|
||||
"container/list"
|
||||
"fmt"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func getFilenameOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
@ -14,8 +12,7 @@ func getFilenameOperator(d *dataTreeNavigator, context Context, expressionNode *
|
||||
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
node := &yaml.Node{Kind: yaml.ScalarNode, Value: candidate.Filename, Tag: "!!str"}
|
||||
result := candidate.CreateReplacement(node)
|
||||
result := candidate.CreateReplacement(ScalarNode, "!!str", candidate.GetFilename())
|
||||
results.PushBack(result)
|
||||
}
|
||||
|
||||
@ -29,8 +26,7 @@ func getFileIndexOperator(d *dataTreeNavigator, context Context, expressionNode
|
||||
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
node := &yaml.Node{Kind: yaml.ScalarNode, Value: fmt.Sprintf("%v", candidate.FileIndex), Tag: "!!int"}
|
||||
result := candidate.CreateReplacement(node)
|
||||
result := candidate.CreateReplacement(ScalarNode, "!!int", fmt.Sprintf("%v", candidate.GetFileIndex()))
|
||||
results.PushBack(result)
|
||||
}
|
||||
|
||||
|
@ -25,7 +25,7 @@ func filterOperator(d *dataTreeNavigator, context Context, expressionNode *Expre
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
collected.Node.Style = unwrapDoc(candidate.Node).Style
|
||||
collected.Style = candidate.Style
|
||||
results.PushBack(collected)
|
||||
}
|
||||
return context.ChildContext(results), nil
|
||||
|
@ -2,26 +2,24 @@ package yqlib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type flattenPreferences struct {
|
||||
depth int
|
||||
}
|
||||
|
||||
func flatten(node *yaml.Node, depth int) {
|
||||
func flatten(node *CandidateNode, depth int) {
|
||||
if depth == 0 {
|
||||
return
|
||||
}
|
||||
if node.Kind != yaml.SequenceNode {
|
||||
if node.Kind != SequenceNode {
|
||||
return
|
||||
}
|
||||
content := node.Content
|
||||
newSeq := make([]*yaml.Node, 0)
|
||||
newSeq := make([]*CandidateNode, 0)
|
||||
|
||||
for i := 0; i < len(content); i++ {
|
||||
if content[i].Kind == yaml.SequenceNode {
|
||||
if content[i].Kind == SequenceNode {
|
||||
flatten(content[i], depth-1)
|
||||
for j := 0; j < len(content[i].Content); j++ {
|
||||
newSeq = append(newSeq, content[i].Content[j])
|
||||
@ -30,7 +28,8 @@ func flatten(node *yaml.Node, depth int) {
|
||||
newSeq = append(newSeq, content[i])
|
||||
}
|
||||
}
|
||||
node.Content = newSeq
|
||||
node.Content = make([]*CandidateNode, 0)
|
||||
node.AddChildren(newSeq)
|
||||
}
|
||||
|
||||
func flattenOp(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
@ -40,12 +39,11 @@ func flattenOp(d *dataTreeNavigator, context Context, expressionNode *Expression
|
||||
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
candidateNode := unwrapDoc(candidate.Node)
|
||||
if candidateNode.Kind != yaml.SequenceNode {
|
||||
return Context{}, fmt.Errorf("Only arrays are supported for flatten")
|
||||
if candidate.Kind != SequenceNode {
|
||||
return Context{}, fmt.Errorf("only arrays are supported for flatten")
|
||||
}
|
||||
|
||||
flatten(candidateNode, depth)
|
||||
flatten(candidate, depth)
|
||||
|
||||
}
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user