Refactored decoder responsibilities (#1402)

- improved comment handling
- yaml decoder now responsible for leading content work around
This commit is contained in:
Mike Farah 2022-10-28 14:16:46 +11:00 committed by GitHub
parent 46c32f4c79
commit 880397d549
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
42 changed files with 529 additions and 329 deletions

View File

@ -9,7 +9,7 @@ EOL
testEmptyEval() {
X=$(./yq e test.yml)
expected=$(cat test.yml)
expected="# comment"
assertEquals 0 $?
assertEquals "$expected" "$X"
}

View File

@ -1,6 +1,5 @@
package cmd
var leadingContentPreProcessing = true
var unwrapScalar = true
var writeInplace = false

View File

@ -75,7 +75,7 @@ func evaluateAll(cmd *cobra.Command, args []string) (cmdError error) {
return err
}
decoder, err := configureDecoder()
decoder, err := configureDecoder(true)
if err != nil {
return err
}
@ -109,13 +109,13 @@ func evaluateAll(cmd *cobra.Command, args []string) (cmdError error) {
switch len(args) {
case 0:
if nullInput {
err = yqlib.NewStreamEvaluator().EvaluateNew(processExpression(expression), printer, "")
err = yqlib.NewStreamEvaluator().EvaluateNew(processExpression(expression), printer)
} else {
cmd.Println(cmd.UsageString())
return nil
}
default:
err = allAtOnceEvaluator.EvaluateFiles(processExpression(expression), args, printer, leadingContentPreProcessing, decoder)
err = allAtOnceEvaluator.EvaluateFiles(processExpression(expression), args, printer, decoder)
}
completedSuccessfully = err == nil

View File

@ -97,7 +97,7 @@ func evaluateSequence(cmd *cobra.Command, args []string) (cmdError error) {
printer := yqlib.NewPrinter(encoder, printerWriter)
decoder, err := configureDecoder()
decoder, err := configureDecoder(false)
if err != nil {
return err
}
@ -123,13 +123,13 @@ func evaluateSequence(cmd *cobra.Command, args []string) (cmdError error) {
switch len(args) {
case 0:
if nullInput {
err = streamEvaluator.EvaluateNew(processExpression(expression), printer, "")
err = streamEvaluator.EvaluateNew(processExpression(expression), printer)
} else {
cmd.Println(cmd.UsageString())
return nil
}
default:
err = streamEvaluator.EvaluateFiles(processExpression(expression), args, printer, leadingContentPreProcessing, decoder)
err = streamEvaluator.EvaluateFiles(processExpression(expression), args, printer, decoder)
}
completedSuccessfully = err == nil

View File

@ -59,6 +59,11 @@ yq -P sample.json
"naming conflicts with the default content name, directive name and proc inst prefix. If you need to keep " +
"`+` please set that value explicityly with --xml-attribute-prefix.")
}
//copy preference form global setting
yqlib.ConfiguredYamlPreferences.UnwrapScalar = unwrapScalar
yqlib.ConfiguredYamlPreferences.PrintDocSeparators = !noDocSeparators
},
}
@ -97,7 +102,7 @@ yq -P sample.json
rootCmd.PersistentFlags().BoolVarP(&forceNoColor, "no-colors", "M", false, "force print with no colors")
rootCmd.PersistentFlags().StringVarP(&frontMatter, "front-matter", "f", "", "(extract|process) first input as yaml front-matter. Extract will pull out the yaml content, process will run the expression against the yaml content, leaving the remaining data intact")
rootCmd.PersistentFlags().StringVarP(&forceExpression, "expression", "", "", "forcibly set the expression argument. Useful when yq argument detection thinks your expression is a file.")
rootCmd.PersistentFlags().BoolVarP(&leadingContentPreProcessing, "header-preprocess", "", true, "Slurp any header comments and separators before processing expression.")
rootCmd.PersistentFlags().BoolVarP(&yqlib.ConfiguredYamlPreferences.LeadingContentPreProcessing, "header-preprocess", "", true, "Slurp any header comments and separators before processing expression.")
rootCmd.PersistentFlags().StringVarP(&splitFileExp, "split-exp", "s", "", "print each result (or doc) into a file named (exp). [exp] argument must return a string. You can use $index in the expression as the result counter.")
rootCmd.PersistentFlags().StringVarP(&splitFileExpFile, "split-exp-file", "", "", "Use a file to specify the split-exp expression.")

View File

@ -56,7 +56,7 @@ func initCommand(cmd *cobra.Command, args []string) (string, []string, error) {
return expression, args, nil
}
func configureDecoder() (yqlib.Decoder, error) {
func configureDecoder(evaluateTogether bool) (yqlib.Decoder, error) {
yqlibInputFormat, err := yqlib.InputFormatFromString(inputFormat)
if err != nil {
return nil, err
@ -73,8 +73,9 @@ func configureDecoder() (yqlib.Decoder, error) {
case yqlib.TSVObjectInputFormat:
return yqlib.NewCSVObjectDecoder('\t'), nil
}
return yqlib.NewYamlDecoder(), nil
prefs := yqlib.ConfiguredYamlPreferences
prefs.EvaluateTogether = evaluateTogether
return yqlib.NewYamlDecoder(prefs), nil
}
func configurePrinterWriter(format yqlib.PrinterOutputFormat, out io.Writer) (yqlib.PrinterWriter, error) {
@ -105,7 +106,7 @@ func configureEncoder(format yqlib.PrinterOutputFormat) yqlib.Encoder {
case yqlib.TSVOutputFormat:
return yqlib.NewCsvEncoder('\t')
case yqlib.YamlOutputFormat:
return yqlib.NewYamlEncoder(indent, colorsEnabled, !noDocSeparators, unwrapScalar)
return yqlib.NewYamlEncoder(indent, colorsEnabled, yqlib.ConfiguredYamlPreferences)
case yqlib.XMLOutputFormat:
return yqlib.NewXMLEncoder(indent, yqlib.ConfiguredXMLPreferences)
}

View File

View File

@ -8,7 +8,7 @@ import (
// A yaml expression evaluator that runs the expression once against all files/nodes in memory.
type Evaluator interface {
EvaluateFiles(expression string, filenames []string, printer Printer, leadingContentPreProcessing bool, decoder Decoder) error
EvaluateFiles(expression string, filenames []string, printer Printer, decoder Decoder) error
// EvaluateNodes takes an expression and one or more yaml nodes, returning a list of matching candidate nodes
EvaluateNodes(expression string, nodes ...*yaml.Node) (*list.List, error)
@ -46,21 +46,16 @@ func (e *allAtOnceEvaluator) EvaluateCandidateNodes(expression string, inputCand
return context.MatchingNodes, nil
}
func (e *allAtOnceEvaluator) EvaluateFiles(expression string, filenames []string, printer Printer, leadingContentPreProcessing bool, decoder Decoder) error {
func (e *allAtOnceEvaluator) EvaluateFiles(expression string, filenames []string, printer Printer, decoder Decoder) error {
fileIndex := 0
firstFileLeadingContent := ""
var allDocuments = list.New()
for _, filename := range filenames {
reader, leadingContent, err := readStream(filename, fileIndex == 0 && leadingContentPreProcessing)
reader, err := readStream(filename)
if err != nil {
return err
}
if fileIndex == 0 {
firstFileLeadingContent = leadingContent
}
fileDocuments, err := readDocuments(reader, filename, fileIndex, decoder)
if err != nil {
return err
@ -75,11 +70,9 @@ func (e *allAtOnceEvaluator) EvaluateFiles(expression string, filenames []string
Filename: "",
Node: &yaml.Node{Kind: yaml.DocumentNode, Content: []*yaml.Node{{Tag: "!!null", Kind: yaml.ScalarNode}}},
FileIndex: 0,
LeadingContent: firstFileLeadingContent,
LeadingContent: "",
}
allDocuments.PushBack(candidateNode)
} else {
allDocuments.Front().Value.(*CandidateNode).LeadingContent = firstFileLeadingContent
}
matches, err := e.EvaluateCandidateNodes(expression, allDocuments)

View File

@ -136,13 +136,13 @@ var csvScenarios = []formatScenario{
func testCSVScenario(t *testing.T, s formatScenario) {
switch s.scenarioType {
case "encode-csv":
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewYamlDecoder(), NewCsvEncoder(',')), s.description)
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewCsvEncoder(',')), s.description)
case "encode-tsv":
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewYamlDecoder(), NewCsvEncoder('\t')), s.description)
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewCsvEncoder('\t')), s.description)
case "decode-csv-object":
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewCSVObjectDecoder(','), NewYamlEncoder(2, false, true, true)), s.description)
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewCSVObjectDecoder(','), NewYamlEncoder(2, false, ConfiguredYamlPreferences)), s.description)
case "decode-tsv-object":
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewCSVObjectDecoder('\t'), NewYamlEncoder(2, false, true, true)), s.description)
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewCSVObjectDecoder('\t'), NewYamlEncoder(2, false, ConfiguredYamlPreferences)), s.description)
case "roundtrip-csv":
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewCSVObjectDecoder(','), NewCsvEncoder(',')), s.description)
default:
@ -171,7 +171,7 @@ func documentCSVDecodeObjectScenario(w *bufio.Writer, s formatScenario, formatTy
}
writeOrPanic(w, fmt.Sprintf("```yaml\n%v```\n\n",
processFormatScenario(s, NewCSVObjectDecoder(separator), NewYamlEncoder(s.indent, false, true, true))),
processFormatScenario(s, NewCSVObjectDecoder(separator), NewYamlEncoder(s.indent, false, ConfiguredYamlPreferences))),
)
}
@ -203,7 +203,7 @@ func documentCSVEncodeScenario(w *bufio.Writer, s formatScenario, formatType str
}
writeOrPanic(w, fmt.Sprintf("```%v\n%v```\n\n", formatType,
processFormatScenario(s, NewYamlDecoder(), NewCsvEncoder(separator))),
processFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewCsvEncoder(separator))),
)
}

View File

@ -3,8 +3,6 @@ package yqlib
import (
"fmt"
"io"
yaml "gopkg.in/yaml.v3"
)
type InputFormat uint
@ -20,8 +18,8 @@ const (
)
type Decoder interface {
Init(reader io.Reader)
Decode(node *yaml.Node) error
Init(reader io.Reader) error
Decode() (*CandidateNode, error)
}
func InputFormatFromString(format string) (InputFormat, error) {

View File

@ -19,21 +19,22 @@ func NewBase64Decoder() Decoder {
return &base64Decoder{finished: false, encoding: *base64.StdEncoding}
}
func (dec *base64Decoder) Init(reader io.Reader) {
func (dec *base64Decoder) Init(reader io.Reader) error {
dec.reader = reader
dec.readAnything = false
dec.finished = false
return nil
}
func (dec *base64Decoder) Decode(rootYamlNode *yaml.Node) error {
func (dec *base64Decoder) Decode() (*CandidateNode, error) {
if dec.finished {
return io.EOF
return nil, io.EOF
}
base64Reader := base64.NewDecoder(&dec.encoding, dec.reader)
buf := new(bytes.Buffer)
if _, err := buf.ReadFrom(base64Reader); err != nil {
return err
return nil, err
}
if buf.Len() == 0 {
dec.finished = true
@ -42,12 +43,15 @@ func (dec *base64Decoder) Decode(rootYamlNode *yaml.Node) error {
// otherwise if we've already read some bytes, and now we get
// an empty string, then we are done.
if dec.readAnything {
return io.EOF
return nil, io.EOF
}
}
dec.readAnything = true
rootYamlNode.Kind = yaml.ScalarNode
rootYamlNode.Tag = "!!str"
rootYamlNode.Value = buf.String()
return nil
return &CandidateNode{
Node: &yaml.Node{
Kind: yaml.ScalarNode,
Tag: "!!str",
Value: buf.String(),
},
}, nil
}

View File

@ -19,12 +19,13 @@ func NewCSVObjectDecoder(separator rune) Decoder {
return &csvObjectDecoder{separator: separator}
}
func (dec *csvObjectDecoder) Init(reader io.Reader) {
func (dec *csvObjectDecoder) Init(reader io.Reader) error {
cleanReader, enc := utfbom.Skip(reader)
log.Debugf("Detected encoding: %s\n", enc)
dec.reader = *csv.NewReader(cleanReader)
dec.reader.Comma = dec.separator
dec.finished = false
return nil
}
func (dec *csvObjectDecoder) convertToYamlNode(content string) *yaml.Node {
@ -47,14 +48,14 @@ func (dec *csvObjectDecoder) createObject(headerRow []string, contentRow []strin
return objectNode
}
func (dec *csvObjectDecoder) Decode(rootYamlNode *yaml.Node) error {
func (dec *csvObjectDecoder) Decode() (*CandidateNode, error) {
if dec.finished {
return io.EOF
return nil, io.EOF
}
headerRow, err := dec.reader.Read()
log.Debugf(": headerRow%v", headerRow)
if err != nil {
return err
return nil, err
}
rootArray := &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq"}
@ -68,13 +69,13 @@ func (dec *csvObjectDecoder) Decode(rootYamlNode *yaml.Node) error {
log.Debugf("Read next contentRow: %v, %v", contentRow, err)
}
if !errors.Is(err, io.EOF) {
return err
return nil, err
}
log.Debugf("finished, contentRow%v", contentRow)
log.Debugf("err: %v", err)
rootYamlNode.Kind = yaml.DocumentNode
rootYamlNode.Content = []*yaml.Node{rootArray}
return nil
return &CandidateNode{
Node: &yaml.Node{
Kind: yaml.DocumentNode,
Content: []*yaml.Node{rootArray},
},
}, nil
}

View File

@ -16,26 +16,31 @@ func NewJSONDecoder() Decoder {
return &jsonDecoder{}
}
func (dec *jsonDecoder) Init(reader io.Reader) {
func (dec *jsonDecoder) Init(reader io.Reader) error {
dec.decoder = *json.NewDecoder(reader)
return nil
}
func (dec *jsonDecoder) Decode(rootYamlNode *yaml.Node) error {
func (dec *jsonDecoder) Decode() (*CandidateNode, error) {
var dataBucket orderedMap
log.Debug("going to decode")
err := dec.decoder.Decode(&dataBucket)
if err != nil {
return err
return nil, err
}
node, err := dec.convertToYamlNode(&dataBucket)
if err != nil {
return err
return nil, err
}
rootYamlNode.Kind = yaml.DocumentNode
rootYamlNode.Content = []*yaml.Node{node}
return nil
return &CandidateNode{
Node: &yaml.Node{
Kind: yaml.DocumentNode,
Content: []*yaml.Node{node},
},
}, nil
}
func (dec *jsonDecoder) convertToYamlNode(data *orderedMap) (*yaml.Node, error) {

View File

@ -2,6 +2,7 @@ package yqlib
import (
"bytes"
"fmt"
"io"
"strconv"
"strings"
@ -20,9 +21,10 @@ func NewPropertiesDecoder() Decoder {
return &propertiesDecoder{d: NewDataTreeNavigator(), finished: false}
}
func (dec *propertiesDecoder) Init(reader io.Reader) {
func (dec *propertiesDecoder) Init(reader io.Reader) error {
dec.reader = reader
dec.finished = false
return nil
}
func parsePropKey(key string) []interface{} {
@ -46,15 +48,49 @@ func (dec *propertiesDecoder) processComment(c string) string {
return "# " + c
}
func (dec *propertiesDecoder) applyProperty(properties *properties.Properties, context Context, key string) error {
func (dec *propertiesDecoder) applyPropertyComments(context Context, path []interface{}, comments []string) error {
assignmentOp := &Operation{OperationType: assignOpType, Preferences: assignPreferences{}}
rhsCandidateNode := &CandidateNode{
Path: path,
Node: &yaml.Node{
Tag: "!!str",
Value: fmt.Sprintf("%v", path[len(path)-1]),
HeadComment: dec.processComment(strings.Join(comments, "\n")),
Kind: yaml.ScalarNode,
},
}
rhsCandidateNode.Node.Tag = guessTagFromCustomType(rhsCandidateNode.Node)
rhsOp := &Operation{OperationType: valueOpType, CandidateNode: rhsCandidateNode}
assignmentOpNode := &ExpressionNode{
Operation: assignmentOp,
LHS: createTraversalTree(path, traversePreferences{}, true),
RHS: &ExpressionNode{Operation: rhsOp},
}
_, err := dec.d.GetMatchingNodes(context, assignmentOpNode)
return err
}
func (dec *propertiesDecoder) applyProperty(context Context, properties *properties.Properties, key string) error {
value, _ := properties.Get(key)
path := parsePropKey(key)
propertyComments := properties.GetComments(key)
if len(propertyComments) > 0 {
err := dec.applyPropertyComments(context, path, propertyComments)
if err != nil {
return nil
}
}
rhsNode := &yaml.Node{
Value: value,
Tag: "!!str",
Kind: yaml.ScalarNode,
LineComment: dec.processComment(properties.GetComment(key)),
Value: value,
Tag: "!!str",
Kind: yaml.ScalarNode,
}
rhsNode.Tag = guessTagFromCustomType(rhsNode)
@ -78,22 +114,22 @@ func (dec *propertiesDecoder) applyProperty(properties *properties.Properties, c
return err
}
func (dec *propertiesDecoder) Decode(rootYamlNode *yaml.Node) error {
func (dec *propertiesDecoder) Decode() (*CandidateNode, error) {
if dec.finished {
return io.EOF
return nil, io.EOF
}
buf := new(bytes.Buffer)
if _, err := buf.ReadFrom(dec.reader); err != nil {
return err
return nil, err
}
if buf.Len() == 0 {
dec.finished = true
return io.EOF
return nil, io.EOF
}
properties, err := properties.LoadString(buf.String())
if err != nil {
return err
return nil, err
}
properties.DisableExpansion = true
@ -108,15 +144,18 @@ func (dec *propertiesDecoder) Decode(rootYamlNode *yaml.Node) error {
context = context.SingleChildContext(rootMap)
for _, key := range properties.Keys() {
if err := dec.applyProperty(properties, context, key); err != nil {
return err
if err := dec.applyProperty(context, properties, key); err != nil {
return nil, err
}
}
rootYamlNode.Kind = yaml.DocumentNode
rootYamlNode.Content = []*yaml.Node{rootMap.Node}
dec.finished = true
return nil
return &CandidateNode{
Node: &yaml.Node{
Kind: yaml.DocumentNode,
Content: []*yaml.Node{rootMap.Node},
},
}, nil
}

View File

@ -23,7 +23,7 @@ func processFormatScenario(s formatScenario, decoder Decoder, encoder Encoder) s
writer := bufio.NewWriter(&output)
if decoder == nil {
decoder = NewYamlDecoder()
decoder = NewYamlDecoder(ConfiguredYamlPreferences)
}
inputs, err := readDocuments(strings.NewReader(s.input), "sample.yml", 0, decoder)

View File

@ -25,10 +25,11 @@ func NewXMLDecoder(prefs XmlPreferences) Decoder {
}
}
func (dec *xmlDecoder) Init(reader io.Reader) {
func (dec *xmlDecoder) Init(reader io.Reader) error {
dec.reader = reader
dec.readAnything = false
dec.finished = false
return nil
}
func (dec *xmlDecoder) createSequence(nodes []*xmlNode) (*yaml.Node, error) {
@ -118,32 +119,36 @@ func (dec *xmlDecoder) convertToYamlNode(n *xmlNode) (*yaml.Node, error) {
return scalar, nil
}
func (dec *xmlDecoder) Decode(rootYamlNode *yaml.Node) error {
func (dec *xmlDecoder) Decode() (*CandidateNode, error) {
if dec.finished {
return io.EOF
return nil, io.EOF
}
root := &xmlNode{}
// cant use xj - it doesn't keep map order.
err := dec.decodeXML(root)
if err != nil {
return err
return nil, err
}
firstNode, err := dec.convertToYamlNode(root)
if err != nil {
return err
return nil, err
} else if firstNode.Tag == "!!null" {
dec.finished = true
if dec.readAnything {
return io.EOF
return nil, io.EOF
}
}
dec.readAnything = true
rootYamlNode.Kind = yaml.DocumentNode
rootYamlNode.Content = []*yaml.Node{firstNode}
dec.finished = true
return nil
return &CandidateNode{
Node: &yaml.Node{
Kind: yaml.DocumentNode,
Content: []*yaml.Node{firstNode},
},
}, nil
}
type xmlNode struct {

View File

@ -1,23 +1,114 @@
package yqlib
import (
"bufio"
"errors"
"io"
"regexp"
"strings"
yaml "gopkg.in/yaml.v3"
)
type yamlDecoder struct {
decoder yaml.Decoder
// work around of various parsing issues by yaml.v3 with document headers
prefs YamlPreferences
leadingContent string
readAnything bool
firstFile bool
}
func NewYamlDecoder() Decoder {
return &yamlDecoder{}
func NewYamlDecoder(prefs YamlPreferences) Decoder {
return &yamlDecoder{prefs: prefs, firstFile: true}
}
func (dec *yamlDecoder) Init(reader io.Reader) {
dec.decoder = *yaml.NewDecoder(reader)
func (dec *yamlDecoder) processReadStream(reader *bufio.Reader) (io.Reader, string, error) {
var commentLineRegEx = regexp.MustCompile(`^\s*#`)
var sb strings.Builder
for {
peekBytes, err := reader.Peek(3)
if errors.Is(err, io.EOF) {
// EOF are handled else where..
return reader, sb.String(), nil
} else if err != nil {
return reader, sb.String(), err
} else if string(peekBytes) == "---" {
_, err := reader.ReadString('\n')
sb.WriteString("$yqDocSeperator$\n")
if errors.Is(err, io.EOF) {
return reader, sb.String(), nil
} else if err != nil {
return reader, sb.String(), err
}
} else if commentLineRegEx.MatchString(string(peekBytes)) {
line, err := reader.ReadString('\n')
sb.WriteString(line)
if errors.Is(err, io.EOF) {
return reader, sb.String(), nil
} else if err != nil {
return reader, sb.String(), err
}
} else {
return reader, sb.String(), nil
}
}
}
func (dec *yamlDecoder) Decode(rootYamlNode *yaml.Node) error {
return dec.decoder.Decode(rootYamlNode)
func (dec *yamlDecoder) Init(reader io.Reader) error {
readerToUse := reader
leadingContent := ""
var err error
// if we 'evaluating together' - we only process the leading content
// of the first file - this ensures comments from subsequent files are
// merged together correctly.
if dec.prefs.LeadingContentPreProcessing && (!dec.prefs.EvaluateTogether || dec.firstFile) {
readerToUse, leadingContent, err = dec.processReadStream(bufio.NewReader(reader))
if err != nil {
return err
}
}
dec.leadingContent = leadingContent
dec.readAnything = false
dec.decoder = *yaml.NewDecoder(readerToUse)
dec.firstFile = false
return nil
}
func (dec *yamlDecoder) Decode() (*CandidateNode, error) {
var dataBucket yaml.Node
err := dec.decoder.Decode(&dataBucket)
if errors.Is(err, io.EOF) && dec.leadingContent != "" && !dec.readAnything {
// force returning an empty node with a comment.
dec.readAnything = true
return dec.blankNodeWithComment(), nil
} else if err != nil {
return nil, err
}
candidateNode := &CandidateNode{
Node: &dataBucket,
}
if dec.leadingContent != "" {
candidateNode.LeadingContent = dec.leadingContent
dec.leadingContent = ""
}
// move document comments into candidate node
// otherwise unwrap drops them.
candidateNode.TrailingContent = dataBucket.FootComment
dataBucket.FootComment = ""
return candidateNode, nil
}
func (dec *yamlDecoder) blankNodeWithComment() *CandidateNode {
return &CandidateNode{
Document: 0,
Filename: "",
Node: &yaml.Node{Kind: yaml.DocumentNode, Content: []*yaml.Node{{Tag: "!!null", Kind: yaml.ScalarNode}}},
FileIndex: 0,
LeadingContent: dec.leadingContent,
}
}

View File

@ -246,6 +246,7 @@ Note the use of `...` to ensure key nodes are included.
Given a sample.yml file of:
```yaml
# hi
a: cat # comment
# great
b: # key comment
@ -263,6 +264,7 @@ b:
## Get line comment
Given a sample.yml file of:
```yaml
# welcome!
a: cat # meow
# have a great day
```

View File

@ -9,7 +9,7 @@ Note that empty arrays and maps are not encoded by default.
Given a sample.yml file of:
```yaml
# block comments don't come through
# block comments come through
person: # neither do comments on maps
name: Mike Wazowski # comments on values appear
pets:
@ -25,6 +25,7 @@ yq -o=props sample.yml
```
will output
```properties
# block comments come through
# comments on values appear
person.name = Mike Wazowski
@ -38,7 +39,7 @@ Note that string values with blank characters in them are encapsulated with doub
Given a sample.yml file of:
```yaml
# block comments don't come through
# block comments come through
person: # neither do comments on maps
name: Mike Wazowski # comments on values appear
pets:
@ -54,6 +55,7 @@ yq -o=props --unwrapScalar=false sample.yml
```
will output
```properties
# block comments come through
# comments on values appear
person.name = "Mike Wazowski"
@ -65,7 +67,7 @@ person.food.0 = pizza
## Encode properties: no comments
Given a sample.yml file of:
```yaml
# block comments don't come through
# block comments come through
person: # neither do comments on maps
name: Mike Wazowski # comments on values appear
pets:
@ -91,7 +93,7 @@ Use a yq expression to set the empty maps and sequences to your desired value.
Given a sample.yml file of:
```yaml
# block comments don't come through
# block comments come through
person: # neither do comments on maps
name: Mike Wazowski # comments on values appear
pets:
@ -107,6 +109,7 @@ yq -o=props '(.. | select( (tag == "!!map" or tag =="!!seq") and length == 0)) =
```
will output
```properties
# block comments come through
# comments on values appear
person.name = Mike Wazowski
@ -120,6 +123,7 @@ emptyMap =
## Decode properties
Given a sample.properties file of:
```properties
# block comments come through
# comments on values appear
person.name = Mike Wazowski
@ -135,9 +139,12 @@ yq -p=props sample.properties
will output
```yaml
person:
name: Mike Wazowski # comments on values appear
# block comments come through
# comments on values appear
name: Mike Wazowski
pets:
- cat # comments on array values appear
# comments on array values appear
- cat
food:
- pizza
```
@ -145,6 +152,7 @@ person:
## Roundtrip
Given a sample.properties file of:
```properties
# block comments come through
# comments on values appear
person.name = Mike Wazowski
@ -159,6 +167,7 @@ yq -p=props -o=props '.person.pets.0 = "dog"' sample.properties
```
will output
```properties
# block comments come through
# comments on values appear
person.name = Mike Wazowski

View File

@ -386,6 +386,7 @@ A best attempt is made to copy comments to xml.
Given a sample.yml file of:
```yaml
# header comment
# above_cat
cat: # inline_cat
# above_array
@ -402,7 +403,10 @@ yq -o=xml '.' sample.yml
```
will output
```xml
<!-- above_cat inline_cat --><cat><!-- above_array inline_array -->
<!--
header comment
above_cat
--><!-- inline_cat --><cat><!-- above_array inline_array -->
<array>val1<!-- inline_val1 --></array>
<array><!-- above_val2 -->val2<!-- inline_val2 --></array>
</cat><!-- below_cat -->

View File

@ -65,7 +65,7 @@ func (pe *propertiesEncoder) PrintLeadingContent(writer io.Writer, content strin
func (pe *propertiesEncoder) Encode(writer io.Writer, node *yaml.Node) error {
mapKeysToStrings(node)
p := properties.NewProperties()
err := pe.doEncode(p, node, "")
err := pe.doEncode(p, node, "", nil)
if err != nil {
return err
}
@ -74,8 +74,17 @@ func (pe *propertiesEncoder) Encode(writer io.Writer, node *yaml.Node) error {
return err
}
func (pe *propertiesEncoder) doEncode(p *properties.Properties, node *yaml.Node, path string) error {
p.SetComment(path, headAndLineComment(node))
func (pe *propertiesEncoder) doEncode(p *properties.Properties, node *yaml.Node, path string, keyNode *yaml.Node) error {
comments := ""
if keyNode != nil {
// include the key node comments if present
comments = headAndLineComment(keyNode)
}
comments = comments + headAndLineComment(node)
commentsWithSpaces := strings.ReplaceAll(comments, "\n", "\n ")
p.SetComments(path, strings.Split(commentsWithSpaces, "\n"))
switch node.Kind {
case yaml.ScalarNode:
var nodeValue string
@ -87,13 +96,13 @@ func (pe *propertiesEncoder) doEncode(p *properties.Properties, node *yaml.Node,
_, _, err := p.Set(path, nodeValue)
return err
case yaml.DocumentNode:
return pe.doEncode(p, node.Content[0], path)
return pe.doEncode(p, node.Content[0], path, node)
case yaml.SequenceNode:
return pe.encodeArray(p, node.Content, path)
case yaml.MappingNode:
return pe.encodeMap(p, node.Content, path)
case yaml.AliasNode:
return pe.doEncode(p, node.Alias, path)
return pe.doEncode(p, node.Alias, path, nil)
default:
return fmt.Errorf("Unsupported node %v", node.Tag)
}
@ -108,7 +117,7 @@ func (pe *propertiesEncoder) appendPath(path string, key interface{}) string {
func (pe *propertiesEncoder) encodeArray(p *properties.Properties, kids []*yaml.Node, path string) error {
for index, child := range kids {
err := pe.doEncode(p, child, pe.appendPath(path, index))
err := pe.doEncode(p, child, pe.appendPath(path, index), nil)
if err != nil {
return err
}
@ -120,7 +129,7 @@ func (pe *propertiesEncoder) encodeMap(p *properties.Properties, kids []*yaml.No
for index := 0; index < len(kids); index = index + 2 {
key := kids[index]
value := kids[index+1]
err := pe.doEncode(p, value, pe.appendPath(path, key.Value))
err := pe.doEncode(p, value, pe.appendPath(path, key.Value), key)
if err != nil {
return err
}

View File

@ -14,7 +14,7 @@ func yamlToProps(sampleYaml string, unwrapScalar bool) string {
writer := bufio.NewWriter(&output)
var propsEncoder = NewPropertiesEncoder(unwrapScalar)
inputs, err := readDocuments(strings.NewReader(sampleYaml), "sample.yml", 0, NewYamlDecoder())
inputs, err := readDocuments(strings.NewReader(sampleYaml), "sample.yml", 0, NewYamlDecoder(ConfiguredYamlPreferences))
if err != nil {
panic(err)
}

View File

@ -14,7 +14,7 @@ func yamlToJSON(sampleYaml string, indent int) string {
writer := bufio.NewWriter(&output)
var jsonEncoder = NewJSONEncoder(indent, false)
inputs, err := readDocuments(strings.NewReader(sampleYaml), "sample.yml", 0, NewYamlDecoder())
inputs, err := readDocuments(strings.NewReader(sampleYaml), "sample.yml", 0, NewYamlDecoder(ConfiguredYamlPreferences))
if err != nil {
panic(err)
}

View File

@ -4,24 +4,28 @@ import (
"encoding/xml"
"fmt"
"io"
"regexp"
"strings"
yaml "gopkg.in/yaml.v3"
)
type xmlEncoder struct {
indentString string
writer io.Writer
prefs XmlPreferences
indentString string
writer io.Writer
prefs XmlPreferences
leadingContent string
}
var commentPrefix = regexp.MustCompile(`(^|\n)\s*#`)
func NewXMLEncoder(indent int, prefs XmlPreferences) Encoder {
var indentString = ""
for index := 0; index < indent; index++ {
indentString = indentString + " "
}
return &xmlEncoder{indentString, nil, prefs}
return &xmlEncoder{indentString, nil, prefs, ""}
}
func (e *xmlEncoder) CanHandleAliases() bool {
@ -33,6 +37,7 @@ func (e *xmlEncoder) PrintDocumentSeparator(writer io.Writer) error {
}
func (e *xmlEncoder) PrintLeadingContent(writer io.Writer, content string) error {
e.leadingContent = commentPrefix.ReplaceAllString(content, "\n")
return nil
}
@ -42,6 +47,13 @@ func (e *xmlEncoder) Encode(writer io.Writer, node *yaml.Node) error {
e.writer = writer
encoder.Indent("", e.indentString)
if e.leadingContent != "" {
err := e.encodeComment(encoder, e.leadingContent)
if err != nil {
return err
}
}
switch node.Kind {
case yaml.MappingNode:
err := e.encodeTopLevelMap(encoder, node)

View File

@ -11,17 +11,16 @@ import (
)
type yamlEncoder struct {
indent int
colorise bool
printDocSeparators bool
unwrapScalar bool
indent int
colorise bool
prefs YamlPreferences
}
func NewYamlEncoder(indent int, colorise bool, printDocSeparators bool, unwrapScalar bool) Encoder {
func NewYamlEncoder(indent int, colorise bool, prefs YamlPreferences) Encoder {
if indent < 0 {
indent = 0
}
return &yamlEncoder{indent, colorise, printDocSeparators, unwrapScalar}
return &yamlEncoder{indent, colorise, prefs}
}
func (ye *yamlEncoder) CanHandleAliases() bool {
@ -29,7 +28,7 @@ func (ye *yamlEncoder) CanHandleAliases() bool {
}
func (ye *yamlEncoder) PrintDocumentSeparator(writer io.Writer) error {
if ye.printDocSeparators {
if ye.prefs.PrintDocSeparators {
log.Debug("-- writing doc sep")
if err := writeString(writer, "---\n"); err != nil {
return err
@ -76,7 +75,7 @@ func (ye *yamlEncoder) PrintLeadingContent(writer io.Writer, content string) err
func (ye *yamlEncoder) Encode(writer io.Writer, node *yaml.Node) error {
if node.Kind == yaml.ScalarNode && ye.unwrapScalar {
if node.Kind == yaml.ScalarNode && ye.prefs.UnwrapScalar {
return writeString(writer, node.Value+"\n")
}

View File

@ -262,11 +262,11 @@ func documentDecodeNdJsonScenario(w *bufio.Writer, s formatScenario) {
writeOrPanic(w, "will output\n")
writeOrPanic(w, fmt.Sprintf("```yaml\n%v```\n\n", processFormatScenario(s, NewJSONDecoder(), NewYamlEncoder(s.indent, false, true, true))))
writeOrPanic(w, fmt.Sprintf("```yaml\n%v```\n\n", processFormatScenario(s, NewJSONDecoder(), NewYamlEncoder(s.indent, false, ConfiguredYamlPreferences))))
}
func decodeJSON(t *testing.T, jsonString string) *CandidateNode {
docs, err := readDocumentWithLeadingContent(jsonString, "sample.json", 0)
docs, err := readDocument(jsonString, "sample.json", 0)
if err != nil {
t.Error(err)
@ -293,12 +293,12 @@ func decodeJSON(t *testing.T, jsonString string) *CandidateNode {
func testJSONScenario(t *testing.T, s formatScenario) {
switch s.scenarioType {
case "encode", "decode":
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewYamlDecoder(), NewJSONEncoder(s.indent, false)), s.description)
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewJSONEncoder(s.indent, false)), s.description)
case "":
var actual = resultToString(t, decodeJSON(t, s.input))
test.AssertResultWithContext(t, s.expected, actual, s.description)
case "decode-ndjson":
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewJSONDecoder(), NewYamlEncoder(2, false, true, true)), s.description)
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewJSONDecoder(), NewYamlEncoder(2, false, ConfiguredYamlPreferences)), s.description)
case "roundtrip-ndjson":
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewJSONDecoder(), NewJSONEncoder(0, false)), s.description)
case "roundtrip-multi":
@ -385,7 +385,7 @@ func documentJSONEncodeScenario(w *bufio.Writer, s formatScenario) {
}
writeOrPanic(w, "will output\n")
writeOrPanic(w, fmt.Sprintf("```json\n%v```\n\n", processFormatScenario(s, NewYamlDecoder(), NewJSONEncoder(s.indent, false))))
writeOrPanic(w, fmt.Sprintf("```json\n%v```\n\n", processFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewJSONEncoder(s.indent, false))))
}
func TestJSONScenarios(t *testing.T) {

View File

@ -84,7 +84,7 @@ var participleYqRules = []*participleYqRule{
{"LoadString", `load_?str|str_?load`, loadOp(nil, true), 0},
{"LoadYaml", `load`, loadOp(NewYamlDecoder(), false), 0},
{"LoadYaml", `load`, loadOp(NewYamlDecoder(ConfiguredYamlPreferences), false), 0},
{"SplitDocument", `splitDoc|split_?doc`, opToken(splitDocumentOpType), 0},

View File

@ -248,14 +248,16 @@ func guessTagFromCustomType(node *yaml.Node) string {
}
func parseSnippet(value string) (*yaml.Node, error) {
decoder := NewYamlDecoder()
decoder.Init(strings.NewReader(value))
var dataBucket yaml.Node
err := decoder.Decode(&dataBucket)
if len(dataBucket.Content) == 0 {
decoder := NewYamlDecoder(ConfiguredYamlPreferences)
err := decoder.Init(strings.NewReader(value))
if err != nil {
return nil, err
}
parsedNode, err := decoder.Decode()
if len(parsedNode.Node.Content) == 0 {
return nil, fmt.Errorf("bad data")
}
return dataBucket.Content[0], err
return unwrapDoc(parsedNode.Node), err
}
func recursiveNodeEqual(lhs *yaml.Node, rhs *yaml.Node) bool {

View File

@ -83,6 +83,10 @@ func getCommentsOperator(d *dataTreeNavigator, context Context, expressionNode *
log.Debugf("GetComments operator!")
var results = list.New()
yamlPrefs := NewDefaultYamlPreferences()
yamlPrefs.PrintDocSeparators = false
yamlPrefs.UnwrapScalar = false
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
candidate := el.Value.(*CandidateNode)
comment := ""
@ -92,7 +96,7 @@ func getCommentsOperator(d *dataTreeNavigator, context Context, expressionNode *
var chompRegexp = regexp.MustCompile(`\n$`)
var output bytes.Buffer
var writer = bufio.NewWriter(&output)
var encoder = NewYamlEncoder(2, false, false, false)
var encoder = NewYamlEncoder(2, false, yamlPrefs)
if err := encoder.PrintLeadingContent(writer, candidate.LeadingContent); err != nil {
return Context{}, err
}

View File

@ -4,7 +4,6 @@ import (
"container/list"
"errors"
"fmt"
"strings"
"time"
"gopkg.in/yaml.v3"
@ -54,7 +53,6 @@ func nowOp(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode
func formatDateTime(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
format, err := getStringParamter("format", d, context, expressionNode.RHS)
layout := context.GetDateTimeLayout()
decoder := NewYamlDecoder()
if err != nil {
return Context{}, err
@ -69,19 +67,15 @@ func formatDateTime(d *dataTreeNavigator, context Context, expressionNode *Expre
return Context{}, fmt.Errorf("could not parse datetime of [%v]: %w", candidate.GetNicePath(), err)
}
formattedTimeStr := parsedTime.Format(format)
decoder.Init(strings.NewReader(formattedTimeStr))
var dataBucket yaml.Node
errorReading := decoder.Decode(&dataBucket)
var node *yaml.Node
node, errorReading := parseSnippet(formattedTimeStr)
if errorReading != nil {
log.Debugf("could not parse %v - lets just leave it as a string", formattedTimeStr)
log.Debugf("could not parse %v - lets just leave it as a string: %w", formattedTimeStr, errorReading)
node = &yaml.Node{
Kind: yaml.ScalarNode,
Tag: "!!str",
Value: formattedTimeStr,
}
} else {
node = unwrapDoc(&dataBucket)
}
results.PushBack(candidate.CreateReplacement(node))

View File

@ -21,7 +21,7 @@ func configureEncoder(format PrinterOutputFormat, indent int) Encoder {
case TSVOutputFormat:
return NewCsvEncoder('\t')
case YamlOutputFormat:
return NewYamlEncoder(indent, false, true, true)
return NewYamlEncoder(indent, false, ConfiguredYamlPreferences)
case XMLOutputFormat:
return NewXMLEncoder(indent, ConfiguredXMLPreferences)
case Base64OutputFormat:
@ -102,7 +102,7 @@ func decodeOperator(d *dataTreeNavigator, context Context, expressionNode *Expre
var decoder Decoder
switch preferences.format {
case YamlInputFormat:
decoder = NewYamlDecoder()
decoder = NewYamlDecoder(ConfiguredYamlPreferences)
case XMLInputFormat:
decoder = NewXMLDecoder(ConfiguredXMLPreferences)
case Base64InputFormat:
@ -121,17 +121,19 @@ func decodeOperator(d *dataTreeNavigator, context Context, expressionNode *Expre
context.SetVariable("decoded: "+candidate.GetKey(), candidate.AsList())
var dataBucket yaml.Node
log.Debugf("got: [%v]", candidate.Node.Value)
decoder.Init(strings.NewReader(unwrapDoc(candidate.Node).Value))
err := decoder.Init(strings.NewReader(unwrapDoc(candidate.Node).Value))
if err != nil {
return Context{}, err
}
errorReading := decoder.Decode(&dataBucket)
decodedNode, errorReading := decoder.Decode()
if errorReading != nil {
return Context{}, errorReading
}
//first node is a doc
node := unwrapDoc(&dataBucket)
node := unwrapDoc(decodedNode.Node)
results.PushBack(candidate.CreateReplacement(node))
}

View File

@ -7,8 +7,16 @@ import (
var loadScenarios = []expressionScenario{
{
skipDoc: true,
description: "Load empty file",
description: "Load empty file with a comment",
expression: `load("../../examples/empty.yaml")`,
expected: []string{
"D0, P[], (doc)::# comment\n\n",
},
},
{
skipDoc: true,
description: "Load empty file with no comment",
expression: `load("../../examples/empty-no-comment.yaml")`,
expected: []string{
"D0, P[], (!!null)::\n",
},

View File

@ -40,21 +40,16 @@ func TestMain(m *testing.M) {
}
func NewSimpleYamlPrinter(writer io.Writer, outputFormat PrinterOutputFormat, unwrapScalar bool, colorsEnabled bool, indent int, printDocSeparators bool) Printer {
return NewPrinter(NewYamlEncoder(indent, colorsEnabled, printDocSeparators, unwrapScalar), NewSinglePrinterWriter(writer))
prefs := NewDefaultYamlPreferences()
prefs.PrintDocSeparators = printDocSeparators
prefs.UnwrapScalar = unwrapScalar
return NewPrinter(NewYamlEncoder(indent, colorsEnabled, prefs), NewSinglePrinterWriter(writer))
}
func readDocumentWithLeadingContent(content string, fakefilename string, fakeFileIndex int) (*list.List, error) {
reader, firstFileLeadingContent, err := processReadStream(bufio.NewReader(strings.NewReader(content)))
if err != nil {
return nil, err
}
func readDocument(content string, fakefilename string, fakeFileIndex int) (*list.List, error) {
reader := bufio.NewReader(strings.NewReader(content))
inputs, err := readDocuments(reader, fakefilename, fakeFileIndex, NewYamlDecoder())
if err != nil {
return nil, err
}
inputs.Front().Value.(*CandidateNode).LeadingContent = firstFileLeadingContent
return inputs, nil
return readDocuments(reader, fakefilename, fakeFileIndex, NewYamlDecoder(ConfiguredYamlPreferences))
}
func testScenario(t *testing.T, s *expressionScenario) {
@ -67,7 +62,7 @@ func testScenario(t *testing.T, s *expressionScenario) {
inputs := list.New()
if s.document != "" {
inputs, err = readDocumentWithLeadingContent(s.document, "sample.yml", 0)
inputs, err = readDocument(s.document, "sample.yml", 0)
if err != nil {
t.Error(err, s.document, s.expression)
@ -75,7 +70,7 @@ func testScenario(t *testing.T, s *expressionScenario) {
}
if s.document2 != "" {
moreInputs, err := readDocumentWithLeadingContent(s.document2, "another.yml", 1)
moreInputs, err := readDocument(s.document2, "another.yml", 1)
if err != nil {
t.Error(err, s.document2, s.expression)
return
@ -176,7 +171,7 @@ func formatYaml(yaml string, filename string) string {
panic(err)
}
streamEvaluator := NewStreamEvaluator()
_, err = streamEvaluator.Evaluate(filename, strings.NewReader(yaml), node, printer, "", NewYamlDecoder())
_, err = streamEvaluator.Evaluate(filename, strings.NewReader(yaml), node, printer, NewYamlDecoder(ConfiguredYamlPreferences))
if err != nil {
panic(err)
}
@ -322,13 +317,13 @@ func documentOutput(t *testing.T, w *bufio.Writer, s expressionScenario, formatt
if s.document != "" {
inputs, err = readDocumentWithLeadingContent(formattedDoc, "sample.yml", 0)
inputs, err = readDocument(formattedDoc, "sample.yml", 0)
if err != nil {
t.Error(err, s.document, s.expression)
return
}
if s.document2 != "" {
moreInputs, err := readDocumentWithLeadingContent(formattedDoc2, "another.yml", 1)
moreInputs, err := readDocument(formattedDoc2, "another.yml", 1)
if err != nil {
t.Error(err, s.document, s.expression)
return

View File

@ -111,7 +111,7 @@ func (p *resultsPrinter) PrintResults(matchingNodes *list.List) error {
mappedDoc := el.Value.(*CandidateNode)
log.Debug("-- print sep logic: p.firstTimePrinting: %v, previousDocIndex: %v, mappedDoc.Document: %v", p.firstTimePrinting, p.previousDocIndex, mappedDoc.Document)
log.Debug("%v", NodeToString(mappedDoc))
writer, errorWriting := p.printerWriter.GetWriter(mappedDoc)
if errorWriting != nil {
return errorWriting

View File

@ -38,7 +38,7 @@ func TestPrinterMultipleDocsInSequenceOnly(t *testing.T) {
var writer = bufio.NewWriter(&output)
printer := NewSimpleYamlPrinter(writer, YamlOutputFormat, true, false, 2, true)
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder())
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder(ConfiguredYamlPreferences))
if err != nil {
panic(err)
}
@ -76,7 +76,7 @@ func TestPrinterMultipleDocsInSequenceWithLeadingContent(t *testing.T) {
var writer = bufio.NewWriter(&output)
printer := NewSimpleYamlPrinter(writer, YamlOutputFormat, true, false, 2, true)
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder())
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder(ConfiguredYamlPreferences))
if err != nil {
panic(err)
}
@ -118,7 +118,7 @@ func TestPrinterMultipleFilesInSequence(t *testing.T) {
var writer = bufio.NewWriter(&output)
printer := NewSimpleYamlPrinter(writer, YamlOutputFormat, true, false, 2, true)
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder())
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder(ConfiguredYamlPreferences))
if err != nil {
panic(err)
}
@ -165,7 +165,7 @@ func TestPrinterMultipleFilesInSequenceWithLeadingContent(t *testing.T) {
var writer = bufio.NewWriter(&output)
printer := NewSimpleYamlPrinter(writer, YamlOutputFormat, true, false, 2, true)
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder())
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder(ConfiguredYamlPreferences))
if err != nil {
panic(err)
}
@ -215,7 +215,7 @@ func TestPrinterMultipleDocsInSinglePrint(t *testing.T) {
var writer = bufio.NewWriter(&output)
printer := NewSimpleYamlPrinter(writer, YamlOutputFormat, true, false, 2, true)
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder())
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder(ConfiguredYamlPreferences))
if err != nil {
panic(err)
}
@ -234,7 +234,7 @@ func TestPrinterMultipleDocsInSinglePrintWithLeadingDoc(t *testing.T) {
var writer = bufio.NewWriter(&output)
printer := NewSimpleYamlPrinter(writer, YamlOutputFormat, true, false, 2, true)
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder())
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder(ConfiguredYamlPreferences))
if err != nil {
panic(err)
}
@ -263,7 +263,7 @@ func TestPrinterMultipleDocsInSinglePrintWithLeadingDocTrailing(t *testing.T) {
var writer = bufio.NewWriter(&output)
printer := NewSimpleYamlPrinter(writer, YamlOutputFormat, true, false, 2, true)
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder())
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder(ConfiguredYamlPreferences))
if err != nil {
panic(err)
}
@ -294,7 +294,7 @@ func TestPrinterScalarWithLeadingCont(t *testing.T) {
panic(err)
}
streamEvaluator := NewStreamEvaluator()
_, err = streamEvaluator.Evaluate("sample", strings.NewReader(multiDocSample), node, printer, "# blah\n", NewYamlDecoder())
_, err = streamEvaluator.Evaluate("sample", strings.NewReader(multiDocSample), node, printer, NewYamlDecoder(ConfiguredYamlPreferences))
if err != nil {
panic(err)
}
@ -316,7 +316,7 @@ func TestPrinterMultipleDocsJson(t *testing.T) {
// when outputing JSON.
printer := NewPrinter(NewJSONEncoder(0, false), NewSinglePrinterWriter(writer))
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder())
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder(ConfiguredYamlPreferences))
if err != nil {
panic(err)
}

View File

@ -8,7 +8,49 @@ import (
"github.com/mikefarah/yq/v4/test"
)
const samplePropertiesYaml = `# block comments don't come through
const propertiesWithCommentsOnMap = `this.thing = hi hi
# important notes
# about this value
this.value = cool
`
const expectedPropertiesWithCommentsOnMapProps = `this.thing = hi hi
# important notes
# about this value
this.value = cool
`
const expectedPropertiesWithCommentsOnMapYaml = `this:
thing: hi hi
# important notes
# about this value
value: cool
`
const propertiesWithCommentInArray = `
this.array.0 = cat
# important notes
# about dogs
this.array.1 = dog
`
const expectedPropertiesWithCommentInArrayProps = `this.array.0 = cat
# important notes
# about dogs
this.array.1 = dog
`
const expectedPropertiesWithCommentInArrayYaml = `this:
array:
- cat
# important notes
# about dogs
- dog
`
const samplePropertiesYaml = `# block comments come through
person: # neither do comments on maps
name: Mike Wazowski # comments on values appear
pets:
@ -18,7 +60,8 @@ emptyArray: []
emptyMap: []
`
const expectedPropertiesUnwrapped = `# comments on values appear
const expectedPropertiesUnwrapped = `# block comments come through
# comments on values appear
person.name = Mike Wazowski
# comments on array values appear
@ -26,7 +69,8 @@ person.pets.0 = cat
person.food.0 = pizza
`
const expectedPropertiesWrapped = `# comments on values appear
const expectedPropertiesWrapped = `# block comments come through
# comments on values appear
person.name = "Mike Wazowski"
# comments on array values appear
@ -34,7 +78,8 @@ person.pets.0 = cat
person.food.0 = pizza
`
const expectedUpdatedProperties = `# comments on values appear
const expectedUpdatedProperties = `# block comments come through
# comments on values appear
person.name = Mike Wazowski
# comments on array values appear
@ -43,9 +88,12 @@ person.food.0 = pizza
`
const expectedDecodedYaml = `person:
name: Mike Wazowski # comments on values appear
# block comments come through
# comments on values appear
name: Mike Wazowski
pets:
- cat # comments on array values appear
# comments on array values appear
- cat
food:
- pizza
`
@ -55,7 +103,8 @@ person.pets.0 = cat
person.food.0 = pizza
`
const expectedPropertiesWithEmptyMapsAndArrays = `# comments on values appear
const expectedPropertiesWithEmptyMapsAndArrays = `# block comments come through
# comments on values appear
person.name = Mike Wazowski
# comments on array values appear
@ -112,6 +161,34 @@ var propertyScenarios = []formatScenario{
expected: expectedUpdatedProperties,
scenarioType: "roundtrip",
},
{
skipDoc: true,
description: "comments on arrays roundtrip",
input: propertiesWithCommentInArray,
expected: expectedPropertiesWithCommentInArrayProps,
scenarioType: "roundtrip",
},
{
skipDoc: true,
description: "comments on arrays decode",
input: propertiesWithCommentInArray,
expected: expectedPropertiesWithCommentInArrayYaml,
scenarioType: "decode",
},
{
skipDoc: true,
description: "comments on map roundtrip",
input: propertiesWithCommentsOnMap,
expected: expectedPropertiesWithCommentsOnMapProps,
scenarioType: "roundtrip",
},
{
skipDoc: true,
description: "comments on map decode",
input: propertiesWithCommentsOnMap,
expected: expectedPropertiesWithCommentsOnMapYaml,
scenarioType: "decode",
},
{
description: "Empty doc",
skipDoc: true,
@ -143,7 +220,7 @@ func documentUnwrappedEncodePropertyScenario(w *bufio.Writer, s formatScenario)
}
writeOrPanic(w, "will output\n")
writeOrPanic(w, fmt.Sprintf("```properties\n%v```\n\n", processFormatScenario(s, NewYamlDecoder(), NewPropertiesEncoder(true))))
writeOrPanic(w, fmt.Sprintf("```properties\n%v```\n\n", processFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewPropertiesEncoder(true))))
}
func documentWrappedEncodePropertyScenario(w *bufio.Writer, s formatScenario) {
@ -168,7 +245,7 @@ func documentWrappedEncodePropertyScenario(w *bufio.Writer, s formatScenario) {
}
writeOrPanic(w, "will output\n")
writeOrPanic(w, fmt.Sprintf("```properties\n%v```\n\n", processFormatScenario(s, NewYamlDecoder(), NewPropertiesEncoder(false))))
writeOrPanic(w, fmt.Sprintf("```properties\n%v```\n\n", processFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewPropertiesEncoder(false))))
}
func documentDecodePropertyScenario(w *bufio.Writer, s formatScenario) {
@ -193,7 +270,7 @@ func documentDecodePropertyScenario(w *bufio.Writer, s formatScenario) {
writeOrPanic(w, "will output\n")
writeOrPanic(w, fmt.Sprintf("```yaml\n%v```\n\n", processFormatScenario(s, NewPropertiesDecoder(), NewYamlEncoder(s.indent, false, true, true))))
writeOrPanic(w, fmt.Sprintf("```yaml\n%v```\n\n", processFormatScenario(s, NewPropertiesDecoder(), NewYamlEncoder(s.indent, false, ConfiguredYamlPreferences))))
}
func documentRoundTripPropertyScenario(w *bufio.Writer, s formatScenario) {
@ -245,11 +322,11 @@ func TestPropertyScenarios(t *testing.T) {
for _, s := range propertyScenarios {
switch s.scenarioType {
case "":
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewYamlDecoder(), NewPropertiesEncoder(true)), s.description)
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewPropertiesEncoder(true)), s.description)
case "decode":
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewPropertiesDecoder(), NewYamlEncoder(2, false, true, true)), s.description)
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewPropertiesDecoder(), NewYamlEncoder(2, false, ConfiguredYamlPreferences)), s.description)
case "encode-wrapped":
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewYamlDecoder(), NewPropertiesEncoder(false)), s.description)
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewPropertiesEncoder(false)), s.description)
case "roundtrip":
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewPropertiesDecoder(), NewPropertiesEncoder(true)), s.description)

View File

@ -14,9 +14,9 @@ import (
// Uses less memory than loading all documents and running the expression once, but this cannot process
// cross document expressions.
type StreamEvaluator interface {
Evaluate(filename string, reader io.Reader, node *ExpressionNode, printer Printer, leadingContent string, decoder Decoder) (uint, error)
EvaluateFiles(expression string, filenames []string, printer Printer, leadingContentPreProcessing bool, decoder Decoder) error
EvaluateNew(expression string, printer Printer, leadingContent string) error
Evaluate(filename string, reader io.Reader, node *ExpressionNode, printer Printer, decoder Decoder) (uint, error)
EvaluateFiles(expression string, filenames []string, printer Printer, decoder Decoder) error
EvaluateNew(expression string, printer Printer) error
}
type streamEvaluator struct {
@ -28,17 +28,16 @@ func NewStreamEvaluator() StreamEvaluator {
return &streamEvaluator{treeNavigator: NewDataTreeNavigator()}
}
func (s *streamEvaluator) EvaluateNew(expression string, printer Printer, leadingContent string) error {
func (s *streamEvaluator) EvaluateNew(expression string, printer Printer) error {
node, err := ExpressionParser.ParseExpression(expression)
if err != nil {
return err
}
candidateNode := &CandidateNode{
Document: 0,
Filename: "",
Node: &yaml.Node{Kind: yaml.DocumentNode, Content: []*yaml.Node{{Tag: "!!null", Kind: yaml.ScalarNode}}},
FileIndex: 0,
LeadingContent: leadingContent,
Document: 0,
Filename: "",
Node: &yaml.Node{Kind: yaml.DocumentNode, Content: []*yaml.Node{{Tag: "!!null", Kind: yaml.ScalarNode}}},
FileIndex: 0,
}
inputList := list.New()
inputList.PushBack(candidateNode)
@ -50,27 +49,20 @@ func (s *streamEvaluator) EvaluateNew(expression string, printer Printer, leadin
return printer.PrintResults(result.MatchingNodes)
}
func (s *streamEvaluator) EvaluateFiles(expression string, filenames []string, printer Printer, leadingContentPreProcessing bool, decoder Decoder) error {
func (s *streamEvaluator) EvaluateFiles(expression string, filenames []string, printer Printer, decoder Decoder) error {
var totalProcessDocs uint
node, err := ExpressionParser.ParseExpression(expression)
if err != nil {
return err
}
var firstFileLeadingContent string
for index, filename := range filenames {
reader, leadingContent, err := readStream(filename, leadingContentPreProcessing)
log.Debug("leadingContent: %v", leadingContent)
if index == 0 {
firstFileLeadingContent = leadingContent
}
for _, filename := range filenames {
reader, err := readStream(filename)
if err != nil {
return err
}
processedDocs, err := s.Evaluate(filename, reader, node, printer, leadingContent, decoder)
processedDocs, err := s.Evaluate(filename, reader, node, printer, decoder)
if err != nil {
return err
}
@ -83,19 +75,22 @@ func (s *streamEvaluator) EvaluateFiles(expression string, filenames []string, p
}
if totalProcessDocs == 0 {
return s.EvaluateNew(expression, printer, firstFileLeadingContent)
// problem is I've already slurped the leading content sadface
return s.EvaluateNew(expression, printer)
}
return nil
}
func (s *streamEvaluator) Evaluate(filename string, reader io.Reader, node *ExpressionNode, printer Printer, leadingContent string, decoder Decoder) (uint, error) {
func (s *streamEvaluator) Evaluate(filename string, reader io.Reader, node *ExpressionNode, printer Printer, decoder Decoder) (uint, error) {
var currentIndex uint
decoder.Init(reader)
err := decoder.Init(reader)
if err != nil {
return 0, err
}
for {
var dataBucket yaml.Node
errorReading := decoder.Decode(&dataBucket)
candidateNode, errorReading := decoder.Decode()
if errors.Is(errorReading, io.EOF) {
s.fileIndex = s.fileIndex + 1
@ -103,21 +98,10 @@ func (s *streamEvaluator) Evaluate(filename string, reader io.Reader, node *Expr
} else if errorReading != nil {
return currentIndex, fmt.Errorf("bad file '%v': %w", filename, errorReading)
}
candidateNode.Document = currentIndex
candidateNode.Filename = filename
candidateNode.FileIndex = s.fileIndex
candidateNode := &CandidateNode{
Document: currentIndex,
Filename: filename,
Node: &dataBucket,
FileIndex: s.fileIndex,
}
// move document comments into candidate node
// otherwise unwrap drops them.
candidateNode.TrailingContent = dataBucket.FootComment
dataBucket.FootComment = ""
if currentIndex == 0 {
candidateNode.LeadingContent = leadingContent
}
inputList := list.New()
inputList.PushBack(candidateNode)

View File

@ -1,17 +1,17 @@
package yqlib
import (
"bufio"
"bytes"
"container/list"
"errors"
"fmt"
"io"
yaml "gopkg.in/yaml.v3"
"strings"
)
type StringEvaluator interface {
Evaluate(expression string, input string, encoder Encoder, leadingContentPreProcessing bool, decoder Decoder) (string, error)
Evaluate(expression string, input string, encoder Encoder, decoder Decoder) (string, error)
}
type stringEvaluator struct {
@ -25,7 +25,7 @@ func NewStringEvaluator() StringEvaluator {
}
}
func (s *stringEvaluator) Evaluate(expression string, input string, encoder Encoder, leadingContentPreProcessing bool, decoder Decoder) (string, error) {
func (s *stringEvaluator) Evaluate(expression string, input string, encoder Encoder, decoder Decoder) (string, error) {
// Use bytes.Buffer for output of string
out := new(bytes.Buffer)
@ -37,16 +37,15 @@ func (s *stringEvaluator) Evaluate(expression string, input string, encoder Enco
return "", err
}
reader, leadingContent, err := readString(input, leadingContentPreProcessing)
reader := bufio.NewReader(strings.NewReader(input))
var currentIndex uint
err = decoder.Init(reader)
if err != nil {
return "", err
}
var currentIndex uint
decoder.Init(reader)
for {
var dataBucket yaml.Node
errorReading := decoder.Decode(&dataBucket)
candidateNode, errorReading := decoder.Decode()
if errors.Is(errorReading, io.EOF) {
s.fileIndex = s.fileIndex + 1
@ -54,20 +53,9 @@ func (s *stringEvaluator) Evaluate(expression string, input string, encoder Enco
} else if errorReading != nil {
return "", fmt.Errorf("bad input '%v': %w", input, errorReading)
}
candidateNode.Document = currentIndex
candidateNode.FileIndex = s.fileIndex
candidateNode := &CandidateNode{
Document: currentIndex,
Node: &dataBucket,
FileIndex: s.fileIndex,
}
// move document comments into candidate node
// otherwise unwrap drops them.
candidateNode.TrailingContent = dataBucket.FootComment
dataBucket.FootComment = ""
if currentIndex == 0 {
candidateNode.LeadingContent = leadingContent
}
inputList := list.New()
inputList.PushBack(candidateNode)

View File

@ -18,10 +18,10 @@ func TestStringEvaluator_Evaluate_Nominal(t *testing.T) {
`---` + "\n" +
` - name: jq` + "\n" +
` description: Command-line JSON processor` + "\n"
encoder := NewYamlEncoder(2, true, true, true)
decoder := NewYamlDecoder()
encoder := NewYamlEncoder(2, true, ConfiguredYamlPreferences)
decoder := NewYamlDecoder(ConfiguredYamlPreferences)
result, err := NewStringEvaluator().Evaluate(expression, input, encoder, true, decoder)
result, err := NewStringEvaluator().Evaluate(expression, input, encoder, decoder)
if err != nil {
t.Error(err)
}

View File

@ -7,13 +7,9 @@ import (
"fmt"
"io"
"os"
"regexp"
"strings"
yaml "gopkg.in/yaml.v3"
)
func readStream(filename string, leadingContentPreProcessing bool) (io.Reader, string, error) {
func readStream(filename string) (io.Reader, error) {
var reader *bufio.Reader
if filename == "-" {
reader = bufio.NewReader(os.Stdin)
@ -22,23 +18,12 @@ func readStream(filename string, leadingContentPreProcessing bool) (io.Reader, s
// and ensuring that it's not possible to give a path to a file outside thar directory.
file, err := os.Open(filename) // #nosec
if err != nil {
return nil, "", err
return nil, err
}
reader = bufio.NewReader(file)
}
return reader, nil
if !leadingContentPreProcessing {
return reader, "", nil
}
return processReadStream(reader)
}
func readString(input string, leadingContentPreProcessing bool) (io.Reader, string, error) {
reader := bufio.NewReader(strings.NewReader(input))
if !leadingContentPreProcessing {
return reader, "", nil
}
return processReadStream(reader)
}
func writeString(writer io.Writer, txt string) error {
@ -46,46 +31,16 @@ func writeString(writer io.Writer, txt string) error {
return errorWriting
}
func processReadStream(reader *bufio.Reader) (io.Reader, string, error) {
var commentLineRegEx = regexp.MustCompile(`^\s*#`)
var sb strings.Builder
for {
peekBytes, err := reader.Peek(3)
if errors.Is(err, io.EOF) {
// EOF are handled else where..
return reader, sb.String(), nil
} else if err != nil {
return reader, sb.String(), err
} else if string(peekBytes) == "---" {
_, err := reader.ReadString('\n')
sb.WriteString("$yqDocSeperator$\n")
if errors.Is(err, io.EOF) {
return reader, sb.String(), nil
} else if err != nil {
return reader, sb.String(), err
}
} else if commentLineRegEx.MatchString(string(peekBytes)) {
line, err := reader.ReadString('\n')
sb.WriteString(line)
if errors.Is(err, io.EOF) {
return reader, sb.String(), nil
} else if err != nil {
return reader, sb.String(), err
}
} else {
return reader, sb.String(), nil
}
}
}
func readDocuments(reader io.Reader, filename string, fileIndex int, decoder Decoder) (*list.List, error) {
decoder.Init(reader)
err := decoder.Init(reader)
if err != nil {
return nil, err
}
inputList := list.New()
var currentIndex uint
for {
var dataBucket yaml.Node
errorReading := decoder.Decode(&dataBucket)
candidateNode, errorReading := decoder.Decode()
if errors.Is(errorReading, io.EOF) {
switch reader := reader.(type) {
@ -96,18 +51,10 @@ func readDocuments(reader io.Reader, filename string, fileIndex int, decoder Dec
} else if errorReading != nil {
return nil, fmt.Errorf("bad file '%v': %w", filename, errorReading)
}
candidateNode := &CandidateNode{
Document: currentIndex,
Filename: filename,
Node: &dataBucket,
FileIndex: fileIndex,
EvaluateTogether: true,
}
//move document comments into candidate node
// otherwise unwrap drops them.
candidateNode.TrailingContent = dataBucket.FootComment
dataBucket.FootComment = ""
candidateNode.Document = currentIndex
candidateNode.Filename = filename
candidateNode.FileIndex = fileIndex
candidateNode.EvaluateTogether = true
inputList.PushBack(candidateNode)

View File

@ -137,7 +137,8 @@ in d before -->
</cat><!-- after cat -->
`
const yamlWithComments = `# above_cat
const yamlWithComments = `# header comment
# above_cat
cat: # inline_cat
# above_array
array: # inline_array
@ -147,7 +148,10 @@ cat: # inline_cat
# below_cat
`
const expectedXMLWithComments = `<!-- above_cat inline_cat --><cat><!-- above_array inline_array -->
const expectedXMLWithComments = `<!--
header comment
above_cat
--><!-- inline_cat --><cat><!-- above_array inline_array -->
<array>val1<!-- inline_val1 --></array>
<array><!-- above_val2 -->val2<!-- inline_val2 --></array>
</cat><!-- below_cat -->
@ -414,19 +418,19 @@ var xmlScenarios = []formatScenario{
func testXMLScenario(t *testing.T, s formatScenario) {
switch s.scenarioType {
case "", "decode":
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewXMLDecoder(ConfiguredXMLPreferences), NewYamlEncoder(4, false, true, true)), s.description)
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewXMLDecoder(ConfiguredXMLPreferences), NewYamlEncoder(4, false, ConfiguredYamlPreferences)), s.description)
case "encode":
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewYamlDecoder(), NewXMLEncoder(2, ConfiguredXMLPreferences)), s.description)
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewXMLEncoder(2, ConfiguredXMLPreferences)), s.description)
case "roundtrip":
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewXMLDecoder(ConfiguredXMLPreferences), NewXMLEncoder(2, ConfiguredXMLPreferences)), s.description)
case "decode-keep-ns":
prefs := NewDefaultXmlPreferences()
prefs.KeepNamespace = true
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewXMLDecoder(prefs), NewYamlEncoder(2, false, true, true)), s.description)
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewXMLDecoder(prefs), NewYamlEncoder(2, false, ConfiguredYamlPreferences)), s.description)
case "decode-raw-token":
prefs := NewDefaultXmlPreferences()
prefs.UseRawToken = true
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewXMLDecoder(prefs), NewYamlEncoder(2, false, true, true)), s.description)
test.AssertResultWithContext(t, s.expected, processFormatScenario(s, NewXMLDecoder(prefs), NewYamlEncoder(2, false, ConfiguredYamlPreferences)), s.description)
case "roundtrip-skip-directives":
prefs := NewDefaultXmlPreferences()
prefs.SkipDirectives = true
@ -480,7 +484,7 @@ func documentXMLDecodeScenario(w *bufio.Writer, s formatScenario) {
writeOrPanic(w, fmt.Sprintf("```bash\nyq -p=xml '%v' sample.xml\n```\n", expression))
writeOrPanic(w, "will output\n")
writeOrPanic(w, fmt.Sprintf("```yaml\n%v```\n\n", processFormatScenario(s, NewXMLDecoder(ConfiguredXMLPreferences), NewYamlEncoder(2, false, true, true))))
writeOrPanic(w, fmt.Sprintf("```yaml\n%v```\n\n", processFormatScenario(s, NewXMLDecoder(ConfiguredXMLPreferences), NewYamlEncoder(2, false, ConfiguredYamlPreferences))))
}
func documentXMLDecodeKeepNsScenario(w *bufio.Writer, s formatScenario) {
@ -549,7 +553,7 @@ func documentXMLEncodeScenario(w *bufio.Writer, s formatScenario) {
writeOrPanic(w, "```bash\nyq -o=xml '.' sample.yml\n```\n")
writeOrPanic(w, "will output\n")
writeOrPanic(w, fmt.Sprintf("```xml\n%v```\n\n", processFormatScenario(s, NewYamlDecoder(), NewXMLEncoder(2, ConfiguredXMLPreferences))))
writeOrPanic(w, fmt.Sprintf("```xml\n%v```\n\n", processFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewXMLEncoder(2, ConfiguredXMLPreferences))))
}
func documentXMLRoundTripScenario(w *bufio.Writer, s formatScenario) {

19
pkg/yqlib/yaml.go Normal file
View File

@ -0,0 +1,19 @@
package yqlib
type YamlPreferences struct {
LeadingContentPreProcessing bool
PrintDocSeparators bool
UnwrapScalar bool
EvaluateTogether bool
}
func NewDefaultYamlPreferences() YamlPreferences {
return YamlPreferences{
LeadingContentPreProcessing: true,
PrintDocSeparators: true,
UnwrapScalar: true,
EvaluateTogether: false,
}
}
var ConfiguredYamlPreferences = NewDefaultYamlPreferences()