mirror of
https://github.com/mikefarah/yq.git
synced 2025-01-27 08:55:37 +00:00
Merge branch 'master' into toml
This commit is contained in:
commit
9f50a49cc4
4
.github/workflows/go.yml
vendored
4
.github/workflows/go.yml
vendored
@ -10,10 +10,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- name: Set up Go 1.19
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.19
|
||||
go-version: '^1.20'
|
||||
id: go
|
||||
|
||||
- name: Check out code into the Go module directory
|
||||
|
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@ -12,7 +12,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: '^1.19'
|
||||
go-version: '^1.20'
|
||||
check-latest: true
|
||||
- name: Compile man page markup
|
||||
id: gen-man-page-md
|
||||
|
@ -1,4 +1,4 @@
|
||||
FROM golang:1.19.4 as builder
|
||||
FROM golang:1.20.2 as builder
|
||||
|
||||
WORKDIR /go/src/mikefarah/yq
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
FROM golang:1.19.4
|
||||
FROM golang:1.20.2
|
||||
|
||||
COPY scripts/devtools.sh /opt/devtools.sh
|
||||
|
||||
|
211
acceptance_tests/inputs-format-auto.sh
Executable file
211
acceptance_tests/inputs-format-auto.sh
Executable file
@ -0,0 +1,211 @@
|
||||
#!/bin/bash
|
||||
|
||||
setUp() {
|
||||
rm test*.yml 2>/dev/null || true
|
||||
rm test*.json 2>/dev/null || true
|
||||
rm test*.properties 2>/dev/null || true
|
||||
rm test*.csv 2>/dev/null || true
|
||||
rm test*.tsv 2>/dev/null || true
|
||||
rm test*.xml 2>/dev/null || true
|
||||
}
|
||||
|
||||
testInputJson() {
|
||||
cat >test.json <<EOL
|
||||
{ "mike" : { "things": "cool" } }
|
||||
EOL
|
||||
|
||||
read -r -d '' expected << EOM
|
||||
{
|
||||
"mike": {
|
||||
"things": "cool"
|
||||
}
|
||||
}
|
||||
EOM
|
||||
|
||||
X=$(./yq test.json)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(./yq ea test.json)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testInputJsonOutputYaml() {
|
||||
cat >test.json <<EOL
|
||||
{ "mike" : { "things": "cool" } }
|
||||
EOL
|
||||
|
||||
read -r -d '' expected << EOM
|
||||
mike:
|
||||
things: cool
|
||||
EOM
|
||||
|
||||
X=$(./yq test.json -oy)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(./yq ea test.json -oy)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testInputProperties() {
|
||||
cat >test.properties <<EOL
|
||||
mike.things = hello
|
||||
EOL
|
||||
|
||||
read -r -d '' expected << EOM
|
||||
mike.things = hello
|
||||
EOM
|
||||
|
||||
X=$(./yq e test.properties)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(./yq test.properties)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(./yq ea test.properties)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testInputPropertiesGitHubAction() {
|
||||
cat >test.properties <<EOL
|
||||
mike.things = hello
|
||||
EOL
|
||||
|
||||
read -r -d '' expected << EOM
|
||||
mike.things = hello
|
||||
EOM
|
||||
|
||||
X=$(cat /dev/null | ./yq e test.properties)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(cat /dev/null | ./yq ea test.properties)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testInputCSV() {
|
||||
cat >test.csv <<EOL
|
||||
fruit,yumLevel
|
||||
apple,5
|
||||
banana,4
|
||||
EOL
|
||||
|
||||
read -r -d '' expected << EOM
|
||||
fruit,yumLevel
|
||||
apple,5
|
||||
banana,4
|
||||
EOM
|
||||
|
||||
X=$(./yq e test.csv)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(./yq ea test.csv)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testInputCSVUTF8() {
|
||||
read -r -d '' expected << EOM
|
||||
id,first,last
|
||||
1,john,smith
|
||||
1,jane,smith
|
||||
EOM
|
||||
|
||||
X=$(./yq utf8.csv)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testInputTSV() {
|
||||
cat >test.tsv <<EOL
|
||||
fruit yumLevel
|
||||
apple 5
|
||||
banana 4
|
||||
EOL
|
||||
|
||||
read -r -d '' expected << EOM
|
||||
fruit yumLevel
|
||||
apple 5
|
||||
banana 4
|
||||
EOM
|
||||
|
||||
X=$(./yq e test.tsv)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(./yq ea test.tsv)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
testInputXml() {
|
||||
cat >test.xml <<EOL
|
||||
<cat legs="4">BiBi</cat>
|
||||
EOL
|
||||
|
||||
read -r -d '' expected << EOM
|
||||
<cat legs="4">BiBi</cat>
|
||||
EOM
|
||||
|
||||
X=$(./yq e test.xml)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(./yq ea test.xml)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testInputXmlNamespaces() {
|
||||
cat >test.xml <<EOL
|
||||
<?xml version="1.0"?>
|
||||
<map xmlns="some-namespace" xmlns:xsi="some-instance" xsi:schemaLocation="some-url">
|
||||
</map>
|
||||
EOL
|
||||
|
||||
read -r -d '' expected << EOM
|
||||
<?xml version="1.0"?>
|
||||
<map xmlns="some-namespace" xmlns:xsi="some-instance" xsi:schemaLocation="some-url"></map>
|
||||
EOM
|
||||
|
||||
X=$(./yq e test.xml)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(./yq ea test.xml)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
|
||||
|
||||
testInputXmlStrict() {
|
||||
cat >test.xml <<EOL
|
||||
<?xml version="1.0"?>
|
||||
<!DOCTYPE root [
|
||||
<!ENTITY writer "Catherine.">
|
||||
<!ENTITY copyright "(r) Great">
|
||||
]>
|
||||
<root>
|
||||
<item>&writer;©right;</item>
|
||||
</root>
|
||||
EOL
|
||||
|
||||
X=$(./yq --xml-strict-mode test.xml 2>&1)
|
||||
assertEquals 1 $?
|
||||
assertEquals "Error: bad file 'test.xml': XML syntax error on line 7: invalid character entity &writer;" "$X"
|
||||
|
||||
X=$(./yq ea --xml-strict-mode test.xml 2>&1)
|
||||
assertEquals "Error: bad file 'test.xml': XML syntax error on line 7: invalid character entity &writer;" "$X"
|
||||
}
|
||||
|
||||
testInputXmlGithubAction() {
|
||||
cat >test.xml <<EOL
|
||||
<cat legs="4">BiBi</cat>
|
||||
EOL
|
||||
|
||||
read -r -d '' expected << EOM
|
||||
<cat legs="4">BiBi</cat>
|
||||
EOM
|
||||
|
||||
X=$(cat /dev/null | ./yq e test.xml)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(cat /dev/null | ./yq ea test.xml)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
source ./scripts/shunit2
|
@ -120,7 +120,7 @@ EOM
|
||||
}
|
||||
|
||||
testInputXmlNamespaces() {
|
||||
cat >test.yml <<EOL
|
||||
cat >test.xml <<EOL
|
||||
<?xml version="1.0"?>
|
||||
<map xmlns="some-namespace" xmlns:xsi="some-instance" xsi:schemaLocation="some-url">
|
||||
</map>
|
||||
@ -134,10 +134,10 @@ map:
|
||||
+@xsi:schemaLocation: some-url
|
||||
EOM
|
||||
|
||||
X=$(./yq e -p=xml test.yml)
|
||||
X=$(./yq e -p=xml test.xml)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(./yq ea -p=xml test.yml)
|
||||
X=$(./yq ea -p=xml test.xml)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
|
@ -6,8 +6,10 @@ var unwrapScalar = false
|
||||
|
||||
var writeInplace = false
|
||||
var outputToJSON = false
|
||||
var outputFormat = "yaml"
|
||||
var inputFormat = "yaml"
|
||||
|
||||
var outputFormat = ""
|
||||
|
||||
var inputFormat = ""
|
||||
|
||||
var exitStatus = false
|
||||
var forceColor = false
|
||||
|
@ -84,7 +84,10 @@ func evaluateAll(cmd *cobra.Command, args []string) (cmdError error) {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
encoder := configureEncoder(format)
|
||||
encoder, err := configureEncoder()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
printer := yqlib.NewPrinter(encoder, printerWriter)
|
||||
|
||||
|
@ -93,7 +93,10 @@ func evaluateSequence(cmd *cobra.Command, args []string) (cmdError error) {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
encoder := configureEncoder(format)
|
||||
encoder, err := configureEncoder()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
printer := yqlib.NewPrinter(encoder, printerWriter)
|
||||
|
||||
|
23
cmd/root.go
23
cmd/root.go
@ -53,25 +53,6 @@ yq -P sample.json
|
||||
logging.SetBackend(backend)
|
||||
yqlib.InitExpressionParser()
|
||||
|
||||
outputFormatType, err := yqlib.OutputFormatFromString(outputFormat)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if outputFormatType == yqlib.YamlOutputFormat ||
|
||||
outputFormatType == yqlib.PropsOutputFormat {
|
||||
unwrapScalar = true
|
||||
}
|
||||
if unwrapScalarFlag.IsExplicitySet() {
|
||||
unwrapScalar = unwrapScalarFlag.IsSet()
|
||||
}
|
||||
|
||||
//copy preference form global setting
|
||||
yqlib.ConfiguredYamlPreferences.UnwrapScalar = unwrapScalar
|
||||
|
||||
yqlib.ConfiguredYamlPreferences.PrintDocSeparators = !noDocSeparators
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
@ -84,8 +65,8 @@ yq -P sample.json
|
||||
panic(err)
|
||||
}
|
||||
|
||||
rootCmd.PersistentFlags().StringVarP(&outputFormat, "output-format", "o", "yaml", "[yaml|y|json|j|props|p|xml|x|csv|c|tsv|t] output format type.")
|
||||
rootCmd.PersistentFlags().StringVarP(&inputFormat, "input-format", "p", "yaml", "[yaml|y|props|p|xml|x|json|j|csv|c|tsv|t|toml] parse format for input.")
|
||||
rootCmd.PersistentFlags().StringVarP(&outputFormat, "output-format", "o", "auto", "[auto|a|yaml|y|json|j|props|p|xml|x] output format type.")
|
||||
rootCmd.PersistentFlags().StringVarP(&inputFormat, "input-format", "p", "auto", "[auto|a|yaml|y|props|p|xml|x|toml|t] parse format for input. Note that json is a subset of yaml.")
|
||||
|
||||
rootCmd.PersistentFlags().StringVar(&yqlib.ConfiguredXMLPreferences.AttributePrefix, "xml-attribute-prefix", yqlib.ConfiguredXMLPreferences.AttributePrefix, "prefix for xml attributes")
|
||||
rootCmd.PersistentFlags().StringVar(&yqlib.ConfiguredXMLPreferences.ContentName, "xml-content-name", yqlib.ConfiguredXMLPreferences.ContentName, "name for xml content (if no attribute name is present).")
|
||||
|
88
cmd/utils.go
88
cmd/utils.go
@ -53,6 +53,48 @@ func initCommand(cmd *cobra.Command, args []string) (string, []string, error) {
|
||||
return "", nil, fmt.Errorf("cannot pass files in when using null-input flag")
|
||||
}
|
||||
|
||||
inputFilename := ""
|
||||
if len(args) > 0 {
|
||||
inputFilename = args[0]
|
||||
}
|
||||
if inputFormat == "" || inputFormat == "auto" || inputFormat == "a" {
|
||||
|
||||
inputFormat = yqlib.FormatFromFilename(inputFilename)
|
||||
if outputFormat == "" || outputFormat == "auto" || outputFormat == "a" {
|
||||
outputFormat = yqlib.FormatFromFilename(inputFilename)
|
||||
}
|
||||
} else if outputFormat == "" || outputFormat == "auto" || outputFormat == "a" {
|
||||
// backwards compatibility -
|
||||
// before this was introduced, `yq -pcsv things.csv`
|
||||
// would produce *yaml* output.
|
||||
//
|
||||
outputFormat = yqlib.FormatFromFilename(inputFilename)
|
||||
if inputFilename != "-" {
|
||||
yqlib.GetLogger().Warning("yq default output is now 'auto' (based on the filename extension). Normally yq would output '%v', but for backwards compatibility 'yaml' has been set. Please use -oy to specify yaml, or drop the -p flag.", outputFormat)
|
||||
}
|
||||
outputFormat = "yaml"
|
||||
}
|
||||
|
||||
outputFormatType, err := yqlib.OutputFormatFromString(outputFormat)
|
||||
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
yqlib.GetLogger().Debug("Using outputformat %v", outputFormat)
|
||||
|
||||
if outputFormatType == yqlib.YamlOutputFormat ||
|
||||
outputFormatType == yqlib.PropsOutputFormat {
|
||||
unwrapScalar = true
|
||||
}
|
||||
if unwrapScalarFlag.IsExplicitySet() {
|
||||
unwrapScalar = unwrapScalarFlag.IsSet()
|
||||
}
|
||||
|
||||
//copy preference form global setting
|
||||
yqlib.ConfiguredYamlPreferences.UnwrapScalar = unwrapScalar
|
||||
|
||||
yqlib.ConfiguredYamlPreferences.PrintDocSeparators = !noDocSeparators
|
||||
|
||||
return expression, args, nil
|
||||
}
|
||||
|
||||
@ -61,7 +103,15 @@ func configureDecoder(evaluateTogether bool) (yqlib.Decoder, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
switch yqlibInputFormat {
|
||||
yqlibDecoder, err := createDecoder(yqlibInputFormat, evaluateTogether)
|
||||
if yqlibDecoder == nil {
|
||||
return nil, fmt.Errorf("no support for %s input format", inputFormat)
|
||||
}
|
||||
return yqlibDecoder, err
|
||||
}
|
||||
|
||||
func createDecoder(format yqlib.InputFormat, evaluateTogether bool) (yqlib.Decoder, error) {
|
||||
switch format {
|
||||
case yqlib.XMLInputFormat:
|
||||
return yqlib.NewXMLDecoder(yqlib.ConfiguredXMLPreferences), nil
|
||||
case yqlib.PropertiesInputFormat:
|
||||
@ -74,10 +124,12 @@ func configureDecoder(evaluateTogether bool) (yqlib.Decoder, error) {
|
||||
return yqlib.NewCSVObjectDecoder('\t'), nil
|
||||
case yqlib.TomlInputFormat:
|
||||
return yqlib.NewTomlDecoder(), nil
|
||||
case yqlib.YamlInputFormat:
|
||||
prefs := yqlib.ConfiguredYamlPreferences
|
||||
prefs.EvaluateTogether = evaluateTogether
|
||||
return yqlib.NewYamlDecoder(prefs), nil
|
||||
}
|
||||
prefs := yqlib.ConfiguredYamlPreferences
|
||||
prefs.EvaluateTogether = evaluateTogether
|
||||
return yqlib.NewYamlDecoder(prefs), nil
|
||||
return nil, fmt.Errorf("invalid decoder: %v", format)
|
||||
}
|
||||
|
||||
func configurePrinterWriter(format yqlib.PrinterOutputFormat, out io.Writer) (yqlib.PrinterWriter, error) {
|
||||
@ -97,22 +149,34 @@ func configurePrinterWriter(format yqlib.PrinterOutputFormat, out io.Writer) (yq
|
||||
return printerWriter, nil
|
||||
}
|
||||
|
||||
func configureEncoder(format yqlib.PrinterOutputFormat) yqlib.Encoder {
|
||||
func configureEncoder() (yqlib.Encoder, error) {
|
||||
yqlibOutputFormat, err := yqlib.OutputFormatFromString(outputFormat)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
yqlibEncoder, err := createEncoder(yqlibOutputFormat)
|
||||
if yqlibEncoder == nil {
|
||||
return nil, fmt.Errorf("no support for %s output format", outputFormat)
|
||||
}
|
||||
return yqlibEncoder, err
|
||||
}
|
||||
|
||||
func createEncoder(format yqlib.PrinterOutputFormat) (yqlib.Encoder, error) {
|
||||
switch format {
|
||||
case yqlib.JSONOutputFormat:
|
||||
return yqlib.NewJSONEncoder(indent, colorsEnabled, unwrapScalar)
|
||||
return yqlib.NewJSONEncoder(indent, colorsEnabled, unwrapScalar), nil
|
||||
case yqlib.PropsOutputFormat:
|
||||
return yqlib.NewPropertiesEncoder(unwrapScalar)
|
||||
return yqlib.NewPropertiesEncoder(unwrapScalar), nil
|
||||
case yqlib.CSVOutputFormat:
|
||||
return yqlib.NewCsvEncoder(',')
|
||||
return yqlib.NewCsvEncoder(','), nil
|
||||
case yqlib.TSVOutputFormat:
|
||||
return yqlib.NewCsvEncoder('\t')
|
||||
return yqlib.NewCsvEncoder('\t'), nil
|
||||
case yqlib.YamlOutputFormat:
|
||||
return yqlib.NewYamlEncoder(indent, colorsEnabled, yqlib.ConfiguredYamlPreferences)
|
||||
return yqlib.NewYamlEncoder(indent, colorsEnabled, yqlib.ConfiguredYamlPreferences), nil
|
||||
case yqlib.XMLOutputFormat:
|
||||
return yqlib.NewXMLEncoder(indent, yqlib.ConfiguredXMLPreferences)
|
||||
return yqlib.NewXMLEncoder(indent, yqlib.ConfiguredXMLPreferences), nil
|
||||
}
|
||||
panic("invalid encoder")
|
||||
return nil, fmt.Errorf("invalid encoder: %v", format)
|
||||
}
|
||||
|
||||
// this is a hack to enable backwards compatibility with githubactions (which pipe /dev/null into everything)
|
||||
|
@ -11,7 +11,7 @@ var (
|
||||
GitDescribe string
|
||||
|
||||
// Version is main version number that is being run at the moment.
|
||||
Version = "v4.30.6"
|
||||
Version = "v4.31.2"
|
||||
|
||||
// VersionPrerelease is a pre-release marker for the version. If this is "" (empty string)
|
||||
// then it means that it is a final release. Otherwise, this is a pre-release
|
||||
|
@ -1,5 +1 @@
|
||||
date: "2022-11-25"
|
||||
raw:
|
||||
id: 1
|
||||
XML: text_outside_1<Tag1 />text_outside_2<Tag2Kling Klong</Tag2>text_outside_3
|
||||
time: 09:19:00
|
||||
<cat>3</cat>
|
20
go.mod
20
go.mod
@ -1,32 +1,32 @@
|
||||
module github.com/mikefarah/yq/v4
|
||||
|
||||
require (
|
||||
github.com/a8m/envsubst v1.3.0
|
||||
github.com/a8m/envsubst v1.4.2
|
||||
github.com/alecthomas/participle/v2 v2.0.0-beta.5
|
||||
github.com/alecthomas/repr v0.1.1
|
||||
github.com/alecthomas/repr v0.2.0
|
||||
github.com/dimchansky/utfbom v1.1.1
|
||||
github.com/elliotchance/orderedmap v1.5.0
|
||||
github.com/fatih/color v1.13.0
|
||||
github.com/fatih/color v1.14.1
|
||||
github.com/goccy/go-json v0.10.0
|
||||
github.com/goccy/go-yaml v1.9.7
|
||||
github.com/goccy/go-yaml v1.10.0
|
||||
github.com/jinzhu/copier v0.3.5
|
||||
github.com/magiconair/properties v1.8.7
|
||||
github.com/pelletier/go-toml/v2 v2.0.6
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e
|
||||
github.com/spf13/cobra v1.6.1
|
||||
github.com/spf13/pflag v1.0.5
|
||||
golang.org/x/net v0.0.0-20220906165146-f3363e06e74c
|
||||
golang.org/x/net v0.8.0
|
||||
gopkg.in/op/go-logging.v1 v1.0.0-20160211212156-b2cb9fa56473
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/inconshreveable/mousetrap v1.0.1 // indirect
|
||||
github.com/mattn/go-colorable v0.1.12 // indirect
|
||||
github.com/mattn/go-isatty v0.0.14 // indirect
|
||||
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 // indirect
|
||||
golang.org/x/text v0.3.8 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-isatty v0.0.17 // indirect
|
||||
golang.org/x/sys v0.6.0 // indirect
|
||||
golang.org/x/text v0.8.0 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f // indirect
|
||||
)
|
||||
|
||||
go 1.19
|
||||
go 1.20
|
||||
|
42
go.sum
42
go.sum
@ -1,10 +1,10 @@
|
||||
github.com/a8m/envsubst v1.3.0 h1:GmXKmVssap0YtlU3E230W98RWtWCyIZzjtf1apWWyAg=
|
||||
github.com/a8m/envsubst v1.3.0/go.mod h1:MVUTQNGQ3tsjOOtKCNd+fl8RzhsXcDvvAEzkhGtlsbY=
|
||||
github.com/a8m/envsubst v1.4.2 h1:4yWIHXOLEJHQEFd4UjrWDrYeYlV7ncFWJOCBRLOZHQg=
|
||||
github.com/a8m/envsubst v1.4.2/go.mod h1:MVUTQNGQ3tsjOOtKCNd+fl8RzhsXcDvvAEzkhGtlsbY=
|
||||
github.com/alecthomas/assert/v2 v2.0.3 h1:WKqJODfOiQG0nEJKFKzDIG3E29CN2/4zR9XGJzKIkbg=
|
||||
github.com/alecthomas/participle/v2 v2.0.0-beta.5 h1:y6dsSYVb1G5eK6mgmy+BgI3Mw35a3WghArZ/Hbebrjo=
|
||||
github.com/alecthomas/participle/v2 v2.0.0-beta.5/go.mod h1:RC764t6n4L8D8ITAJv0qdokritYSNR3wV5cVwmIEaMM=
|
||||
github.com/alecthomas/repr v0.1.1 h1:87P60cSmareLAxMc4Hro0r2RBY4ROm0dYwkJNpS4pPs=
|
||||
github.com/alecthomas/repr v0.1.1/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
|
||||
github.com/alecthomas/repr v0.2.0 h1:HAzS41CIzNW5syS8Mf9UwXhNH1J9aix/BvDRf1Ml2Yk=
|
||||
github.com/alecthomas/repr v0.2.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
@ -14,16 +14,17 @@ github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/
|
||||
github.com/elliotchance/orderedmap v1.5.0 h1:1IsExUsjv5XNBD3ZdC7jkAAqLWOOKdbPTmkHx63OsBg=
|
||||
github.com/elliotchance/orderedmap v1.5.0/go.mod h1:wsDwEaX5jEoyhbs7x93zk2H/qv0zwuhg4inXhDkYqys=
|
||||
github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM=
|
||||
github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w=
|
||||
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
|
||||
github.com/fatih/color v1.14.1 h1:qfhVLaG5s+nCROl1zJsZRxFeYrHLqWroPOQ8BWiNb4w=
|
||||
github.com/fatih/color v1.14.1/go.mod h1:2oHN61fhTpgcxD3TSWCgKDiH1+x4OiDVVGH8WlgGZGg=
|
||||
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||
github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8=
|
||||
github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA=
|
||||
github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4=
|
||||
github.com/goccy/go-json v0.10.0 h1:mXKd9Qw4NuzShiRlOXKews24ufknHO7gx30lsDyokKA=
|
||||
github.com/goccy/go-json v0.10.0/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||
github.com/goccy/go-yaml v1.9.7 h1:D/Vx+JITklB1ugSkncB4BNR67M3X6AKs9+rqVeo3ddw=
|
||||
github.com/goccy/go-yaml v1.9.7/go.mod h1:JubOolP3gh0HpiBc4BLRD4YmjEjHAmIIB2aaXKkTfoE=
|
||||
github.com/goccy/go-yaml v1.10.0 h1:rBi+5HGuznOxx0JZ+60LDY85gc0dyIJCIMvsMJTKSKQ=
|
||||
github.com/goccy/go-yaml v1.10.0/go.mod h1:h/18Lr6oSQ3mvmqFoWmQ47KChOgpfHpTyIHl3yVmpiY=
|
||||
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
|
||||
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
|
||||
github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
@ -33,14 +34,14 @@ github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgx
|
||||
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
|
||||
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
|
||||
github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
||||
github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
||||
github.com/mattn/go-colorable v0.1.12 h1:jF+Du6AlPIjs2BiUiQlKOX0rt3SujHxPnksPKZbaA40=
|
||||
github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
|
||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||
github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y=
|
||||
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
|
||||
github.com/pelletier/go-toml/v2 v2.0.6 h1:nrzqCb7j9cDFj2coyLNLaZuJTLjWjlaz6nvTvIwycIU=
|
||||
github.com/pelletier/go-toml/v2 v2.0.6/go.mod h1:eumQOmlWiOPt5WriQQqoM5y18pDHwha2N+QD+EUNTek=
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng=
|
||||
github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e h1:aoZm08cpOy4WuID//EZDgcC4zIxODThtZNPirFr42+A=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
@ -62,21 +63,20 @@ github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20220906165146-f3363e06e74c h1:yKufUcDwucU5urd+50/Opbt4AYpqthk7wHpHok8f1lo=
|
||||
golang.org/x/net v0.0.0-20220906165146-f3363e06e74c/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk=
|
||||
golang.org/x/net v0.8.0 h1:Zrh2ngAOFYneWTAIAPethzeaQLuHwhuBkuV6ZiRnUaQ=
|
||||
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220406163625-3f8b81556e12/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 h1:WIoqL4EROvwiPdUtaip4VcDdpZ4kha7wBWZrbVKCIZg=
|
||||
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0 h1:MVltZSvRTcU2ljQOhs94SXPftV6DCNnZViHeQps87pQ=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.8 h1:nAL+RVCQ9uMn3vJZbV+MRnydTJFPf8qqY42YiA6MrqY=
|
||||
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
||||
golang.org/x/text v0.8.0 h1:57P1ETyNKtuIjB4SRd15iJxuhj8Gc416Y78H3qgMh68=
|
||||
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f h1:uF6paiQQebLeSXkrTqHqz0MXhXXS1KgF41eUdBNvxK0=
|
||||
|
25
pkg/yqlib/chown_linux.go
Normal file
25
pkg/yqlib/chown_linux.go
Normal file
@ -0,0 +1,25 @@
|
||||
//go:build linux
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
func changeOwner(info fs.FileInfo, file *os.File) error {
|
||||
if stat, ok := info.Sys().(*syscall.Stat_t); ok {
|
||||
uid := int(stat.Uid)
|
||||
gid := int(stat.Gid)
|
||||
|
||||
err := os.Chown(file.Name(), uid, gid)
|
||||
if err != nil {
|
||||
// this happens with snap confinement
|
||||
// not really a big issue as users can chown
|
||||
// the file themselves if required.
|
||||
log.Info("Skipping chown: %v", err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
12
pkg/yqlib/chown_not_linux_os.go
Normal file
12
pkg/yqlib/chown_not_linux_os.go
Normal file
@ -0,0 +1,12 @@
|
||||
//go:build !linux
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
)
|
||||
|
||||
func changeOwner(info fs.FileInfo, file *os.File) error {
|
||||
return nil
|
||||
}
|
@ -3,6 +3,7 @@ package yqlib
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type InputFormat uint
|
||||
@ -16,6 +17,7 @@ const (
|
||||
CSVObjectInputFormat
|
||||
TSVObjectInputFormat
|
||||
TomlInputFormat
|
||||
UriInputFormat
|
||||
)
|
||||
|
||||
type Decoder interface {
|
||||
@ -25,11 +27,11 @@ type Decoder interface {
|
||||
|
||||
func InputFormatFromString(format string) (InputFormat, error) {
|
||||
switch format {
|
||||
case "yaml", "y":
|
||||
case "yaml", "yml", "y":
|
||||
return YamlInputFormat, nil
|
||||
case "xml", "x":
|
||||
return XMLInputFormat, nil
|
||||
case "props", "p":
|
||||
case "properties", "props", "p":
|
||||
return PropertiesInputFormat, nil
|
||||
case "json", "ndjson", "j":
|
||||
return JsonInputFormat, nil
|
||||
@ -40,6 +42,22 @@ func InputFormatFromString(format string) (InputFormat, error) {
|
||||
case "toml":
|
||||
return TomlInputFormat, nil
|
||||
default:
|
||||
return 0, fmt.Errorf("unknown format '%v' please use [yaml|xml|props|js]", format)
|
||||
return 0, fmt.Errorf("unknown format '%v' please use [yaml|json|props|csv|tsv|xml|toml]", format)
|
||||
}
|
||||
}
|
||||
|
||||
func FormatFromFilename(filename string) string {
|
||||
|
||||
if filename != "" {
|
||||
GetLogger().Debugf("checking file extension '%s' for auto format detection", filename)
|
||||
nPos := strings.LastIndex(filename, ".")
|
||||
if nPos > -1 {
|
||||
format := filename[nPos+1:]
|
||||
GetLogger().Debugf("detected format '%s'", format)
|
||||
return format
|
||||
}
|
||||
}
|
||||
|
||||
GetLogger().Debugf("using default inputFormat 'yaml'")
|
||||
return "yaml"
|
||||
}
|
||||
|
@ -1,3 +1,5 @@
|
||||
//go:build !yq_nojson
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
|
@ -63,7 +63,7 @@ func (dec *propertiesDecoder) applyPropertyComments(context Context, path []inte
|
||||
|
||||
rhsCandidateNode.Node.Tag = guessTagFromCustomType(rhsCandidateNode.Node)
|
||||
|
||||
rhsOp := &Operation{OperationType: valueOpType, CandidateNode: rhsCandidateNode}
|
||||
rhsOp := &Operation{OperationType: referenceOpType, CandidateNode: rhsCandidateNode}
|
||||
|
||||
assignmentOpNode := &ExpressionNode{
|
||||
Operation: assignmentOp,
|
||||
@ -93,7 +93,27 @@ func (dec *propertiesDecoder) applyProperty(context Context, properties *propert
|
||||
Kind: yaml.ScalarNode,
|
||||
}
|
||||
|
||||
return dec.d.DeeplyAssign(context, path, rhsNode)
|
||||
rhsNode.Tag = guessTagFromCustomType(rhsNode)
|
||||
|
||||
rhsCandidateNode := &CandidateNode{
|
||||
Path: path,
|
||||
Node: rhsNode,
|
||||
}
|
||||
|
||||
assignmentOp := &Operation{OperationType: assignOpType, Preferences: assignPreferences{}}
|
||||
|
||||
rhsOp := &Operation{OperationType: referenceOpType, CandidateNode: rhsCandidateNode}
|
||||
|
||||
assignmentOpNode := &ExpressionNode{
|
||||
Operation: assignmentOp,
|
||||
LHS: createTraversalTree(path, traversePreferences{}, false),
|
||||
RHS: &ExpressionNode{Operation: rhsOp},
|
||||
}
|
||||
|
||||
_, err := dec.d.GetMatchingNodes(context, assignmentOpNode)
|
||||
// toml?
|
||||
// return dec.d.DeeplyAssign(context, path, rhsNode)
|
||||
return err
|
||||
}
|
||||
|
||||
func (dec *propertiesDecoder) Decode() (*CandidateNode, error) {
|
||||
|
60
pkg/yqlib/decoder_uri.go
Normal file
60
pkg/yqlib/decoder_uri.go
Normal file
@ -0,0 +1,60 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"net/url"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type uriDecoder struct {
|
||||
reader io.Reader
|
||||
finished bool
|
||||
readAnything bool
|
||||
}
|
||||
|
||||
func NewUriDecoder() Decoder {
|
||||
return &uriDecoder{finished: false}
|
||||
}
|
||||
|
||||
func (dec *uriDecoder) Init(reader io.Reader) error {
|
||||
dec.reader = reader
|
||||
dec.readAnything = false
|
||||
dec.finished = false
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dec *uriDecoder) Decode() (*CandidateNode, error) {
|
||||
if dec.finished {
|
||||
return nil, io.EOF
|
||||
}
|
||||
|
||||
buf := new(bytes.Buffer)
|
||||
|
||||
if _, err := buf.ReadFrom(dec.reader); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if buf.Len() == 0 {
|
||||
dec.finished = true
|
||||
|
||||
// if we've read _only_ an empty string, lets return that
|
||||
// otherwise if we've already read some bytes, and now we get
|
||||
// an empty string, then we are done.
|
||||
if dec.readAnything {
|
||||
return nil, io.EOF
|
||||
}
|
||||
}
|
||||
newValue, err := url.QueryUnescape(buf.String())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dec.readAnything = true
|
||||
return &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: newValue,
|
||||
},
|
||||
}, nil
|
||||
}
|
@ -1,3 +1,5 @@
|
||||
//go:build !yq_noxml
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
@ -5,6 +7,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
@ -46,11 +49,19 @@ func (dec *xmlDecoder) createSequence(nodes []*xmlNode) (*yaml.Node, error) {
|
||||
return yamlNode, nil
|
||||
}
|
||||
|
||||
var decoderCommentPrefix = regexp.MustCompile(`(^|\n)([[:alpha:]])`)
|
||||
|
||||
func (dec *xmlDecoder) processComment(c string) string {
|
||||
if c == "" {
|
||||
return ""
|
||||
}
|
||||
return "#" + strings.TrimRight(c, " ")
|
||||
//need to replace "cat " with "# cat"
|
||||
// "\ncat\n" with "\n cat\n"
|
||||
// ensure non-empty comments starting with newline have a space in front
|
||||
|
||||
replacement := decoderCommentPrefix.ReplaceAllString(c, "$1 $2")
|
||||
replacement = "#" + strings.ReplaceAll(strings.TrimRight(replacement, " "), "\n", "\n#")
|
||||
return replacement
|
||||
}
|
||||
|
||||
func (dec *xmlDecoder) createMap(n *xmlNode) (*yaml.Node, error) {
|
||||
@ -75,6 +86,7 @@ func (dec *xmlDecoder) createMap(n *xmlNode) (*yaml.Node, error) {
|
||||
var err error
|
||||
|
||||
if i == 0 {
|
||||
log.Debugf("head comment here")
|
||||
labelNode.HeadComment = dec.processComment(n.HeadComment)
|
||||
|
||||
}
|
||||
|
@ -86,6 +86,23 @@ will output
|
||||
a: ['dog', 'cat']
|
||||
```
|
||||
|
||||
## Prepend to existing array
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
a:
|
||||
- dog
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.a = ["cat"] + .a' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
a:
|
||||
- cat
|
||||
- dog
|
||||
```
|
||||
|
||||
## Add new object to array
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
|
@ -86,6 +86,30 @@ a: cool
|
||||
updated: 2021-05-19T01:02:03Z
|
||||
```
|
||||
|
||||
## From Unix
|
||||
Converts from unix time. Note, you don't have to pipe through the tz operator :)
|
||||
|
||||
Running
|
||||
```bash
|
||||
yq --null-input '1675301929 | from_unix | tz("UTC")'
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
2023-02-02T01:38:49Z
|
||||
```
|
||||
|
||||
## To Unix
|
||||
Converts to unix time
|
||||
|
||||
Running
|
||||
```bash
|
||||
yq --null-input 'now | to_unix'
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
1621386123
|
||||
```
|
||||
|
||||
## Timezone: from standard RFC3339 format
|
||||
Returns a new datetime in the specified timezone. Specify standard IANA Time Zone format or 'utc', 'local'. When given a single parameter, this assumes the datetime is in RFC3339 format.
|
||||
|
||||
|
@ -16,6 +16,8 @@ These operators are useful to process yaml documents that have stringified embed
|
||||
| TSV | from_tsv/@tsvd | to_tsv/@tsv |
|
||||
| XML | from_xml/@xmld | to_xml(i)/@xml |
|
||||
| Base64 | @base64d | @base64 |
|
||||
| URI | @urid | @uri |
|
||||
| Shell | | @sh |
|
||||
|
||||
|
||||
See CSV and TSV [documentation](https://mikefarah.gitbook.io/yq/usage/csv-tsv) for accepted formats.
|
||||
@ -435,6 +437,50 @@ will output
|
||||
YTogYXBwbGUK
|
||||
```
|
||||
|
||||
## Encode a string to uri
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
coolData: this has & special () characters *
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.coolData | @uri' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
this+has+%26+special+%28%29+characters+%2A
|
||||
```
|
||||
|
||||
## Decode a URI to a string
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
this+has+%26+special+%28%29+characters+%2A
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '@urid' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
this has & special () characters *
|
||||
```
|
||||
|
||||
## Encode a string to sh
|
||||
Sh/Bash friendly string
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
coolData: strings with spaces and a 'quote'
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.coolData | @sh' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
strings' with spaces and a '\'quote\'
|
||||
```
|
||||
|
||||
## Decode a base64 encoded string
|
||||
Decoded data is assumed to be a string.
|
||||
|
||||
|
18
pkg/yqlib/doc/operators/filter.md
Normal file
18
pkg/yqlib/doc/operators/filter.md
Normal file
@ -0,0 +1,18 @@
|
||||
|
||||
## Filter array
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq 'filter(. < 3)' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
- 1
|
||||
- 2
|
||||
```
|
||||
|
@ -16,6 +16,8 @@ These operators are useful to process yaml documents that have stringified embed
|
||||
| TSV | from_tsv/@tsvd | to_tsv/@tsv |
|
||||
| XML | from_xml/@xmld | to_xml(i)/@xml |
|
||||
| Base64 | @base64d | @base64 |
|
||||
| URI | @urid | @uri |
|
||||
| Shell | | @sh |
|
||||
|
||||
|
||||
See CSV and TSV [documentation](https://mikefarah.gitbook.io/yq/usage/csv-tsv) for accepted formats.
|
||||
|
4
pkg/yqlib/doc/operators/headers/shuffle.md
Normal file
4
pkg/yqlib/doc/operators/headers/shuffle.md
Normal file
@ -0,0 +1,4 @@
|
||||
# Shuffle
|
||||
|
||||
Shuffles an array. Note that this command does _not_ use a cryptographically secure random number generator to randomise the array order.
|
||||
|
@ -499,3 +499,43 @@ b: !goat
|
||||
dog: woof
|
||||
```
|
||||
|
||||
## Merging a null with a map
|
||||
Running
|
||||
```bash
|
||||
yq --null-input 'null * {"some": "thing"}'
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
some: thing
|
||||
```
|
||||
|
||||
## Merging a map with null
|
||||
Running
|
||||
```bash
|
||||
yq --null-input '{"some": "thing"} * null'
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
some: thing
|
||||
```
|
||||
|
||||
## Merging an null with an array
|
||||
Running
|
||||
```bash
|
||||
yq --null-input 'null * ["some"]'
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
- some
|
||||
```
|
||||
|
||||
## Merging an array with null
|
||||
Running
|
||||
```bash
|
||||
yq --null-input '["some"] * null'
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
- some
|
||||
```
|
||||
|
||||
|
51
pkg/yqlib/doc/operators/shuffle.md
Normal file
51
pkg/yqlib/doc/operators/shuffle.md
Normal file
@ -0,0 +1,51 @@
|
||||
# Shuffle
|
||||
|
||||
Shuffles an array. Note that this command does _not_ use a cryptographically secure random number generator to randomise the array order.
|
||||
|
||||
|
||||
## Shuffle array
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- 4
|
||||
- 5
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq 'shuffle' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
- 5
|
||||
- 2
|
||||
- 4
|
||||
- 1
|
||||
- 3
|
||||
```
|
||||
|
||||
## Shuffle array in place
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
cool:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- 4
|
||||
- 5
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.cool |= shuffle' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
cool:
|
||||
- 5
|
||||
- 2
|
||||
- 4
|
||||
- 1
|
||||
- 3
|
||||
```
|
||||
|
@ -25,6 +25,28 @@ will output
|
||||
- a: cat
|
||||
```
|
||||
|
||||
## Sort by multiple fields
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- a: dog
|
||||
- a: cat
|
||||
b: banana
|
||||
- a: cat
|
||||
b: apple
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq 'sort_by(.a, .b)' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
- a: cat
|
||||
b: apple
|
||||
- a: cat
|
||||
b: banana
|
||||
- a: dog
|
||||
```
|
||||
|
||||
## Sort descending by string field
|
||||
Use sort with reverse to sort in descending order.
|
||||
|
||||
|
@ -407,8 +407,10 @@ A best attempt is made to copy comments to xml.
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
#
|
||||
# header comment
|
||||
# above_cat
|
||||
#
|
||||
cat: # inline_cat
|
||||
# above_array
|
||||
array: # inline_array
|
||||
@ -425,9 +427,11 @@ yq -o=xml '.' sample.yml
|
||||
will output
|
||||
```xml
|
||||
<!--
|
||||
header comment
|
||||
above_cat
|
||||
--><!-- inline_cat --><cat><!-- above_array inline_array -->
|
||||
header comment
|
||||
above_cat
|
||||
-->
|
||||
<!-- inline_cat -->
|
||||
<cat><!-- above_array inline_array -->
|
||||
<array>val1<!-- inline_val1 --></array>
|
||||
<array><!-- above_val2 -->val2<!-- inline_val2 --></array>
|
||||
</cat><!-- below_cat -->
|
||||
@ -489,7 +493,8 @@ yq -p=xml -o=xml '.' sample.xml
|
||||
```
|
||||
will output
|
||||
```xml
|
||||
<!-- before cat --><cat><!-- in cat before -->
|
||||
<!-- before cat -->
|
||||
<cat><!-- in cat before -->
|
||||
<x>3<!-- multi
|
||||
line comment
|
||||
for x --></x><!-- before y -->
|
||||
|
@ -1,10 +1,6 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
@ -17,162 +13,17 @@ type Encoder interface {
|
||||
CanHandleAliases() bool
|
||||
}
|
||||
|
||||
// orderedMap allows to marshal and unmarshal JSON and YAML values keeping the
|
||||
// order of keys and values in a map or an object.
|
||||
type orderedMap struct {
|
||||
// if this is an object, kv != nil. If this is not an object, kv == nil.
|
||||
kv []orderedMapKV
|
||||
altVal interface{}
|
||||
}
|
||||
func mapKeysToStrings(node *yaml.Node) {
|
||||
|
||||
type orderedMapKV struct {
|
||||
K string
|
||||
V orderedMap
|
||||
}
|
||||
|
||||
func (o *orderedMap) UnmarshalJSON(data []byte) error {
|
||||
switch data[0] {
|
||||
case '{':
|
||||
// initialise so that even if the object is empty it is not nil
|
||||
o.kv = []orderedMapKV{}
|
||||
|
||||
// create decoder
|
||||
dec := json.NewDecoder(bytes.NewReader(data))
|
||||
_, err := dec.Token() // open object
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// cycle through k/v
|
||||
var tok json.Token
|
||||
for tok, err = dec.Token(); err == nil; tok, err = dec.Token() {
|
||||
// we can expect two types: string or Delim. Delim automatically means
|
||||
// that it is the closing bracket of the object, whereas string means
|
||||
// that there is another key.
|
||||
if _, ok := tok.(json.Delim); ok {
|
||||
break
|
||||
if node.Kind == yaml.MappingNode {
|
||||
for index, child := range node.Content {
|
||||
if index%2 == 0 { // its a map key
|
||||
child.Tag = "!!str"
|
||||
}
|
||||
kv := orderedMapKV{
|
||||
K: tok.(string),
|
||||
}
|
||||
if err := dec.Decode(&kv.V); err != nil {
|
||||
return err
|
||||
}
|
||||
o.kv = append(o.kv, kv)
|
||||
}
|
||||
// unexpected error
|
||||
if err != nil && !errors.Is(err, io.EOF) {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
case '[':
|
||||
var res []*orderedMap
|
||||
if err := json.Unmarshal(data, &res); err != nil {
|
||||
return err
|
||||
}
|
||||
o.altVal = res
|
||||
o.kv = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
return json.Unmarshal(data, &o.altVal)
|
||||
}
|
||||
|
||||
func (o orderedMap) MarshalJSON() ([]byte, error) {
|
||||
buf := new(bytes.Buffer)
|
||||
enc := json.NewEncoder(buf)
|
||||
enc.SetEscapeHTML(false) // do not escape html chars e.g. &, <, >
|
||||
if o.kv == nil {
|
||||
if err := enc.Encode(o.altVal); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
buf.WriteByte('{')
|
||||
for idx, el := range o.kv {
|
||||
if err := enc.Encode(el.K); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
buf.WriteByte(':')
|
||||
if err := enc.Encode(el.V); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if idx != len(o.kv)-1 {
|
||||
buf.WriteByte(',')
|
||||
}
|
||||
}
|
||||
buf.WriteByte('}')
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
func (o *orderedMap) UnmarshalYAML(node *yaml.Node) error {
|
||||
switch node.Kind {
|
||||
case yaml.DocumentNode:
|
||||
if len(node.Content) == 0 {
|
||||
return nil
|
||||
}
|
||||
return o.UnmarshalYAML(node.Content[0])
|
||||
case yaml.AliasNode:
|
||||
return o.UnmarshalYAML(node.Alias)
|
||||
case yaml.ScalarNode:
|
||||
return node.Decode(&o.altVal)
|
||||
case yaml.MappingNode:
|
||||
// set kv to non-nil
|
||||
o.kv = []orderedMapKV{}
|
||||
for i := 0; i < len(node.Content); i += 2 {
|
||||
var key string
|
||||
var val orderedMap
|
||||
if err := node.Content[i].Decode(&key); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := node.Content[i+1].Decode(&val); err != nil {
|
||||
return err
|
||||
}
|
||||
o.kv = append(o.kv, orderedMapKV{
|
||||
K: key,
|
||||
V: val,
|
||||
})
|
||||
}
|
||||
return nil
|
||||
case yaml.SequenceNode:
|
||||
// note that this has to be a pointer, so that nulls can be represented.
|
||||
var res []*orderedMap
|
||||
if err := node.Decode(&res); err != nil {
|
||||
return err
|
||||
}
|
||||
o.altVal = res
|
||||
o.kv = nil
|
||||
return nil
|
||||
case 0:
|
||||
// null
|
||||
o.kv = nil
|
||||
o.altVal = nil
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("orderedMap: invalid yaml node")
|
||||
for _, child := range node.Content {
|
||||
mapKeysToStrings(child)
|
||||
}
|
||||
}
|
||||
|
||||
func (o *orderedMap) MarshalYAML() (interface{}, error) {
|
||||
// fast path: kv is nil, use altVal
|
||||
if o.kv == nil {
|
||||
return o.altVal, nil
|
||||
}
|
||||
content := make([]*yaml.Node, 0, len(o.kv)*2)
|
||||
for _, val := range o.kv {
|
||||
n := new(yaml.Node)
|
||||
if err := n.Encode(val.V); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
content = append(content, &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: val.K,
|
||||
}, n)
|
||||
}
|
||||
return &yaml.Node{
|
||||
Kind: yaml.MappingNode,
|
||||
Tag: "!!map",
|
||||
Content: content,
|
||||
}, nil
|
||||
}
|
||||
|
@ -1,3 +1,5 @@
|
||||
//go:build !yq_nojson
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
@ -14,21 +16,6 @@ type jsonEncoder struct {
|
||||
UnwrapScalar bool
|
||||
}
|
||||
|
||||
func mapKeysToStrings(node *yaml.Node) {
|
||||
|
||||
if node.Kind == yaml.MappingNode {
|
||||
for index, child := range node.Content {
|
||||
if index%2 == 0 { // its a map key
|
||||
child.Tag = "!!str"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, child := range node.Content {
|
||||
mapKeysToStrings(child)
|
||||
}
|
||||
}
|
||||
|
||||
func NewJSONEncoder(indent int, colorise bool, unwrapScalar bool) Encoder {
|
||||
var indentString = ""
|
||||
|
||||
|
79
pkg/yqlib/encoder_sh.go
Normal file
79
pkg/yqlib/encoder_sh.go
Normal file
@ -0,0 +1,79 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
var unsafeChars = regexp.MustCompile(`[^\w@%+=:,./-]`)
|
||||
|
||||
type shEncoder struct {
|
||||
quoteAll bool
|
||||
}
|
||||
|
||||
func NewShEncoder() Encoder {
|
||||
return &shEncoder{false}
|
||||
}
|
||||
|
||||
func (e *shEncoder) CanHandleAliases() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (e *shEncoder) PrintDocumentSeparator(writer io.Writer) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *shEncoder) PrintLeadingContent(writer io.Writer, content string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *shEncoder) Encode(writer io.Writer, originalNode *yaml.Node) error {
|
||||
node := unwrapDoc(originalNode)
|
||||
if guessTagFromCustomType(node) != "!!str" {
|
||||
return fmt.Errorf("cannot encode %v as URI, can only operate on strings. Please first pipe through another encoding operator to convert the value to a string", node.Tag)
|
||||
}
|
||||
|
||||
return writeString(writer, e.encode(originalNode.Value))
|
||||
}
|
||||
|
||||
// put any (shell-unsafe) characters into a single-quoted block, close the block lazily
|
||||
func (e *shEncoder) encode(input string) string {
|
||||
const quote = '\''
|
||||
var inQuoteBlock = false
|
||||
var encoded strings.Builder
|
||||
encoded.Grow(len(input))
|
||||
|
||||
for _, ir := range input {
|
||||
// open or close a single-quote block
|
||||
if ir == quote {
|
||||
if inQuoteBlock {
|
||||
// get out of a quote block for an input quote
|
||||
encoded.WriteRune(quote)
|
||||
inQuoteBlock = !inQuoteBlock
|
||||
}
|
||||
// escape the quote with a backslash
|
||||
encoded.WriteRune('\\')
|
||||
} else {
|
||||
if e.shouldQuote(ir) && !inQuoteBlock {
|
||||
// start a quote block for any (unsafe) characters
|
||||
encoded.WriteRune(quote)
|
||||
inQuoteBlock = !inQuoteBlock
|
||||
}
|
||||
}
|
||||
// pass on the input character
|
||||
encoded.WriteRune(ir)
|
||||
}
|
||||
// close any pending quote block
|
||||
if inQuoteBlock {
|
||||
encoded.WriteRune(quote)
|
||||
}
|
||||
return encoded.String()
|
||||
}
|
||||
|
||||
func (e *shEncoder) shouldQuote(ir rune) bool {
|
||||
return e.quoteAll || unsafeChars.MatchString(string(ir))
|
||||
}
|
@ -1,3 +1,5 @@
|
||||
//go:build !yq_nojson
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
|
37
pkg/yqlib/encoder_uri.go
Normal file
37
pkg/yqlib/encoder_uri.go
Normal file
@ -0,0 +1,37 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type uriEncoder struct {
|
||||
}
|
||||
|
||||
func NewUriEncoder() Encoder {
|
||||
return &uriEncoder{}
|
||||
}
|
||||
|
||||
func (e *uriEncoder) CanHandleAliases() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (e *uriEncoder) PrintDocumentSeparator(writer io.Writer) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *uriEncoder) PrintLeadingContent(writer io.Writer, content string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *uriEncoder) Encode(writer io.Writer, originalNode *yaml.Node) error {
|
||||
node := unwrapDoc(originalNode)
|
||||
if guessTagFromCustomType(node) != "!!str" {
|
||||
return fmt.Errorf("cannot encode %v as URI, can only operate on strings. Please first pipe through another encoding operator to convert the value to a string", node.Tag)
|
||||
}
|
||||
_, err := writer.Write([]byte(url.QueryEscape(originalNode.Value)))
|
||||
return err
|
||||
}
|
@ -1,3 +1,5 @@
|
||||
//go:build !yq_noxml
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
@ -17,8 +19,6 @@ type xmlEncoder struct {
|
||||
leadingContent string
|
||||
}
|
||||
|
||||
var commentPrefix = regexp.MustCompile(`(^|\n)\s*#`)
|
||||
|
||||
func NewXMLEncoder(indent int, prefs XmlPreferences) Encoder {
|
||||
var indentString = ""
|
||||
|
||||
@ -37,7 +37,7 @@ func (e *xmlEncoder) PrintDocumentSeparator(writer io.Writer) error {
|
||||
}
|
||||
|
||||
func (e *xmlEncoder) PrintLeadingContent(writer io.Writer, content string) error {
|
||||
e.leadingContent = commentPrefix.ReplaceAllString(content, "\n")
|
||||
e.leadingContent = content
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -46,12 +46,39 @@ func (e *xmlEncoder) Encode(writer io.Writer, node *yaml.Node) error {
|
||||
// hack so we can manually add newlines to procInst and directives
|
||||
e.writer = writer
|
||||
encoder.Indent("", e.indentString)
|
||||
var newLine xml.CharData = []byte("\n")
|
||||
|
||||
mapNode := unwrapDoc(node)
|
||||
if mapNode.Tag == "!!map" {
|
||||
// make sure <?xml .. ?> processing instructions are encoded first
|
||||
for i := 0; i < len(mapNode.Content); i += 2 {
|
||||
key := mapNode.Content[i]
|
||||
value := mapNode.Content[i+1]
|
||||
|
||||
if key.Value == (e.prefs.ProcInstPrefix + "xml") {
|
||||
name := strings.Replace(key.Value, e.prefs.ProcInstPrefix, "", 1)
|
||||
procInst := xml.ProcInst{Target: name, Inst: []byte(value.Value)}
|
||||
if err := encoder.EncodeToken(procInst); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := e.writer.Write([]byte("\n")); err != nil {
|
||||
log.Warning("Unable to write newline, skipping: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if e.leadingContent != "" {
|
||||
|
||||
// remove first and last newlines if present
|
||||
err := e.encodeComment(encoder, e.leadingContent)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = encoder.EncodeToken(newLine)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
switch node.Kind {
|
||||
@ -87,29 +114,12 @@ func (e *xmlEncoder) Encode(writer io.Writer, node *yaml.Node) error {
|
||||
default:
|
||||
return fmt.Errorf("unsupported type %v", node.Tag)
|
||||
}
|
||||
var charData xml.CharData = []byte("\n")
|
||||
return encoder.EncodeToken(charData)
|
||||
|
||||
return encoder.EncodeToken(newLine)
|
||||
|
||||
}
|
||||
|
||||
func (e *xmlEncoder) encodeTopLevelMap(encoder *xml.Encoder, node *yaml.Node) error {
|
||||
// make sure <?xml .. ?> processing instructions are encoded first
|
||||
for i := 0; i < len(node.Content); i += 2 {
|
||||
key := node.Content[i]
|
||||
value := node.Content[i+1]
|
||||
|
||||
if key.Value == (e.prefs.ProcInstPrefix + "xml") {
|
||||
name := strings.Replace(key.Value, e.prefs.ProcInstPrefix, "", 1)
|
||||
procInst := xml.ProcInst{Target: name, Inst: []byte(value.Value)}
|
||||
if err := encoder.EncodeToken(procInst); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := e.writer.Write([]byte("\n")); err != nil {
|
||||
log.Warning("Unable to write newline, skipping: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
err := e.encodeComment(encoder, headAndLineComment(node))
|
||||
if err != nil {
|
||||
return err
|
||||
@ -124,6 +134,13 @@ func (e *xmlEncoder) encodeTopLevelMap(encoder *xml.Encoder, node *yaml.Node) er
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if headAndLineComment(key) != "" {
|
||||
var newLine xml.CharData = []byte("\n")
|
||||
err = encoder.EncodeToken(newLine)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if key.Value == (e.prefs.ProcInstPrefix + "xml") {
|
||||
// dont double process these.
|
||||
@ -204,13 +221,34 @@ func (e *xmlEncoder) doEncode(encoder *xml.Encoder, node *yaml.Node, start xml.S
|
||||
return fmt.Errorf("unsupported type %v", node.Tag)
|
||||
}
|
||||
|
||||
var xmlEncodeMultilineCommentRegex = regexp.MustCompile(`(^|\n) *# ?(.*)`)
|
||||
var xmlEncodeSingleLineCommentRegex = regexp.MustCompile(`^\s*#(.*)\n?`)
|
||||
var chompRegexp = regexp.MustCompile(`\n$`)
|
||||
|
||||
func (e *xmlEncoder) encodeComment(encoder *xml.Encoder, commentStr string) error {
|
||||
if commentStr != "" {
|
||||
log.Debugf("encoding comment %v", commentStr)
|
||||
if !strings.HasSuffix(commentStr, " ") {
|
||||
commentStr = commentStr + " "
|
||||
log.Debugf("got comment [%v]", commentStr)
|
||||
// multi line string
|
||||
if len(commentStr) > 2 && strings.Contains(commentStr[1:len(commentStr)-1], "\n") {
|
||||
commentStr = chompRegexp.ReplaceAllString(commentStr, "")
|
||||
log.Debugf("chompRegexp [%v]", commentStr)
|
||||
commentStr = xmlEncodeMultilineCommentRegex.ReplaceAllString(commentStr, "$1$2")
|
||||
log.Debugf("processed multine [%v]", commentStr)
|
||||
// if the first line is non blank, add a space
|
||||
if commentStr[0] != '\n' && commentStr[0] != ' ' {
|
||||
commentStr = " " + commentStr
|
||||
}
|
||||
|
||||
} else {
|
||||
commentStr = xmlEncodeSingleLineCommentRegex.ReplaceAllString(commentStr, "$1")
|
||||
}
|
||||
|
||||
if !strings.HasSuffix(commentStr, " ") && !strings.HasSuffix(commentStr, "\n") {
|
||||
commentStr = commentStr + " "
|
||||
log.Debugf("added suffix [%v]", commentStr)
|
||||
}
|
||||
log.Debugf("encoding comment [%v]", commentStr)
|
||||
|
||||
var comment xml.Comment = []byte(commentStr)
|
||||
err := encoder.EncodeToken(comment)
|
||||
if err != nil {
|
||||
|
@ -1,3 +1,5 @@
|
||||
//go:build !yq_nojson
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
|
@ -37,6 +37,7 @@ var participleYqRules = []*participleYqRule{
|
||||
|
||||
{"MapValues", `map_?values`, opToken(mapValuesOpType), 0},
|
||||
simpleOp("map", mapOpType),
|
||||
simpleOp("filter", filterOpType),
|
||||
simpleOp("pick", pickOpType),
|
||||
|
||||
{"FlattenWithDepth", `flatten\([0-9]+\)`, flattenWithDepth(), 0},
|
||||
@ -45,8 +46,11 @@ var participleYqRules = []*participleYqRule{
|
||||
simpleOp("format_datetime", formatDateTimeOpType),
|
||||
simpleOp("now", nowOpType),
|
||||
simpleOp("tz", tzOpType),
|
||||
simpleOp("from_?unix", fromUnixOpType),
|
||||
simpleOp("to_?unix", toUnixOpType),
|
||||
simpleOp("with_dtf", withDtFormatOpType),
|
||||
simpleOp("error", errorOpType),
|
||||
simpleOp("shuffle", shuffleOpType),
|
||||
simpleOp("sortKeys", sortKeysOpType),
|
||||
simpleOp("sort_?keys", sortKeysOpType),
|
||||
|
||||
@ -78,6 +82,10 @@ var participleYqRules = []*participleYqRule{
|
||||
{"Base64d", `@base64d`, decodeOp(Base64InputFormat), 0},
|
||||
{"Base64", `@base64`, encodeWithIndent(Base64OutputFormat, 0), 0},
|
||||
|
||||
{"Urid", `@urid`, decodeOp(UriInputFormat), 0},
|
||||
{"Uri", `@uri`, encodeWithIndent(UriOutputFormat, 0), 0},
|
||||
{"SH", `@sh`, encodeWithIndent(ShOutputFormat, 0), 0},
|
||||
|
||||
{"LoadXML", `load_?xml|xml_?load`, loadOp(NewXMLDecoder(ConfiguredXMLPreferences), false), 0},
|
||||
|
||||
{"LoadBase64", `load_?base64`, loadOp(NewBase64Decoder(), false), 0},
|
||||
|
@ -61,7 +61,7 @@ var participleLexerScenarios = []participleLexerScenario{
|
||||
TokenType: operationToken,
|
||||
Operation: &Operation{
|
||||
OperationType: valueOpType,
|
||||
Value: 3,
|
||||
Value: int64(3),
|
||||
StringValue: "3",
|
||||
CandidateNode: &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
@ -103,7 +103,7 @@ var participleLexerScenarios = []participleLexerScenario{
|
||||
TokenType: operationToken,
|
||||
Operation: &Operation{
|
||||
OperationType: valueOpType,
|
||||
Value: -2,
|
||||
Value: int64(-2),
|
||||
StringValue: "-2",
|
||||
CandidateNode: &CandidateNode{
|
||||
Node: &yaml.Node{
|
||||
|
@ -53,7 +53,7 @@ var subtractAssignOpType = &operationType{Type: "SUBTRACT_ASSIGN", NumArgs: 2, P
|
||||
|
||||
var assignAttributesOpType = &operationType{Type: "ASSIGN_ATTRIBUTES", NumArgs: 2, Precedence: 40, Handler: assignAttributesOperator}
|
||||
var assignStyleOpType = &operationType{Type: "ASSIGN_STYLE", NumArgs: 2, Precedence: 40, Handler: assignStyleOperator}
|
||||
var assignVariableOpType = &operationType{Type: "ASSIGN_VARIABLE", NumArgs: 2, Precedence: 40, Handler: assignVariableOperator}
|
||||
var assignVariableOpType = &operationType{Type: "ASSIGN_VARIABLE", NumArgs: 2, Precedence: 40, Handler: useWithPipe}
|
||||
var assignTagOpType = &operationType{Type: "ASSIGN_TAG", NumArgs: 2, Precedence: 40, Handler: assignTagOperator}
|
||||
var assignCommentOpType = &operationType{Type: "ASSIGN_COMMENT", NumArgs: 2, Precedence: 40, Handler: assignCommentsOperator}
|
||||
var assignAnchorOpType = &operationType{Type: "ASSIGN_ANCHOR", NumArgs: 2, Precedence: 40, Handler: assignAnchorOperator}
|
||||
@ -84,6 +84,7 @@ var expressionOpType = &operationType{Type: "EXP", NumArgs: 0, Precedence: 50, H
|
||||
|
||||
var collectOpType = &operationType{Type: "COLLECT", NumArgs: 1, Precedence: 50, Handler: collectOperator}
|
||||
var mapOpType = &operationType{Type: "MAP", NumArgs: 1, Precedence: 50, Handler: mapOperator}
|
||||
var filterOpType = &operationType{Type: "FILTER", NumArgs: 1, Precedence: 50, Handler: filterOperator}
|
||||
var errorOpType = &operationType{Type: "ERROR", NumArgs: 1, Precedence: 50, Handler: errorOperator}
|
||||
var pickOpType = &operationType{Type: "PICK", NumArgs: 1, Precedence: 50, Handler: pickOperator}
|
||||
var evalOpType = &operationType{Type: "EVAL", NumArgs: 1, Precedence: 50, Handler: evalOperator}
|
||||
@ -93,6 +94,8 @@ var formatDateTimeOpType = &operationType{Type: "FORMAT_DATE_TIME", NumArgs: 1,
|
||||
var withDtFormatOpType = &operationType{Type: "WITH_DATE_TIME_FORMAT", NumArgs: 1, Precedence: 50, Handler: withDateTimeFormat}
|
||||
var nowOpType = &operationType{Type: "NOW", NumArgs: 0, Precedence: 50, Handler: nowOp}
|
||||
var tzOpType = &operationType{Type: "TIMEZONE", NumArgs: 1, Precedence: 50, Handler: tzOp}
|
||||
var fromUnixOpType = &operationType{Type: "FROM_UNIX", NumArgs: 0, Precedence: 50, Handler: fromUnixOp}
|
||||
var toUnixOpType = &operationType{Type: "TO_UNIX", NumArgs: 0, Precedence: 50, Handler: toUnixOp}
|
||||
|
||||
var encodeOpType = &operationType{Type: "ENCODE", NumArgs: 0, Precedence: 50, Handler: encodeOperator}
|
||||
var decodeOpType = &operationType{Type: "DECODE", NumArgs: 0, Precedence: 50, Handler: decodeOperator}
|
||||
@ -133,6 +136,7 @@ var explodeOpType = &operationType{Type: "EXPLODE", NumArgs: 1, Precedence: 50,
|
||||
var sortByOpType = &operationType{Type: "SORT_BY", NumArgs: 1, Precedence: 50, Handler: sortByOperator}
|
||||
var reverseOpType = &operationType{Type: "REVERSE", NumArgs: 0, Precedence: 50, Handler: reverseOperator}
|
||||
var sortOpType = &operationType{Type: "SORT", NumArgs: 0, Precedence: 50, Handler: sortOperator}
|
||||
var shuffleOpType = &operationType{Type: "SHUFFLE", NumArgs: 0, Precedence: 50, Handler: shuffleOperator}
|
||||
|
||||
var sortKeysOpType = &operationType{Type: "SORT_KEYS", NumArgs: 1, Precedence: 50, Handler: sortKeysOperator}
|
||||
|
||||
@ -155,6 +159,7 @@ var traverseArrayOpType = &operationType{Type: "TRAVERSE_ARRAY", NumArgs: 2, Pre
|
||||
|
||||
var selfReferenceOpType = &operationType{Type: "SELF", NumArgs: 0, Precedence: 55, Handler: selfOperator}
|
||||
var valueOpType = &operationType{Type: "VALUE", NumArgs: 0, Precedence: 50, Handler: valueOperator}
|
||||
var referenceOpType = &operationType{Type: "REF", NumArgs: 0, Precedence: 50, Handler: referenceOperator}
|
||||
var envOpType = &operationType{Type: "ENV", NumArgs: 0, Precedence: 50, Handler: envOperator}
|
||||
var notOpType = &operationType{Type: "NOT", NumArgs: 0, Precedence: 50, Handler: notOperator}
|
||||
var emptyOpType = &operationType{Type: "EMPTY", Precedence: 50, Handler: emptyOperator}
|
||||
@ -334,7 +339,7 @@ func deepCloneWithOptions(node *yaml.Node, cloneContent bool) *yaml.Node {
|
||||
Tag: node.Tag,
|
||||
Value: node.Value,
|
||||
Anchor: node.Anchor,
|
||||
Alias: deepClone(node.Alias),
|
||||
Alias: node.Alias,
|
||||
HeadComment: node.HeadComment,
|
||||
LineComment: node.LineComment,
|
||||
FootComment: node.FootComment,
|
||||
@ -416,6 +421,7 @@ func footComment(node *yaml.Node) string {
|
||||
}
|
||||
|
||||
func createValueOperation(value interface{}, stringValue string) *Operation {
|
||||
log.Debug("creating value op for string %v", stringValue)
|
||||
var node = createScalarNode(value, stringValue)
|
||||
|
||||
return &Operation{
|
||||
|
11
pkg/yqlib/no_json.go
Normal file
11
pkg/yqlib/no_json.go
Normal file
@ -0,0 +1,11 @@
|
||||
//go:build yq_nojson
|
||||
|
||||
package yqlib
|
||||
|
||||
func NewJSONDecoder() Decoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewJSONEncoder(indent int, colorise bool, unwrapScalar bool) Encoder {
|
||||
return nil
|
||||
}
|
11
pkg/yqlib/no_xml.go
Normal file
11
pkg/yqlib/no_xml.go
Normal file
@ -0,0 +1,11 @@
|
||||
//go:build yq_noxml
|
||||
|
||||
package yqlib
|
||||
|
||||
func NewXMLDecoder(prefs XmlPreferences) Decoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewXMLEncoder(indent int, prefs XmlPreferences) Encoder {
|
||||
return nil
|
||||
}
|
@ -34,7 +34,7 @@ var addOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
skipDoc: true,
|
||||
document: `{}`,
|
||||
expression: "(.a + .b) as $x",
|
||||
expression: "(.a + .b) as $x | .",
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{}\n",
|
||||
},
|
||||
@ -101,6 +101,14 @@ var addOperatorScenarios = []expressionScenario{
|
||||
"D0, P[], (doc)::a: ['dog', 'cat']\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Prepend to existing array",
|
||||
document: `a: [dog]`,
|
||||
expression: `.a = ["cat"] + .a`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: [cat, dog]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "Concatenate to existing array",
|
||||
|
@ -16,7 +16,7 @@ var alternativeOperatorScenarios = []expressionScenario{
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
expression: `(.b // "hello") as $x`,
|
||||
expression: `(.b // "hello") as $x | .`,
|
||||
document: `a: bridge`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: bridge\n",
|
||||
|
@ -4,6 +4,11 @@ import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
var mergeAnchorAssign = `a: &a
|
||||
x: OriginalValue
|
||||
b:
|
||||
<<: *a`
|
||||
|
||||
var assignOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
description: "Create yaml file",
|
||||
@ -20,6 +25,14 @@ var assignOperatorScenarios = []expressionScenario{
|
||||
"D0, P[], (doc)::a: null\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
document: mergeAnchorAssign,
|
||||
expression: `.c = .b | .a.x = "ModifiedValue" | explode(.)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a:\n x: ModifiedValue\nb:\n x: ModifiedValue\nc:\n x: ModifiedValue\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
document: "{}",
|
||||
|
@ -102,7 +102,7 @@ var booleanOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
skipDoc: true,
|
||||
document: `[{pet: cat}]`,
|
||||
expression: `any_c(.name == "harry") as $c`,
|
||||
expression: `any_c(.name == "harry") as $c | .`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::[{pet: cat}]\n",
|
||||
},
|
||||
@ -110,9 +110,17 @@ var booleanOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
skipDoc: true,
|
||||
document: `[{pet: cat}]`,
|
||||
expression: `all_c(.name == "harry") as $c`,
|
||||
expression: `any_c(.name == "harry") as $c | $c`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::[{pet: cat}]\n",
|
||||
"D0, P[], (!!bool)::false\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
document: `[{pet: cat}]`,
|
||||
expression: `all_c(.name == "harry") as $c | $c`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!bool)::false\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -185,7 +193,7 @@ var booleanOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
skipDoc: true,
|
||||
document: `{}`,
|
||||
expression: `(.a.b or .c) as $x`,
|
||||
expression: `(.a.b or .c) as $x | .`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{}\n",
|
||||
},
|
||||
@ -193,7 +201,7 @@ var booleanOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
skipDoc: true,
|
||||
document: `{}`,
|
||||
expression: `(.a.b and .c) as $x`,
|
||||
expression: `(.a.b and .c) as $x | .`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{}\n",
|
||||
},
|
||||
|
@ -3,7 +3,6 @@ package yqlib
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
@ -80,7 +79,7 @@ func compareScalars(context Context, prefs compareTypePref, lhs *yaml.Node, rhs
|
||||
|
||||
isDateTime := lhs.Tag == "!!timestamp"
|
||||
// if the lhs is a string, it might be a timestamp in a custom format.
|
||||
if lhsTag == "!!str" && context.GetDateTimeLayout() != time.RFC3339 {
|
||||
if lhsTag == "!!str" {
|
||||
_, err := parseDateTime(context.GetDateTimeLayout(), lhs.Value)
|
||||
isDateTime = err == nil
|
||||
}
|
@ -4,6 +4,7 @@ import (
|
||||
"container/list"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
@ -129,3 +130,69 @@ func tzOp(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode)
|
||||
|
||||
return context.ChildContext(results), nil
|
||||
}
|
||||
|
||||
func parseUnixTime(unixTime string) (time.Time, error) {
|
||||
seconds, err := strconv.ParseFloat(unixTime, 64)
|
||||
|
||||
if err != nil {
|
||||
return time.Now(), err
|
||||
}
|
||||
|
||||
return time.UnixMilli(int64(seconds * 1000)), nil
|
||||
}
|
||||
|
||||
func fromUnixOp(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
|
||||
var results = list.New()
|
||||
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
|
||||
actualTag := guessTagFromCustomType(candidate.Node)
|
||||
|
||||
if actualTag != "!!int" && guessTagFromCustomType(candidate.Node) != "!!float" {
|
||||
return Context{}, fmt.Errorf("from_unix only works on numbers, found %v instead", candidate.Node.Tag)
|
||||
}
|
||||
|
||||
parsedTime, err := parseUnixTime(candidate.Node.Value)
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
|
||||
node := &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
Tag: "!!timestamp",
|
||||
Value: parsedTime.Format(time.RFC3339),
|
||||
}
|
||||
|
||||
results.PushBack(candidate.CreateReplacement(node))
|
||||
}
|
||||
|
||||
return context.ChildContext(results), nil
|
||||
}
|
||||
|
||||
func toUnixOp(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
|
||||
layout := context.GetDateTimeLayout()
|
||||
|
||||
var results = list.New()
|
||||
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
|
||||
parsedTime, err := parseDateTime(layout, candidate.Node.Value)
|
||||
if err != nil {
|
||||
return Context{}, fmt.Errorf("could not parse datetime of [%v] using layout [%v]: %w", candidate.GetNicePath(), layout, err)
|
||||
}
|
||||
|
||||
node := &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
Tag: "!!int",
|
||||
Value: fmt.Sprintf("%v", parsedTime.Unix()),
|
||||
}
|
||||
|
||||
results.PushBack(candidate.CreateReplacement(node))
|
||||
}
|
||||
|
||||
return context.ChildContext(results), nil
|
||||
}
|
||||
|
@ -39,6 +39,22 @@ var dateTimeOperatorScenarios = []expressionScenario{
|
||||
"D0, P[], (doc)::a: cool\nupdated: 2021-05-19T01:02:03Z\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "From Unix",
|
||||
subdescription: "Converts from unix time. Note, you don't have to pipe through the tz operator :)",
|
||||
expression: `1675301929 | from_unix | tz("UTC")`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!timestamp)::2023-02-02T01:38:49Z\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "To Unix",
|
||||
subdescription: "Converts to unix time",
|
||||
expression: `now | to_unix`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!int)::1621386123\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Timezone: from standard RFC3339 format",
|
||||
subdescription: "Returns a new datetime in the specified timezone. Specify standard IANA Time Zone format or 'utc', 'local'. When given a single parameter, this assumes the datetime is in RFC3339 format.",
|
||||
|
@ -4,6 +4,7 @@ import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"container/list"
|
||||
"errors"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
@ -26,6 +27,10 @@ func configureEncoder(format PrinterOutputFormat, indent int) Encoder {
|
||||
return NewXMLEncoder(indent, ConfiguredXMLPreferences)
|
||||
case Base64OutputFormat:
|
||||
return NewBase64Encoder()
|
||||
case UriOutputFormat:
|
||||
return NewUriEncoder()
|
||||
case ShOutputFormat:
|
||||
return NewShEncoder()
|
||||
}
|
||||
panic("invalid encoder")
|
||||
}
|
||||
@ -35,6 +40,9 @@ func encodeToString(candidate *CandidateNode, prefs encoderPreferences) (string,
|
||||
log.Debug("printing with indent: %v", prefs.indent)
|
||||
|
||||
encoder := configureEncoder(prefs.format, prefs.indent)
|
||||
if encoder == nil {
|
||||
return "", errors.New("no support for output format")
|
||||
}
|
||||
|
||||
printer := NewPrinter(encoder, NewSinglePrinterWriter(bufio.NewWriter(&output)))
|
||||
err := printer.PrintResults(candidate.AsList())
|
||||
@ -94,13 +102,11 @@ type decoderPreferences struct {
|
||||
format InputFormat
|
||||
}
|
||||
|
||||
/* takes a string and decodes it back into an object */
|
||||
func decodeOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
|
||||
preferences := expressionNode.Operation.Preferences.(decoderPreferences)
|
||||
|
||||
func createDecoder(format InputFormat) Decoder {
|
||||
var decoder Decoder
|
||||
switch preferences.format {
|
||||
switch format {
|
||||
case JsonInputFormat:
|
||||
decoder = NewJSONDecoder()
|
||||
case YamlInputFormat:
|
||||
decoder = NewYamlDecoder(ConfiguredYamlPreferences)
|
||||
case XMLInputFormat:
|
||||
@ -113,6 +119,20 @@ func decodeOperator(d *dataTreeNavigator, context Context, expressionNode *Expre
|
||||
decoder = NewCSVObjectDecoder(',')
|
||||
case TSVObjectInputFormat:
|
||||
decoder = NewCSVObjectDecoder('\t')
|
||||
case UriInputFormat:
|
||||
decoder = NewUriDecoder()
|
||||
}
|
||||
return decoder
|
||||
}
|
||||
|
||||
/* takes a string and decodes it back into an object */
|
||||
func decodeOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
|
||||
preferences := expressionNode.Operation.Preferences.(decoderPreferences)
|
||||
|
||||
decoder := createDecoder(preferences.format)
|
||||
if decoder == nil {
|
||||
return Context{}, errors.New("no support for input format")
|
||||
}
|
||||
|
||||
var results = list.New()
|
||||
|
@ -8,15 +8,17 @@ var prefix = "D0, P[], (doc)::a:\n cool:\n bob: dylan\n"
|
||||
|
||||
var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
description: "Encode value as json string",
|
||||
document: `{a: {cool: "thing"}}`,
|
||||
expression: `.b = (.a | to_json)`,
|
||||
requiresFormat: "json",
|
||||
description: "Encode value as json string",
|
||||
document: `{a: {cool: "thing"}}`,
|
||||
expression: `.b = (.a | to_json)`,
|
||||
expected: []string{
|
||||
`D0, P[], (doc)::{a: {cool: "thing"}, b: "{\n \"cool\": \"thing\"\n}\n"}
|
||||
`,
|
||||
},
|
||||
},
|
||||
{
|
||||
requiresFormat: "json",
|
||||
description: "Encode value as json string, on one line",
|
||||
subdescription: "Pass in a 0 indent to print json on a single line.",
|
||||
document: `{a: {cool: "thing"}}`,
|
||||
@ -27,6 +29,7 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
},
|
||||
},
|
||||
{
|
||||
requiresFormat: "json",
|
||||
description: "Encode value as json string, on one line shorthand",
|
||||
subdescription: "Pass in a 0 indent to print json on a single line.",
|
||||
document: `{a: {cool: "thing"}}`,
|
||||
@ -37,6 +40,7 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
},
|
||||
},
|
||||
{
|
||||
requiresFormat: "json",
|
||||
description: "Decode a json encoded string",
|
||||
subdescription: "Keep in mind JSON is a subset of YAML. If you want idiomatic yaml, pipe through the style operator to clear out the JSON styling.",
|
||||
document: `a: '{"cool":"thing"}'`,
|
||||
@ -193,33 +197,37 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Encode value as xml string",
|
||||
document: `{a: {cool: {foo: "bar", +@id: hi}}}`,
|
||||
expression: `.a | to_xml`,
|
||||
requiresFormat: "xml",
|
||||
description: "Encode value as xml string",
|
||||
document: `{a: {cool: {foo: "bar", +@id: hi}}}`,
|
||||
expression: `.a | to_xml`,
|
||||
expected: []string{
|
||||
"D0, P[a], (!!str)::<cool id=\"hi\">\n <foo>bar</foo>\n</cool>\n\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Encode value as xml string on a single line",
|
||||
document: `{a: {cool: {foo: "bar", +@id: hi}}}`,
|
||||
expression: `.a | @xml`,
|
||||
requiresFormat: "xml",
|
||||
description: "Encode value as xml string on a single line",
|
||||
document: `{a: {cool: {foo: "bar", +@id: hi}}}`,
|
||||
expression: `.a | @xml`,
|
||||
expected: []string{
|
||||
"D0, P[a], (!!str)::<cool id=\"hi\"><foo>bar</foo></cool>\n\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Encode value as xml string with custom indentation",
|
||||
document: `{a: {cool: {foo: "bar", +@id: hi}}}`,
|
||||
expression: `{"cat": .a | to_xml(1)}`,
|
||||
requiresFormat: "xml",
|
||||
description: "Encode value as xml string with custom indentation",
|
||||
document: `{a: {cool: {foo: "bar", +@id: hi}}}`,
|
||||
expression: `{"cat": .a | to_xml(1)}`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!map)::cat: |\n <cool id=\"hi\">\n <foo>bar</foo>\n </cool>\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Decode a xml encoded string",
|
||||
document: `a: "<foo>bar</foo>"`,
|
||||
expression: `.b = (.a | from_xml)`,
|
||||
requiresFormat: "xml",
|
||||
description: "Decode a xml encoded string",
|
||||
document: `a: "<foo>bar</foo>"`,
|
||||
expression: `.b = (.a | from_xml)`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: \"<foo>bar</foo>\"\nb:\n foo: bar\n",
|
||||
},
|
||||
@ -241,6 +249,41 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
"D0, P[], (!!str)::YTogYXBwbGUK\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Encode a string to uri",
|
||||
document: "coolData: this has & special () characters *",
|
||||
expression: ".coolData | @uri",
|
||||
expected: []string{
|
||||
"D0, P[coolData], (!!str)::this+has+%26+special+%28%29+characters+%2A\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Decode a URI to a string",
|
||||
document: "this+has+%26+special+%28%29+characters+%2A",
|
||||
expression: "@urid",
|
||||
expected: []string{
|
||||
"D0, P[], (!!str)::this has & special () characters *\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Encode a string to sh",
|
||||
subdescription: "Sh/Bash friendly string",
|
||||
document: "coolData: strings with spaces and a 'quote'",
|
||||
expression: ".coolData | @sh",
|
||||
expected: []string{
|
||||
"D0, P[coolData], (!!str)::strings' with spaces and a '\\'quote\\'\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Encode a string to sh",
|
||||
subdescription: "Watch out for stray '' (empty strings)",
|
||||
document: "coolData: \"'starts, contains more '' and ends with a quote'\"",
|
||||
expression: ".coolData | @sh",
|
||||
expected: []string{
|
||||
"D0, P[coolData], (!!str)::\\'starts,' contains more '\\'\\'' and ends with a quote'\\'\n",
|
||||
},
|
||||
skipDoc: true,
|
||||
},
|
||||
{
|
||||
description: "Decode a base64 encoded string",
|
||||
subdescription: "Decoded data is assumed to be a string.",
|
||||
@ -268,9 +311,10 @@ var encoderDecoderOperatorScenarios = []expressionScenario{
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "empty xml decode",
|
||||
skipDoc: true,
|
||||
expression: `"" | @xmld`,
|
||||
requiresFormat: "xml",
|
||||
description: "empty xml decode",
|
||||
skipDoc: true,
|
||||
expression: `"" | @xmld`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!null)::\n",
|
||||
},
|
||||
|
@ -47,7 +47,7 @@ var equalsOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
skipDoc: true,
|
||||
document: "{}",
|
||||
expression: "(.a == .b) as $x",
|
||||
expression: "(.a == .b) as $x | .",
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{}\n",
|
||||
},
|
||||
@ -63,7 +63,7 @@ var equalsOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
skipDoc: true,
|
||||
document: "{}",
|
||||
expression: "(.a != .b) as $x",
|
||||
expression: "(.a != .b) as $x | .",
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{}\n",
|
||||
},
|
||||
|
32
pkg/yqlib/operator_filter.go
Normal file
32
pkg/yqlib/operator_filter.go
Normal file
@ -0,0 +1,32 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"container/list"
|
||||
)
|
||||
|
||||
func filterOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
log.Debugf("-- filterOperation")
|
||||
var results = list.New()
|
||||
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
children := context.SingleChildContext(candidate)
|
||||
splatted, err := splat(children, traversePreferences{})
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
filtered, err := selectOperator(d, splatted, expressionNode)
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
|
||||
selfExpression := &ExpressionNode{Operation: &Operation{OperationType: selfReferenceOpType}}
|
||||
collected, err := collectTogether(d, filtered, selfExpression)
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
collected.Node.Style = unwrapDoc(candidate.Node).Style
|
||||
results.PushBack(collected)
|
||||
}
|
||||
return context.ChildContext(results), nil
|
||||
}
|
49
pkg/yqlib/operator_filter_test.go
Normal file
49
pkg/yqlib/operator_filter_test.go
Normal file
@ -0,0 +1,49 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
var filterOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
description: "Filter array",
|
||||
document: `[1,2,3]`,
|
||||
expression: `filter(. < 3)`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!seq)::[1, 2]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
document: `[1,2,3]`,
|
||||
expression: `filter(. > 1)`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!seq)::[2, 3]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "Filter array to empty",
|
||||
document: `[1,2,3]`,
|
||||
expression: `filter(. > 4)`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!seq)::[]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "Filter empty array",
|
||||
document: `[]`,
|
||||
expression: `filter(. > 1)`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!seq)::[]\n",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestFilterOperatorScenarios(t *testing.T) {
|
||||
for _, tt := range filterOperatorScenarios {
|
||||
testScenario(t, &tt)
|
||||
}
|
||||
documentOperatorScenarios(t, "filter", filterOperatorScenarios)
|
||||
}
|
@ -16,7 +16,7 @@ var hasOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
skipDoc: true,
|
||||
document: `a: hello`,
|
||||
expression: `has(.b) as $c`,
|
||||
expression: `has(.b) as $c | .`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::a: hello\n",
|
||||
},
|
||||
|
@ -34,6 +34,9 @@ func loadString(filename string) (*CandidateNode, error) {
|
||||
}
|
||||
|
||||
func loadYaml(filename string, decoder Decoder) (*CandidateNode, error) {
|
||||
if decoder == nil {
|
||||
return nil, fmt.Errorf("could not load %s", filename)
|
||||
}
|
||||
|
||||
file, err := os.Open(filename) // #nosec
|
||||
if err != nil {
|
||||
|
@ -74,9 +74,10 @@ var loadScenarios = []expressionScenario{
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Load from XML",
|
||||
document: "cool: things",
|
||||
expression: `.more_stuff = load_xml("../../examples/small.xml")`,
|
||||
requiresFormat: "xml",
|
||||
description: "Load from XML",
|
||||
document: "cool: things",
|
||||
expression: `.more_stuff = load_xml("../../examples/small.xml")`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::cool: things\nmore_stuff:\n this: is some xml\n",
|
||||
},
|
||||
|
@ -60,8 +60,14 @@ func multiply(preferences multiplyPreferences) func(d *dataTreeNavigator, contex
|
||||
log.Debugf("Multiplying LHS: %v", lhs.Node.Tag)
|
||||
log.Debugf("- RHS: %v", rhs.Node.Tag)
|
||||
|
||||
if lhs.Node.Kind == yaml.MappingNode && rhs.Node.Kind == yaml.MappingNode ||
|
||||
(lhs.Node.Kind == yaml.SequenceNode && rhs.Node.Kind == yaml.SequenceNode) {
|
||||
if rhs.Node.Tag == "!!null" {
|
||||
return lhs.Copy()
|
||||
}
|
||||
|
||||
if (lhs.Node.Kind == yaml.MappingNode && rhs.Node.Kind == yaml.MappingNode) ||
|
||||
(lhs.Node.Tag == "!!null" && rhs.Node.Kind == yaml.MappingNode) ||
|
||||
(lhs.Node.Kind == yaml.SequenceNode && rhs.Node.Kind == yaml.SequenceNode) ||
|
||||
(lhs.Node.Tag == "!!null" && rhs.Node.Kind == yaml.SequenceNode) {
|
||||
var newBlank = CandidateNode{}
|
||||
err := copier.CopyWithOption(&newBlank, lhs, copier.Option{IgnoreEmpty: true, DeepCopy: true})
|
||||
if err != nil {
|
||||
@ -189,7 +195,7 @@ func applyAssignment(d *dataTreeNavigator, context Context, pathIndexToStartFrom
|
||||
} else {
|
||||
log.Debugf("merge - assignmentOp := &Operation{OperationType: assignAttributesOpType}")
|
||||
}
|
||||
rhsOp := &Operation{OperationType: valueOpType, CandidateNode: rhs}
|
||||
rhsOp := &Operation{OperationType: referenceOpType, CandidateNode: rhs}
|
||||
|
||||
assignmentOpNode := &ExpressionNode{
|
||||
Operation: assignmentOp,
|
||||
|
@ -579,6 +579,41 @@ var multiplyOperatorScenarios = []expressionScenario{
|
||||
"D0, P[], (doc)::a: {a: apple is included, b: cool.}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Merging a null with a map",
|
||||
expression: `null * {"some": "thing"}`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!map)::some: thing\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Merging a map with null",
|
||||
expression: `{"some": "thing"} * null`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!map)::some: thing\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Merging an null with an array",
|
||||
expression: `null * ["some"]`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!seq)::- some\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Merging an array with null",
|
||||
expression: `["some"] * null`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!seq)::- some\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
expression: `null * null`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!null)::null\n",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestMultiplyOperatorScenarios(t *testing.T) {
|
||||
|
@ -81,7 +81,7 @@ func setPathOperator(d *dataTreeNavigator, context Context, expressionNode *Expr
|
||||
return Context{}, fmt.Errorf("SETPATH: expected single value on RHS but found %v", targetContextValue.MatchingNodes.Len())
|
||||
}
|
||||
|
||||
rhsOp := &Operation{OperationType: valueOpType, CandidateNode: targetContextValue.MatchingNodes.Front().Value.(*CandidateNode)}
|
||||
rhsOp := &Operation{OperationType: referenceOpType, CandidateNode: targetContextValue.MatchingNodes.Front().Value.(*CandidateNode)}
|
||||
|
||||
assignmentOpNode := &ExpressionNode{
|
||||
Operation: assignmentOp,
|
||||
|
@ -2,9 +2,9 @@ package yqlib
|
||||
|
||||
func pipeOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
|
||||
//lhs may update the variable context, we should pass that into the RHS
|
||||
// BUT we still return the original context back (see jq)
|
||||
// https://stedolan.github.io/jq/manual/#Variable/SymbolicBindingOperator:...as$identifier|...
|
||||
if expressionNode.LHS.Operation.OperationType == assignVariableOpType {
|
||||
return variableLoop(d, context, expressionNode)
|
||||
}
|
||||
|
||||
lhs, err := d.GetMatchingNodes(context, expressionNode.LHS)
|
||||
if err != nil {
|
||||
|
37
pkg/yqlib/operator_shuffle.go
Normal file
37
pkg/yqlib/operator_shuffle.go
Normal file
@ -0,0 +1,37 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"container/list"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func shuffleOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
|
||||
// ignore CWE-338 gosec issue of not using crypto/rand
|
||||
// this is just to shuffle an array rather generating a
|
||||
// secret or something that needs proper rand.
|
||||
myRand := rand.New(rand.NewSource(Now().UnixNano())) // #nosec
|
||||
|
||||
results := list.New()
|
||||
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
|
||||
candidateNode := unwrapDoc(candidate.Node)
|
||||
|
||||
if candidateNode.Kind != yaml.SequenceNode {
|
||||
return context, fmt.Errorf("node at path [%v] is not an array (it's a %v)", candidate.GetNicePath(), candidate.GetNiceTag())
|
||||
}
|
||||
|
||||
result := deepClone(candidateNode)
|
||||
|
||||
a := result.Content
|
||||
|
||||
myRand.Shuffle(len(a), func(i, j int) { a[i], a[j] = a[j], a[i] })
|
||||
results.PushBack(candidate.CreateReplacement(result))
|
||||
}
|
||||
return context.ChildContext(results), nil
|
||||
}
|
30
pkg/yqlib/operator_shuffle_test.go
Normal file
30
pkg/yqlib/operator_shuffle_test.go
Normal file
@ -0,0 +1,30 @@
|
||||
package yqlib
|
||||
|
||||
import "testing"
|
||||
|
||||
var shuffleOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
description: "Shuffle array",
|
||||
document: "[1, 2, 3, 4, 5]",
|
||||
expression: `shuffle`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!seq)::[5, 2, 4, 1, 3]\n",
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
description: "Shuffle array in place",
|
||||
document: "cool: [1, 2, 3, 4, 5]",
|
||||
expression: `.cool |= shuffle`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::cool: [5, 2, 4, 1, 3]\n",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestShuffleByOperatorScenarios(t *testing.T) {
|
||||
for _, tt := range shuffleOperatorScenarios {
|
||||
testScenario(t, &tt)
|
||||
}
|
||||
documentOperatorScenarios(t, "shuffle", shuffleOperatorScenarios)
|
||||
}
|
@ -48,6 +48,8 @@ func sliceArrayOperator(d *dataTreeNavigator, context Context, expressionNode *E
|
||||
relativeSecondNumber := secondNumber
|
||||
if relativeSecondNumber < 0 {
|
||||
relativeSecondNumber = len(original.Content) + secondNumber
|
||||
} else if relativeSecondNumber > len(original.Content) {
|
||||
relativeSecondNumber = len(original.Content)
|
||||
}
|
||||
|
||||
log.Debug("calculateIndicesToTraverse: slice from %v to %v", relativeFirstNumber, relativeSecondNumber)
|
||||
|
@ -55,6 +55,24 @@ var sliceArrayScenarios = []expressionScenario{
|
||||
"D0, P[1], (!!seq)::- banana\n- grape\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "second index beyond array clamps",
|
||||
document: `[cat]`,
|
||||
expression: `.[:3]`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!seq)::- cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "first index beyond array returns nothing",
|
||||
document: `[cat]`,
|
||||
expression: `.[3:]`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!seq)::[]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
document: `[[cat, dog, frog, cow], [apple, banana, grape, mango]]`,
|
||||
|
@ -42,18 +42,7 @@ func sortByOperator(d *dataTreeNavigator, context Context, expressionNode *Expre
|
||||
return Context{}, err
|
||||
}
|
||||
|
||||
nodeToCompare := &yaml.Node{Kind: yaml.ScalarNode, Tag: "!!null"}
|
||||
if compareContext.MatchingNodes.Len() > 0 {
|
||||
nodeToCompare = compareContext.MatchingNodes.Front().Value.(*CandidateNode).Node
|
||||
}
|
||||
|
||||
log.Debug("going to compare %v by %v", NodeToString(candidate.CreateReplacement(originalNode)), NodeToString(candidate.CreateReplacement(nodeToCompare)))
|
||||
|
||||
sortableArray[i] = sortableNode{Node: originalNode, NodeToCompare: nodeToCompare, dateTimeLayout: context.GetDateTimeLayout()}
|
||||
|
||||
if nodeToCompare.Kind != yaml.ScalarNode {
|
||||
return Context{}, fmt.Errorf("sort only works for scalars, got %v", nodeToCompare.Tag)
|
||||
}
|
||||
sortableArray[i] = sortableNode{Node: originalNode, CompareContext: compareContext, dateTimeLayout: context.GetDateTimeLayout()}
|
||||
|
||||
}
|
||||
|
||||
@ -72,7 +61,7 @@ func sortByOperator(d *dataTreeNavigator, context Context, expressionNode *Expre
|
||||
|
||||
type sortableNode struct {
|
||||
Node *yaml.Node
|
||||
NodeToCompare *yaml.Node
|
||||
CompareContext Context
|
||||
dateTimeLayout string
|
||||
}
|
||||
|
||||
@ -82,9 +71,28 @@ func (a sortableNodeArray) Len() int { return len(a) }
|
||||
func (a sortableNodeArray) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||
|
||||
func (a sortableNodeArray) Less(i, j int) bool {
|
||||
lhs := a[i].NodeToCompare
|
||||
rhs := a[j].NodeToCompare
|
||||
lhsContext := a[i].CompareContext
|
||||
rhsContext := a[j].CompareContext
|
||||
|
||||
rhsEl := rhsContext.MatchingNodes.Front()
|
||||
for lhsEl := lhsContext.MatchingNodes.Front(); lhsEl != nil && rhsEl != nil; lhsEl = lhsEl.Next() {
|
||||
lhs := lhsEl.Value.(*CandidateNode)
|
||||
rhs := rhsEl.Value.(*CandidateNode)
|
||||
|
||||
result := a.compare(lhs.Node, rhs.Node, a[i].dateTimeLayout)
|
||||
|
||||
if result < 0 {
|
||||
return true
|
||||
} else if result > 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
rhsEl = rhsEl.Next()
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (a sortableNodeArray) compare(lhs *yaml.Node, rhs *yaml.Node, dateTimeLayout string) int {
|
||||
lhsTag := lhs.Tag
|
||||
rhsTag := rhs.Tag
|
||||
|
||||
@ -99,7 +107,7 @@ func (a sortableNodeArray) Less(i, j int) bool {
|
||||
}
|
||||
|
||||
isDateTime := lhsTag == "!!timestamp" && rhsTag == "!!timestamp"
|
||||
layout := a[i].dateTimeLayout
|
||||
layout := dateTimeLayout
|
||||
// if the lhs is a string, it might be a timestamp in a custom format.
|
||||
if lhsTag == "!!str" && layout != time.RFC3339 {
|
||||
_, errLhs := parseDateTime(layout, lhs.Value)
|
||||
@ -108,13 +116,13 @@ func (a sortableNodeArray) Less(i, j int) bool {
|
||||
}
|
||||
|
||||
if lhsTag == "!!null" && rhsTag != "!!null" {
|
||||
return true
|
||||
return -1
|
||||
} else if lhsTag != "!!null" && rhsTag == "!!null" {
|
||||
return false
|
||||
return 1
|
||||
} else if lhsTag == "!!bool" && rhsTag != "!!bool" {
|
||||
return true
|
||||
return -1
|
||||
} else if lhsTag != "!!bool" && rhsTag == "!!bool" {
|
||||
return false
|
||||
return 1
|
||||
} else if lhsTag == "!!bool" && rhsTag == "!!bool" {
|
||||
lhsTruthy, err := isTruthyNode(lhs)
|
||||
if err != nil {
|
||||
@ -125,20 +133,30 @@ func (a sortableNodeArray) Less(i, j int) bool {
|
||||
if err != nil {
|
||||
panic(fmt.Errorf("could not parse %v as boolean: %w", rhs.Value, err))
|
||||
}
|
||||
|
||||
return !lhsTruthy && rhsTruthy
|
||||
if lhsTruthy == rhsTruthy {
|
||||
return 0
|
||||
} else if lhsTruthy {
|
||||
return 1
|
||||
}
|
||||
return -1
|
||||
} else if isDateTime {
|
||||
lhsTime, err := parseDateTime(layout, lhs.Value)
|
||||
if err != nil {
|
||||
log.Warningf("Could not parse time %v with layout %v for sort, sorting by string instead: %w", lhs.Value, layout, err)
|
||||
return strings.Compare(lhs.Value, rhs.Value) < 0
|
||||
return strings.Compare(lhs.Value, rhs.Value)
|
||||
}
|
||||
rhsTime, err := parseDateTime(layout, rhs.Value)
|
||||
if err != nil {
|
||||
log.Warningf("Could not parse time %v with layout %v for sort, sorting by string instead: %w", rhs.Value, layout, err)
|
||||
return strings.Compare(lhs.Value, rhs.Value) < 0
|
||||
return strings.Compare(lhs.Value, rhs.Value)
|
||||
}
|
||||
return lhsTime.Before(rhsTime)
|
||||
if lhsTime.Equal(rhsTime) {
|
||||
return 0
|
||||
} else if lhsTime.Before(rhsTime) {
|
||||
return -1
|
||||
}
|
||||
|
||||
return 1
|
||||
} else if lhsTag == "!!int" && rhsTag == "!!int" {
|
||||
_, lhsNum, err := parseInt64(lhs.Value)
|
||||
if err != nil {
|
||||
@ -148,7 +166,7 @@ func (a sortableNodeArray) Less(i, j int) bool {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return lhsNum < rhsNum
|
||||
return int(lhsNum - rhsNum)
|
||||
} else if (lhsTag == "!!int" || lhsTag == "!!float") && (rhsTag == "!!int" || rhsTag == "!!float") {
|
||||
lhsNum, err := strconv.ParseFloat(lhs.Value, 64)
|
||||
if err != nil {
|
||||
@ -158,8 +176,14 @@ func (a sortableNodeArray) Less(i, j int) bool {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return lhsNum < rhsNum
|
||||
if lhsNum == rhsNum {
|
||||
return 0
|
||||
} else if lhsNum < rhsNum {
|
||||
return -1
|
||||
}
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
return strings.Compare(lhs.Value, rhs.Value) < 0
|
||||
return strings.Compare(lhs.Value, rhs.Value)
|
||||
}
|
||||
|
@ -11,6 +11,32 @@ var sortByOperatorScenarios = []expressionScenario{
|
||||
"D0, P[], (!!seq)::[{a: apple}, {a: banana}, {a: cat}]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Sort by multiple fields",
|
||||
document: "[{a: dog},{a: cat, b: banana},{a: cat, b: apple}]",
|
||||
expression: `sort_by(.a, .b)`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!seq)::[{a: cat, b: apple}, {a: cat, b: banana}, {a: dog}]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Sort by multiple fields",
|
||||
skipDoc: true,
|
||||
document: "[{a: dog, b: good},{a: cat, c: things},{a: cat, b: apple}]",
|
||||
expression: `sort_by(.a, .b)`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!seq)::[{a: cat, c: things}, {a: cat, b: apple}, {a: dog, b: good}]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Sort by multiple fields",
|
||||
skipDoc: true,
|
||||
document: "[{a: dog, b: 0.1},{a: cat, b: 0.01},{a: cat, b: 0.001}]",
|
||||
expression: `sort_by(.a, .b)`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!seq)::[{a: cat, b: 0.001}, {a: cat, b: 0.01}, {a: dog, b: 0.1}]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Sort descending by string field",
|
||||
subdescription: "Use sort with reverse to sort in descending order.",
|
||||
|
@ -8,7 +8,7 @@ var subtractOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
skipDoc: true,
|
||||
document: `{}`,
|
||||
expression: "(.a - .b) as $x",
|
||||
expression: "(.a - .b) as $x | .",
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{}\n",
|
||||
},
|
||||
|
@ -151,7 +151,7 @@ var traversePathOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
skipDoc: true,
|
||||
document: `c: dog`,
|
||||
expression: `.[.a.b] as $x`,
|
||||
expression: `.[.a.b] as $x | .`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::c: dog\n",
|
||||
},
|
||||
|
@ -8,7 +8,7 @@ var unionOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
skipDoc: true,
|
||||
document: "{}",
|
||||
expression: `(.a, .b.c) as $x`,
|
||||
expression: `(.a, .b.c) as $x | .`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{}\n",
|
||||
},
|
||||
|
@ -1,6 +1,30 @@
|
||||
package yqlib
|
||||
|
||||
import "container/list"
|
||||
|
||||
func referenceOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
return context.SingleChildContext(expressionNode.Operation.CandidateNode), nil
|
||||
}
|
||||
|
||||
func valueOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
log.Debug("value = %v", expressionNode.Operation.CandidateNode.Node.Value)
|
||||
return context.SingleChildContext(expressionNode.Operation.CandidateNode), nil
|
||||
if context.MatchingNodes.Len() == 0 {
|
||||
clone, err := expressionNode.Operation.CandidateNode.Copy()
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
return context.SingleChildContext(clone), nil
|
||||
}
|
||||
|
||||
var results = list.New()
|
||||
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
clone, err := expressionNode.Operation.CandidateNode.Copy()
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
results.PushBack(clone)
|
||||
}
|
||||
|
||||
return context.ChildContext(results), nil
|
||||
}
|
||||
|
@ -12,6 +12,28 @@ var valueOperatorScenarios = []expressionScenario{
|
||||
"D0, P[], (!!int)::1\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
document: `[1,2,3]`,
|
||||
expression: `.[] | "foo"`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!str)::foo\n",
|
||||
"D0, P[], (!!str)::foo\n",
|
||||
"D0, P[], (!!str)::foo\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
document: `[1,2,3]`,
|
||||
expression: `[.[] | "foo"] | .[0] = "cat"`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!seq)::- cat\n- foo\n- foo\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
expression: `"foo"`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!str)::foo\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
document: ``,
|
||||
expression: `0x9f`,
|
||||
|
@ -19,24 +19,82 @@ type assignVarPreferences struct {
|
||||
IsReference bool
|
||||
}
|
||||
|
||||
func assignVariableOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
lhs, err := d.GetMatchingNodes(context.ReadOnlyClone(), expressionNode.LHS)
|
||||
func useWithPipe(d *dataTreeNavigator, context Context, originalExp *ExpressionNode) (Context, error) {
|
||||
return Context{}, fmt.Errorf("must use variable with a pipe, e.g. `exp as $x | ...`")
|
||||
}
|
||||
|
||||
// variables are like loops in jq
|
||||
// https://stedolan.github.io/jq/manual/#Variable
|
||||
func variableLoop(d *dataTreeNavigator, context Context, originalExp *ExpressionNode) (Context, error) {
|
||||
log.Debug("variable loop!")
|
||||
results := list.New()
|
||||
var evaluateAllTogether = true
|
||||
for matchEl := context.MatchingNodes.Front(); matchEl != nil; matchEl = matchEl.Next() {
|
||||
evaluateAllTogether = evaluateAllTogether && matchEl.Value.(*CandidateNode).EvaluateTogether
|
||||
if !evaluateAllTogether {
|
||||
break
|
||||
}
|
||||
}
|
||||
if evaluateAllTogether {
|
||||
return variableLoopSingleChild(d, context, originalExp)
|
||||
}
|
||||
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
result, err := variableLoopSingleChild(d, context.SingleChildContext(el.Value.(*CandidateNode)), originalExp)
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
results.PushBackList(result.MatchingNodes)
|
||||
}
|
||||
return context.ChildContext(results), nil
|
||||
}
|
||||
|
||||
func variableLoopSingleChild(d *dataTreeNavigator, context Context, originalExp *ExpressionNode) (Context, error) {
|
||||
|
||||
variableExp := originalExp.LHS
|
||||
lhs, err := d.GetMatchingNodes(context.ReadOnlyClone(), variableExp.LHS)
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
if expressionNode.RHS.Operation.OperationType.Type != "GET_VARIABLE" {
|
||||
if variableExp.RHS.Operation.OperationType.Type != "GET_VARIABLE" {
|
||||
return Context{}, fmt.Errorf("RHS of 'as' operator must be a variable name e.g. $foo")
|
||||
}
|
||||
variableName := expressionNode.RHS.Operation.StringValue
|
||||
variableName := variableExp.RHS.Operation.StringValue
|
||||
|
||||
prefs := expressionNode.Operation.Preferences.(assignVarPreferences)
|
||||
prefs := variableExp.Operation.Preferences.(assignVarPreferences)
|
||||
|
||||
var variableValue *list.List
|
||||
if prefs.IsReference {
|
||||
variableValue = lhs.MatchingNodes
|
||||
} else {
|
||||
variableValue = lhs.DeepClone().MatchingNodes
|
||||
results := list.New()
|
||||
|
||||
// now we loop over lhs, set variable to each result and calculate originalExp.Rhs
|
||||
for el := lhs.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
log.Debug("PROCESSING VARIABLE: ", NodeToString(el.Value.(*CandidateNode)))
|
||||
var variableValue = list.New()
|
||||
if prefs.IsReference {
|
||||
variableValue.PushBack(el.Value)
|
||||
} else {
|
||||
candidateCopy, err := el.Value.(*CandidateNode).Copy()
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
variableValue.PushBack(candidateCopy)
|
||||
}
|
||||
newContext := context.ChildContext(context.MatchingNodes)
|
||||
newContext.SetVariable(variableName, variableValue)
|
||||
|
||||
rhs, err := d.GetMatchingNodes(newContext, originalExp.RHS)
|
||||
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
log.Debug("PROCESSING VARIABLE DONE, got back: ", rhs.MatchingNodes.Len())
|
||||
results.PushBackList(rhs.MatchingNodes)
|
||||
}
|
||||
context.SetVariable(variableName, variableValue)
|
||||
return context, nil
|
||||
|
||||
// if there is no LHS - then I guess we just calculate originalExp.Rhs
|
||||
if lhs.MatchingNodes.Len() == 0 {
|
||||
return d.GetMatchingNodes(context, originalExp.RHS)
|
||||
}
|
||||
|
||||
return context.ChildContext(results), nil
|
||||
|
||||
}
|
||||
|
@ -8,15 +8,21 @@ var variableOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
skipDoc: true,
|
||||
document: `{}`,
|
||||
expression: `.a.b as $foo`,
|
||||
expression: `.a.b as $foo | .`,
|
||||
expected: []string{
|
||||
"D0, P[], (doc)::{}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
document: `{}`,
|
||||
expression: `.a.b as $foo`,
|
||||
expectedError: "must use variable with a pipe, e.g. `exp as $x | ...`",
|
||||
},
|
||||
{
|
||||
document: "a: [cat]",
|
||||
skipDoc: true,
|
||||
expression: "(.[] | {.name: .}) as $item",
|
||||
expression: "(.[] | {.name: .}) as $item | .",
|
||||
expectedError: `cannot index array with 'name' (strconv.ParseInt: parsing "name": invalid syntax)`,
|
||||
},
|
||||
{
|
||||
@ -36,6 +42,22 @@ var variableOperatorScenarios = []expressionScenario{
|
||||
"D0, P[1], (!!str)::dog\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
document: `[1, 2]`,
|
||||
expression: `.[] | . as $f | select($f == 2)`,
|
||||
expected: []string{
|
||||
"D0, P[1], (!!int)::2\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
document: `[1, 2]`,
|
||||
expression: `[.[] | . as $f | $f + 1]`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!seq)::- 2\n- 3\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Using variables as a lookup",
|
||||
subdescription: "Example taken from [jq](https://stedolan.github.io/jq/manual/#Variable/SymbolicBindingOperator:...as$identifier|...)",
|
||||
|
@ -41,9 +41,9 @@ func compoundAssignFunction(d *dataTreeNavigator, context Context, expressionNod
|
||||
if err != nil {
|
||||
return Context{}, err
|
||||
}
|
||||
valueCopyExp := &ExpressionNode{Operation: &Operation{OperationType: valueOpType, CandidateNode: clone}}
|
||||
valueCopyExp := &ExpressionNode{Operation: &Operation{OperationType: referenceOpType, CandidateNode: clone}}
|
||||
|
||||
valueExpression := &ExpressionNode{Operation: &Operation{OperationType: valueOpType, CandidateNode: candidate}}
|
||||
valueExpression := &ExpressionNode{Operation: &Operation{OperationType: referenceOpType, CandidateNode: candidate}}
|
||||
|
||||
assignmentOpNode := &ExpressionNode{Operation: assignmentOp, LHS: valueExpression, RHS: calculation(valueCopyExp, expressionNode.RHS)}
|
||||
|
||||
|
@ -21,6 +21,13 @@ var compareOperatorScenarios = []expressionScenario{
|
||||
"D0, P[k], (!!bool)::true\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
expression: `"2022-01-30T15:53:09Z" > "2020-01-30T15:53:09Z"`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!bool)::true\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
document: "a: 5\nb: 4",
|
@ -28,6 +28,7 @@ type expressionScenario struct {
|
||||
skipDoc bool
|
||||
expectedError string
|
||||
dontFormatInputForDoc bool // dont format input doc for documentation generation
|
||||
requiresFormat string
|
||||
}
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
@ -103,6 +104,23 @@ func testScenario(t *testing.T, s *expressionScenario) {
|
||||
return
|
||||
}
|
||||
|
||||
if s.requiresFormat != "" {
|
||||
format := s.requiresFormat
|
||||
inputFormat, err := InputFormatFromString(format)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
if decoder := createDecoder(inputFormat); decoder == nil {
|
||||
t.Skipf("no support for %s input format", format)
|
||||
}
|
||||
outputFormat, err := OutputFormatFromString(format)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
if encoder := configureEncoder(outputFormat, 4); encoder == nil {
|
||||
t.Skipf("no support for %s output format", format)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
t.Error(fmt.Errorf("%w: %v: %v", err, s.description, s.expression))
|
||||
return
|
||||
|
14
pkg/yqlib/ordered_map.go
Normal file
14
pkg/yqlib/ordered_map.go
Normal file
@ -0,0 +1,14 @@
|
||||
package yqlib
|
||||
|
||||
// orderedMap allows to marshal and unmarshal JSON and YAML values keeping the
|
||||
// order of keys and values in a map or an object.
|
||||
type orderedMap struct {
|
||||
// if this is an object, kv != nil. If this is not an object, kv == nil.
|
||||
kv []orderedMapKV
|
||||
altVal interface{}
|
||||
}
|
||||
|
||||
type orderedMapKV struct {
|
||||
K string
|
||||
V orderedMap
|
||||
}
|
83
pkg/yqlib/ordered_map_json.go
Normal file
83
pkg/yqlib/ordered_map_json.go
Normal file
@ -0,0 +1,83 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io"
|
||||
)
|
||||
|
||||
func (o *orderedMap) UnmarshalJSON(data []byte) error {
|
||||
switch data[0] {
|
||||
case '{':
|
||||
// initialise so that even if the object is empty it is not nil
|
||||
o.kv = []orderedMapKV{}
|
||||
|
||||
// create decoder
|
||||
dec := json.NewDecoder(bytes.NewReader(data))
|
||||
_, err := dec.Token() // open object
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// cycle through k/v
|
||||
var tok json.Token
|
||||
for tok, err = dec.Token(); err == nil; tok, err = dec.Token() {
|
||||
// we can expect two types: string or Delim. Delim automatically means
|
||||
// that it is the closing bracket of the object, whereas string means
|
||||
// that there is another key.
|
||||
if _, ok := tok.(json.Delim); ok {
|
||||
break
|
||||
}
|
||||
kv := orderedMapKV{
|
||||
K: tok.(string),
|
||||
}
|
||||
if err := dec.Decode(&kv.V); err != nil {
|
||||
return err
|
||||
}
|
||||
o.kv = append(o.kv, kv)
|
||||
}
|
||||
// unexpected error
|
||||
if err != nil && !errors.Is(err, io.EOF) {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
case '[':
|
||||
var res []*orderedMap
|
||||
if err := json.Unmarshal(data, &res); err != nil {
|
||||
return err
|
||||
}
|
||||
o.altVal = res
|
||||
o.kv = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
return json.Unmarshal(data, &o.altVal)
|
||||
}
|
||||
|
||||
func (o orderedMap) MarshalJSON() ([]byte, error) {
|
||||
buf := new(bytes.Buffer)
|
||||
enc := json.NewEncoder(buf)
|
||||
enc.SetEscapeHTML(false) // do not escape html chars e.g. &, <, >
|
||||
if o.kv == nil {
|
||||
if err := enc.Encode(o.altVal); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
buf.WriteByte('{')
|
||||
for idx, el := range o.kv {
|
||||
if err := enc.Encode(el.K); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
buf.WriteByte(':')
|
||||
if err := enc.Encode(el.V); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if idx != len(o.kv)-1 {
|
||||
buf.WriteByte(',')
|
||||
}
|
||||
}
|
||||
buf.WriteByte('}')
|
||||
return buf.Bytes(), nil
|
||||
}
|
79
pkg/yqlib/ordered_map_yaml.go
Normal file
79
pkg/yqlib/ordered_map_yaml.go
Normal file
@ -0,0 +1,79 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func (o *orderedMap) UnmarshalYAML(node *yaml.Node) error {
|
||||
switch node.Kind {
|
||||
case yaml.DocumentNode:
|
||||
if len(node.Content) == 0 {
|
||||
return nil
|
||||
}
|
||||
return o.UnmarshalYAML(node.Content[0])
|
||||
case yaml.AliasNode:
|
||||
return o.UnmarshalYAML(node.Alias)
|
||||
case yaml.ScalarNode:
|
||||
return node.Decode(&o.altVal)
|
||||
case yaml.MappingNode:
|
||||
// set kv to non-nil
|
||||
o.kv = []orderedMapKV{}
|
||||
for i := 0; i < len(node.Content); i += 2 {
|
||||
var key string
|
||||
var val orderedMap
|
||||
if err := node.Content[i].Decode(&key); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := node.Content[i+1].Decode(&val); err != nil {
|
||||
return err
|
||||
}
|
||||
o.kv = append(o.kv, orderedMapKV{
|
||||
K: key,
|
||||
V: val,
|
||||
})
|
||||
}
|
||||
return nil
|
||||
case yaml.SequenceNode:
|
||||
// note that this has to be a pointer, so that nulls can be represented.
|
||||
var res []*orderedMap
|
||||
if err := node.Decode(&res); err != nil {
|
||||
return err
|
||||
}
|
||||
o.altVal = res
|
||||
o.kv = nil
|
||||
return nil
|
||||
case 0:
|
||||
// null
|
||||
o.kv = nil
|
||||
o.altVal = nil
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("orderedMap: invalid yaml node")
|
||||
}
|
||||
}
|
||||
|
||||
func (o *orderedMap) MarshalYAML() (interface{}, error) {
|
||||
// fast path: kv is nil, use altVal
|
||||
if o.kv == nil {
|
||||
return o.altVal, nil
|
||||
}
|
||||
content := make([]*yaml.Node, 0, len(o.kv)*2)
|
||||
for _, val := range o.kv {
|
||||
n := new(yaml.Node)
|
||||
if err := n.Encode(val.V); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
content = append(content, &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: val.K,
|
||||
}, n)
|
||||
}
|
||||
return &yaml.Node{
|
||||
Kind: yaml.MappingNode,
|
||||
Tag: "!!map",
|
||||
Content: content,
|
||||
}, nil
|
||||
}
|
@ -27,15 +27,17 @@ const (
|
||||
TSVOutputFormat
|
||||
XMLOutputFormat
|
||||
Base64OutputFormat
|
||||
UriOutputFormat
|
||||
ShOutputFormat
|
||||
)
|
||||
|
||||
func OutputFormatFromString(format string) (PrinterOutputFormat, error) {
|
||||
switch format {
|
||||
case "yaml", "y":
|
||||
case "yaml", "y", "yml":
|
||||
return YamlOutputFormat, nil
|
||||
case "json", "j":
|
||||
return JSONOutputFormat, nil
|
||||
case "props", "p":
|
||||
case "props", "p", "properties":
|
||||
return PropsOutputFormat, nil
|
||||
case "csv", "c":
|
||||
return CSVOutputFormat, nil
|
||||
|
@ -314,7 +314,11 @@ func TestPrinterMultipleDocsJson(t *testing.T) {
|
||||
var writer = bufio.NewWriter(&output)
|
||||
// note printDocSeparators is true, it should still not print document separators
|
||||
// when outputing JSON.
|
||||
printer := NewPrinter(NewJSONEncoder(0, false, false), NewSinglePrinterWriter(writer))
|
||||
encoder := NewJSONEncoder(0, false, false)
|
||||
if encoder == nil {
|
||||
t.Skipf("no support for %s output format", "json")
|
||||
}
|
||||
printer := NewPrinter(encoder, NewSinglePrinterWriter(writer))
|
||||
|
||||
inputs, err := readDocuments(strings.NewReader(multiDocSample), "sample.yml", 0, NewYamlDecoder(ConfiguredYamlPreferences))
|
||||
if err != nil {
|
||||
|
@ -34,6 +34,10 @@ func (w *writeInPlaceHandlerImpl) CreateTempFile() (*os.File, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err = changeOwner(info, file); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
log.Debug("WriteInPlaceHandler: writing to tempfile: %v", file.Name())
|
||||
w.tempFile = file
|
||||
return file, err
|
||||
|
@ -1,3 +1,5 @@
|
||||
//go:build !yq_noxml
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
@ -8,6 +10,29 @@ import (
|
||||
"github.com/mikefarah/yq/v4/test"
|
||||
)
|
||||
|
||||
const yamlInputWithProcInstAndHeadComment = `# cats
|
||||
+p_xml: version="1.0"
|
||||
this: is some xml`
|
||||
|
||||
const expectedXmlProcInstAndHeadComment = `<?xml version="1.0"?>
|
||||
<!-- cats -->
|
||||
<this>is some xml</this>
|
||||
`
|
||||
|
||||
const xmlProcInstAndHeadCommentBlock = `<?xml version="1.0"?>
|
||||
<!--
|
||||
cats
|
||||
-->
|
||||
<this>is some xml</this>
|
||||
`
|
||||
|
||||
const expectedYamlProcInstAndHeadCommentBlock = `#
|
||||
# cats
|
||||
#
|
||||
+p_xml: version="1.0"
|
||||
this: is some xml
|
||||
`
|
||||
|
||||
const inputXMLWithComments = `
|
||||
<!-- before cat -->
|
||||
<cat>
|
||||
@ -126,7 +151,8 @@ cat:
|
||||
# after cat
|
||||
`
|
||||
|
||||
const expectedRoundtripXMLWithComments = `<!-- before cat --><cat><!-- in cat before -->
|
||||
const expectedRoundtripXMLWithComments = `<!-- before cat -->
|
||||
<cat><!-- in cat before -->
|
||||
<x>3<!-- multi
|
||||
line comment
|
||||
for x --></x><!-- before y -->
|
||||
@ -137,8 +163,10 @@ in d before -->
|
||||
</cat><!-- after cat -->
|
||||
`
|
||||
|
||||
const yamlWithComments = `# header comment
|
||||
const yamlWithComments = `#
|
||||
# header comment
|
||||
# above_cat
|
||||
#
|
||||
cat: # inline_cat
|
||||
# above_array
|
||||
array: # inline_array
|
||||
@ -149,9 +177,11 @@ cat: # inline_cat
|
||||
`
|
||||
|
||||
const expectedXMLWithComments = `<!--
|
||||
header comment
|
||||
above_cat
|
||||
--><!-- inline_cat --><cat><!-- above_array inline_array -->
|
||||
header comment
|
||||
above_cat
|
||||
-->
|
||||
<!-- inline_cat -->
|
||||
<cat><!-- above_array inline_array -->
|
||||
<array>val1<!-- inline_val1 --></array>
|
||||
<array><!-- above_val2 -->val2<!-- inline_val2 --></array>
|
||||
</cat><!-- below_cat -->
|
||||
@ -264,6 +294,41 @@ var xmlScenarios = []formatScenario{
|
||||
input: "<root><cats><cat>quick</cat><cat>soft</cat><!-- kitty_comment--><cat>squishy</cat></cats></root>",
|
||||
expected: "root:\n cats:\n cat:\n - quick\n - soft\n # kitty_comment\n\n - squishy\n",
|
||||
},
|
||||
{
|
||||
description: "ProcInst with head comment",
|
||||
skipDoc: true,
|
||||
input: yamlInputWithProcInstAndHeadComment,
|
||||
expected: expectedXmlProcInstAndHeadComment,
|
||||
scenarioType: "encode",
|
||||
},
|
||||
{
|
||||
description: "ProcInst with head comment round trip",
|
||||
skipDoc: true,
|
||||
input: expectedXmlProcInstAndHeadComment,
|
||||
expected: expectedXmlProcInstAndHeadComment,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "ProcInst with block head comment to yaml",
|
||||
skipDoc: true,
|
||||
input: xmlProcInstAndHeadCommentBlock,
|
||||
expected: expectedYamlProcInstAndHeadCommentBlock,
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "ProcInst with block head comment from yaml",
|
||||
skipDoc: true,
|
||||
input: expectedYamlProcInstAndHeadCommentBlock,
|
||||
expected: xmlProcInstAndHeadCommentBlock,
|
||||
scenarioType: "encode",
|
||||
},
|
||||
{
|
||||
description: "ProcInst with head comment round trip block",
|
||||
skipDoc: true,
|
||||
input: xmlProcInstAndHeadCommentBlock,
|
||||
expected: xmlProcInstAndHeadCommentBlock,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Parse xml: simple",
|
||||
subdescription: "Notice how all the values are strings, see the next example on how you can fix that.",
|
||||
@ -464,6 +529,41 @@ var xmlScenarios = []formatScenario{
|
||||
expected: "<cat name=\"tiger\">cool</cat>\n",
|
||||
scenarioType: "encode",
|
||||
},
|
||||
{
|
||||
description: "round trip multiline 1",
|
||||
skipDoc: true,
|
||||
input: "<x><!-- cats --></x>\n",
|
||||
expected: "<x><!-- cats --></x>\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "round trip multiline 2",
|
||||
skipDoc: true,
|
||||
input: "<x><!--\n cats\n --></x>\n",
|
||||
expected: "<x><!--\ncats\n--></x>\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "round trip multiline 3",
|
||||
skipDoc: true,
|
||||
input: "<x><!--\n\tcats\n --></x>\n",
|
||||
expected: "<x><!--\n\tcats\n--></x>\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "round trip multiline 4",
|
||||
skipDoc: true,
|
||||
input: "<x><!--\n\tcats\n\tdogs\n--></x>\n",
|
||||
expected: "<x><!--\n\tcats\n\tdogs\n--></x>\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "round trip multiline 5",
|
||||
skipDoc: true, // pity spaces aren't kept atm.
|
||||
input: "<x><!--\ncats\ndogs\n--></x>\n",
|
||||
expected: "<x><!--\ncats\ndogs\n--></x>\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Encode xml: comments",
|
||||
subdescription: "A best attempt is made to copy comments to xml.",
|
||||
|
@ -1,3 +1,30 @@
|
||||
4.31.2:
|
||||
- Fixed variable handling #1458, #1566
|
||||
- Fixed merged anchor reference problem #1482
|
||||
- Fixed xml encoding of ProcInst #1563, improved XML comment handling
|
||||
- Allow build without json and xml support (#1556) Thanks @afbjorklund
|
||||
- Bumped dependencies
|
||||
|
||||
4.31.1:
|
||||
- Added shuffle command #1503
|
||||
- Added ability to sort by multiple fields #1541
|
||||
- Added @sh encoder #1526
|
||||
- Added @uri/@urid encoder/decoder #1529
|
||||
- Fixed date comparison with string date #1537
|
||||
- Added from_unix/to_unix Operators
|
||||
- Bumped dependency versions
|
||||
|
||||
4.30.8:
|
||||
- Log info message when unable to chown file in linux (e.g. snap confinement) #1521
|
||||
|
||||
|
||||
4.30.7:
|
||||
- Fixed bug in splice operator #1511
|
||||
- Fixed value operator bug #1515
|
||||
- Fixed handling of merging null #1501
|
||||
- Ownership of file now maintained in linux (thanks @vaguecoder) #1473
|
||||
- Bumped dependency versions
|
||||
|
||||
4.30.6:
|
||||
- Fixed xml comment in array of scalars #1465
|
||||
- Include blank new lines in leading header preprocessing #1462
|
||||
|
@ -1,5 +1,5 @@
|
||||
#!/bin/sh
|
||||
set -ex
|
||||
go mod download golang.org/x/tools@latest
|
||||
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v1.49.0
|
||||
wget -O- -nv https://raw.githubusercontent.com/securego/gosec/master/install.sh | sh -s v2.13.1
|
||||
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v1.51.1
|
||||
wget -O- -nv https://raw.githubusercontent.com/securego/gosec/master/install.sh | sh -s
|
||||
|
@ -1,5 +1,5 @@
|
||||
name: yq
|
||||
version: 'v4.30.6'
|
||||
version: 'v4.31.2'
|
||||
summary: A lightweight and portable command-line YAML processor
|
||||
description: |
|
||||
The aim of the project is to be the jq or sed of yaml files.
|
||||
@ -13,7 +13,7 @@ apps:
|
||||
parts:
|
||||
yq:
|
||||
plugin: go
|
||||
go-channel: 1.19/stable
|
||||
go-channel: 1.20/stable
|
||||
source: .
|
||||
source-type: git
|
||||
go-importpath: github.com/mikefarah/yq
|
||||
|
Loading…
Reference in New Issue
Block a user