mirror of
https://github.com/mikefarah/yq.git
synced 2025-01-27 08:55:37 +00:00
Merging from master
This commit is contained in:
commit
b2a3cb47a7
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
@ -38,7 +38,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
|
6
.github/workflows/docker-release.yml
vendored
6
.github/workflows/docker-release.yml
vendored
@ -14,16 +14,16 @@ jobs:
|
||||
IMAGE_NAME: mikefarah/yq
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
platforms: all
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
version: latest
|
||||
|
||||
|
2
.github/workflows/go.yml
vendored
2
.github/workflows/go.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
||||
id: go
|
||||
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get dependencies
|
||||
run: |
|
||||
|
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@ -9,7 +9,7 @@ jobs:
|
||||
publishGitRelease:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: '^1.20'
|
||||
|
2
.github/workflows/snap-release.yml
vendored
2
.github/workflows/snap-release.yml
vendored
@ -12,7 +12,7 @@ jobs:
|
||||
environment: snap
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: snapcore/action-build@v1
|
||||
id: build
|
||||
- uses: snapcore/action-publish@v1
|
||||
|
2
.github/workflows/test-yq.yml
vendored
2
.github/workflows/test-yq.yml
vendored
@ -13,7 +13,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Get test
|
||||
id: get_value
|
||||
uses: mikefarah/yq@master
|
||||
|
@ -1,4 +1,4 @@
|
||||
FROM golang:1.20.5 as builder
|
||||
FROM golang:1.21.1 as builder
|
||||
|
||||
WORKDIR /go/src/mikefarah/yq
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
FROM golang:1.20.5
|
||||
FROM golang:1.21.1
|
||||
|
||||
COPY scripts/devtools.sh /opt/devtools.sh
|
||||
|
||||
|
6
Makefile
6
Makefile
@ -84,8 +84,12 @@ format: vendor
|
||||
${ENGINERUN} bash ./scripts/format.sh
|
||||
|
||||
|
||||
.PHONY: spelling
|
||||
spelling: format
|
||||
${ENGINERUN} bash ./scripts/spelling.sh
|
||||
|
||||
.PHONY: secure
|
||||
secure: format
|
||||
secure: spelling
|
||||
${ENGINERUN} bash ./scripts/secure.sh
|
||||
|
||||
.PHONY: check
|
||||
|
31
README.md
31
README.md
@ -19,7 +19,7 @@ Pipe from STDIN:
|
||||
yq '.a.b[0].c' < file.yaml
|
||||
```
|
||||
|
||||
Update a yaml file, inplace
|
||||
Update a yaml file, in place
|
||||
```bash
|
||||
yq -i '.a.b[0].c = "cool"' file.yaml
|
||||
```
|
||||
@ -45,12 +45,17 @@ yq -i '
|
||||
' file.yaml
|
||||
```
|
||||
|
||||
Find and update an item in an array:
|
||||
```bash
|
||||
yq '(.[] | select(.name == "foo") | .address) = "12 cat st"'
|
||||
```
|
||||
|
||||
Convert JSON to YAML
|
||||
```bash
|
||||
yq -Poy sample.json
|
||||
```
|
||||
|
||||
See the [documentation](https://mikefarah.gitbook.io/yq/) for more examples.
|
||||
See [recipes](https://mikefarah.gitbook.io/yq/recipes) for more examples and the [documentation](https://mikefarah.gitbook.io/yq/) for more information.
|
||||
|
||||
Take a look at the discussions for [common questions](https://github.com/mikefarah/yq/discussions/categories/q-a), and [cool ideas](https://github.com/mikefarah/yq/discussions/categories/show-and-tell)
|
||||
|
||||
@ -257,6 +262,9 @@ pacman -S go-yq
|
||||
```
|
||||
|
||||
### Windows:
|
||||
|
||||
Using [Chocolatey](https://chocolatey.org)
|
||||
|
||||
[![Chocolatey](https://img.shields.io/chocolatey/v/yq.svg)](https://chocolatey.org/packages/yq)
|
||||
[![Chocolatey](https://img.shields.io/chocolatey/dt/yq.svg)](https://chocolatey.org/packages/yq)
|
||||
```
|
||||
@ -264,12 +272,15 @@ choco install yq
|
||||
```
|
||||
Supported by @chillum (https://chocolatey.org/packages/yq)
|
||||
|
||||
and
|
||||
Using [scoop](https://scoop.sh/)
|
||||
```
|
||||
scoop install main/yq
|
||||
```
|
||||
|
||||
### Winget
|
||||
winget install yq
|
||||
|
||||
https://winget.run/pkg/MikeFarah/yq
|
||||
Using [winget](https://learn.microsoft.com/en-us/windows/package-manager/)
|
||||
```
|
||||
winget install --id MikeFarah.yq
|
||||
```
|
||||
|
||||
### Mac:
|
||||
Using [MacPorts](https://www.macports.org/)
|
||||
@ -299,7 +310,7 @@ https://pkgs.alpinelinux.org/package/edge/community/x86/yq
|
||||
- [Deeply data structures](https://mikefarah.gitbook.io/yq/operators/traverse-read)
|
||||
- [Sort keys](https://mikefarah.gitbook.io/yq/operators/sort-keys)
|
||||
- Manipulate yaml [comments](https://mikefarah.gitbook.io/yq/operators/comment-operators), [styling](https://mikefarah.gitbook.io/yq/operators/style), [tags](https://mikefarah.gitbook.io/yq/operators/tag) and [anchors and aliases](https://mikefarah.gitbook.io/yq/operators/anchor-and-alias-operators).
|
||||
- [Update inplace](https://mikefarah.gitbook.io/yq/v/v4.x/commands/evaluate#flags)
|
||||
- [Update in place](https://mikefarah.gitbook.io/yq/v/v4.x/commands/evaluate#flags)
|
||||
- [Complex expressions to select and update](https://mikefarah.gitbook.io/yq/operators/select#select-and-update-matching-values-in-map)
|
||||
- Keeps yaml formatting and comments when updating (though there are issues with whitespace)
|
||||
- [Decode/Encode base64 data](https://mikefarah.gitbook.io/yq/operators/encode-decode)
|
||||
@ -326,7 +337,7 @@ Examples:
|
||||
# yq defaults to 'eval' command if no command is specified. See "yq eval --help" for more examples.
|
||||
yq '.stuff' < myfile.yml # outputs the data at the "stuff" node from "myfile.yml"
|
||||
|
||||
yq -i '.stuff = "foo"' myfile.yml # update myfile.yml inplace
|
||||
yq -i '.stuff = "foo"' myfile.yml # update myfile.yml in place
|
||||
|
||||
|
||||
Available Commands:
|
||||
@ -343,7 +354,7 @@ Flags:
|
||||
--header-preprocess Slurp any header comments and separators before processing expression. (default true)
|
||||
-h, --help help for yq
|
||||
-I, --indent int sets indent level for output (default 2)
|
||||
-i, --inplace update the file inplace of first file given.
|
||||
-i, --inplace update the file in place of first file given.
|
||||
-p, --input-format string [yaml|y|xml|x] parse format for input. Note that json is a subset of yaml. (default "yaml")
|
||||
-M, --no-colors force print with no colors
|
||||
-N, --no-doc Don't print document separators (---)
|
||||
|
@ -3,25 +3,25 @@
|
||||
testWriteInPlacePipeIn() {
|
||||
result=$(./yq e -i -n '.a' 2>&1)
|
||||
assertEquals 1 $?
|
||||
assertEquals "Error: write inplace flag only applicable when giving an expression and at least one file" "$result"
|
||||
assertEquals "Error: write in place flag only applicable when giving an expression and at least one file" "$result"
|
||||
}
|
||||
|
||||
testWriteInPlacePipeInEvalall() {
|
||||
result=$(./yq ea -i -n '.a' 2>&1)
|
||||
assertEquals 1 $?
|
||||
assertEquals "Error: write inplace flag only applicable when giving an expression and at least one file" "$result"
|
||||
assertEquals "Error: write in place flag only applicable when giving an expression and at least one file" "$result"
|
||||
}
|
||||
|
||||
testWriteInPlaceWithSplit() {
|
||||
result=$(./yq e -s "cat" -i '.a = "thing"' test.yml 2>&1)
|
||||
assertEquals 1 $?
|
||||
assertEquals "Error: write inplace cannot be used with split file" "$result"
|
||||
assertEquals "Error: write in place cannot be used with split file" "$result"
|
||||
}
|
||||
|
||||
testWriteInPlaceWithSplitEvalAll() {
|
||||
result=$(./yq ea -s "cat" -i '.a = "thing"' test.yml 2>&1)
|
||||
assertEquals 1 $?
|
||||
assertEquals "Error: write inplace cannot be used with split file" "$result"
|
||||
assertEquals "Error: write in place cannot be used with split file" "$result"
|
||||
}
|
||||
|
||||
testNullWithFiles() {
|
||||
|
@ -143,7 +143,7 @@ testBasicCatWithFilesNoDash() {
|
||||
}
|
||||
|
||||
# when the nullinput flag is used
|
||||
# dont automatically read STDIN (this breaks github actions)
|
||||
# don't automatically read STDIN (this breaks github actions)
|
||||
testBasicCreateFileGithubAction() {
|
||||
cat /dev/null | ./yq -n ".a = 123" > test.yml
|
||||
}
|
||||
@ -302,7 +302,7 @@ testBasicExitStatusNoEval() {
|
||||
assertEquals 1 "$?"
|
||||
}
|
||||
|
||||
testBasicExtractFieldWithSeperator() {
|
||||
testBasicExtractFieldWithSeparator() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
name: chart-name
|
||||
@ -312,7 +312,7 @@ EOL
|
||||
assertEquals "chart-name" "$X"
|
||||
}
|
||||
|
||||
testBasicExtractMultipleFieldWithSeperator() {
|
||||
testBasicExtractMultipleFieldWithSeparator() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
name: chart-name
|
||||
|
@ -11,7 +11,7 @@ a: test
|
||||
EOL
|
||||
}
|
||||
|
||||
testLeadingSeperatorWithDoc() {
|
||||
testLeadingSeparatorWithDoc() {
|
||||
cat >test.yml <<EOL
|
||||
# hi peeps
|
||||
# cool
|
||||
@ -35,7 +35,7 @@ EOM
|
||||
}
|
||||
|
||||
|
||||
testLeadingSeperatorWithNewlinesNewDoc() {
|
||||
testLeadingSeparatorWithNewlinesNewDoc() {
|
||||
cat >test.yml <<EOL
|
||||
# hi peeps
|
||||
# cool
|
||||
@ -62,7 +62,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorWithNewlinesMoreComments() {
|
||||
testLeadingSeparatorWithNewlinesMoreComments() {
|
||||
cat >test.yml <<EOL
|
||||
# hi peeps
|
||||
# cool
|
||||
@ -92,7 +92,7 @@ EOM
|
||||
}
|
||||
|
||||
|
||||
testLeadingSeperatorWithDirective() {
|
||||
testLeadingSeparatorWithDirective() {
|
||||
cat >test.yml <<EOL
|
||||
%YAML 1.1
|
||||
---
|
||||
@ -110,18 +110,18 @@ EOM
|
||||
}
|
||||
|
||||
|
||||
testLeadingSeperatorPipeIntoEvalSeq() {
|
||||
testLeadingSeparatorPipeIntoEvalSeq() {
|
||||
X=$(./yq e - < test.yml)
|
||||
expected=$(cat test.yml)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorExtractField() {
|
||||
testLeadingSeparatorExtractField() {
|
||||
X=$(./yq e '.a' - < test.yml)
|
||||
assertEquals "test" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorExtractFieldWithCommentsAfterSep() {
|
||||
testLeadingSeparatorExtractFieldWithCommentsAfterSep() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
# hi peeps
|
||||
@ -132,7 +132,7 @@ EOL
|
||||
assertEquals "test" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorExtractFieldWithCommentsBeforeSep() {
|
||||
testLeadingSeparatorExtractFieldWithCommentsBeforeSep() {
|
||||
cat >test.yml <<EOL
|
||||
# hi peeps
|
||||
# cool
|
||||
@ -144,7 +144,7 @@ EOL
|
||||
}
|
||||
|
||||
|
||||
testLeadingSeperatorExtractFieldMultiDoc() {
|
||||
testLeadingSeparatorExtractFieldMultiDoc() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
a: test
|
||||
@ -161,13 +161,13 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorExtractFieldMultiDocWithComments() {
|
||||
testLeadingSeparatorExtractFieldMultiDocWithComments() {
|
||||
cat >test.yml <<EOL
|
||||
# here
|
||||
---
|
||||
# there
|
||||
a: test
|
||||
# whereever
|
||||
# wherever
|
||||
---
|
||||
# you are
|
||||
a: test2
|
||||
@ -184,26 +184,26 @@ EOM
|
||||
}
|
||||
|
||||
|
||||
testLeadingSeperatorEvalSeq() {
|
||||
testLeadingSeparatorEvalSeq() {
|
||||
X=$(./yq e test.yml)
|
||||
expected=$(cat test.yml)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorPipeIntoEvalAll() {
|
||||
testLeadingSeparatorPipeIntoEvalAll() {
|
||||
X=$(./yq ea - < test.yml)
|
||||
expected=$(cat test.yml)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
|
||||
testLeadingSeperatorEvalAll() {
|
||||
testLeadingSeparatorEvalAll() {
|
||||
X=$(./yq ea test.yml)
|
||||
expected=$(cat test.yml)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocEvalSimple() {
|
||||
testLeadingSeparatorMultiDocEvalSimple() {
|
||||
read -r -d '' expected << EOM
|
||||
---
|
||||
a: test
|
||||
@ -217,7 +217,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocInOneFile() {
|
||||
testLeadingSeparatorMultiDocInOneFile() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
# hi peeps
|
||||
@ -231,7 +231,7 @@ EOL
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocInOneFileEvalAll() {
|
||||
testLeadingSeparatorMultiDocInOneFileEvalAll() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
# hi peeps
|
||||
@ -245,7 +245,7 @@ EOL
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocEvalComments() {
|
||||
testLeadingSeparatorMultiDocEvalComments() {
|
||||
cat >test.yml <<EOL
|
||||
# hi peeps
|
||||
# cool
|
||||
@ -273,7 +273,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocEvalCommentsTrailingSep() {
|
||||
testLeadingSeparatorMultiDocEvalCommentsTrailingSep() {
|
||||
cat >test.yml <<EOL
|
||||
# hi peeps
|
||||
# cool
|
||||
@ -305,7 +305,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiMultiDocEvalCommentsTrailingSep() {
|
||||
testLeadingSeparatorMultiMultiDocEvalCommentsTrailingSep() {
|
||||
cat >test.yml <<EOL
|
||||
# hi peeps
|
||||
# cool
|
||||
@ -345,7 +345,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocEvalCommentsLeadingSep() {
|
||||
testLeadingSeparatorMultiDocEvalCommentsLeadingSep() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
# hi peeps
|
||||
@ -405,7 +405,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocEvalCommentsStripComments() {
|
||||
testLeadingSeparatorMultiDocEvalCommentsStripComments() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
# hi peeps
|
||||
@ -428,7 +428,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocEvalCommentsLeadingSepNoDocFlag() {
|
||||
testLeadingSeparatorMultiDocEvalCommentsLeadingSepNoDocFlag() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
# hi peeps
|
||||
@ -454,7 +454,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocEvalJsonFlag() {
|
||||
testLeadingSeparatorMultiDocEvalJsonFlag() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
# hi peeps
|
||||
@ -483,7 +483,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocEvalAllJsonFlag() {
|
||||
testLeadingSeparatorMultiDocEvalAllJsonFlag() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
# hi peeps
|
||||
@ -512,7 +512,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocEvalAll() {
|
||||
testLeadingSeparatorMultiDocEvalAll() {
|
||||
read -r -d '' expected << EOM
|
||||
---
|
||||
a: test
|
@ -13,7 +13,7 @@ func createEvaluateAllCommand() *cobra.Command {
|
||||
Aliases: []string{"ea"},
|
||||
Short: "Loads _all_ yaml documents of _all_ yaml files and runs expression once",
|
||||
Example: `
|
||||
# Merge f2.yml into f1.yml (inplace)
|
||||
# Merge f2.yml into f1.yml (in place)
|
||||
yq eval-all --inplace 'select(fileIndex == 0) * select(fileIndex == 1)' f1.yml f2.yml
|
||||
## the same command and expression using shortened names:
|
||||
yq ea -i 'select(fi == 0) * select(fi == 1)' f1.yml f2.yml
|
||||
|
@ -28,7 +28,7 @@ cat file2.yml | yq e '.a.b' file1.yml - file3.yml
|
||||
## Note that editing an empty file does not work.
|
||||
yq e -n '.a.b.c = "cat"'
|
||||
|
||||
# Update a file inplace
|
||||
# Update a file in place
|
||||
yq e '.a.b = "cool"' -i file.yaml
|
||||
`,
|
||||
Long: `yq is a portable command-line YAML processor (https://github.com/mikefarah/yq/)
|
@ -78,12 +78,17 @@ yq -P sample.json
|
||||
rootCmd.PersistentFlags().BoolVar(&yqlib.ConfiguredXMLPreferences.SkipProcInst, "xml-skip-proc-inst", yqlib.ConfiguredXMLPreferences.SkipProcInst, "skip over process instructions (e.g. <?xml version=\"1\"?>)")
|
||||
rootCmd.PersistentFlags().BoolVar(&yqlib.ConfiguredXMLPreferences.SkipDirectives, "xml-skip-directives", yqlib.ConfiguredXMLPreferences.SkipDirectives, "skip over directives (e.g. <!DOCTYPE thing cat>)")
|
||||
|
||||
rootCmd.PersistentFlags().StringVar(&yqlib.ConfiguredLuaPreferences.DocPrefix, "lua-prefix", yqlib.ConfiguredLuaPreferences.DocPrefix, "prefix")
|
||||
rootCmd.PersistentFlags().StringVar(&yqlib.ConfiguredLuaPreferences.DocSuffix, "lua-suffix", yqlib.ConfiguredLuaPreferences.DocSuffix, "suffix")
|
||||
rootCmd.PersistentFlags().BoolVar(&yqlib.ConfiguredLuaPreferences.UnquotedKeys, "lua-unquoted", yqlib.ConfiguredLuaPreferences.UnquotedKeys, "output unquoted string keys (e.g. {foo=\"bar\"})")
|
||||
rootCmd.PersistentFlags().BoolVar(&yqlib.ConfiguredLuaPreferences.Globals, "lua-globals", yqlib.ConfiguredLuaPreferences.Globals, "output keys as top-level global variables")
|
||||
|
||||
rootCmd.PersistentFlags().BoolVarP(&nullInput, "null-input", "n", false, "Don't read input, simply evaluate the expression given. Useful for creating docs from scratch.")
|
||||
rootCmd.PersistentFlags().BoolVarP(&noDocSeparators, "no-doc", "N", false, "Don't print document separators (---)")
|
||||
|
||||
rootCmd.PersistentFlags().IntVarP(&indent, "indent", "I", 2, "sets indent level for output")
|
||||
rootCmd.Flags().BoolVarP(&version, "version", "V", false, "Print version information and quit")
|
||||
rootCmd.PersistentFlags().BoolVarP(&writeInplace, "inplace", "i", false, "update the file inplace of first file given.")
|
||||
rootCmd.PersistentFlags().BoolVarP(&writeInplace, "inplace", "i", false, "update the file in place of first file given.")
|
||||
rootCmd.PersistentFlags().VarP(unwrapScalarFlag, "unwrapScalar", "r", "unwrap scalar, print the value with no quotes, colors or comments. Defaults to true for yaml")
|
||||
rootCmd.PersistentFlags().Lookup("unwrapScalar").NoOptDefVal = "true"
|
||||
rootCmd.PersistentFlags().BoolVarP(&nulSepOutput, "nul-output", "0", false, "Use NUL char to separate values. If unwrap scalar is also set, fail if unwrapped scalar contains NUL char.")
|
||||
|
@ -8,21 +8,21 @@ import (
|
||||
|
||||
type boolFlag interface {
|
||||
pflag.Value
|
||||
IsExplicitySet() bool
|
||||
IsExplicitlySet() bool
|
||||
IsSet() bool
|
||||
}
|
||||
|
||||
type unwrapScalarFlagStrc struct {
|
||||
explicitySet bool
|
||||
value bool
|
||||
explicitlySet bool
|
||||
value bool
|
||||
}
|
||||
|
||||
func newUnwrapFlag() boolFlag {
|
||||
return &unwrapScalarFlagStrc{value: true}
|
||||
}
|
||||
|
||||
func (f *unwrapScalarFlagStrc) IsExplicitySet() bool {
|
||||
return f.explicitySet
|
||||
func (f *unwrapScalarFlagStrc) IsExplicitlySet() bool {
|
||||
return f.explicitlySet
|
||||
}
|
||||
|
||||
func (f *unwrapScalarFlagStrc) IsSet() bool {
|
||||
@ -37,7 +37,7 @@ func (f *unwrapScalarFlagStrc) Set(value string) error {
|
||||
|
||||
v, err := strconv.ParseBool(value)
|
||||
f.value = v
|
||||
f.explicitySet = true
|
||||
f.explicitlySet = true
|
||||
return err
|
||||
}
|
||||
|
||||
|
10
cmd/utils.go
10
cmd/utils.go
@ -42,7 +42,7 @@ func initCommand(cmd *cobra.Command, args []string) (string, []string, error) {
|
||||
}
|
||||
|
||||
if writeInplace && (len(args) == 0 || args[0] == "-") {
|
||||
return "", nil, fmt.Errorf("write inplace flag only applicable when giving an expression and at least one file")
|
||||
return "", nil, fmt.Errorf("write in place flag only applicable when giving an expression and at least one file")
|
||||
}
|
||||
|
||||
if frontMatter != "" && len(args) == 0 {
|
||||
@ -50,7 +50,7 @@ func initCommand(cmd *cobra.Command, args []string) (string, []string, error) {
|
||||
}
|
||||
|
||||
if writeInplace && splitFileExp != "" {
|
||||
return "", nil, fmt.Errorf("write inplace cannot be used with split file")
|
||||
return "", nil, fmt.Errorf("write in place cannot be used with split file")
|
||||
}
|
||||
|
||||
if nullInput && len(args) > 0 {
|
||||
@ -104,7 +104,7 @@ func initCommand(cmd *cobra.Command, args []string) (string, []string, error) {
|
||||
outputFormatType == yqlib.PropsOutputFormat {
|
||||
unwrapScalar = true
|
||||
}
|
||||
if unwrapScalarFlag.IsExplicitySet() {
|
||||
if unwrapScalarFlag.IsExplicitlySet() {
|
||||
unwrapScalar = unwrapScalarFlag.IsSet()
|
||||
}
|
||||
|
||||
@ -130,6 +130,8 @@ func configureDecoder(evaluateTogether bool) (yqlib.Decoder, error) {
|
||||
|
||||
func createDecoder(format yqlib.InputFormat, evaluateTogether bool) (yqlib.Decoder, error) {
|
||||
switch format {
|
||||
case yqlib.LuaInputFormat:
|
||||
return yqlib.NewLuaDecoder(yqlib.ConfiguredLuaPreferences), nil
|
||||
case yqlib.XMLInputFormat:
|
||||
return yqlib.NewXMLDecoder(yqlib.ConfiguredXMLPreferences), nil
|
||||
case yqlib.PropertiesInputFormat:
|
||||
@ -197,6 +199,8 @@ func createEncoder(format yqlib.PrinterOutputFormat) (yqlib.Encoder, error) {
|
||||
return yqlib.NewTomlEncoder(), nil
|
||||
case yqlib.ShellVariablesOutputFormat:
|
||||
return yqlib.NewShellVariablesEncoder(), nil
|
||||
case yqlib.LuaOutputFormat:
|
||||
return yqlib.NewLuaEncoder(yqlib.ConfiguredLuaPreferences), nil
|
||||
}
|
||||
return nil, fmt.Errorf("invalid encoder: %v", format)
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ var (
|
||||
GitDescribe string
|
||||
|
||||
// Version is main version number that is being run at the moment.
|
||||
Version = "v4.34.1"
|
||||
Version = "v4.35.2"
|
||||
|
||||
// VersionPrerelease is a pre-release marker for the version. If this is "" (empty string)
|
||||
// then it means that it is a final release. Otherwise, this is a pre-release
|
||||
|
14
cspell.config.yaml
Normal file
14
cspell.config.yaml
Normal file
@ -0,0 +1,14 @@
|
||||
---
|
||||
$schema: https://raw.githubusercontent.com/streetsidesoftware/cspell/main/cspell.schema.json
|
||||
version: '0.2'
|
||||
language: en-GB
|
||||
dictionaryDefinitions:
|
||||
- name: project-words
|
||||
path: './project-words.txt'
|
||||
addWords: true
|
||||
dictionaries:
|
||||
- project-words
|
||||
ignorePaths:
|
||||
- 'vendor'
|
||||
- 'bin'
|
||||
- '/project-words.txt'
|
2
debian/changelog
vendored
2
debian/changelog
vendored
@ -127,7 +127,7 @@ yq (4.9.6) focal; urgency=medium
|
||||
|
||||
* Added darwin/arm64 build, thanks @alecthomas
|
||||
* Incremented docker alpine base version, thanks @da6d6i7-bronga
|
||||
* Bug fix: multine expression
|
||||
* Bug fix: multiline expression
|
||||
* Bug fix: special character
|
||||
|
||||
-- Roberto Mier Escandon <rmescandon@gmail.com> Tue, 29 Jun 2021 21:32:14 +0200
|
||||
|
9
examples/data.lua
Normal file
9
examples/data.lua
Normal file
@ -0,0 +1,9 @@
|
||||
return {
|
||||
["country"] = "Australia"; -- this place
|
||||
["cities"] = {
|
||||
"Sydney",
|
||||
"Melbourne",
|
||||
"Brisbane",
|
||||
"Perth",
|
||||
};
|
||||
};
|
15
go.mod
15
go.mod
@ -2,21 +2,22 @@ module github.com/mikefarah/yq/v4
|
||||
|
||||
require (
|
||||
github.com/a8m/envsubst v1.4.2
|
||||
github.com/alecthomas/participle/v2 v2.0.0
|
||||
github.com/alecthomas/participle/v2 v2.1.0
|
||||
github.com/alecthomas/repr v0.2.0
|
||||
github.com/dimchansky/utfbom v1.1.1
|
||||
github.com/elliotchance/orderedmap v1.5.0
|
||||
github.com/fatih/color v1.15.0
|
||||
github.com/goccy/go-json v0.10.2
|
||||
github.com/goccy/go-yaml v1.11.0
|
||||
github.com/jinzhu/copier v0.3.5
|
||||
github.com/goccy/go-yaml v1.11.2
|
||||
github.com/jinzhu/copier v0.4.0
|
||||
github.com/magiconair/properties v1.8.7
|
||||
github.com/pelletier/go-toml/v2 v2.0.8
|
||||
github.com/pelletier/go-toml/v2 v2.1.0
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e
|
||||
github.com/spf13/cobra v1.7.0
|
||||
github.com/spf13/pflag v1.0.5
|
||||
golang.org/x/net v0.11.0
|
||||
golang.org/x/text v0.10.0
|
||||
github.com/yuin/gopher-lua v1.1.0
|
||||
golang.org/x/net v0.15.0
|
||||
golang.org/x/text v0.13.0
|
||||
gopkg.in/op/go-logging.v1 v1.0.0-20160211212156-b2cb9fa56473
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
@ -25,7 +26,7 @@ require (
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-isatty v0.0.17 // indirect
|
||||
golang.org/x/sys v0.9.0 // indirect
|
||||
golang.org/x/sys v0.12.0 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f // indirect
|
||||
)
|
||||
|
||||
|
36
go.sum
36
go.sum
@ -1,8 +1,8 @@
|
||||
github.com/a8m/envsubst v1.4.2 h1:4yWIHXOLEJHQEFd4UjrWDrYeYlV7ncFWJOCBRLOZHQg=
|
||||
github.com/a8m/envsubst v1.4.2/go.mod h1:MVUTQNGQ3tsjOOtKCNd+fl8RzhsXcDvvAEzkhGtlsbY=
|
||||
github.com/alecthomas/assert/v2 v2.2.2 h1:Z/iVC0xZfWTaFNE6bA3z07T86hd45Xe2eLt6WVy2bbk=
|
||||
github.com/alecthomas/participle/v2 v2.0.0 h1:Fgrq+MbuSsJwIkw3fEj9h75vDP0Er5JzepJ0/HNHv0g=
|
||||
github.com/alecthomas/participle/v2 v2.0.0/go.mod h1:rAKZdJldHu8084ojcWevWAL8KmEU+AT+Olodb+WoN2Y=
|
||||
github.com/alecthomas/assert/v2 v2.3.0 h1:mAsH2wmvjsuvyBvAmCtm7zFsBlb8mIHx5ySLVdDZXL0=
|
||||
github.com/alecthomas/participle/v2 v2.1.0 h1:z7dElHRrOEEq45F2TG5cbQihMtNTv8vwldytDj7Wrz4=
|
||||
github.com/alecthomas/participle/v2 v2.1.0/go.mod h1:Y1+hAs8DHPmc3YUFzqllV+eSQ9ljPTk0ZkPMtEdAx2c=
|
||||
github.com/alecthomas/repr v0.2.0 h1:HAzS41CIzNW5syS8Mf9UwXhNH1J9aix/BvDRf1Ml2Yk=
|
||||
github.com/alecthomas/repr v0.2.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
@ -17,13 +17,13 @@ github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs=
|
||||
github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw=
|
||||
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
||||
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||
github.com/goccy/go-yaml v1.11.0 h1:n7Z+zx8S9f9KgzG6KtQKf+kwqXZlLNR2F6018Dgau54=
|
||||
github.com/goccy/go-yaml v1.11.0/go.mod h1:H+mJrWtjPTJAHvRbV09MCK9xYwODM+wRTVFFTWckfng=
|
||||
github.com/goccy/go-yaml v1.11.2 h1:joq77SxuyIs9zzxEjgyLBugMQ9NEgTWxXfz2wVqwAaQ=
|
||||
github.com/goccy/go-yaml v1.11.2/go.mod h1:wKnAMd44+9JAAnGQpWVEgBzGt3YuTaQ4uXoHvE4m7WU=
|
||||
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/jinzhu/copier v0.3.5 h1:GlvfUwHk62RokgqVNvYsku0TATCF7bAHVwEXoBh3iJg=
|
||||
github.com/jinzhu/copier v0.3.5/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
|
||||
github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8=
|
||||
github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
|
||||
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
|
||||
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
|
||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||
@ -31,8 +31,8 @@ github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovk
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng=
|
||||
github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ=
|
||||
github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4=
|
||||
github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4=
|
||||
github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e h1:aoZm08cpOy4WuID//EZDgcC4zIxODThtZNPirFr42+A=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
@ -48,15 +48,17 @@ github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpE
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.3 h1:RP3t2pwF7cMEbC1dqtB6poj3niw/9gnV4Cjg5oW5gtY=
|
||||
github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
golang.org/x/net v0.11.0 h1:Gi2tvZIJyBtO9SDr1q9h5hEQCp/4L2RQ+ar0qjx2oNU=
|
||||
golang.org/x/net v0.11.0/go.mod h1:2L/ixqYpgIVXmeoSA/4Lu7BzTG4KIyPIryS4IsOd1oQ=
|
||||
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/yuin/gopher-lua v1.1.0 h1:BojcDhfyDWgU2f2TOzYK/g5p2gxMrku8oupLDqlnSqE=
|
||||
github.com/yuin/gopher-lua v1.1.0/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw=
|
||||
golang.org/x/net v0.15.0 h1:ugBLEUaxABaB5AJqW9enI0ACdci2RUd4eP51NTBvuJ8=
|
||||
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.9.0 h1:KS/R3tvhPqvJvwcKfnBHJwwthS11LRhmM5D59eEXa0s=
|
||||
golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/text v0.10.0 h1:UpjohKhiEgNc0CSauXmwYftY1+LlaC75SJwh0SgCX58=
|
||||
golang.org/x/text v0.10.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
|
||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f h1:uF6paiQQebLeSXkrTqHqz0MXhXXS1KgF41eUdBNvxK0=
|
||||
golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
|
@ -29,7 +29,7 @@ It pipes the current, lets call it 'root' context through the `lhs` expression o
|
||||
cat
|
||||
```
|
||||
|
||||
Sidenote: this node holds not only its value 'cat', but comments and metadata too, including path and parent information.
|
||||
Side note: this node holds not only its value 'cat', but comments and metadata too, including path and parent information.
|
||||
|
||||
The `=` operator then pipes the 'root' context through the `rhs` expression of `.b` to return the node
|
||||
|
||||
|
@ -60,7 +60,7 @@ func TestCandidateNodeChildWhenParentUpdated(t *testing.T) {
|
||||
parent.SetFileIndex(2)
|
||||
parent.SetFilename("meow")
|
||||
test.AssertResultWithContext(t, "meow", child.GetFilename(), "filename")
|
||||
test.AssertResultWithContext(t, 2, child.GetFileIndex(), "fileindex")
|
||||
test.AssertResultWithContext(t, 2, child.GetFileIndex(), "file index")
|
||||
test.AssertResultWithContext(t, uint(1), child.GetDocument(), "document index")
|
||||
}
|
||||
|
||||
|
@ -187,7 +187,7 @@ func (o *CandidateNode) UnmarshalYAML(node *yaml.Node, anchorMap map[string]*Can
|
||||
case 0:
|
||||
// not sure when this happens
|
||||
o.copyFromYamlNode(node, anchorMap)
|
||||
log.Debugf("UnmarshalYAML - errr.. %v", NodeToString(o))
|
||||
log.Debugf("UnmarshalYAML - err.. %v", NodeToString(o))
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("orderedMap: invalid yaml node")
|
||||
|
@ -19,9 +19,10 @@ func (o *CandidateNode) setScalarFromJson(value interface{}) error {
|
||||
o.Value = fmt.Sprintf("%v", value)
|
||||
o.Tag = "!!float"
|
||||
// json decoder returns ints as float.
|
||||
if value == float64(int(rawData.(float64))) {
|
||||
if value == float64(int64(rawData.(float64))) {
|
||||
// aha it's an int disguised as a float
|
||||
o.Tag = "!!int"
|
||||
o.Value = fmt.Sprintf("%v", int64(value.(float64)))
|
||||
}
|
||||
case int, int64, int32:
|
||||
o.Value = fmt.Sprintf("%v", value)
|
||||
|
@ -18,6 +18,7 @@ const (
|
||||
TSVObjectInputFormat
|
||||
TomlInputFormat
|
||||
UriInputFormat
|
||||
LuaInputFormat
|
||||
)
|
||||
|
||||
type Decoder interface {
|
||||
@ -41,6 +42,8 @@ func InputFormatFromString(format string) (InputFormat, error) {
|
||||
return TSVObjectInputFormat, nil
|
||||
case "toml":
|
||||
return TomlInputFormat, nil
|
||||
case "lua", "l":
|
||||
return LuaInputFormat, nil
|
||||
default:
|
||||
return 0, fmt.Errorf("unknown format '%v' please use [yaml|json|props|csv|tsv|xml|toml]", format)
|
||||
}
|
||||
|
163
pkg/yqlib/decoder_lua.go
Normal file
163
pkg/yqlib/decoder_lua.go
Normal file
@ -0,0 +1,163 @@
|
||||
//go:build !yq_nolua
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
|
||||
lua "github.com/yuin/gopher-lua"
|
||||
)
|
||||
|
||||
type luaDecoder struct {
|
||||
reader io.Reader
|
||||
finished bool
|
||||
prefs LuaPreferences
|
||||
}
|
||||
|
||||
func NewLuaDecoder(prefs LuaPreferences) Decoder {
|
||||
return &luaDecoder{
|
||||
prefs: prefs,
|
||||
}
|
||||
}
|
||||
|
||||
func (dec *luaDecoder) Init(reader io.Reader) error {
|
||||
dec.reader = reader
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dec *luaDecoder) convertToYamlNode(ls *lua.LState, lv lua.LValue) *CandidateNode {
|
||||
switch lv.Type() {
|
||||
case lua.LTNil:
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!null",
|
||||
Value: "",
|
||||
}
|
||||
case lua.LTBool:
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!bool",
|
||||
Value: lv.String(),
|
||||
}
|
||||
case lua.LTNumber:
|
||||
n := float64(lua.LVAsNumber(lv))
|
||||
// various special case floats
|
||||
if math.IsNaN(n) {
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!float",
|
||||
Value: ".nan",
|
||||
}
|
||||
}
|
||||
if math.IsInf(n, 1) {
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!float",
|
||||
Value: ".inf",
|
||||
}
|
||||
}
|
||||
if math.IsInf(n, -1) {
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!float",
|
||||
Value: "-.inf",
|
||||
}
|
||||
}
|
||||
|
||||
// does it look like an integer?
|
||||
if n == float64(int(n)) {
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!int",
|
||||
Value: lv.String(),
|
||||
}
|
||||
}
|
||||
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!float",
|
||||
Value: lv.String(),
|
||||
}
|
||||
case lua.LTString:
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: lv.String(),
|
||||
}
|
||||
case lua.LTFunction:
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "tag:lua.org,2006,function",
|
||||
Value: lv.String(),
|
||||
}
|
||||
case lua.LTTable:
|
||||
// Simultaneously create a sequence and a map, pick which one to return
|
||||
// based on whether all keys were consecutive integers
|
||||
i := 1
|
||||
yaml_sequence := &CandidateNode{
|
||||
Kind: SequenceNode,
|
||||
Tag: "!!seq",
|
||||
}
|
||||
yaml_map := &CandidateNode{
|
||||
Kind: MappingNode,
|
||||
Tag: "!!map",
|
||||
}
|
||||
t := lv.(*lua.LTable)
|
||||
k, v := ls.Next(t, lua.LNil)
|
||||
for k != lua.LNil {
|
||||
if ki, ok := k.(lua.LNumber); i != 0 && ok && math.Mod(float64(ki), 1) == 0 && int(ki) == i {
|
||||
i++
|
||||
} else {
|
||||
i = 0
|
||||
}
|
||||
yaml_map.Content = append(yaml_map.Content, dec.convertToYamlNode(ls, k))
|
||||
yv := dec.convertToYamlNode(ls, v)
|
||||
yaml_map.Content = append(yaml_map.Content, yv)
|
||||
if i != 0 {
|
||||
yaml_sequence.Content = append(yaml_sequence.Content, yv)
|
||||
}
|
||||
k, v = ls.Next(t, k)
|
||||
}
|
||||
if i != 0 {
|
||||
return yaml_sequence
|
||||
}
|
||||
return yaml_map
|
||||
default:
|
||||
return &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
LineComment: fmt.Sprintf("Unhandled Lua type: %s", lv.Type().String()),
|
||||
Tag: "!!null",
|
||||
Value: lv.String(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (dec *luaDecoder) decideTopLevelNode(ls *lua.LState) *CandidateNode {
|
||||
if ls.GetTop() == 0 {
|
||||
// no items were explicitly returned, encode the globals table instead
|
||||
return dec.convertToYamlNode(ls, ls.Get(lua.GlobalsIndex))
|
||||
}
|
||||
return dec.convertToYamlNode(ls, ls.Get(1))
|
||||
}
|
||||
|
||||
func (dec *luaDecoder) Decode() (*CandidateNode, error) {
|
||||
if dec.finished {
|
||||
return nil, io.EOF
|
||||
}
|
||||
ls := lua.NewState(lua.Options{SkipOpenLibs: true})
|
||||
defer ls.Close()
|
||||
fn, err := ls.Load(dec.reader, "@input")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ls.Push(fn)
|
||||
err = ls.PCall(0, lua.MultRet, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
firstNode := dec.decideTopLevelNode(ls)
|
||||
dec.finished = true
|
||||
return firstNode, nil
|
||||
}
|
@ -50,7 +50,7 @@ func (dec *yamlDecoder) processReadStream(reader *bufio.Reader) (io.Reader, stri
|
||||
}
|
||||
} else if string(peekBytes) == "---" {
|
||||
_, err := reader.ReadString('\n')
|
||||
sb.WriteString("$yqDocSeperator$\n")
|
||||
sb.WriteString("$yqDocSeparator$\n")
|
||||
if errors.Is(err, io.EOF) {
|
||||
return reader, sb.String(), nil
|
||||
} else if err != nil {
|
||||
|
@ -26,7 +26,7 @@ will output
|
||||
bar: 100
|
||||
```
|
||||
|
||||
## Group by field, with nuls
|
||||
## Group by field, with nulls
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- cat: dog
|
||||
|
@ -26,7 +26,7 @@ yq '.a.b[0].c' file.yaml
|
||||
cat file.yaml | yq '.a.b[0].c'
|
||||
```
|
||||
|
||||
## Update a yaml file, inplace
|
||||
## Update a yaml file, in place
|
||||
```bash
|
||||
yq -i '.a.b[0].c = "cool"' file.yaml
|
||||
```
|
||||
|
2
pkg/yqlib/doc/operators/headers/to_number.md
Normal file
2
pkg/yqlib/doc/operators/headers/to_number.md
Normal file
@ -0,0 +1,2 @@
|
||||
# To Number
|
||||
Parses the input as a number. yq will try to parse values as an int first, failing that it will try float. Values that already ints or floats will be left alone.
|
77
pkg/yqlib/doc/operators/kind.md
Normal file
77
pkg/yqlib/doc/operators/kind.md
Normal file
@ -0,0 +1,77 @@
|
||||
|
||||
## Get kind
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
a: cat
|
||||
b: 5
|
||||
c: 3.2
|
||||
e: true
|
||||
f: []
|
||||
g: {}
|
||||
h: null
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.. | kind' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
map
|
||||
scalar
|
||||
scalar
|
||||
scalar
|
||||
scalar
|
||||
seq
|
||||
map
|
||||
scalar
|
||||
```
|
||||
|
||||
## Get kind, ignores custom tags
|
||||
Unlike tag, kind is not affected by custom tags.
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
a: !!thing cat
|
||||
b: !!foo {}
|
||||
c: !!bar []
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.. | kind' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
map
|
||||
scalar
|
||||
map
|
||||
seq
|
||||
```
|
||||
|
||||
## Add comments only to scalars
|
||||
An example of how you can use kind
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
a:
|
||||
b: 5
|
||||
c: 3.2
|
||||
e: true
|
||||
f: []
|
||||
g: {}
|
||||
h: null
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '(.. | select(kind == "scalar")) line_comment = "this is a scalar"' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
a:
|
||||
b: 5 # this is a scalar
|
||||
c: 3.2 # this is a scalar
|
||||
e: true # this is a scalar
|
||||
f: []
|
||||
g: {}
|
||||
h: null # this is a scalar
|
||||
```
|
||||
|
@ -13,7 +13,7 @@ myMap:
|
||||
cat: meow
|
||||
dog: bark
|
||||
thing: hamster
|
||||
hamster: squeek
|
||||
hamster: squeak
|
||||
```
|
||||
then
|
||||
```bash
|
||||
@ -22,7 +22,7 @@ yq '.myMap |= pick(["hamster", "cat", "goat"])' sample.yml
|
||||
will output
|
||||
```yaml
|
||||
myMap:
|
||||
hamster: squeek
|
||||
hamster: squeak
|
||||
cat: meow
|
||||
```
|
||||
|
||||
|
49
pkg/yqlib/doc/operators/to_number.md
Normal file
49
pkg/yqlib/doc/operators/to_number.md
Normal file
@ -0,0 +1,49 @@
|
||||
# To Number
|
||||
Parses the input as a number. yq will try to parse values as an int first, failing that it will try float. Values that already ints or floats will be left alone.
|
||||
|
||||
## Converts strings to numbers
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- "3"
|
||||
- "3.1"
|
||||
- "-1e3"
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.[] | to_number' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
3
|
||||
3.1
|
||||
-1e3
|
||||
```
|
||||
|
||||
## Doesn't change numbers
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- 3
|
||||
- 3.1
|
||||
- -1e3
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.[] | to_number' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
3
|
||||
3.1
|
||||
-1e3
|
||||
```
|
||||
|
||||
## Cannot convert null
|
||||
Running
|
||||
```bash
|
||||
yq --null-input '.a.b | to_number'
|
||||
```
|
||||
will output
|
||||
```bash
|
||||
Error: cannot convert node value [null] at path a.b of tag !!null to number
|
||||
```
|
||||
|
@ -2,7 +2,7 @@
|
||||
Encode/Decode/Roundtrip CSV and TSV files.
|
||||
|
||||
## Encode
|
||||
Currently supports arrays of homogenous flat objects, that is: no nesting and it assumes the _first_ object has all the keys required:
|
||||
Currently supports arrays of homogeneous flat objects, that is: no nesting and it assumes the _first_ object has all the keys required:
|
||||
|
||||
```yaml
|
||||
- name: Bobo
|
||||
|
@ -2,7 +2,7 @@
|
||||
Encode/Decode/Roundtrip CSV and TSV files.
|
||||
|
||||
## Encode
|
||||
Currently supports arrays of homogenous flat objects, that is: no nesting and it assumes the _first_ object has all the keys required:
|
||||
Currently supports arrays of homogeneous flat objects, that is: no nesting and it assumes the _first_ object has all the keys required:
|
||||
|
||||
```yaml
|
||||
- name: Bobo
|
||||
|
5
pkg/yqlib/doc/usage/headers/recipes.md
Normal file
5
pkg/yqlib/doc/usage/headers/recipes.md
Normal file
@ -0,0 +1,5 @@
|
||||
# Recipes
|
||||
|
||||
These examples are intended to show how you can use multiple operators together so you get an idea of how you can perform complex data manipulation.
|
||||
|
||||
Please see the details [operator docs](https://mikefarah.gitbook.io/yq/operators) for details on each individual operator.
|
1
pkg/yqlib/doc/usage/lua.md
Normal file
1
pkg/yqlib/doc/usage/lua.md
Normal file
@ -0,0 +1 @@
|
||||
|
155
pkg/yqlib/doc/usage/recipes.md
Normal file
155
pkg/yqlib/doc/usage/recipes.md
Normal file
@ -0,0 +1,155 @@
|
||||
# Recipes
|
||||
|
||||
These examples are intended to show how you can use multiple operators together so you get an idea of how you can perform complex data manipulation.
|
||||
|
||||
Please see the details [operator docs](https://mikefarah.gitbook.io/yq/operators) for details on each individual operator.
|
||||
|
||||
## Find items in an array
|
||||
We have an array and we want to find the elements with a particular name.
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- name: Foo
|
||||
numBuckets: 0
|
||||
- name: Bar
|
||||
numBuckets: 0
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.[] | select(.name == "Foo")' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
name: Foo
|
||||
numBuckets: 0
|
||||
```
|
||||
|
||||
### Explanation:
|
||||
- `.[]` splats the array, and puts all the items in the context.
|
||||
- These items are then piped (`|`) into `select(.name == "Foo")` which will select all the nodes that have a name property set to 'Foo'.
|
||||
- See the [select](https://mikefarah.gitbook.io/yq/operators/select) operator for more information.
|
||||
|
||||
## Find and update items in an array
|
||||
We have an array and we want to _update_ the elements with a particular name.
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- name: Foo
|
||||
numBuckets: 0
|
||||
- name: Bar
|
||||
numBuckets: 0
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '(.[] | select(.name == "Foo") | .numBuckets) |= . + 1' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
- name: Foo
|
||||
numBuckets: 1
|
||||
- name: Bar
|
||||
numBuckets: 0
|
||||
```
|
||||
|
||||
### Explanation:
|
||||
- Following from the example above`.[]` splats the array, selects filters the items.
|
||||
- We then pipe (`|`) that into `.numBuckets`, which will select that field from all the matching items
|
||||
- Splat, select and the field are all in brackets, that whole expression is passed to the `|=` operator as the left hand side expression, with `. + 1` as the right hand side expression.
|
||||
- `|=` is the operator that updates fields relative to their own value, which is referenced as dot (`.`).
|
||||
- The expression `. + 1` increments the numBuckets counter.
|
||||
- See the [assign](https://mikefarah.gitbook.io/yq/operators/assign-update) and [add](https://mikefarah.gitbook.io/yq/operators/add) operators for more information.
|
||||
|
||||
## Multiple or complex updates to items in an array
|
||||
We have an array and we want to _update_ the elements with a particular name in reference to its type.
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
myArray:
|
||||
- name: Foo
|
||||
type: cat
|
||||
- name: Bar
|
||||
type: dog
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq 'with(.myArray[]; .name = .name + " - " + .type)' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
myArray:
|
||||
- name: Foo - cat
|
||||
type: cat
|
||||
- name: Bar - dog
|
||||
type: dog
|
||||
```
|
||||
|
||||
### Explanation:
|
||||
- The with operator will effectively loop through each given item in the first given expression, and run the second expression against it.
|
||||
- `.myArray[]` splats the array in `myArray`. So `with` will run against each item in that array
|
||||
- `.name = .name + " - " + .type` this expression is run against every item, updating the name to be a concatenation of the original name as well as the type.
|
||||
- See the [with](https://mikefarah.gitbook.io/yq/operators/with) operator for more information and examples.
|
||||
|
||||
## Sort an array by a field
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
myArray:
|
||||
- name: Foo
|
||||
numBuckets: 1
|
||||
- name: Bar
|
||||
numBuckets: 0
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.myArray |= sort_by(.numBuckets)' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
myArray:
|
||||
- name: Bar
|
||||
numBuckets: 0
|
||||
- name: Foo
|
||||
numBuckets: 1
|
||||
```
|
||||
|
||||
### Explanation:
|
||||
- We want to resort `.myArray`.
|
||||
- `sort_by` works by piping an array into it, and it pipes out a sorted array.
|
||||
- So, we use `|=` to update `.myArray`. This is the same as doing `.myArray = (.myArray | sort_by(.numBuckets))`
|
||||
|
||||
## Filter, flatten, sort and unique
|
||||
Lets
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- type: foo
|
||||
names:
|
||||
- Fred
|
||||
- Catherine
|
||||
- type: bar
|
||||
names:
|
||||
- Zelda
|
||||
- type: foo
|
||||
names: Fred
|
||||
- type: foo
|
||||
names: Ava
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '[.[] | select(.type == "foo") | .names] | flatten | sort | unique' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
- Ava
|
||||
- Catherine
|
||||
- Fred
|
||||
```
|
||||
|
||||
### Explanation:
|
||||
- `.[] | select(.type == "foo") | .names` will select the array elements of type "foo"
|
||||
- Splat `.[]` will unwrap the array and match all the items. We need to do this so we can work on the child items, for instance, filter items out using the `select` operator.
|
||||
- But we still want the final results back into an array. So after we're doing working on the children, we wrap everything back into an array using square brackets around the expression. `[.[] | select(.type == "foo") | .names]`
|
||||
- Now have have an array of all the 'names' values. Which includes arrays of strings as well as strings on their own.
|
||||
- Pipe `|` this array through `flatten`. This will flatten nested arrays. So now we have a flat list of all the name value strings
|
||||
- Next we pipe `|` that through `sort` and then `unique` to get a sorted, unique list of the names!
|
||||
- See the [flatten](https://mikefarah.gitbook.io/yq/operators/flatten), [sort](https://mikefarah.gitbook.io/yq/operators/sort) and [unique](https://mikefarah.gitbook.io/yq/operators/unique) for more information and examples.
|
||||
|
@ -34,7 +34,7 @@ Given a sample.yml file of:
|
||||
ascii_=_symbols: replaced with _
|
||||
"ascii_ _controls": dropped (this example uses \t)
|
||||
nonascii_א_characters: dropped
|
||||
effrot_expeñded_tò_preserve_accented_latin_letters: moderate (via unicode NFKD)
|
||||
effort_expeñded_tò_preserve_accented_latin_letters: moderate (via unicode NFKD)
|
||||
|
||||
```
|
||||
then
|
||||
@ -46,7 +46,7 @@ will output
|
||||
ascii___symbols='replaced with _'
|
||||
ascii__controls='dropped (this example uses \t)'
|
||||
nonascii__characters=dropped
|
||||
effrot_expended_to_preserve_accented_latin_letters='moderate (via unicode NFKD)'
|
||||
effort_expended_to_preserve_accented_latin_letters='moderate (via unicode NFKD)'
|
||||
```
|
||||
|
||||
## Encode shell variables: empty values, arrays and maps
|
||||
|
@ -53,7 +53,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -p=xml '.' sample.xml
|
||||
yq -oy '.' sample.xml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
@ -78,7 +78,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -p=xml ' (.. | select(tag == "!!str")) |= from_yaml' sample.xml
|
||||
yq -oy ' (.. | select(tag == "!!str")) |= from_yaml' sample.xml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
@ -100,7 +100,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -p=xml '.' sample.xml
|
||||
yq -oy '.' sample.xml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
@ -110,6 +110,42 @@ animal:
|
||||
- goat
|
||||
```
|
||||
|
||||
## Parse xml: force as an array
|
||||
In XML, if your array has a single item, then yq doesn't know its an array. This is how you can consistently force it to be an array. This handles the 3 scenarios of having nothing in the array, having a single item and having multiple.
|
||||
|
||||
Given a sample.xml file of:
|
||||
```xml
|
||||
<zoo><animal>cat</animal></zoo>
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -oy '.zoo.animal |= ([] + .)' sample.xml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
zoo:
|
||||
animal:
|
||||
- cat
|
||||
```
|
||||
|
||||
## Parse xml: force all as an array
|
||||
Because of the way yq works, when updating everything you need to update the children before the parents. By default `..` will match parents first, so we reverse that before updating.
|
||||
|
||||
Given a sample.xml file of:
|
||||
```xml
|
||||
<zoo><thing><frog>boing</frog></thing></zoo>
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -oy '([..] | reverse | .[]) |= [] + .' sample.xml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
zoo:
|
||||
thing:
|
||||
frog: boing
|
||||
```
|
||||
|
||||
## Parse xml: attributes
|
||||
Attributes are converted to fields, with the default attribute prefix '+'. Use '--xml-attribute-prefix` to set your own.
|
||||
|
||||
@ -122,7 +158,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -p=xml '.' sample.xml
|
||||
yq -oy '.' sample.xml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
@ -142,7 +178,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -p=xml '.' sample.xml
|
||||
yq -oy '.' sample.xml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
@ -161,7 +197,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -p=xml '.' sample.xml
|
||||
yq -oy '.' sample.xml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
@ -190,7 +226,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -p=xml -o=xml '.' sample.xml
|
||||
yq '.' sample.xml
|
||||
```
|
||||
will output
|
||||
```xml
|
||||
@ -221,7 +257,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -p=xml -o=xml --xml-skip-directives '.' sample.xml
|
||||
yq --xml-skip-directives '.' sample.xml
|
||||
```
|
||||
will output
|
||||
```xml
|
||||
@ -257,7 +293,7 @@ for x --></x>
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -p=xml '.' sample.xml
|
||||
yq -oy '.' sample.xml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
@ -289,7 +325,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -p=xml -o=xml --xml-keep-namespace=false '.' sample.xml
|
||||
yq --xml-keep-namespace=false '.' sample.xml
|
||||
```
|
||||
will output
|
||||
```xml
|
||||
@ -314,7 +350,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -p=xml -o=xml --xml-raw-token=false '.' sample.xml
|
||||
yq --xml-raw-token=false '.' sample.xml
|
||||
```
|
||||
will output
|
||||
```xml
|
||||
@ -489,7 +525,7 @@ for x --></x>
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -p=xml -o=xml '.' sample.xml
|
||||
yq '.' sample.xml
|
||||
```
|
||||
will output
|
||||
```xml
|
||||
@ -522,7 +558,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -p=xml -o=xml '.' sample.xml
|
||||
yq '.' sample.xml
|
||||
```
|
||||
will output
|
||||
```xml
|
||||
|
327
pkg/yqlib/encoder_lua.go
Normal file
327
pkg/yqlib/encoder_lua.go
Normal file
@ -0,0 +1,327 @@
|
||||
//go:build !yq_nolua
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type luaEncoder struct {
|
||||
docPrefix string
|
||||
docSuffix string
|
||||
indent int
|
||||
indentStr string
|
||||
unquoted bool
|
||||
globals bool
|
||||
escape *strings.Replacer
|
||||
}
|
||||
|
||||
func (le *luaEncoder) CanHandleAliases() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func NewLuaEncoder(prefs LuaPreferences) Encoder {
|
||||
escape := strings.NewReplacer(
|
||||
"\000", "\\000",
|
||||
"\001", "\\001",
|
||||
"\002", "\\002",
|
||||
"\003", "\\003",
|
||||
"\004", "\\004",
|
||||
"\005", "\\005",
|
||||
"\006", "\\006",
|
||||
"\007", "\\a",
|
||||
"\010", "\\b",
|
||||
"\011", "\\t",
|
||||
"\012", "\\n",
|
||||
"\013", "\\v",
|
||||
"\014", "\\f",
|
||||
"\015", "\\r",
|
||||
"\016", "\\014",
|
||||
"\017", "\\015",
|
||||
"\020", "\\016",
|
||||
"\021", "\\017",
|
||||
"\022", "\\018",
|
||||
"\023", "\\019",
|
||||
"\024", "\\020",
|
||||
"\025", "\\021",
|
||||
"\026", "\\022",
|
||||
"\027", "\\023",
|
||||
"\030", "\\024",
|
||||
"\031", "\\025",
|
||||
"\032", "\\026",
|
||||
"\033", "\\027",
|
||||
"\034", "\\028",
|
||||
"\035", "\\029",
|
||||
"\036", "\\030",
|
||||
"\037", "\\031",
|
||||
"\"", "\\\"",
|
||||
"'", "\\'",
|
||||
"\\", "\\\\",
|
||||
"\177", "\\127",
|
||||
)
|
||||
unescape := strings.NewReplacer(
|
||||
"\\'", "'",
|
||||
"\\\"", "\"",
|
||||
"\\n", "\n",
|
||||
"\\r", "\r",
|
||||
"\\t", "\t",
|
||||
"\\\\", "\\",
|
||||
)
|
||||
return &luaEncoder{unescape.Replace(prefs.DocPrefix), unescape.Replace(prefs.DocSuffix), 0, "\t", prefs.UnquotedKeys, prefs.Globals, escape}
|
||||
}
|
||||
|
||||
func (le *luaEncoder) PrintDocumentSeparator(writer io.Writer) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (le *luaEncoder) PrintLeadingContent(writer io.Writer, content string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (le *luaEncoder) encodeString(writer io.Writer, node *CandidateNode) error {
|
||||
quote := "\""
|
||||
switch node.Style {
|
||||
case LiteralStyle, FoldedStyle, FlowStyle:
|
||||
for i := 0; i < 10; i++ {
|
||||
if !strings.Contains(node.Value, "]"+strings.Repeat("=", i)+"]") {
|
||||
err := writeString(writer, "["+strings.Repeat("=", i)+"[\n")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = writeString(writer, node.Value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return writeString(writer, "]"+strings.Repeat("=", i)+"]")
|
||||
}
|
||||
}
|
||||
case SingleQuotedStyle:
|
||||
quote = "'"
|
||||
|
||||
// fallthrough to regular ol' string
|
||||
}
|
||||
return writeString(writer, quote+le.escape.Replace(node.Value)+quote)
|
||||
}
|
||||
|
||||
func (le *luaEncoder) writeIndent(writer io.Writer) error {
|
||||
if le.indentStr == "" {
|
||||
return nil
|
||||
}
|
||||
err := writeString(writer, "\n")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return writeString(writer, strings.Repeat(le.indentStr, le.indent))
|
||||
}
|
||||
|
||||
func (le *luaEncoder) encodeArray(writer io.Writer, node *CandidateNode) error {
|
||||
err := writeString(writer, "{")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
le.indent++
|
||||
for _, child := range node.Content {
|
||||
err = le.writeIndent(writer)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err := le.encodeAny(writer, child)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = writeString(writer, ",")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if child.LineComment != "" {
|
||||
sansPrefix, _ := strings.CutPrefix(child.LineComment, "#")
|
||||
err = writeString(writer, " --"+sansPrefix)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
le.indent--
|
||||
if len(node.Content) != 0 {
|
||||
err = le.writeIndent(writer)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return writeString(writer, "}")
|
||||
}
|
||||
|
||||
func needsQuoting(s string) bool {
|
||||
// known keywords as of Lua 5.4
|
||||
switch s {
|
||||
case "do", "and", "else", "break",
|
||||
"if", "end", "goto", "false",
|
||||
"in", "for", "then", "local",
|
||||
"or", "nil", "true", "until",
|
||||
"elseif", "function", "not",
|
||||
"repeat", "return", "while":
|
||||
return true
|
||||
}
|
||||
// [%a_][%w_]*
|
||||
for i, c := range s {
|
||||
if i == 0 {
|
||||
if !((c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || c == '_') {
|
||||
return true
|
||||
}
|
||||
} else {
|
||||
if !((c >= '0' && c <= '9') || (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || c == '_') {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (le *luaEncoder) encodeMap(writer io.Writer, node *CandidateNode, global bool) error {
|
||||
if !global {
|
||||
err := writeString(writer, "{")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
le.indent++
|
||||
}
|
||||
for i, child := range node.Content {
|
||||
if (i % 2) == 1 {
|
||||
// value
|
||||
err := le.encodeAny(writer, child)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = writeString(writer, ";")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
// key
|
||||
if !global || i > 0 {
|
||||
err := le.writeIndent(writer)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if (le.unquoted || global) && child.Tag == "!!str" && !needsQuoting(child.Value) {
|
||||
err := writeString(writer, child.Value+" = ")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if global {
|
||||
// This only works in Lua 5.2+
|
||||
err := writeString(writer, "_ENV")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
err := writeString(writer, "[")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = le.encodeAny(writer, child)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = writeString(writer, "] = ")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
if child.LineComment != "" {
|
||||
sansPrefix, _ := strings.CutPrefix(child.LineComment, "#")
|
||||
err := writeString(writer, strings.Repeat(" ", i%2)+"--"+sansPrefix)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if (i % 2) == 0 {
|
||||
// newline and indent after comments on keys
|
||||
err = le.writeIndent(writer)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if global {
|
||||
return writeString(writer, "\n")
|
||||
}
|
||||
le.indent--
|
||||
if len(node.Content) != 0 {
|
||||
err := le.writeIndent(writer)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return writeString(writer, "}")
|
||||
}
|
||||
|
||||
func (le *luaEncoder) encodeAny(writer io.Writer, node *CandidateNode) error {
|
||||
switch node.Kind {
|
||||
case SequenceNode:
|
||||
return le.encodeArray(writer, node)
|
||||
case MappingNode:
|
||||
return le.encodeMap(writer, node, false)
|
||||
case ScalarNode:
|
||||
switch node.Tag {
|
||||
case "!!str":
|
||||
return le.encodeString(writer, node)
|
||||
case "!!null":
|
||||
// TODO reject invalid use as a table key
|
||||
return writeString(writer, "nil")
|
||||
case "!!bool":
|
||||
// Yaml 1.2 has case variation e.g. True, FALSE etc but Lua only has
|
||||
// lower case
|
||||
return writeString(writer, strings.ToLower(node.Value))
|
||||
case "!!int":
|
||||
if strings.HasPrefix(node.Value, "0o") {
|
||||
_, octalValue, err := parseInt64(node.Value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return writeString(writer, fmt.Sprintf("%d", octalValue))
|
||||
}
|
||||
return writeString(writer, strings.ToLower(node.Value))
|
||||
case "!!float":
|
||||
switch strings.ToLower(node.Value) {
|
||||
case ".inf":
|
||||
return writeString(writer, "(1/0)")
|
||||
case "-.inf":
|
||||
return writeString(writer, "(-1/0)")
|
||||
case ".nan":
|
||||
return writeString(writer, "(0/0)")
|
||||
default:
|
||||
return writeString(writer, node.Value)
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("Lua encoder NYI -- %s", node.Tag)
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("Lua encoder NYI -- %s", node.Tag)
|
||||
}
|
||||
}
|
||||
|
||||
func (le *luaEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
if !le.globals && node.Parent == nil {
|
||||
err := writeString(writer, le.docPrefix)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if err := le.encodeAny(writer, node); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !le.globals && node.Parent == nil {
|
||||
err := writeString(writer, le.docSuffix)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
@ -36,7 +36,7 @@ func (pe *propertiesEncoder) PrintLeadingContent(writer io.Writer, content strin
|
||||
if errReading != nil && !errors.Is(errReading, io.EOF) {
|
||||
return errReading
|
||||
}
|
||||
if strings.Contains(readline, "$yqDocSeperator$") {
|
||||
if strings.Contains(readline, "$yqDocSeparator$") {
|
||||
|
||||
if err := pe.PrintDocumentSeparator(writer); err != nil {
|
||||
return err
|
||||
|
@ -213,7 +213,7 @@ func (e *xmlEncoder) encodeComment(encoder *xml.Encoder, commentStr string) erro
|
||||
commentStr = chompRegexp.ReplaceAllString(commentStr, "")
|
||||
log.Debugf("chompRegexp [%v]", commentStr)
|
||||
commentStr = xmlEncodeMultilineCommentRegex.ReplaceAllString(commentStr, "$1$2")
|
||||
log.Debugf("processed multine [%v]", commentStr)
|
||||
log.Debugf("processed multiline [%v]", commentStr)
|
||||
// if the first line is non blank, add a space
|
||||
if commentStr[0] != '\n' && commentStr[0] != ' ' {
|
||||
commentStr = " " + commentStr
|
||||
|
@ -39,7 +39,6 @@ func (ye *yamlEncoder) PrintDocumentSeparator(writer io.Writer) error {
|
||||
}
|
||||
|
||||
func (ye *yamlEncoder) PrintLeadingContent(writer io.Writer, content string) error {
|
||||
// log.Debug("headcommentwas [%v]", content)
|
||||
reader := bufio.NewReader(strings.NewReader(content))
|
||||
|
||||
var commentLineRegEx = regexp.MustCompile(`^\s*#`)
|
||||
@ -50,7 +49,7 @@ func (ye *yamlEncoder) PrintLeadingContent(writer io.Writer, content string) err
|
||||
if errReading != nil && !errors.Is(errReading, io.EOF) {
|
||||
return errReading
|
||||
}
|
||||
if strings.Contains(readline, "$yqDocSeperator$") {
|
||||
if strings.Contains(readline, "$yqDocSeparator$") {
|
||||
|
||||
if err := ye.PrintDocumentSeparator(writer); err != nil {
|
||||
return err
|
||||
|
@ -224,8 +224,8 @@ var jsonScenarios = []formatScenario{
|
||||
{
|
||||
description: "numbers",
|
||||
skipDoc: true,
|
||||
input: "[3, 3.0, 3.1, -1]",
|
||||
expected: "- 3\n- 3\n- 3.1\n- -1\n",
|
||||
input: "[3, 3.0, 3.1, -1, 999999, 1000000, 1000001, 1.1]",
|
||||
expected: "- 3\n- 3\n- 3.1\n- -1\n- 999999\n- 1000000\n- 1000001\n- 1.1\n",
|
||||
scenarioType: "decode-ndjson",
|
||||
},
|
||||
{
|
||||
|
@ -175,7 +175,7 @@ func handleToken(tokens []*token, index int, postProcessedTokens []*token) (toke
|
||||
if index != len(tokens)-1 && currentToken.CheckForPostTraverse &&
|
||||
tokens[index+1].TokenType == openCollect {
|
||||
|
||||
log.Debug(" adding traverArray because next is opencollect")
|
||||
log.Debug(" adding traverseArray because next is opencollect")
|
||||
op := &Operation{OperationType: traverseArrayOpType}
|
||||
postProcessedTokens = append(postProcessedTokens, &token{TokenType: operationToken, Operation: op})
|
||||
}
|
||||
|
@ -26,14 +26,15 @@ var participleYqRules = []*participleYqRule{
|
||||
{"RecursiveDecent", `\.\.`, recursiveDecentOpToken(false), 0},
|
||||
|
||||
{"GetVariable", `\$[a-zA-Z_\-0-9]+`, getVariableOpToken(), 0},
|
||||
{"AsignAsVariable", `as`, opTokenWithPrefs(assignVariableOpType, nil, assignVarPreferences{}), 0},
|
||||
{"AsignRefVariable", `ref`, opTokenWithPrefs(assignVariableOpType, nil, assignVarPreferences{IsReference: true}), 0},
|
||||
{"AssignAsVariable", `as`, opTokenWithPrefs(assignVariableOpType, nil, assignVarPreferences{}), 0},
|
||||
{"AssignRefVariable", `ref`, opTokenWithPrefs(assignVariableOpType, nil, assignVarPreferences{IsReference: true}), 0},
|
||||
|
||||
{"CreateMap", `:\s*`, opToken(createMapOpType), 0},
|
||||
simpleOp("length", lengthOpType),
|
||||
simpleOp("line", lineOpType),
|
||||
simpleOp("column", columnOpType),
|
||||
simpleOp("eval", evalOpType),
|
||||
simpleOp("to_?number", toNumberOpType),
|
||||
|
||||
{"MapValues", `map_?values`, opToken(mapValuesOpType), 0},
|
||||
simpleOp("map", mapOpType),
|
||||
@ -152,6 +153,7 @@ var participleYqRules = []*participleYqRule{
|
||||
|
||||
assignableOp("style", getStyleOpType, assignStyleOpType),
|
||||
assignableOp("tag|type", getTagOpType, assignTagOpType),
|
||||
simpleOp("kind", getKindOpType),
|
||||
assignableOp("anchor", getAnchorOpType, assignAnchorOpType),
|
||||
assignableOp("alias", getAliasOpType, assignAliasOpType),
|
||||
|
||||
@ -201,7 +203,7 @@ var participleYqRules = []*participleYqRule{
|
||||
{`whitespace`, `[ \t\n]+`, nil, 0},
|
||||
|
||||
{"WrappedPathElement", `\."[^ "]+"\??`, pathToken(true), 0},
|
||||
{"PathElement", `\.[^ ;\}\{\:\[\],\|\.\[\(\)=\n]+\??`, pathToken(false), 0},
|
||||
{"PathElement", `\.[^ ;\}\{\:\[\],\|\.\[\(\)=\n!]+\??`, pathToken(false), 0},
|
||||
{"Pipe", `\|`, opToken(pipeOpType), 0},
|
||||
{"Self", `\.`, opToken(selfReferenceOpType), 0},
|
||||
|
||||
|
@ -13,6 +13,29 @@ type participleLexerScenario struct {
|
||||
}
|
||||
|
||||
var participleLexerScenarios = []participleLexerScenario{
|
||||
{
|
||||
expression: ".a!=",
|
||||
tokens: []*token{
|
||||
{
|
||||
TokenType: operationToken,
|
||||
Operation: &Operation{
|
||||
OperationType: traversePathOpType,
|
||||
Value: "a",
|
||||
StringValue: "a",
|
||||
Preferences: traversePreferences{},
|
||||
},
|
||||
CheckForPostTraverse: true,
|
||||
},
|
||||
{
|
||||
TokenType: operationToken,
|
||||
Operation: &Operation{
|
||||
OperationType: notEqualsOpType,
|
||||
Value: "NOT_EQUALS",
|
||||
StringValue: "!=",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
expression: ".[:3]",
|
||||
tokens: []*token{
|
||||
|
@ -118,6 +118,7 @@ var splitDocumentOpType = &operationType{Type: "SPLIT_DOC", NumArgs: 0, Preceden
|
||||
var getVariableOpType = &operationType{Type: "GET_VARIABLE", NumArgs: 0, Precedence: 55, Handler: getVariableOperator}
|
||||
var getStyleOpType = &operationType{Type: "GET_STYLE", NumArgs: 0, Precedence: 50, Handler: getStyleOperator}
|
||||
var getTagOpType = &operationType{Type: "GET_TAG", NumArgs: 0, Precedence: 50, Handler: getTagOperator}
|
||||
var getKindOpType = &operationType{Type: "GET_KIND", NumArgs: 0, Precedence: 50, Handler: getKindOperator}
|
||||
|
||||
var getKeyOpType = &operationType{Type: "GET_KEY", NumArgs: 0, Precedence: 50, Handler: getKeyOperator}
|
||||
var isKeyOpType = &operationType{Type: "IS_KEY", NumArgs: 0, Precedence: 50, Handler: isKeyOperator}
|
||||
@ -164,6 +165,7 @@ var valueOpType = &operationType{Type: "VALUE", NumArgs: 0, Precedence: 50, Hand
|
||||
var referenceOpType = &operationType{Type: "REF", NumArgs: 0, Precedence: 50, Handler: referenceOperator}
|
||||
var envOpType = &operationType{Type: "ENV", NumArgs: 0, Precedence: 50, Handler: envOperator}
|
||||
var notOpType = &operationType{Type: "NOT", NumArgs: 0, Precedence: 50, Handler: notOperator}
|
||||
var toNumberOpType = &operationType{Type: "TO_NUMBER", NumArgs: 0, Precedence: 50, Handler: toNumberOperator}
|
||||
var emptyOpType = &operationType{Type: "EMPTY", Precedence: 50, Handler: emptyOperator}
|
||||
|
||||
var envsubstOpType = &operationType{Type: "ENVSUBST", NumArgs: 0, Precedence: 50, Handler: envsubstOperator}
|
||||
|
19
pkg/yqlib/lua.go
Normal file
19
pkg/yqlib/lua.go
Normal file
@ -0,0 +1,19 @@
|
||||
package yqlib
|
||||
|
||||
type LuaPreferences struct {
|
||||
DocPrefix string
|
||||
DocSuffix string
|
||||
UnquotedKeys bool
|
||||
Globals bool
|
||||
}
|
||||
|
||||
func NewDefaultLuaPreferences() LuaPreferences {
|
||||
return LuaPreferences{
|
||||
DocPrefix: "return ",
|
||||
DocSuffix: ";\n",
|
||||
UnquotedKeys: false,
|
||||
Globals: false,
|
||||
}
|
||||
}
|
||||
|
||||
var ConfiguredLuaPreferences = NewDefaultLuaPreferences()
|
367
pkg/yqlib/lua_test.go
Normal file
367
pkg/yqlib/lua_test.go
Normal file
@ -0,0 +1,367 @@
|
||||
//go:build !yq_nolua
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/mikefarah/yq/v4/test"
|
||||
)
|
||||
|
||||
var luaScenarios = []formatScenario{
|
||||
// {
|
||||
// description: "Basic input example",
|
||||
// input: `return {
|
||||
// ["country"] = "Australia"; -- this place
|
||||
// ["cities"] = {
|
||||
// "Sydney",
|
||||
// "Melbourne",
|
||||
// "Brisbane",
|
||||
// "Perth",
|
||||
// };
|
||||
// };
|
||||
// `,
|
||||
// expected: `country: Australia
|
||||
// cities:
|
||||
// - Sydney
|
||||
// - Melbourne
|
||||
// - Brisbane
|
||||
// - Perth
|
||||
// `,
|
||||
// },
|
||||
// {
|
||||
// description: "Basic output example",
|
||||
// scenarioType: "encode",
|
||||
// input: `---
|
||||
// country: Australia # this place
|
||||
// cities:
|
||||
// - Sydney
|
||||
// - Melbourne
|
||||
// - Brisbane
|
||||
// - Perth`,
|
||||
// expected: `return {
|
||||
// ["country"] = "Australia"; -- this place
|
||||
// ["cities"] = {
|
||||
// "Sydney",
|
||||
// "Melbourne",
|
||||
// "Brisbane",
|
||||
// "Perth",
|
||||
// };
|
||||
// };
|
||||
// `,
|
||||
// },
|
||||
{
|
||||
description: "Basic roundtrip",
|
||||
skipDoc: true,
|
||||
scenarioType: "roundtrip",
|
||||
expression: `.cities[0] = "Adelaide"`,
|
||||
input: `return {
|
||||
["country"] = "Australia"; -- this place
|
||||
["cities"] = {
|
||||
"Sydney",
|
||||
"Melbourne",
|
||||
"Brisbane",
|
||||
"Perth",
|
||||
};
|
||||
};
|
||||
`,
|
||||
expected: `return {
|
||||
["country"] = "Australia";
|
||||
["cities"] = {
|
||||
"Adelaide",
|
||||
"Melbourne",
|
||||
"Brisbane",
|
||||
"Perth",
|
||||
};
|
||||
};
|
||||
`,
|
||||
},
|
||||
// {
|
||||
// description: "Unquoted keys",
|
||||
// subdescription: "Uses the `--lua-unquoted` option to produce a nicer-looking output.",
|
||||
// scenarioType: "unquoted-encode",
|
||||
// input: `---
|
||||
//
|
||||
// country: Australia # this place
|
||||
// cities:
|
||||
// - Sydney
|
||||
// - Melbourne
|
||||
// - Brisbane
|
||||
// - Perth`,
|
||||
// expected: `return {
|
||||
// country = "Australia"; -- this place
|
||||
// cities = {
|
||||
// "Sydney",
|
||||
// "Melbourne",
|
||||
// "Brisbane",
|
||||
// "Perth",
|
||||
// };
|
||||
// };
|
||||
//
|
||||
// `,
|
||||
//
|
||||
// },
|
||||
// {
|
||||
// description: "Globals",
|
||||
// subdescription: "Uses the `--lua-globals` option to export the values into the global scope.",
|
||||
// scenarioType: "globals-encode",
|
||||
// input: `---
|
||||
//
|
||||
// country: Australia # this place
|
||||
// cities:
|
||||
// - Sydney
|
||||
// - Melbourne
|
||||
// - Brisbane
|
||||
// - Perth`,
|
||||
// expected: `country = "Australia"; -- this place
|
||||
//
|
||||
// cities = {
|
||||
// "Sydney",
|
||||
// "Melbourne",
|
||||
// "Brisbane",
|
||||
// "Perth",
|
||||
// };
|
||||
//
|
||||
// `,
|
||||
//
|
||||
// },
|
||||
// {
|
||||
// description: "Elaborate example",
|
||||
// input: `---
|
||||
//
|
||||
// hello: world
|
||||
// tables:
|
||||
//
|
||||
// like: this
|
||||
// keys: values
|
||||
// ? look: non-string keys
|
||||
// : True
|
||||
//
|
||||
// numbers:
|
||||
// - decimal: 12345
|
||||
// - hex: 0x7fabc123
|
||||
// - octal: 0o30
|
||||
// - float: 123.45
|
||||
// - infinity: .inf
|
||||
// - not: .nan
|
||||
//
|
||||
// `,
|
||||
//
|
||||
// expected: `return {
|
||||
// ["hello"] = "world";
|
||||
// ["tables"] = {
|
||||
// ["like"] = "this";
|
||||
// ["keys"] = "values";
|
||||
// [{
|
||||
// ["look"] = "non-string keys";
|
||||
// }] = true;
|
||||
// };
|
||||
// ["numbers"] = {
|
||||
// {
|
||||
// ["decimal"] = 12345;
|
||||
// },
|
||||
// {
|
||||
// ["hex"] = 0x7fabc123;
|
||||
// },
|
||||
// {
|
||||
// ["octal"] = 24;
|
||||
// },
|
||||
// {
|
||||
// ["float"] = 123.45;
|
||||
// },
|
||||
// {
|
||||
// ["infinity"] = (1/0);
|
||||
// },
|
||||
// {
|
||||
// ["not"] = (0/0);
|
||||
// },
|
||||
// };
|
||||
// };
|
||||
//
|
||||
// `,
|
||||
//
|
||||
// scenarioType: "encode",
|
||||
// },
|
||||
// {
|
||||
// skipDoc: true,
|
||||
// description: "Sequence",
|
||||
// input: "- a\n- b\n- c\n",
|
||||
// expected: "return {\n\t\"a\",\n\t\"b\",\n\t\"c\",\n};\n",
|
||||
// scenarioType: "encode",
|
||||
// },
|
||||
// {
|
||||
// skipDoc: true,
|
||||
// description: "Mapping",
|
||||
// input: "a: b\nc:\n d: e\nf: 0\n",
|
||||
// expected: "return {\n\t[\"a\"] = \"b\";\n\t[\"c\"] = {\n\t\t[\"d\"] = \"e\";\n\t};\n\t[\"f\"] = 0;\n};\n",
|
||||
// scenarioType: "encode",
|
||||
// },
|
||||
// {
|
||||
// skipDoc: true,
|
||||
// description: "Scalar str",
|
||||
// input: "str: |\n foo\n bar\nanother: 'single'\nand: \"double\"",
|
||||
// expected: "return {\n\t[\"str\"] = [[\nfoo\nbar\n]];\n\t[\"another\"] = 'single';\n\t[\"and\"] = \"double\";\n};\n",
|
||||
// scenarioType: "encode",
|
||||
// },
|
||||
// {
|
||||
// skipDoc: true,
|
||||
// description: "Scalar null",
|
||||
// input: "x: null\n",
|
||||
// expected: "return {\n\t[\"x\"] = nil;\n};\n",
|
||||
// scenarioType: "encode",
|
||||
// },
|
||||
// {
|
||||
// skipDoc: true,
|
||||
// description: "Scalar int",
|
||||
// input: "- 1\n- 2\n- 0x10\n- 0o30\n- -999\n",
|
||||
// expected: "return {\n\t1,\n\t2,\n\t0x10,\n\t24,\n\t-999,\n};\n",
|
||||
// scenarioType: "encode",
|
||||
// },
|
||||
// {
|
||||
// skipDoc: true,
|
||||
// description: "Scalar float",
|
||||
// input: "- 1.0\n- 3.14\n- 1e100\n- .Inf\n- .NAN\n",
|
||||
// expected: "return {\n\t1.0,\n\t3.14,\n\t1e100,\n\t(1/0),\n\t(0/0),\n};\n",
|
||||
// scenarioType: "encode",
|
||||
// },
|
||||
}
|
||||
|
||||
func testLuaScenario(t *testing.T, s formatScenario) {
|
||||
switch s.scenarioType {
|
||||
case "", "decode":
|
||||
test.AssertResultWithContext(t, s.expected, mustProcessFormatScenario(s, NewLuaDecoder(ConfiguredLuaPreferences), NewYamlEncoder(4, false, ConfiguredYamlPreferences)), s.description)
|
||||
case "encode":
|
||||
test.AssertResultWithContext(t, s.expected, mustProcessFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewLuaEncoder(ConfiguredLuaPreferences)), s.description)
|
||||
case "roundtrip":
|
||||
test.AssertResultWithContext(t, s.expected, mustProcessFormatScenario(s, NewLuaDecoder(ConfiguredLuaPreferences), NewLuaEncoder(ConfiguredLuaPreferences)), s.description)
|
||||
case "unquoted-encode":
|
||||
test.AssertResultWithContext(t, s.expected, mustProcessFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewLuaEncoder(LuaPreferences{
|
||||
DocPrefix: "return ",
|
||||
DocSuffix: ";\n",
|
||||
UnquotedKeys: true,
|
||||
Globals: false,
|
||||
})), s.description)
|
||||
case "globals-encode":
|
||||
test.AssertResultWithContext(t, s.expected, mustProcessFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewLuaEncoder(LuaPreferences{
|
||||
DocPrefix: "return ",
|
||||
DocSuffix: ";\n",
|
||||
UnquotedKeys: false,
|
||||
Globals: true,
|
||||
})), s.description)
|
||||
default:
|
||||
panic(fmt.Sprintf("unhandled scenario type %q", s.scenarioType))
|
||||
}
|
||||
}
|
||||
|
||||
func documentLuaScenario(t *testing.T, w *bufio.Writer, i interface{}) {
|
||||
s := i.(formatScenario)
|
||||
|
||||
if s.skipDoc {
|
||||
return
|
||||
}
|
||||
switch s.scenarioType {
|
||||
case "", "decode":
|
||||
documentLuaDecodeScenario(w, s)
|
||||
case "encode", "unquoted-encode", "globals-encode":
|
||||
documentLuaEncodeScenario(w, s)
|
||||
case "roundtrip":
|
||||
documentLuaRoundTripScenario(w, s)
|
||||
default:
|
||||
panic(fmt.Sprintf("unhandled scenario type %q", s.scenarioType))
|
||||
}
|
||||
}
|
||||
|
||||
func documentLuaDecodeScenario(w *bufio.Writer, s formatScenario) {
|
||||
writeOrPanic(w, fmt.Sprintf("## %v\n", s.description))
|
||||
|
||||
if s.subdescription != "" {
|
||||
writeOrPanic(w, s.subdescription)
|
||||
writeOrPanic(w, "\n\n")
|
||||
}
|
||||
|
||||
writeOrPanic(w, "Given a sample.lua file of:\n")
|
||||
writeOrPanic(w, fmt.Sprintf("```lua\n%v\n```\n", s.input))
|
||||
|
||||
writeOrPanic(w, "then\n")
|
||||
expression := s.expression
|
||||
if expression == "" {
|
||||
expression = "."
|
||||
}
|
||||
writeOrPanic(w, fmt.Sprintf("```bash\nyq -oy '%v' sample.lua\n```\n", expression))
|
||||
writeOrPanic(w, "will output\n")
|
||||
|
||||
writeOrPanic(w, fmt.Sprintf("```yaml\n%v```\n\n", mustProcessFormatScenario(s, NewLuaDecoder(ConfiguredLuaPreferences), NewYamlEncoder(2, false, ConfiguredYamlPreferences))))
|
||||
}
|
||||
|
||||
func documentLuaEncodeScenario(w *bufio.Writer, s formatScenario) {
|
||||
writeOrPanic(w, fmt.Sprintf("## %v\n", s.description))
|
||||
|
||||
if s.subdescription != "" {
|
||||
writeOrPanic(w, s.subdescription)
|
||||
writeOrPanic(w, "\n\n")
|
||||
}
|
||||
|
||||
prefs := ConfiguredLuaPreferences
|
||||
switch s.scenarioType {
|
||||
case "unquoted-encode":
|
||||
prefs = LuaPreferences{
|
||||
DocPrefix: "return ",
|
||||
DocSuffix: ";\n",
|
||||
UnquotedKeys: true,
|
||||
Globals: false,
|
||||
}
|
||||
case "globals-encode":
|
||||
prefs = LuaPreferences{
|
||||
DocPrefix: "return ",
|
||||
DocSuffix: ";\n",
|
||||
UnquotedKeys: false,
|
||||
Globals: true,
|
||||
}
|
||||
}
|
||||
writeOrPanic(w, "Given a sample.yml file of:\n")
|
||||
writeOrPanic(w, fmt.Sprintf("```yaml\n%v\n```\n", s.input))
|
||||
|
||||
writeOrPanic(w, "then\n")
|
||||
switch s.scenarioType {
|
||||
case "unquoted-encode":
|
||||
writeOrPanic(w, "```bash\nyq -o=lua --lua-unquoted '.' sample.yml\n```\n")
|
||||
case "globals-encode":
|
||||
writeOrPanic(w, "```bash\nyq -o=lua --lua-globals '.' sample.yml\n```\n")
|
||||
default:
|
||||
writeOrPanic(w, "```bash\nyq -o=lua '.' sample.yml\n```\n")
|
||||
}
|
||||
writeOrPanic(w, "will output\n")
|
||||
|
||||
writeOrPanic(w, fmt.Sprintf("```lua\n%v```\n\n", mustProcessFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewLuaEncoder(prefs))))
|
||||
}
|
||||
|
||||
func documentLuaRoundTripScenario(w *bufio.Writer, s formatScenario) {
|
||||
writeOrPanic(w, fmt.Sprintf("## %v\n", s.description))
|
||||
|
||||
if s.subdescription != "" {
|
||||
writeOrPanic(w, s.subdescription)
|
||||
writeOrPanic(w, "\n\n")
|
||||
}
|
||||
|
||||
writeOrPanic(w, "Given a sample.lua file of:\n")
|
||||
writeOrPanic(w, fmt.Sprintf("```lua\n%v\n```\n", s.input))
|
||||
|
||||
writeOrPanic(w, "then\n")
|
||||
writeOrPanic(w, "```bash\nyq '.' sample.lua\n```\n")
|
||||
writeOrPanic(w, "will output\n")
|
||||
|
||||
writeOrPanic(w, fmt.Sprintf("```lua\n%v```\n\n", mustProcessFormatScenario(s, NewLuaDecoder(ConfiguredLuaPreferences), NewLuaEncoder(ConfiguredLuaPreferences))))
|
||||
}
|
||||
|
||||
func TestLuaScenarios(t *testing.T) {
|
||||
for _, tt := range luaScenarios {
|
||||
testLuaScenario(t, tt)
|
||||
}
|
||||
genericScenarios := make([]interface{}, len(luaScenarios))
|
||||
for i, s := range luaScenarios {
|
||||
genericScenarios[i] = s
|
||||
}
|
||||
documentScenarios(t, "usage", "lua", genericScenarios, documentLuaScenario)
|
||||
}
|
11
pkg/yqlib/no_lua.go
Normal file
11
pkg/yqlib/no_lua.go
Normal file
@ -0,0 +1,11 @@
|
||||
//go:build yq_nolua
|
||||
|
||||
package yqlib
|
||||
|
||||
func NewLuaEncoder(prefs LuaPreferences) Encoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewLuaDecoder(prefs LuaPreferences) Decoder {
|
||||
return nil
|
||||
}
|
@ -95,7 +95,12 @@ func addScalars(context Context, target *CandidateNode, lhs *CandidateNode, rhs
|
||||
|
||||
} else if lhsTag == "!!str" {
|
||||
target.Tag = lhs.Tag
|
||||
target.Value = lhs.Value + rhs.Value
|
||||
if rhsTag == "!!null" {
|
||||
target.Value = lhs.Value
|
||||
} else {
|
||||
target.Value = lhs.Value + rhs.Value
|
||||
}
|
||||
|
||||
} else if rhsTag == "!!str" {
|
||||
target.Tag = rhs.Tag
|
||||
target.Value = lhs.Value + rhs.Value
|
||||
|
@ -225,6 +225,20 @@ var addOperatorScenarios = []expressionScenario{
|
||||
"D0, P[], (!cool)::3cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
expression: `null + "cat"`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!str)::cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
expression: `"cat" + null`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!str)::cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Number addition - float",
|
||||
subdescription: "If the lhs or rhs are floats then the expression will be calculated with floats.",
|
||||
|
@ -74,8 +74,8 @@ func assignCommentsOperator(d *dataTreeNavigator, context Context, expressionNod
|
||||
|
||||
func getCommentsOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
preferences := expressionNode.Operation.Preferences.(commentOpPreferences)
|
||||
var startCommentCharaterRegExp = regexp.MustCompile(`^# `)
|
||||
var subsequentCommentCharaterRegExp = regexp.MustCompile(`\n# `)
|
||||
var startCommentCharacterRegExp = regexp.MustCompile(`^# `)
|
||||
var subsequentCommentCharacterRegExp = regexp.MustCompile(`\n# `)
|
||||
|
||||
log.Debugf("GetComments operator!")
|
||||
var results = list.New()
|
||||
@ -108,8 +108,8 @@ func getCommentsOperator(d *dataTreeNavigator, context Context, expressionNode *
|
||||
} else if preferences.FootComment {
|
||||
comment = candidate.FootComment
|
||||
}
|
||||
comment = startCommentCharaterRegExp.ReplaceAllString(comment, "")
|
||||
comment = subsequentCommentCharaterRegExp.ReplaceAllString(comment, "\n")
|
||||
comment = startCommentCharacterRegExp.ReplaceAllString(comment, "")
|
||||
comment = subsequentCommentCharacterRegExp.ReplaceAllString(comment, "\n")
|
||||
|
||||
result := candidate.CreateReplacement(ScalarNode, "!!str", comment)
|
||||
if candidate.IsMapKey {
|
||||
|
@ -8,7 +8,7 @@ import (
|
||||
"time"
|
||||
)
|
||||
|
||||
func getStringParamter(parameterName string, d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (string, error) {
|
||||
func getStringParameter(parameterName string, d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (string, error) {
|
||||
result, err := d.GetMatchingNodes(context.ReadOnlyClone(), expressionNode)
|
||||
|
||||
if err != nil {
|
||||
@ -22,7 +22,7 @@ func getStringParamter(parameterName string, d *dataTreeNavigator, context Conte
|
||||
|
||||
func withDateTimeFormat(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
if expressionNode.RHS.Operation.OperationType == blockOpType || expressionNode.RHS.Operation.OperationType == unionOpType {
|
||||
layout, err := getStringParamter("layout", d, context, expressionNode.RHS.LHS)
|
||||
layout, err := getStringParameter("layout", d, context, expressionNode.RHS.LHS)
|
||||
if err != nil {
|
||||
return Context{}, fmt.Errorf("could not get date time format: %w", err)
|
||||
}
|
||||
@ -61,7 +61,7 @@ func parseDateTime(layout string, datestring string) (time.Time, error) {
|
||||
}
|
||||
|
||||
func formatDateTime(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
format, err := getStringParamter("format", d, context, expressionNode.RHS)
|
||||
format, err := getStringParameter("format", d, context, expressionNode.RHS)
|
||||
layout := context.GetDateTimeLayout()
|
||||
|
||||
if err != nil {
|
||||
@ -96,7 +96,7 @@ func formatDateTime(d *dataTreeNavigator, context Context, expressionNode *Expre
|
||||
}
|
||||
|
||||
func tzOp(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
timezoneStr, err := getStringParamter("timezone", d, context, expressionNode.RHS)
|
||||
timezoneStr, err := getStringParameter("timezone", d, context, expressionNode.RHS)
|
||||
layout := context.GetDateTimeLayout()
|
||||
|
||||
if err != nil {
|
||||
|
@ -14,7 +14,7 @@ var groupByOperatorScenarios = []expressionScenario{
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Group by field, with nuls",
|
||||
description: "Group by field, with nulls",
|
||||
document: `[{cat: dog}, {foo: 1, bar: 10}, {foo: 3, bar: 100}, {no: foo for you}, {foo: 1, bar: 1}]`,
|
||||
expression: `group_by(.foo)`,
|
||||
expected: []string{
|
||||
|
@ -48,7 +48,7 @@ func keysOperator(d *dataTreeNavigator, context Context, expressionNode *Express
|
||||
if candidate.Kind == MappingNode {
|
||||
targetNode = getMapKeys(candidate)
|
||||
} else if candidate.Kind == SequenceNode {
|
||||
targetNode = getIndicies(candidate)
|
||||
targetNode = getIndices(candidate)
|
||||
} else {
|
||||
return Context{}, fmt.Errorf("Cannot get keys of %v, keys only works for maps and arrays", candidate.Tag)
|
||||
}
|
||||
@ -68,7 +68,7 @@ func getMapKeys(node *CandidateNode) *CandidateNode {
|
||||
return &CandidateNode{Kind: SequenceNode, Tag: "!!seq", Content: contents}
|
||||
}
|
||||
|
||||
func getIndicies(node *CandidateNode) *CandidateNode {
|
||||
func getIndices(node *CandidateNode) *CandidateNode {
|
||||
var contents = make([]*CandidateNode, len(node.Content))
|
||||
|
||||
for index := range node.Content {
|
||||
|
34
pkg/yqlib/operator_kind.go
Normal file
34
pkg/yqlib/operator_kind.go
Normal file
@ -0,0 +1,34 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"container/list"
|
||||
)
|
||||
|
||||
func kindToText(kind Kind) string {
|
||||
switch kind {
|
||||
case MappingNode:
|
||||
return "map"
|
||||
case SequenceNode:
|
||||
return "seq"
|
||||
case ScalarNode:
|
||||
return "scalar"
|
||||
case AliasNode:
|
||||
return "alias"
|
||||
default:
|
||||
return "unknown"
|
||||
}
|
||||
}
|
||||
|
||||
func getKindOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
log.Debugf("GetKindOperator")
|
||||
|
||||
var results = list.New()
|
||||
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
result := candidate.CreateReplacement(ScalarNode, "!!str", kindToText(candidate.Kind))
|
||||
results.PushBack(result)
|
||||
}
|
||||
|
||||
return context.ChildContext(results), nil
|
||||
}
|
49
pkg/yqlib/operator_kind_test.go
Normal file
49
pkg/yqlib/operator_kind_test.go
Normal file
@ -0,0 +1,49 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
var kindOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
description: "Get kind",
|
||||
document: `{a: cat, b: 5, c: 3.2, e: true, f: [], g: {}, h: null}`,
|
||||
expression: `.. | kind`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!str)::map\n",
|
||||
"D0, P[a], (!!str)::scalar\n",
|
||||
"D0, P[b], (!!str)::scalar\n",
|
||||
"D0, P[c], (!!str)::scalar\n",
|
||||
"D0, P[e], (!!str)::scalar\n",
|
||||
"D0, P[f], (!!str)::seq\n",
|
||||
"D0, P[g], (!!str)::map\n",
|
||||
"D0, P[h], (!!str)::scalar\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Get kind, ignores custom tags",
|
||||
subdescription: "Unlike tag, kind is not affected by custom tags.",
|
||||
document: `{a: !!thing cat, b: !!foo {}, c: !!bar []}`,
|
||||
expression: `.. | kind`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!str)::map\n",
|
||||
"D0, P[a], (!!str)::scalar\n",
|
||||
"D0, P[b], (!!str)::map\n",
|
||||
"D0, P[c], (!!str)::seq\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Add comments only to scalars",
|
||||
subdescription: "An example of how you can use kind",
|
||||
document: "a:\n b: 5\n c: 3.2\ne: true\nf: []\ng: {}\nh: null",
|
||||
expression: `(.. | select(kind == "scalar")) line_comment = "this is a scalar"`,
|
||||
expected: []string{"D0, P[], (!!map)::a:\n b: 5 # this is a scalar\n c: 3.2 # this is a scalar\ne: true # this is a scalar\nf: []\ng: {}\nh: null # this is a scalar\n"},
|
||||
},
|
||||
}
|
||||
|
||||
func TestKindOperatorScenarios(t *testing.T) {
|
||||
for _, tt := range kindOperatorScenarios {
|
||||
testScenario(t, &tt)
|
||||
}
|
||||
documentOperatorScenarios(t, "kind", kindOperatorScenarios)
|
||||
}
|
@ -8,28 +8,28 @@ var pickOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
description: "Pick keys from map",
|
||||
subdescription: "Note that the order of the keys matches the pick order and non existent keys are skipped.",
|
||||
document: "myMap: {cat: meow, dog: bark, thing: hamster, hamster: squeek}\n",
|
||||
document: "myMap: {cat: meow, dog: bark, thing: hamster, hamster: squeak}\n",
|
||||
expression: `.myMap |= pick(["hamster", "cat", "goat"])`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!map)::myMap: {hamster: squeek, cat: meow}\n",
|
||||
"D0, P[], (!!map)::myMap: {hamster: squeak, cat: meow}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Pick keys from map",
|
||||
skipDoc: true,
|
||||
document: "!things myMap: {cat: meow, dog: bark, thing: hamster, hamster: squeek}\n",
|
||||
document: "!things myMap: {cat: meow, dog: bark, thing: hamster, hamster: squeak}\n",
|
||||
expression: `.myMap |= pick(["hamster", "cat", "goat"])`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!map)::!things myMap: {hamster: squeek, cat: meow}\n",
|
||||
"D0, P[], (!!map)::!things myMap: {hamster: squeak, cat: meow}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Pick keys from map with comments",
|
||||
skipDoc: true,
|
||||
document: "# abc\nmyMap: {cat: meow, dog: bark, thing: hamster, hamster: squeek}\n# xyz\n",
|
||||
document: "# abc\nmyMap: {cat: meow, dog: bark, thing: hamster, hamster: squeak}\n# xyz\n",
|
||||
expression: `.myMap |= pick(["hamster", "cat", "goat"])`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!map)::# abc\nmyMap: {hamster: squeek, cat: meow}\n# xyz\n",
|
||||
"D0, P[], (!!map)::# abc\nmyMap: {hamster: squeak, cat: meow}\n# xyz\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
|
52
pkg/yqlib/operator_to_number.go
Normal file
52
pkg/yqlib/operator_to_number.go
Normal file
@ -0,0 +1,52 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"container/list"
|
||||
"fmt"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func tryConvertToNumber(value string) (string, bool) {
|
||||
// try a int first
|
||||
_, _, err := parseInt64(value)
|
||||
if err == nil {
|
||||
return "!!int", true
|
||||
}
|
||||
// try float
|
||||
_, floatErr := strconv.ParseFloat(value, 64)
|
||||
|
||||
if floatErr == nil {
|
||||
return "!!float", true
|
||||
}
|
||||
return "", false
|
||||
|
||||
}
|
||||
|
||||
func toNumberOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
log.Debugf("ToNumberOperator")
|
||||
|
||||
var results = list.New()
|
||||
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
if candidate.Kind != ScalarNode {
|
||||
return Context{}, fmt.Errorf("cannot convert node at path %v of tag %v to number", candidate.GetNicePath(), candidate.Tag)
|
||||
}
|
||||
|
||||
if candidate.Tag == "!!int" || candidate.Tag == "!!float" {
|
||||
// it already is a number!
|
||||
results.PushBack(candidate)
|
||||
} else {
|
||||
tag, converted := tryConvertToNumber(candidate.Value)
|
||||
if converted {
|
||||
result := candidate.CreateReplacement(ScalarNode, tag, candidate.Value)
|
||||
results.PushBack(result)
|
||||
} else {
|
||||
return Context{}, fmt.Errorf("cannot convert node value [%v] at path %v of tag %v to number", candidate.Value, candidate.GetNicePath(), candidate.Tag)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return context.ChildContext(results), nil
|
||||
}
|
51
pkg/yqlib/operator_to_number_test.go
Normal file
51
pkg/yqlib/operator_to_number_test.go
Normal file
@ -0,0 +1,51 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
var toNumberScenarios = []expressionScenario{
|
||||
{
|
||||
description: "Converts strings to numbers",
|
||||
document: `["3", "3.1", "-1e3"]`,
|
||||
expression: `.[] | to_number`,
|
||||
expected: []string{
|
||||
"D0, P[0], (!!int)::3\n",
|
||||
"D0, P[1], (!!float)::3.1\n",
|
||||
"D0, P[2], (!!float)::-1e3\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "Converts strings to numbers, with tonumber because jq",
|
||||
document: `["3", "3.1", "-1e3"]`,
|
||||
expression: `.[] | tonumber`,
|
||||
expected: []string{
|
||||
"D0, P[0], (!!int)::3\n",
|
||||
"D0, P[1], (!!float)::3.1\n",
|
||||
"D0, P[2], (!!float)::-1e3\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Doesn't change numbers",
|
||||
document: `[3, 3.1, -1e3]`,
|
||||
expression: `.[] | to_number`,
|
||||
expected: []string{
|
||||
"D0, P[0], (!!int)::3\n",
|
||||
"D0, P[1], (!!float)::3.1\n",
|
||||
"D0, P[2], (!!float)::-1e3\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Cannot convert null",
|
||||
expression: `.a.b | to_number`,
|
||||
expectedError: "cannot convert node value [null] at path a.b of tag !!null to number",
|
||||
},
|
||||
}
|
||||
|
||||
func TestToNumberOperatorScenarios(t *testing.T) {
|
||||
for _, tt := range toNumberScenarios {
|
||||
testScenario(t, &tt)
|
||||
}
|
||||
documentOperatorScenarios(t, "to_number", toNumberScenarios)
|
||||
}
|
@ -192,7 +192,7 @@ func traverseArrayWithIndices(node *CandidateNode, indices []*CandidateNode, pre
|
||||
contentLength := len(node.Content)
|
||||
for contentLength <= index {
|
||||
if contentLength == 0 {
|
||||
// default to nice yaml formating
|
||||
// default to nice yaml formatting
|
||||
node.Style = 0
|
||||
}
|
||||
|
||||
|
@ -19,6 +19,7 @@ import (
|
||||
type expressionScenario struct {
|
||||
description string
|
||||
subdescription string
|
||||
explanation []string
|
||||
environmentVariables map[string]string
|
||||
document string
|
||||
document2 string
|
||||
@ -31,7 +32,7 @@ type expressionScenario struct {
|
||||
}
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
logging.SetLevel(logging.DEBUG, "")
|
||||
logging.SetLevel(logging.ERROR, "")
|
||||
Now = func() time.Time {
|
||||
return time.Date(2021, time.May, 19, 1, 2, 3, 4, time.UTC)
|
||||
}
|
||||
@ -255,6 +256,14 @@ func documentOperatorScenario(t *testing.T, w *bufio.Writer, i interface{}) {
|
||||
writeOrPanic(w, "will output\n")
|
||||
|
||||
documentOutput(t, w, s, formattedDoc, formattedDoc2)
|
||||
|
||||
if len(s.explanation) > 0 {
|
||||
writeOrPanic(w, "### Explanation:\n")
|
||||
for _, text := range s.explanation {
|
||||
writeOrPanic(w, fmt.Sprintf("- %v\n", text))
|
||||
}
|
||||
writeOrPanic(w, "\n")
|
||||
}
|
||||
}
|
||||
|
||||
func documentInput(w *bufio.Writer, s expressionScenario) (string, string) {
|
||||
|
@ -31,6 +31,7 @@ const (
|
||||
ShOutputFormat
|
||||
TomlOutputFormat
|
||||
ShellVariablesOutputFormat
|
||||
LuaOutputFormat
|
||||
)
|
||||
|
||||
func OutputFormatFromString(format string) (PrinterOutputFormat, error) {
|
||||
@ -51,8 +52,10 @@ func OutputFormatFromString(format string) (PrinterOutputFormat, error) {
|
||||
return TomlOutputFormat, nil
|
||||
case "shell", "s", "sh":
|
||||
return ShellVariablesOutputFormat, nil
|
||||
case "lua", "l":
|
||||
return LuaOutputFormat, nil
|
||||
default:
|
||||
return 0, fmt.Errorf("unknown format '%v' please use [yaml|json|props|csv|tsv|xml|toml|shell]", format)
|
||||
return 0, fmt.Errorf("unknown format '%v' please use [yaml|json|props|csv|tsv|xml|toml|shell|lua]", format)
|
||||
}
|
||||
}
|
||||
|
||||
@ -143,7 +146,7 @@ func (p *resultsPrinter) PrintResults(matchingNodes *list.List) error {
|
||||
return errorWriting
|
||||
}
|
||||
|
||||
commentsStartWithSepExp := regexp.MustCompile(`^\$yqDocSeperator\$`)
|
||||
commentsStartWithSepExp := regexp.MustCompile(`^\$yqDocSeparator\$`)
|
||||
commentStartsWithSeparator := commentsStartWithSepExp.MatchString(mappedDoc.LeadingContent)
|
||||
|
||||
if (p.previousDocIndex != mappedDoc.GetDocument() || p.previousFileIndex != mappedDoc.GetFileIndex()) && !commentStartsWithSeparator {
|
||||
|
@ -82,15 +82,15 @@ func TestPrinterMultipleDocsInSequenceWithLeadingContent(t *testing.T) {
|
||||
}
|
||||
|
||||
el := inputs.Front()
|
||||
el.Value.(*CandidateNode).LeadingContent = "# go cats\n$yqDocSeperator$\n"
|
||||
el.Value.(*CandidateNode).LeadingContent = "# go cats\n$yqDocSeparator$\n"
|
||||
sample1 := nodeToList(el.Value.(*CandidateNode))
|
||||
|
||||
el = el.Next()
|
||||
el.Value.(*CandidateNode).LeadingContent = "$yqDocSeperator$\n"
|
||||
el.Value.(*CandidateNode).LeadingContent = "$yqDocSeparator$\n"
|
||||
sample2 := nodeToList(el.Value.(*CandidateNode))
|
||||
|
||||
el = el.Next()
|
||||
el.Value.(*CandidateNode).LeadingContent = "$yqDocSeperator$\n# cool\n"
|
||||
el.Value.(*CandidateNode).LeadingContent = "$yqDocSeparator$\n# cool\n"
|
||||
sample3 := nodeToList(el.Value.(*CandidateNode))
|
||||
|
||||
err = printer.PrintResults(sample1)
|
||||
@ -174,21 +174,21 @@ func TestPrinterMultipleFilesInSequenceWithLeadingContent(t *testing.T) {
|
||||
elNode := el.Value.(*CandidateNode)
|
||||
elNode.document = 0
|
||||
elNode.fileIndex = 0
|
||||
elNode.LeadingContent = "# go cats\n$yqDocSeperator$\n"
|
||||
elNode.LeadingContent = "# go cats\n$yqDocSeparator$\n"
|
||||
sample1 := nodeToList(elNode)
|
||||
|
||||
el = el.Next()
|
||||
elNode = el.Value.(*CandidateNode)
|
||||
elNode.document = 0
|
||||
elNode.fileIndex = 1
|
||||
elNode.LeadingContent = "$yqDocSeperator$\n"
|
||||
elNode.LeadingContent = "$yqDocSeparator$\n"
|
||||
sample2 := nodeToList(elNode)
|
||||
|
||||
el = el.Next()
|
||||
elNode = el.Value.(*CandidateNode)
|
||||
elNode.document = 0
|
||||
elNode.fileIndex = 2
|
||||
elNode.LeadingContent = "$yqDocSeperator$\n# cool\n"
|
||||
elNode.LeadingContent = "$yqDocSeparator$\n# cool\n"
|
||||
sample3 := nodeToList(elNode)
|
||||
|
||||
err = printer.PrintResults(sample1)
|
||||
@ -239,7 +239,7 @@ func TestPrinterMultipleDocsInSinglePrintWithLeadingDoc(t *testing.T) {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
inputs.Front().Value.(*CandidateNode).LeadingContent = "# go cats\n$yqDocSeperator$\n"
|
||||
inputs.Front().Value.(*CandidateNode).LeadingContent = "# go cats\n$yqDocSeparator$\n"
|
||||
|
||||
err = printer.PrintResults(inputs)
|
||||
if err != nil {
|
||||
@ -267,7 +267,7 @@ func TestPrinterMultipleDocsInSinglePrintWithLeadingDocTrailing(t *testing.T) {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
inputs.Front().Value.(*CandidateNode).LeadingContent = "$yqDocSeperator$\n"
|
||||
inputs.Front().Value.(*CandidateNode).LeadingContent = "$yqDocSeparator$\n"
|
||||
err = printer.PrintResults(inputs)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
@ -313,7 +313,7 @@ func TestPrinterMultipleDocsJson(t *testing.T) {
|
||||
var output bytes.Buffer
|
||||
var writer = bufio.NewWriter(&output)
|
||||
// note printDocSeparators is true, it should still not print document separators
|
||||
// when outputing JSON.
|
||||
// when outputting JSON.
|
||||
encoder := NewJSONEncoder(0, false, false)
|
||||
if encoder == nil {
|
||||
t.Skipf("no support for %s output format", "json")
|
||||
@ -365,7 +365,7 @@ func TestPrinterNulSeparatorWithJson(t *testing.T) {
|
||||
var output bytes.Buffer
|
||||
var writer = bufio.NewWriter(&output)
|
||||
// note printDocSeparators is true, it should still not print document separators
|
||||
// when outputing JSON.
|
||||
// when outputting JSON.
|
||||
encoder := NewJSONEncoder(0, false, false)
|
||||
if encoder == nil {
|
||||
t.Skipf("no support for %s output format", "json")
|
||||
|
96
pkg/yqlib/recipes_test.go
Normal file
96
pkg/yqlib/recipes_test.go
Normal file
@ -0,0 +1,96 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
var recipes = []expressionScenario{
|
||||
{
|
||||
description: "Find items in an array",
|
||||
subdescription: "We have an array and we want to find the elements with a particular name.",
|
||||
explanation: []string{
|
||||
"`.[]` splats the array, and puts all the items in the context.",
|
||||
"These items are then piped (`|`) into `select(.name == \"Foo\")` which will select all the nodes that have a name property set to 'Foo'.",
|
||||
"See the [select](https://mikefarah.gitbook.io/yq/operators/select) operator for more information.",
|
||||
},
|
||||
document: `[{name: Foo, numBuckets: 0}, {name: Bar, numBuckets: 0}]`,
|
||||
expression: `.[] | select(.name == "Foo")`,
|
||||
expected: []string{
|
||||
"D0, P[0], (!!map)::{name: Foo, numBuckets: 0}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Find and update items in an array",
|
||||
subdescription: "We have an array and we want to _update_ the elements with a particular name.",
|
||||
document: `[{name: Foo, numBuckets: 0}, {name: Bar, numBuckets: 0}]`,
|
||||
expression: `(.[] | select(.name == "Foo") | .numBuckets) |= . + 1`,
|
||||
explanation: []string{
|
||||
"Following from the example above`.[]` splats the array, selects filters the items.",
|
||||
"We then pipe (`|`) that into `.numBuckets`, which will select that field from all the matching items",
|
||||
"Splat, select and the field are all in brackets, that whole expression is passed to the `|=` operator as the left hand side expression, with `. + 1` as the right hand side expression.",
|
||||
"`|=` is the operator that updates fields relative to their own value, which is referenced as dot (`.`).",
|
||||
"The expression `. + 1` increments the numBuckets counter.",
|
||||
"See the [assign](https://mikefarah.gitbook.io/yq/operators/assign-update) and [add](https://mikefarah.gitbook.io/yq/operators/add) operators for more information.",
|
||||
},
|
||||
expected: []string{
|
||||
"D0, P[], (!!seq)::[{name: Foo, numBuckets: 1}, {name: Bar, numBuckets: 0}]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Multiple or complex updates to items in an array",
|
||||
subdescription: "We have an array and we want to _update_ the elements with a particular name in reference to its type.",
|
||||
document: `myArray: [{name: Foo, type: cat}, {name: Bar, type: dog}]`,
|
||||
expression: `with(.myArray[]; .name = .name + " - " + .type)`,
|
||||
explanation: []string{
|
||||
"The with operator will effectively loop through each given item in the first given expression, and run the second expression against it.",
|
||||
"`.myArray[]` splats the array in `myArray`. So `with` will run against each item in that array",
|
||||
"`.name = .name + \" - \" + .type` this expression is run against every item, updating the name to be a concatenation of the original name as well as the type.",
|
||||
"See the [with](https://mikefarah.gitbook.io/yq/operators/with) operator for more information and examples.",
|
||||
},
|
||||
expected: []string{
|
||||
"D0, P[], (!!map)::myArray: [{name: Foo - cat, type: cat}, {name: Bar - dog, type: dog}]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Sort an array by a field",
|
||||
document: `myArray: [{name: Foo, numBuckets: 1}, {name: Bar, numBuckets: 0}]`,
|
||||
expression: `.myArray |= sort_by(.numBuckets)`,
|
||||
explanation: []string{
|
||||
"We want to resort `.myArray`.",
|
||||
"`sort_by` works by piping an array into it, and it pipes out a sorted array.",
|
||||
"So, we use `|=` to update `.myArray`. This is the same as doing `.myArray = (.myArray | sort_by(.numBuckets))`",
|
||||
},
|
||||
expected: []string{
|
||||
"D0, P[], (!!map)::myArray: [{name: Bar, numBuckets: 0}, {name: Foo, numBuckets: 1}]\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Filter, flatten, sort and unique",
|
||||
subdescription: "Lets",
|
||||
document: `[{type: foo, names: [Fred, Catherine]}, {type: bar, names: [Zelda]}, {type: foo, names: Fred}, {type: foo, names: Ava}]`,
|
||||
expression: `[.[] | select(.type == "foo") | .names] | flatten | sort | unique`,
|
||||
explanation: []string{
|
||||
"`.[] | select(.type == \"foo\") | .names` will select the array elements of type \"foo\"",
|
||||
"Splat `.[]` will unwrap the array and match all the items. We need to do this so we can work on the child items, for instance, filter items out using the `select` operator.",
|
||||
"But we still want the final results back into an array. So after we're doing working on the children, we wrap everything back into an array using square brackets around the expression. `[.[] | select(.type == \"foo\") | .names]`",
|
||||
"Now have have an array of all the 'names' values. Which includes arrays of strings as well as strings on their own.",
|
||||
"Pipe `|` this array through `flatten`. This will flatten nested arrays. So now we have a flat list of all the name value strings",
|
||||
"Next we pipe `|` that through `sort` and then `unique` to get a sorted, unique list of the names!",
|
||||
"See the [flatten](https://mikefarah.gitbook.io/yq/operators/flatten), [sort](https://mikefarah.gitbook.io/yq/operators/sort) and [unique](https://mikefarah.gitbook.io/yq/operators/unique) for more information and examples.",
|
||||
},
|
||||
expected: []string{
|
||||
"D0, P[], (!!seq)::- Ava\n- Catherine\n- Fred\n",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestRecipes(t *testing.T) {
|
||||
for _, tt := range recipes {
|
||||
testScenario(t, &tt)
|
||||
}
|
||||
genericScenarios := make([]interface{}, len(recipes))
|
||||
for i, s := range recipes {
|
||||
genericScenarios[i] = s
|
||||
}
|
||||
documentScenarios(t, "usage", "recipes", genericScenarios, documentOperatorScenario)
|
||||
}
|
@ -35,12 +35,12 @@ var shellVariablesScenarios = []formatScenario{
|
||||
"ascii_=_symbols: replaced with _" + "\n" +
|
||||
"\"ascii_\t_controls\": dropped (this example uses \\t)" + "\n" +
|
||||
"nonascii_\u05d0_characters: dropped" + "\n" +
|
||||
"effrot_expe\u00f1ded_t\u00f2_preserve_accented_latin_letters: moderate (via unicode NFKD)" + "\n",
|
||||
"effort_expe\u00f1ded_t\u00f2_preserve_accented_latin_letters: moderate (via unicode NFKD)" + "\n",
|
||||
expected: "" +
|
||||
"ascii___symbols='replaced with _'" + "\n" +
|
||||
"ascii__controls='dropped (this example uses \\t)'" + "\n" +
|
||||
"nonascii__characters=dropped" + "\n" +
|
||||
"effrot_expended_to_preserve_accented_latin_letters='moderate (via unicode NFKD)'" + "\n",
|
||||
"effort_expended_to_preserve_accented_latin_letters='moderate (via unicode NFKD)'" + "\n",
|
||||
},
|
||||
{
|
||||
description: "Encode shell variables: empty values, arrays and maps",
|
||||
@ -65,10 +65,10 @@ func TestShellVariableScenarios(t *testing.T) {
|
||||
for i, s := range shellVariablesScenarios {
|
||||
genericScenarios[i] = s
|
||||
}
|
||||
documentScenarios(t, "usage", "shellvariables", genericScenarios, documentShellVaraibleScenario)
|
||||
documentScenarios(t, "usage", "shellvariables", genericScenarios, documentShellVariableScenario)
|
||||
}
|
||||
|
||||
func documentShellVaraibleScenario(_ *testing.T, w *bufio.Writer, i interface{}) {
|
||||
func documentShellVariableScenario(_ *testing.T, w *bufio.Writer, i interface{}) {
|
||||
s := i.(formatScenario)
|
||||
if s.skipDoc {
|
||||
return
|
||||
|
@ -44,7 +44,7 @@ func (w *writeInPlaceHandlerImpl) CreateTempFile() (*os.File, error) {
|
||||
}
|
||||
|
||||
func (w *writeInPlaceHandlerImpl) FinishWriteInPlace(evaluatedSuccessfully bool) error {
|
||||
log.Debug("Going to write-inplace, evaluatedSuccessfully=%v, target=%v", evaluatedSuccessfully, w.inputFilename)
|
||||
log.Debug("Going to write in place, evaluatedSuccessfully=%v, target=%v", evaluatedSuccessfully, w.inputFilename)
|
||||
safelyCloseFile(w.tempFile)
|
||||
if evaluatedSuccessfully {
|
||||
log.Debug("Moving temp file to target")
|
||||
|
@ -356,6 +356,20 @@ var xmlScenarios = []formatScenario{
|
||||
input: "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<animal>cat</animal>\n<animal>goat</animal>",
|
||||
expected: "+p_xml: version=\"1.0\" encoding=\"UTF-8\"\nanimal:\n - cat\n - goat\n",
|
||||
},
|
||||
{
|
||||
description: "Parse xml: force as an array",
|
||||
subdescription: "In XML, if your array has a single item, then yq doesn't know its an array. This is how you can consistently force it to be an array. This handles the 3 scenarios of having nothing in the array, having a single item and having multiple.",
|
||||
input: "<zoo><animal>cat</animal></zoo>",
|
||||
expression: ".zoo.animal |= ([] + .)",
|
||||
expected: "zoo:\n animal:\n - cat\n",
|
||||
},
|
||||
{
|
||||
description: "Parse xml: force all as an array",
|
||||
subdescription: "Because of the way yq works, when updating everything you need to update the children before the parents. By default `..` will match parents first, so we reverse that before updating.",
|
||||
input: "<zoo><thing><frog>boing</frog></thing></zoo>",
|
||||
expression: "([..] | reverse | .[]) |= [] + .",
|
||||
expected: "- zoo:\n - thing:\n - frog:\n - boing\n",
|
||||
},
|
||||
{
|
||||
description: "Parse xml: attributes",
|
||||
subdescription: "Attributes are converted to fields, with the default attribute prefix '+'. Use '--xml-attribute-prefix` to set your own.",
|
||||
@ -657,7 +671,7 @@ func documentXMLScenario(t *testing.T, w *bufio.Writer, i interface{}) {
|
||||
case "decode-raw-token-off":
|
||||
documentXMLDecodeKeepNsRawTokenScenario(w, s)
|
||||
case "roundtrip-skip-directives":
|
||||
documentXMLSkipDirectrivesScenario(w, s)
|
||||
documentXMLSkipDirectivesScenario(w, s)
|
||||
|
||||
default:
|
||||
panic(fmt.Sprintf("unhandled scenario type %q", s.scenarioType))
|
||||
@ -680,7 +694,7 @@ func documentXMLDecodeScenario(w *bufio.Writer, s formatScenario) {
|
||||
if expression == "" {
|
||||
expression = "."
|
||||
}
|
||||
writeOrPanic(w, fmt.Sprintf("```bash\nyq -p=xml '%v' sample.xml\n```\n", expression))
|
||||
writeOrPanic(w, fmt.Sprintf("```bash\nyq -oy '%v' sample.xml\n```\n", expression))
|
||||
writeOrPanic(w, "will output\n")
|
||||
|
||||
writeOrPanic(w, fmt.Sprintf("```yaml\n%v```\n\n", mustProcessFormatScenario(s, NewXMLDecoder(ConfiguredXMLPreferences), NewYamlEncoder(2, false, ConfiguredYamlPreferences))))
|
||||
@ -698,7 +712,7 @@ func documentXMLDecodeKeepNsScenario(w *bufio.Writer, s formatScenario) {
|
||||
writeOrPanic(w, fmt.Sprintf("```xml\n%v\n```\n", s.input))
|
||||
|
||||
writeOrPanic(w, "then\n")
|
||||
writeOrPanic(w, "```bash\nyq -p=xml -o=xml --xml-keep-namespace=false '.' sample.xml\n```\n")
|
||||
writeOrPanic(w, "```bash\nyq --xml-keep-namespace=false '.' sample.xml\n```\n")
|
||||
writeOrPanic(w, "will output\n")
|
||||
prefs := NewDefaultXmlPreferences()
|
||||
prefs.KeepNamespace = false
|
||||
@ -722,7 +736,7 @@ func documentXMLDecodeKeepNsRawTokenScenario(w *bufio.Writer, s formatScenario)
|
||||
writeOrPanic(w, fmt.Sprintf("```xml\n%v\n```\n", s.input))
|
||||
|
||||
writeOrPanic(w, "then\n")
|
||||
writeOrPanic(w, "```bash\nyq -p=xml -o=xml --xml-raw-token=false '.' sample.xml\n```\n")
|
||||
writeOrPanic(w, "```bash\nyq --xml-raw-token=false '.' sample.xml\n```\n")
|
||||
writeOrPanic(w, "will output\n")
|
||||
|
||||
prefs := NewDefaultXmlPreferences()
|
||||
@ -767,13 +781,13 @@ func documentXMLRoundTripScenario(w *bufio.Writer, s formatScenario) {
|
||||
writeOrPanic(w, fmt.Sprintf("```xml\n%v\n```\n", s.input))
|
||||
|
||||
writeOrPanic(w, "then\n")
|
||||
writeOrPanic(w, "```bash\nyq -p=xml -o=xml '.' sample.xml\n```\n")
|
||||
writeOrPanic(w, "```bash\nyq '.' sample.xml\n```\n")
|
||||
writeOrPanic(w, "will output\n")
|
||||
|
||||
writeOrPanic(w, fmt.Sprintf("```xml\n%v```\n\n", mustProcessFormatScenario(s, NewXMLDecoder(ConfiguredXMLPreferences), NewXMLEncoder(2, ConfiguredXMLPreferences))))
|
||||
}
|
||||
|
||||
func documentXMLSkipDirectrivesScenario(w *bufio.Writer, s formatScenario) {
|
||||
func documentXMLSkipDirectivesScenario(w *bufio.Writer, s formatScenario) {
|
||||
writeOrPanic(w, fmt.Sprintf("## %v\n", s.description))
|
||||
|
||||
if s.subdescription != "" {
|
||||
@ -785,7 +799,7 @@ func documentXMLSkipDirectrivesScenario(w *bufio.Writer, s formatScenario) {
|
||||
writeOrPanic(w, fmt.Sprintf("```xml\n%v\n```\n", s.input))
|
||||
|
||||
writeOrPanic(w, "then\n")
|
||||
writeOrPanic(w, "```bash\nyq -p=xml -o=xml --xml-skip-directives '.' sample.xml\n```\n")
|
||||
writeOrPanic(w, "```bash\nyq --xml-skip-directives '.' sample.xml\n```\n")
|
||||
writeOrPanic(w, "will output\n")
|
||||
prefs := NewDefaultXmlPreferences()
|
||||
prefs.SkipDirectives = true
|
||||
|
252
project-words.txt
Normal file
252
project-words.txt
Normal file
@ -0,0 +1,252 @@
|
||||
abxbbxdbxebxczzx
|
||||
abxbbxdbxebxczzy
|
||||
accum
|
||||
Accum
|
||||
adithyasunil
|
||||
AEDT
|
||||
água
|
||||
ÁGUA
|
||||
alecthomas
|
||||
appleapple
|
||||
Astuff
|
||||
autocreating
|
||||
autoparse
|
||||
AWST
|
||||
axbxcxdxe
|
||||
axbxcxdxexxx
|
||||
bananabanana
|
||||
barp
|
||||
bitnami
|
||||
blarp
|
||||
blddir
|
||||
Bobo
|
||||
BODMAS
|
||||
bonapite
|
||||
Brien
|
||||
Bstuff
|
||||
BUILDKIT
|
||||
buildpackage
|
||||
catmeow
|
||||
CATYPE
|
||||
CBVVE
|
||||
chardata
|
||||
chillum
|
||||
choco
|
||||
chomper
|
||||
cleanup
|
||||
cmlu
|
||||
colorise
|
||||
colors
|
||||
compinit
|
||||
coolioo
|
||||
coverprofile
|
||||
createmap
|
||||
csvd
|
||||
CSVUTF
|
||||
currentlabel
|
||||
cygpath
|
||||
czvf
|
||||
datestring
|
||||
datetime
|
||||
Datetime
|
||||
datetimes
|
||||
DEBEMAIL
|
||||
debhelper
|
||||
Debugf
|
||||
debuild
|
||||
delish
|
||||
delpaths
|
||||
DELPATHS
|
||||
devorbitus
|
||||
devscripts
|
||||
dimchansky
|
||||
Dont
|
||||
dput
|
||||
elliotchance
|
||||
endhint
|
||||
endofname
|
||||
Entriesfrom
|
||||
envsubst
|
||||
errorlevel
|
||||
Escandón
|
||||
Evalall
|
||||
fakefilename
|
||||
fakeroot
|
||||
Farah
|
||||
fatih
|
||||
Fifi
|
||||
filebytes
|
||||
Fileish
|
||||
foobar
|
||||
foobaz
|
||||
foof
|
||||
frood
|
||||
fullpath
|
||||
gitbook
|
||||
githubactions
|
||||
gnupg
|
||||
goccy
|
||||
gofmt
|
||||
gogo
|
||||
golangci
|
||||
GOMODCACHE
|
||||
GOPATH
|
||||
gosec
|
||||
gota
|
||||
goversion
|
||||
GOVERSION
|
||||
haha
|
||||
hellno
|
||||
herbygillot
|
||||
hexdump
|
||||
Hoang
|
||||
hostpath
|
||||
hotdog
|
||||
howdy
|
||||
incase
|
||||
inlinetables
|
||||
inplace
|
||||
ints
|
||||
ireduce
|
||||
iwatch
|
||||
jinzhu
|
||||
jq's
|
||||
jsond
|
||||
keygrip
|
||||
Keygrip
|
||||
KEYGRIP
|
||||
KEYID
|
||||
keyvalue
|
||||
kwak
|
||||
lalilu
|
||||
ldflags
|
||||
LDFLAGS
|
||||
lexer
|
||||
Lexer
|
||||
libdistro
|
||||
lindex
|
||||
linecomment
|
||||
LVAs
|
||||
magiconair
|
||||
mapvalues
|
||||
Mier
|
||||
mikefarah
|
||||
minideb
|
||||
minishift
|
||||
mipsle
|
||||
mitchellh
|
||||
mktemp
|
||||
Mult
|
||||
multidoc
|
||||
multimaint
|
||||
myenv
|
||||
myenvnonexisting
|
||||
myfile
|
||||
myformat
|
||||
ndjson
|
||||
NDJSON
|
||||
NFKD
|
||||
nixpkgs
|
||||
nojson
|
||||
nonascii
|
||||
nonempty
|
||||
noninteractive
|
||||
Nonquoting
|
||||
nosec
|
||||
notoml
|
||||
noxml
|
||||
nolua
|
||||
nullinput
|
||||
onea
|
||||
Oneshot
|
||||
opencollect
|
||||
opstack
|
||||
orderedmap
|
||||
osarch
|
||||
overridign
|
||||
pacman
|
||||
Padder
|
||||
pandoc
|
||||
parsechangelog
|
||||
pcsv
|
||||
pelletier
|
||||
pflag
|
||||
prechecking
|
||||
Prerelease
|
||||
proc
|
||||
propsd
|
||||
qylib
|
||||
readline
|
||||
realnames
|
||||
realpath
|
||||
repr
|
||||
rhash
|
||||
rindex
|
||||
risentveber
|
||||
rmescandon
|
||||
Rosey
|
||||
roundtrip
|
||||
Roundtrip
|
||||
roundtripping
|
||||
runningvms
|
||||
sadface
|
||||
selfupdate
|
||||
setpath
|
||||
sharedfolder
|
||||
Sharedfolder
|
||||
shellvariables
|
||||
shellvars
|
||||
shortfunc
|
||||
shortpipe
|
||||
shunit
|
||||
snapcraft
|
||||
somevalue
|
||||
splt
|
||||
srcdir
|
||||
stackoverflow
|
||||
stiched
|
||||
Strc
|
||||
strenv
|
||||
strload
|
||||
stylig
|
||||
subarray
|
||||
subchild
|
||||
subdescription
|
||||
submatch
|
||||
submatches
|
||||
SUBSTR
|
||||
tempfile
|
||||
tfstate
|
||||
Tfstate
|
||||
thar
|
||||
timezone
|
||||
Timezone
|
||||
timezones
|
||||
Timezones
|
||||
tojson
|
||||
Tokenvalue
|
||||
tsvd
|
||||
Tuan
|
||||
tzdata
|
||||
Uhoh
|
||||
updateassign
|
||||
urid
|
||||
utfbom
|
||||
Warningf
|
||||
Wazowski
|
||||
webi
|
||||
Webi
|
||||
wherever
|
||||
winget
|
||||
withdots
|
||||
wizz
|
||||
woop
|
||||
workdir
|
||||
Writable
|
||||
xmld
|
||||
xyzzy
|
||||
yamld
|
||||
yqlib
|
||||
yuin
|
||||
zabbix
|
||||
tonumber
|
@ -1,3 +1,19 @@
|
||||
4.35.2:
|
||||
- Fix various typos #1798
|
||||
- Fixed number parsing as float bug in JSON #1756
|
||||
- Fixed string, null concatenation consistency #1712
|
||||
- Fixed expression parsing issue #1711
|
||||
- Bumped dependencies
|
||||
|
||||
4.35.1:
|
||||
- Added Lua output support (Thanks @Zash)!
|
||||
- Added BSD checksum format (Thanks @viq)!
|
||||
- Bumped dependencies
|
||||
|
||||
4.34.2:
|
||||
- Bumped dependencies
|
||||
|
||||
|
||||
4.34.1:
|
||||
- Added shell output format thanks @giorgiga
|
||||
- Fixed nil pointer dereference (#1649) thanks @ArthurFritz
|
||||
@ -95,7 +111,7 @@
|
||||
- Fixed sorting by date #1412
|
||||
- Added check to ensure only maps can be encoded to XML #1408
|
||||
- Check merge alias is a map #1425
|
||||
- Explicity setting unwrap flag works for json output #437, #1409
|
||||
- Explicitly setting unwrap flag works for json output #437, #1409
|
||||
- Bumped go version
|
||||
|
||||
|
||||
|
@ -1,2 +1,2 @@
|
||||
#!/bin/bash
|
||||
go build -tags yq_notoml -tags yq_noxml -tags yq_nojson -ldflags "-s -w" .
|
||||
go build -tags "yq_nolua yq_notoml yq_noxml yq_nojson" -ldflags "-s -w" .
|
@ -26,7 +26,7 @@ if [ "$1" == "" ]; then
|
||||
fi
|
||||
|
||||
if [ "$2" != "" ]; then
|
||||
# so we dont match x.tar.gz when 'x' is given
|
||||
# so we don't match x.tar.gz when 'x' is given
|
||||
file="$2\s"
|
||||
else
|
||||
file=""
|
||||
@ -47,7 +47,7 @@ fi
|
||||
|
||||
grepMatch=$(grep -m 1 -n "$1" checksums_hashes_order)
|
||||
if [ "$grepMatch" == "" ]; then
|
||||
echo "Could not find hash algorith '$1' in checksums_hashes_order"
|
||||
echo "Could not find hash algorithm '$1' in checksums_hashes_order"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
#! /bin/bash
|
||||
set -e
|
||||
|
||||
# note that this reqires pandoc to be installed.
|
||||
# note that this requires pandoc to be installed.
|
||||
|
||||
cat ./pkg/yqlib/doc/operators/headers/Main.md > man.md
|
||||
printf "\n# HOW IT WORKS\n" >> man.md
|
||||
|
@ -1,7 +1,7 @@
|
||||
#! /bin/bash
|
||||
set -e
|
||||
|
||||
# note that this reqires pandoc to be installed.
|
||||
# note that this requires pandoc to be installed.
|
||||
|
||||
pandoc \
|
||||
--variable=title:"YQ" \
|
||||
|
@ -30,7 +30,7 @@ show_help() {
|
||||
echo " distribution is considered"
|
||||
echo " --goversion VERSION The version of Golang to use. Default to $GOVERSION"
|
||||
echo " -k, --sign-key KEYID Sign the package sources with the provided gpg key id (long format). When not provided this"
|
||||
echo " paramater, the generated sources are not signed"
|
||||
echo " parameter, the generated sources are not signed"
|
||||
echo " -s, --sign Sign the package sources with a gpg key of the maintainer"
|
||||
echo " -m, --maintainer WHO The maintainer used as author of the changelog. git.name and git.email (see git config) is"
|
||||
echo " the considered format"
|
||||
|
@ -1306,7 +1306,7 @@ if command [ "$#" -ge 2 ]; then
|
||||
# Argument $1 is either the filename of tests or '--'; either way, skip it.
|
||||
shift
|
||||
# Remaining arguments ($2 .. $#) are assumed to be test function names.
|
||||
# Interate through all remaining args in "$@" in a POSIX (likely portable) way.
|
||||
# Iterate through all remaining args in "$@" in a POSIX (likely portable) way.
|
||||
# Helpful tip: https://unix.stackexchange.com/questions/314032/how-to-use-arguments-like-1-2-in-a-for-loop
|
||||
for _shunit_arg_ do
|
||||
suite_addTest "${_shunit_arg_}"
|
||||
|
3
scripts/spelling.sh
Executable file
3
scripts/spelling.sh
Executable file
@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
npx cspell --no-progress "**/*.{sh,go}"
|
@ -19,6 +19,8 @@ rm yq.1
|
||||
|
||||
rhash -r -a . -o checksums
|
||||
|
||||
rhash -r -a --bsd . -o checksums-bsd
|
||||
|
||||
rhash --list-hashes > checksums_hashes_order
|
||||
|
||||
cp ../scripts/extract-checksum.sh .
|
||||
|
@ -1,5 +1,5 @@
|
||||
name: yq
|
||||
version: 'v4.34.1'
|
||||
version: 'v4.35.2'
|
||||
summary: A lightweight and portable command-line YAML processor
|
||||
description: |
|
||||
The aim of the project is to be the jq or sed of yaml files.
|
||||
|
Loading…
Reference in New Issue
Block a user