mirror of
https://github.com/mikefarah/yq.git
synced 2025-01-12 19:25:37 +00:00
Spellig with cspell
This commit is contained in:
parent
980b0e6daa
commit
cdb9b56066
6
Makefile
6
Makefile
@ -84,8 +84,12 @@ format: vendor
|
|||||||
${ENGINERUN} bash ./scripts/format.sh
|
${ENGINERUN} bash ./scripts/format.sh
|
||||||
|
|
||||||
|
|
||||||
|
.PHONY: spelling
|
||||||
|
spelling: format
|
||||||
|
${ENGINERUN} bash ./scripts/spelling.sh
|
||||||
|
|
||||||
.PHONY: secure
|
.PHONY: secure
|
||||||
secure: format
|
secure: spelling
|
||||||
${ENGINERUN} bash ./scripts/secure.sh
|
${ENGINERUN} bash ./scripts/secure.sh
|
||||||
|
|
||||||
.PHONY: check
|
.PHONY: check
|
||||||
|
@ -19,7 +19,7 @@ Pipe from STDIN:
|
|||||||
yq '.a.b[0].c' < file.yaml
|
yq '.a.b[0].c' < file.yaml
|
||||||
```
|
```
|
||||||
|
|
||||||
Update a yaml file, inplace
|
Update a yaml file, in place
|
||||||
```bash
|
```bash
|
||||||
yq -i '.a.b[0].c = "cool"' file.yaml
|
yq -i '.a.b[0].c = "cool"' file.yaml
|
||||||
```
|
```
|
||||||
@ -310,7 +310,7 @@ https://pkgs.alpinelinux.org/package/edge/community/x86/yq
|
|||||||
- [Deeply data structures](https://mikefarah.gitbook.io/yq/operators/traverse-read)
|
- [Deeply data structures](https://mikefarah.gitbook.io/yq/operators/traverse-read)
|
||||||
- [Sort keys](https://mikefarah.gitbook.io/yq/operators/sort-keys)
|
- [Sort keys](https://mikefarah.gitbook.io/yq/operators/sort-keys)
|
||||||
- Manipulate yaml [comments](https://mikefarah.gitbook.io/yq/operators/comment-operators), [styling](https://mikefarah.gitbook.io/yq/operators/style), [tags](https://mikefarah.gitbook.io/yq/operators/tag) and [anchors and aliases](https://mikefarah.gitbook.io/yq/operators/anchor-and-alias-operators).
|
- Manipulate yaml [comments](https://mikefarah.gitbook.io/yq/operators/comment-operators), [styling](https://mikefarah.gitbook.io/yq/operators/style), [tags](https://mikefarah.gitbook.io/yq/operators/tag) and [anchors and aliases](https://mikefarah.gitbook.io/yq/operators/anchor-and-alias-operators).
|
||||||
- [Update inplace](https://mikefarah.gitbook.io/yq/v/v4.x/commands/evaluate#flags)
|
- [Update in place](https://mikefarah.gitbook.io/yq/v/v4.x/commands/evaluate#flags)
|
||||||
- [Complex expressions to select and update](https://mikefarah.gitbook.io/yq/operators/select#select-and-update-matching-values-in-map)
|
- [Complex expressions to select and update](https://mikefarah.gitbook.io/yq/operators/select#select-and-update-matching-values-in-map)
|
||||||
- Keeps yaml formatting and comments when updating (though there are issues with whitespace)
|
- Keeps yaml formatting and comments when updating (though there are issues with whitespace)
|
||||||
- [Decode/Encode base64 data](https://mikefarah.gitbook.io/yq/operators/encode-decode)
|
- [Decode/Encode base64 data](https://mikefarah.gitbook.io/yq/operators/encode-decode)
|
||||||
@ -337,7 +337,7 @@ Examples:
|
|||||||
# yq defaults to 'eval' command if no command is specified. See "yq eval --help" for more examples.
|
# yq defaults to 'eval' command if no command is specified. See "yq eval --help" for more examples.
|
||||||
yq '.stuff' < myfile.yml # outputs the data at the "stuff" node from "myfile.yml"
|
yq '.stuff' < myfile.yml # outputs the data at the "stuff" node from "myfile.yml"
|
||||||
|
|
||||||
yq -i '.stuff = "foo"' myfile.yml # update myfile.yml inplace
|
yq -i '.stuff = "foo"' myfile.yml # update myfile.yml in place
|
||||||
|
|
||||||
|
|
||||||
Available Commands:
|
Available Commands:
|
||||||
@ -354,7 +354,7 @@ Flags:
|
|||||||
--header-preprocess Slurp any header comments and separators before processing expression. (default true)
|
--header-preprocess Slurp any header comments and separators before processing expression. (default true)
|
||||||
-h, --help help for yq
|
-h, --help help for yq
|
||||||
-I, --indent int sets indent level for output (default 2)
|
-I, --indent int sets indent level for output (default 2)
|
||||||
-i, --inplace update the file inplace of first file given.
|
-i, --inplace update the file in place of first file given.
|
||||||
-p, --input-format string [yaml|y|xml|x] parse format for input. Note that json is a subset of yaml. (default "yaml")
|
-p, --input-format string [yaml|y|xml|x] parse format for input. Note that json is a subset of yaml. (default "yaml")
|
||||||
-M, --no-colors force print with no colors
|
-M, --no-colors force print with no colors
|
||||||
-N, --no-doc Don't print document separators (---)
|
-N, --no-doc Don't print document separators (---)
|
||||||
|
@ -3,25 +3,25 @@
|
|||||||
testWriteInPlacePipeIn() {
|
testWriteInPlacePipeIn() {
|
||||||
result=$(./yq e -i -n '.a' 2>&1)
|
result=$(./yq e -i -n '.a' 2>&1)
|
||||||
assertEquals 1 $?
|
assertEquals 1 $?
|
||||||
assertEquals "Error: write inplace flag only applicable when giving an expression and at least one file" "$result"
|
assertEquals "Error: write in place flag only applicable when giving an expression and at least one file" "$result"
|
||||||
}
|
}
|
||||||
|
|
||||||
testWriteInPlacePipeInEvalall() {
|
testWriteInPlacePipeInEvalall() {
|
||||||
result=$(./yq ea -i -n '.a' 2>&1)
|
result=$(./yq ea -i -n '.a' 2>&1)
|
||||||
assertEquals 1 $?
|
assertEquals 1 $?
|
||||||
assertEquals "Error: write inplace flag only applicable when giving an expression and at least one file" "$result"
|
assertEquals "Error: write in place flag only applicable when giving an expression and at least one file" "$result"
|
||||||
}
|
}
|
||||||
|
|
||||||
testWriteInPlaceWithSplit() {
|
testWriteInPlaceWithSplit() {
|
||||||
result=$(./yq e -s "cat" -i '.a = "thing"' test.yml 2>&1)
|
result=$(./yq e -s "cat" -i '.a = "thing"' test.yml 2>&1)
|
||||||
assertEquals 1 $?
|
assertEquals 1 $?
|
||||||
assertEquals "Error: write inplace cannot be used with split file" "$result"
|
assertEquals "Error: write in place cannot be used with split file" "$result"
|
||||||
}
|
}
|
||||||
|
|
||||||
testWriteInPlaceWithSplitEvalAll() {
|
testWriteInPlaceWithSplitEvalAll() {
|
||||||
result=$(./yq ea -s "cat" -i '.a = "thing"' test.yml 2>&1)
|
result=$(./yq ea -s "cat" -i '.a = "thing"' test.yml 2>&1)
|
||||||
assertEquals 1 $?
|
assertEquals 1 $?
|
||||||
assertEquals "Error: write inplace cannot be used with split file" "$result"
|
assertEquals "Error: write in place cannot be used with split file" "$result"
|
||||||
}
|
}
|
||||||
|
|
||||||
testNullWithFiles() {
|
testNullWithFiles() {
|
||||||
|
@ -143,7 +143,7 @@ testBasicCatWithFilesNoDash() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
# when the nullinput flag is used
|
# when the nullinput flag is used
|
||||||
# dont automatically read STDIN (this breaks github actions)
|
# don't automatically read STDIN (this breaks github actions)
|
||||||
testBasicCreateFileGithubAction() {
|
testBasicCreateFileGithubAction() {
|
||||||
cat /dev/null | ./yq -n ".a = 123" > test.yml
|
cat /dev/null | ./yq -n ".a = 123" > test.yml
|
||||||
}
|
}
|
||||||
@ -302,7 +302,7 @@ testBasicExitStatusNoEval() {
|
|||||||
assertEquals 1 "$?"
|
assertEquals 1 "$?"
|
||||||
}
|
}
|
||||||
|
|
||||||
testBasicExtractFieldWithSeperator() {
|
testBasicExtractFieldWithSeparator() {
|
||||||
cat >test.yml <<EOL
|
cat >test.yml <<EOL
|
||||||
---
|
---
|
||||||
name: chart-name
|
name: chart-name
|
||||||
@ -312,7 +312,7 @@ EOL
|
|||||||
assertEquals "chart-name" "$X"
|
assertEquals "chart-name" "$X"
|
||||||
}
|
}
|
||||||
|
|
||||||
testBasicExtractMultipleFieldWithSeperator() {
|
testBasicExtractMultipleFieldWithSeparator() {
|
||||||
cat >test.yml <<EOL
|
cat >test.yml <<EOL
|
||||||
---
|
---
|
||||||
name: chart-name
|
name: chart-name
|
||||||
|
@ -11,7 +11,7 @@ a: test
|
|||||||
EOL
|
EOL
|
||||||
}
|
}
|
||||||
|
|
||||||
testLeadingSeperatorWithDoc() {
|
testLeadingSeparatorWithDoc() {
|
||||||
cat >test.yml <<EOL
|
cat >test.yml <<EOL
|
||||||
# hi peeps
|
# hi peeps
|
||||||
# cool
|
# cool
|
||||||
@ -35,7 +35,7 @@ EOM
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
testLeadingSeperatorWithNewlinesNewDoc() {
|
testLeadingSeparatorWithNewlinesNewDoc() {
|
||||||
cat >test.yml <<EOL
|
cat >test.yml <<EOL
|
||||||
# hi peeps
|
# hi peeps
|
||||||
# cool
|
# cool
|
||||||
@ -62,7 +62,7 @@ EOM
|
|||||||
assertEquals "$expected" "$X"
|
assertEquals "$expected" "$X"
|
||||||
}
|
}
|
||||||
|
|
||||||
testLeadingSeperatorWithNewlinesMoreComments() {
|
testLeadingSeparatorWithNewlinesMoreComments() {
|
||||||
cat >test.yml <<EOL
|
cat >test.yml <<EOL
|
||||||
# hi peeps
|
# hi peeps
|
||||||
# cool
|
# cool
|
||||||
@ -92,7 +92,7 @@ EOM
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
testLeadingSeperatorWithDirective() {
|
testLeadingSeparatorWithDirective() {
|
||||||
cat >test.yml <<EOL
|
cat >test.yml <<EOL
|
||||||
%YAML 1.1
|
%YAML 1.1
|
||||||
---
|
---
|
||||||
@ -110,18 +110,18 @@ EOM
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
testLeadingSeperatorPipeIntoEvalSeq() {
|
testLeadingSeparatorPipeIntoEvalSeq() {
|
||||||
X=$(./yq e - < test.yml)
|
X=$(./yq e - < test.yml)
|
||||||
expected=$(cat test.yml)
|
expected=$(cat test.yml)
|
||||||
assertEquals "$expected" "$X"
|
assertEquals "$expected" "$X"
|
||||||
}
|
}
|
||||||
|
|
||||||
testLeadingSeperatorExtractField() {
|
testLeadingSeparatorExtractField() {
|
||||||
X=$(./yq e '.a' - < test.yml)
|
X=$(./yq e '.a' - < test.yml)
|
||||||
assertEquals "test" "$X"
|
assertEquals "test" "$X"
|
||||||
}
|
}
|
||||||
|
|
||||||
testLeadingSeperatorExtractFieldWithCommentsAfterSep() {
|
testLeadingSeparatorExtractFieldWithCommentsAfterSep() {
|
||||||
cat >test.yml <<EOL
|
cat >test.yml <<EOL
|
||||||
---
|
---
|
||||||
# hi peeps
|
# hi peeps
|
||||||
@ -132,7 +132,7 @@ EOL
|
|||||||
assertEquals "test" "$X"
|
assertEquals "test" "$X"
|
||||||
}
|
}
|
||||||
|
|
||||||
testLeadingSeperatorExtractFieldWithCommentsBeforeSep() {
|
testLeadingSeparatorExtractFieldWithCommentsBeforeSep() {
|
||||||
cat >test.yml <<EOL
|
cat >test.yml <<EOL
|
||||||
# hi peeps
|
# hi peeps
|
||||||
# cool
|
# cool
|
||||||
@ -144,7 +144,7 @@ EOL
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
testLeadingSeperatorExtractFieldMultiDoc() {
|
testLeadingSeparatorExtractFieldMultiDoc() {
|
||||||
cat >test.yml <<EOL
|
cat >test.yml <<EOL
|
||||||
---
|
---
|
||||||
a: test
|
a: test
|
||||||
@ -161,7 +161,7 @@ EOM
|
|||||||
assertEquals "$expected" "$X"
|
assertEquals "$expected" "$X"
|
||||||
}
|
}
|
||||||
|
|
||||||
testLeadingSeperatorExtractFieldMultiDocWithComments() {
|
testLeadingSeparatorExtractFieldMultiDocWithComments() {
|
||||||
cat >test.yml <<EOL
|
cat >test.yml <<EOL
|
||||||
# here
|
# here
|
||||||
---
|
---
|
||||||
@ -184,26 +184,26 @@ EOM
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
testLeadingSeperatorEvalSeq() {
|
testLeadingSeparatorEvalSeq() {
|
||||||
X=$(./yq e test.yml)
|
X=$(./yq e test.yml)
|
||||||
expected=$(cat test.yml)
|
expected=$(cat test.yml)
|
||||||
assertEquals "$expected" "$X"
|
assertEquals "$expected" "$X"
|
||||||
}
|
}
|
||||||
|
|
||||||
testLeadingSeperatorPipeIntoEvalAll() {
|
testLeadingSeparatorPipeIntoEvalAll() {
|
||||||
X=$(./yq ea - < test.yml)
|
X=$(./yq ea - < test.yml)
|
||||||
expected=$(cat test.yml)
|
expected=$(cat test.yml)
|
||||||
assertEquals "$expected" "$X"
|
assertEquals "$expected" "$X"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
testLeadingSeperatorEvalAll() {
|
testLeadingSeparatorEvalAll() {
|
||||||
X=$(./yq ea test.yml)
|
X=$(./yq ea test.yml)
|
||||||
expected=$(cat test.yml)
|
expected=$(cat test.yml)
|
||||||
assertEquals "$expected" "$X"
|
assertEquals "$expected" "$X"
|
||||||
}
|
}
|
||||||
|
|
||||||
testLeadingSeperatorMultiDocEvalSimple() {
|
testLeadingSeparatorMultiDocEvalSimple() {
|
||||||
read -r -d '' expected << EOM
|
read -r -d '' expected << EOM
|
||||||
---
|
---
|
||||||
a: test
|
a: test
|
||||||
@ -217,7 +217,7 @@ EOM
|
|||||||
assertEquals "$expected" "$X"
|
assertEquals "$expected" "$X"
|
||||||
}
|
}
|
||||||
|
|
||||||
testLeadingSeperatorMultiDocInOneFile() {
|
testLeadingSeparatorMultiDocInOneFile() {
|
||||||
cat >test.yml <<EOL
|
cat >test.yml <<EOL
|
||||||
---
|
---
|
||||||
# hi peeps
|
# hi peeps
|
||||||
@ -231,7 +231,7 @@ EOL
|
|||||||
assertEquals "$expected" "$X"
|
assertEquals "$expected" "$X"
|
||||||
}
|
}
|
||||||
|
|
||||||
testLeadingSeperatorMultiDocInOneFileEvalAll() {
|
testLeadingSeparatorMultiDocInOneFileEvalAll() {
|
||||||
cat >test.yml <<EOL
|
cat >test.yml <<EOL
|
||||||
---
|
---
|
||||||
# hi peeps
|
# hi peeps
|
||||||
@ -245,7 +245,7 @@ EOL
|
|||||||
assertEquals "$expected" "$X"
|
assertEquals "$expected" "$X"
|
||||||
}
|
}
|
||||||
|
|
||||||
testLeadingSeperatorMultiDocEvalComments() {
|
testLeadingSeparatorMultiDocEvalComments() {
|
||||||
cat >test.yml <<EOL
|
cat >test.yml <<EOL
|
||||||
# hi peeps
|
# hi peeps
|
||||||
# cool
|
# cool
|
||||||
@ -273,7 +273,7 @@ EOM
|
|||||||
assertEquals "$expected" "$X"
|
assertEquals "$expected" "$X"
|
||||||
}
|
}
|
||||||
|
|
||||||
testLeadingSeperatorMultiDocEvalCommentsTrailingSep() {
|
testLeadingSeparatorMultiDocEvalCommentsTrailingSep() {
|
||||||
cat >test.yml <<EOL
|
cat >test.yml <<EOL
|
||||||
# hi peeps
|
# hi peeps
|
||||||
# cool
|
# cool
|
||||||
@ -305,7 +305,7 @@ EOM
|
|||||||
assertEquals "$expected" "$X"
|
assertEquals "$expected" "$X"
|
||||||
}
|
}
|
||||||
|
|
||||||
testLeadingSeperatorMultiMultiDocEvalCommentsTrailingSep() {
|
testLeadingSeparatorMultiMultiDocEvalCommentsTrailingSep() {
|
||||||
cat >test.yml <<EOL
|
cat >test.yml <<EOL
|
||||||
# hi peeps
|
# hi peeps
|
||||||
# cool
|
# cool
|
||||||
@ -345,7 +345,7 @@ EOM
|
|||||||
assertEquals "$expected" "$X"
|
assertEquals "$expected" "$X"
|
||||||
}
|
}
|
||||||
|
|
||||||
testLeadingSeperatorMultiDocEvalCommentsLeadingSep() {
|
testLeadingSeparatorMultiDocEvalCommentsLeadingSep() {
|
||||||
cat >test.yml <<EOL
|
cat >test.yml <<EOL
|
||||||
---
|
---
|
||||||
# hi peeps
|
# hi peeps
|
||||||
@ -405,7 +405,7 @@ EOM
|
|||||||
assertEquals "$expected" "$X"
|
assertEquals "$expected" "$X"
|
||||||
}
|
}
|
||||||
|
|
||||||
testLeadingSeperatorMultiDocEvalCommentsStripComments() {
|
testLeadingSeparatorMultiDocEvalCommentsStripComments() {
|
||||||
cat >test.yml <<EOL
|
cat >test.yml <<EOL
|
||||||
---
|
---
|
||||||
# hi peeps
|
# hi peeps
|
||||||
@ -428,7 +428,7 @@ EOM
|
|||||||
assertEquals "$expected" "$X"
|
assertEquals "$expected" "$X"
|
||||||
}
|
}
|
||||||
|
|
||||||
testLeadingSeperatorMultiDocEvalCommentsLeadingSepNoDocFlag() {
|
testLeadingSeparatorMultiDocEvalCommentsLeadingSepNoDocFlag() {
|
||||||
cat >test.yml <<EOL
|
cat >test.yml <<EOL
|
||||||
---
|
---
|
||||||
# hi peeps
|
# hi peeps
|
||||||
@ -454,7 +454,7 @@ EOM
|
|||||||
assertEquals "$expected" "$X"
|
assertEquals "$expected" "$X"
|
||||||
}
|
}
|
||||||
|
|
||||||
testLeadingSeperatorMultiDocEvalJsonFlag() {
|
testLeadingSeparatorMultiDocEvalJsonFlag() {
|
||||||
cat >test.yml <<EOL
|
cat >test.yml <<EOL
|
||||||
---
|
---
|
||||||
# hi peeps
|
# hi peeps
|
||||||
@ -483,7 +483,7 @@ EOM
|
|||||||
assertEquals "$expected" "$X"
|
assertEquals "$expected" "$X"
|
||||||
}
|
}
|
||||||
|
|
||||||
testLeadingSeperatorMultiDocEvalAllJsonFlag() {
|
testLeadingSeparatorMultiDocEvalAllJsonFlag() {
|
||||||
cat >test.yml <<EOL
|
cat >test.yml <<EOL
|
||||||
---
|
---
|
||||||
# hi peeps
|
# hi peeps
|
||||||
@ -512,7 +512,7 @@ EOM
|
|||||||
assertEquals "$expected" "$X"
|
assertEquals "$expected" "$X"
|
||||||
}
|
}
|
||||||
|
|
||||||
testLeadingSeperatorMultiDocEvalAll() {
|
testLeadingSeparatorMultiDocEvalAll() {
|
||||||
read -r -d '' expected << EOM
|
read -r -d '' expected << EOM
|
||||||
---
|
---
|
||||||
a: test
|
a: test
|
||||||
|
@ -13,7 +13,7 @@ func createEvaluateAllCommand() *cobra.Command {
|
|||||||
Aliases: []string{"ea"},
|
Aliases: []string{"ea"},
|
||||||
Short: "Loads _all_ yaml documents of _all_ yaml files and runs expression once",
|
Short: "Loads _all_ yaml documents of _all_ yaml files and runs expression once",
|
||||||
Example: `
|
Example: `
|
||||||
# Merge f2.yml into f1.yml (inplace)
|
# Merge f2.yml into f1.yml (in place)
|
||||||
yq eval-all --inplace 'select(fileIndex == 0) * select(fileIndex == 1)' f1.yml f2.yml
|
yq eval-all --inplace 'select(fileIndex == 0) * select(fileIndex == 1)' f1.yml f2.yml
|
||||||
## the same command and expression using shortened names:
|
## the same command and expression using shortened names:
|
||||||
yq ea -i 'select(fi == 0) * select(fi == 1)' f1.yml f2.yml
|
yq ea -i 'select(fi == 0) * select(fi == 1)' f1.yml f2.yml
|
||||||
|
@ -28,7 +28,7 @@ cat file2.yml | yq e '.a.b' file1.yml - file3.yml
|
|||||||
## Note that editing an empty file does not work.
|
## Note that editing an empty file does not work.
|
||||||
yq e -n '.a.b.c = "cat"'
|
yq e -n '.a.b.c = "cat"'
|
||||||
|
|
||||||
# Update a file inplace
|
# Update a file in place
|
||||||
yq e '.a.b = "cool"' -i file.yaml
|
yq e '.a.b = "cool"' -i file.yaml
|
||||||
`,
|
`,
|
||||||
Long: `yq is a portable command-line YAML processor (https://github.com/mikefarah/yq/)
|
Long: `yq is a portable command-line YAML processor (https://github.com/mikefarah/yq/)
|
||||||
|
@ -88,7 +88,7 @@ yq -P sample.json
|
|||||||
|
|
||||||
rootCmd.PersistentFlags().IntVarP(&indent, "indent", "I", 2, "sets indent level for output")
|
rootCmd.PersistentFlags().IntVarP(&indent, "indent", "I", 2, "sets indent level for output")
|
||||||
rootCmd.Flags().BoolVarP(&version, "version", "V", false, "Print version information and quit")
|
rootCmd.Flags().BoolVarP(&version, "version", "V", false, "Print version information and quit")
|
||||||
rootCmd.PersistentFlags().BoolVarP(&writeInplace, "inplace", "i", false, "update the file inplace of first file given.")
|
rootCmd.PersistentFlags().BoolVarP(&writeInplace, "inplace", "i", false, "update the file in place of first file given.")
|
||||||
rootCmd.PersistentFlags().VarP(unwrapScalarFlag, "unwrapScalar", "r", "unwrap scalar, print the value with no quotes, colors or comments. Defaults to true for yaml")
|
rootCmd.PersistentFlags().VarP(unwrapScalarFlag, "unwrapScalar", "r", "unwrap scalar, print the value with no quotes, colors or comments. Defaults to true for yaml")
|
||||||
rootCmd.PersistentFlags().Lookup("unwrapScalar").NoOptDefVal = "true"
|
rootCmd.PersistentFlags().Lookup("unwrapScalar").NoOptDefVal = "true"
|
||||||
rootCmd.PersistentFlags().BoolVarP(&nulSepOutput, "nul-output", "0", false, "Use NUL char to separate values. If unwrap scalar is also set, fail if unwrapped scalar contains NUL char.")
|
rootCmd.PersistentFlags().BoolVarP(&nulSepOutput, "nul-output", "0", false, "Use NUL char to separate values. If unwrap scalar is also set, fail if unwrapped scalar contains NUL char.")
|
||||||
|
@ -8,12 +8,12 @@ import (
|
|||||||
|
|
||||||
type boolFlag interface {
|
type boolFlag interface {
|
||||||
pflag.Value
|
pflag.Value
|
||||||
IsExplicitySet() bool
|
IsExplicitlySet() bool
|
||||||
IsSet() bool
|
IsSet() bool
|
||||||
}
|
}
|
||||||
|
|
||||||
type unwrapScalarFlagStrc struct {
|
type unwrapScalarFlagStrc struct {
|
||||||
explicitySet bool
|
explicitlySet bool
|
||||||
value bool
|
value bool
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -21,8 +21,8 @@ func newUnwrapFlag() boolFlag {
|
|||||||
return &unwrapScalarFlagStrc{value: true}
|
return &unwrapScalarFlagStrc{value: true}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *unwrapScalarFlagStrc) IsExplicitySet() bool {
|
func (f *unwrapScalarFlagStrc) IsExplicitlySet() bool {
|
||||||
return f.explicitySet
|
return f.explicitlySet
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *unwrapScalarFlagStrc) IsSet() bool {
|
func (f *unwrapScalarFlagStrc) IsSet() bool {
|
||||||
@ -37,7 +37,7 @@ func (f *unwrapScalarFlagStrc) Set(value string) error {
|
|||||||
|
|
||||||
v, err := strconv.ParseBool(value)
|
v, err := strconv.ParseBool(value)
|
||||||
f.value = v
|
f.value = v
|
||||||
f.explicitySet = true
|
f.explicitlySet = true
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -42,7 +42,7 @@ func initCommand(cmd *cobra.Command, args []string) (string, []string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if writeInplace && (len(args) == 0 || args[0] == "-") {
|
if writeInplace && (len(args) == 0 || args[0] == "-") {
|
||||||
return "", nil, fmt.Errorf("write inplace flag only applicable when giving an expression and at least one file")
|
return "", nil, fmt.Errorf("write in place flag only applicable when giving an expression and at least one file")
|
||||||
}
|
}
|
||||||
|
|
||||||
if frontMatter != "" && len(args) == 0 {
|
if frontMatter != "" && len(args) == 0 {
|
||||||
@ -50,7 +50,7 @@ func initCommand(cmd *cobra.Command, args []string) (string, []string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if writeInplace && splitFileExp != "" {
|
if writeInplace && splitFileExp != "" {
|
||||||
return "", nil, fmt.Errorf("write inplace cannot be used with split file")
|
return "", nil, fmt.Errorf("write in place cannot be used with split file")
|
||||||
}
|
}
|
||||||
|
|
||||||
if nullInput && len(args) > 0 {
|
if nullInput && len(args) > 0 {
|
||||||
@ -104,7 +104,7 @@ func initCommand(cmd *cobra.Command, args []string) (string, []string, error) {
|
|||||||
outputFormatType == yqlib.PropsOutputFormat {
|
outputFormatType == yqlib.PropsOutputFormat {
|
||||||
unwrapScalar = true
|
unwrapScalar = true
|
||||||
}
|
}
|
||||||
if unwrapScalarFlag.IsExplicitySet() {
|
if unwrapScalarFlag.IsExplicitlySet() {
|
||||||
unwrapScalar = unwrapScalarFlag.IsSet()
|
unwrapScalar = unwrapScalarFlag.IsSet()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
14
cspell.config.yaml
Normal file
14
cspell.config.yaml
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
---
|
||||||
|
$schema: https://raw.githubusercontent.com/streetsidesoftware/cspell/main/cspell.schema.json
|
||||||
|
version: '0.2'
|
||||||
|
language: en-GB
|
||||||
|
dictionaryDefinitions:
|
||||||
|
- name: project-words
|
||||||
|
path: './project-words.txt'
|
||||||
|
addWords: true
|
||||||
|
dictionaries:
|
||||||
|
- project-words
|
||||||
|
ignorePaths:
|
||||||
|
- 'vendor'
|
||||||
|
- 'bin'
|
||||||
|
- '/project-words.txt'
|
@ -49,7 +49,7 @@ func (dec *yamlDecoder) processReadStream(reader *bufio.Reader) (io.Reader, stri
|
|||||||
}
|
}
|
||||||
} else if string(peekBytes) == "---" {
|
} else if string(peekBytes) == "---" {
|
||||||
_, err := reader.ReadString('\n')
|
_, err := reader.ReadString('\n')
|
||||||
sb.WriteString("$yqDocSeperator$\n")
|
sb.WriteString("$yqDocSeparator$\n")
|
||||||
if errors.Is(err, io.EOF) {
|
if errors.Is(err, io.EOF) {
|
||||||
return reader, sb.String(), nil
|
return reader, sb.String(), nil
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
|
@ -26,7 +26,7 @@ will output
|
|||||||
bar: 100
|
bar: 100
|
||||||
```
|
```
|
||||||
|
|
||||||
## Group by field, with nuls
|
## Group by field, with nulls
|
||||||
Given a sample.yml file of:
|
Given a sample.yml file of:
|
||||||
```yaml
|
```yaml
|
||||||
- cat: dog
|
- cat: dog
|
||||||
|
@ -26,7 +26,7 @@ yq '.a.b[0].c' file.yaml
|
|||||||
cat file.yaml | yq '.a.b[0].c'
|
cat file.yaml | yq '.a.b[0].c'
|
||||||
```
|
```
|
||||||
|
|
||||||
## Update a yaml file, inplace
|
## Update a yaml file, in place
|
||||||
```bash
|
```bash
|
||||||
yq -i '.a.b[0].c = "cool"' file.yaml
|
yq -i '.a.b[0].c = "cool"' file.yaml
|
||||||
```
|
```
|
||||||
|
@ -34,7 +34,7 @@ Given a sample.yml file of:
|
|||||||
ascii_=_symbols: replaced with _
|
ascii_=_symbols: replaced with _
|
||||||
"ascii_ _controls": dropped (this example uses \t)
|
"ascii_ _controls": dropped (this example uses \t)
|
||||||
nonascii_א_characters: dropped
|
nonascii_א_characters: dropped
|
||||||
effrot_expeñded_tò_preserve_accented_latin_letters: moderate (via unicode NFKD)
|
effort_expeñded_tò_preserve_accented_latin_letters: moderate (via unicode NFKD)
|
||||||
|
|
||||||
```
|
```
|
||||||
then
|
then
|
||||||
@ -46,7 +46,7 @@ will output
|
|||||||
ascii___symbols='replaced with _'
|
ascii___symbols='replaced with _'
|
||||||
ascii__controls='dropped (this example uses \t)'
|
ascii__controls='dropped (this example uses \t)'
|
||||||
nonascii__characters=dropped
|
nonascii__characters=dropped
|
||||||
effrot_expended_to_preserve_accented_latin_letters='moderate (via unicode NFKD)'
|
effort_expended_to_preserve_accented_latin_letters='moderate (via unicode NFKD)'
|
||||||
```
|
```
|
||||||
|
|
||||||
## Encode shell variables: empty values, arrays and maps
|
## Encode shell variables: empty values, arrays and maps
|
||||||
|
@ -100,7 +100,7 @@ func (le *luaEncoder) encodeString(writer io.Writer, node *yaml.Node) error {
|
|||||||
case yaml.SingleQuotedStyle:
|
case yaml.SingleQuotedStyle:
|
||||||
quote = "'"
|
quote = "'"
|
||||||
|
|
||||||
// falltrough to regular ol' string
|
// fallthrough to regular ol' string
|
||||||
}
|
}
|
||||||
return writeString(writer, quote+le.escape.Replace(node.Value)+quote)
|
return writeString(writer, quote+le.escape.Replace(node.Value)+quote)
|
||||||
}
|
}
|
||||||
|
@ -37,7 +37,7 @@ func (pe *propertiesEncoder) PrintLeadingContent(writer io.Writer, content strin
|
|||||||
if errReading != nil && !errors.Is(errReading, io.EOF) {
|
if errReading != nil && !errors.Is(errReading, io.EOF) {
|
||||||
return errReading
|
return errReading
|
||||||
}
|
}
|
||||||
if strings.Contains(readline, "$yqDocSeperator$") {
|
if strings.Contains(readline, "$yqDocSeparator$") {
|
||||||
|
|
||||||
if err := pe.PrintDocumentSeparator(writer); err != nil {
|
if err := pe.PrintDocumentSeparator(writer); err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -47,7 +47,7 @@ func (ye *yamlEncoder) PrintLeadingContent(writer io.Writer, content string) err
|
|||||||
if errReading != nil && !errors.Is(errReading, io.EOF) {
|
if errReading != nil && !errors.Is(errReading, io.EOF) {
|
||||||
return errReading
|
return errReading
|
||||||
}
|
}
|
||||||
if strings.Contains(readline, "$yqDocSeperator$") {
|
if strings.Contains(readline, "$yqDocSeparator$") {
|
||||||
|
|
||||||
if err := ye.PrintDocumentSeparator(writer); err != nil {
|
if err := ye.PrintDocumentSeparator(writer); err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -26,8 +26,8 @@ var participleYqRules = []*participleYqRule{
|
|||||||
{"RecursiveDecent", `\.\.`, recursiveDecentOpToken(false), 0},
|
{"RecursiveDecent", `\.\.`, recursiveDecentOpToken(false), 0},
|
||||||
|
|
||||||
{"GetVariable", `\$[a-zA-Z_\-0-9]+`, getVariableOpToken(), 0},
|
{"GetVariable", `\$[a-zA-Z_\-0-9]+`, getVariableOpToken(), 0},
|
||||||
{"AsignAsVariable", `as`, opTokenWithPrefs(assignVariableOpType, nil, assignVarPreferences{}), 0},
|
{"AssignAsVariable", `as`, opTokenWithPrefs(assignVariableOpType, nil, assignVarPreferences{}), 0},
|
||||||
{"AsignRefVariable", `ref`, opTokenWithPrefs(assignVariableOpType, nil, assignVarPreferences{IsReference: true}), 0},
|
{"AssignRefVariable", `ref`, opTokenWithPrefs(assignVariableOpType, nil, assignVarPreferences{IsReference: true}), 0},
|
||||||
|
|
||||||
{"CreateMap", `:\s*`, opToken(createMapOpType), 0},
|
{"CreateMap", `:\s*`, opToken(createMapOpType), 0},
|
||||||
simpleOp("length", lengthOpType),
|
simpleOp("length", lengthOpType),
|
||||||
|
@ -77,8 +77,8 @@ func assignCommentsOperator(d *dataTreeNavigator, context Context, expressionNod
|
|||||||
|
|
||||||
func getCommentsOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
func getCommentsOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
preferences := expressionNode.Operation.Preferences.(commentOpPreferences)
|
preferences := expressionNode.Operation.Preferences.(commentOpPreferences)
|
||||||
var startCommentCharaterRegExp = regexp.MustCompile(`^# `)
|
var startCommentCharacterRegExp = regexp.MustCompile(`^# `)
|
||||||
var subsequentCommentCharaterRegExp = regexp.MustCompile(`\n# `)
|
var subsequentCommentCharacterRegExp = regexp.MustCompile(`\n# `)
|
||||||
|
|
||||||
log.Debugf("GetComments operator!")
|
log.Debugf("GetComments operator!")
|
||||||
var results = list.New()
|
var results = list.New()
|
||||||
@ -112,8 +112,8 @@ func getCommentsOperator(d *dataTreeNavigator, context Context, expressionNode *
|
|||||||
} else if preferences.FootComment {
|
} else if preferences.FootComment {
|
||||||
comment = candidate.Node.FootComment
|
comment = candidate.Node.FootComment
|
||||||
}
|
}
|
||||||
comment = startCommentCharaterRegExp.ReplaceAllString(comment, "")
|
comment = startCommentCharacterRegExp.ReplaceAllString(comment, "")
|
||||||
comment = subsequentCommentCharaterRegExp.ReplaceAllString(comment, "\n")
|
comment = subsequentCommentCharacterRegExp.ReplaceAllString(comment, "\n")
|
||||||
|
|
||||||
node := &yaml.Node{Kind: yaml.ScalarNode, Value: comment, Tag: "!!str"}
|
node := &yaml.Node{Kind: yaml.ScalarNode, Value: comment, Tag: "!!str"}
|
||||||
result := candidate.CreateReplacement(node)
|
result := candidate.CreateReplacement(node)
|
||||||
|
@ -10,7 +10,7 @@ import (
|
|||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
func getStringParamter(parameterName string, d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (string, error) {
|
func getStringParameter(parameterName string, d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (string, error) {
|
||||||
result, err := d.GetMatchingNodes(context.ReadOnlyClone(), expressionNode)
|
result, err := d.GetMatchingNodes(context.ReadOnlyClone(), expressionNode)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -24,7 +24,7 @@ func getStringParamter(parameterName string, d *dataTreeNavigator, context Conte
|
|||||||
|
|
||||||
func withDateTimeFormat(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
func withDateTimeFormat(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
if expressionNode.RHS.Operation.OperationType == blockOpType || expressionNode.RHS.Operation.OperationType == unionOpType {
|
if expressionNode.RHS.Operation.OperationType == blockOpType || expressionNode.RHS.Operation.OperationType == unionOpType {
|
||||||
layout, err := getStringParamter("layout", d, context, expressionNode.RHS.LHS)
|
layout, err := getStringParameter("layout", d, context, expressionNode.RHS.LHS)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Context{}, fmt.Errorf("could not get date time format: %w", err)
|
return Context{}, fmt.Errorf("could not get date time format: %w", err)
|
||||||
}
|
}
|
||||||
@ -63,7 +63,7 @@ func parseDateTime(layout string, datestring string) (time.Time, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func formatDateTime(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
func formatDateTime(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
format, err := getStringParamter("format", d, context, expressionNode.RHS)
|
format, err := getStringParameter("format", d, context, expressionNode.RHS)
|
||||||
layout := context.GetDateTimeLayout()
|
layout := context.GetDateTimeLayout()
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -97,7 +97,7 @@ func formatDateTime(d *dataTreeNavigator, context Context, expressionNode *Expre
|
|||||||
}
|
}
|
||||||
|
|
||||||
func tzOp(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
func tzOp(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||||
timezoneStr, err := getStringParamter("timezone", d, context, expressionNode.RHS)
|
timezoneStr, err := getStringParameter("timezone", d, context, expressionNode.RHS)
|
||||||
layout := context.GetDateTimeLayout()
|
layout := context.GetDateTimeLayout()
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -14,7 +14,7 @@ var groupByOperatorScenarios = []expressionScenario{
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
description: "Group by field, with nuls",
|
description: "Group by field, with nulls",
|
||||||
document: `[{cat: dog}, {foo: 1, bar: 10}, {foo: 3, bar: 100}, {no: foo for you}, {foo: 1, bar: 1}]`,
|
document: `[{cat: dog}, {foo: 1, bar: 10}, {foo: 3, bar: 100}, {no: foo for you}, {foo: 1, bar: 1}]`,
|
||||||
expression: `group_by(.foo)`,
|
expression: `group_by(.foo)`,
|
||||||
expected: []string{
|
expected: []string{
|
||||||
|
@ -50,7 +50,7 @@ func keysOperator(d *dataTreeNavigator, context Context, expressionNode *Express
|
|||||||
if node.Kind == yaml.MappingNode {
|
if node.Kind == yaml.MappingNode {
|
||||||
targetNode = getMapKeys(node)
|
targetNode = getMapKeys(node)
|
||||||
} else if node.Kind == yaml.SequenceNode {
|
} else if node.Kind == yaml.SequenceNode {
|
||||||
targetNode = getIndicies(node)
|
targetNode = getIndices(node)
|
||||||
} else {
|
} else {
|
||||||
return Context{}, fmt.Errorf("Cannot get keys of %v, keys only works for maps and arrays", node.Tag)
|
return Context{}, fmt.Errorf("Cannot get keys of %v, keys only works for maps and arrays", node.Tag)
|
||||||
}
|
}
|
||||||
@ -70,7 +70,7 @@ func getMapKeys(node *yaml.Node) *yaml.Node {
|
|||||||
return &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq", Content: contents}
|
return &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq", Content: contents}
|
||||||
}
|
}
|
||||||
|
|
||||||
func getIndicies(node *yaml.Node) *yaml.Node {
|
func getIndices(node *yaml.Node) *yaml.Node {
|
||||||
var contents = make([]*yaml.Node, len(node.Content))
|
var contents = make([]*yaml.Node, len(node.Content))
|
||||||
|
|
||||||
for index := range node.Content {
|
for index := range node.Content {
|
||||||
|
@ -76,7 +76,7 @@ func pickOperator(d *dataTreeNavigator, context Context, expressionNode *Express
|
|||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
return Context{}, fmt.Errorf("cannot pick indicies from type %v (%v)", node.Tag, candidate.GetNicePath())
|
return Context{}, fmt.Errorf("cannot pick indices from type %v (%v)", node.Tag, candidate.GetNicePath())
|
||||||
}
|
}
|
||||||
|
|
||||||
results.PushBack(candidate.CreateReplacementWithDocWrappers(replacement))
|
results.PushBack(candidate.CreateReplacementWithDocWrappers(replacement))
|
||||||
|
@ -202,7 +202,7 @@ func traverseArrayWithIndices(candidate *CandidateNode, indices []*yaml.Node, pr
|
|||||||
contentLength := len(node.Content)
|
contentLength := len(node.Content)
|
||||||
for contentLength <= index {
|
for contentLength <= index {
|
||||||
if contentLength == 0 {
|
if contentLength == 0 {
|
||||||
// default to nice yaml formating
|
// default to nice yaml formatting
|
||||||
node.Style = 0
|
node.Style = 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -148,7 +148,7 @@ func (p *resultsPrinter) PrintResults(matchingNodes *list.List) error {
|
|||||||
return errorWriting
|
return errorWriting
|
||||||
}
|
}
|
||||||
|
|
||||||
commentsStartWithSepExp := regexp.MustCompile(`^\$yqDocSeperator\$`)
|
commentsStartWithSepExp := regexp.MustCompile(`^\$yqDocSeparator\$`)
|
||||||
commentStartsWithSeparator := commentsStartWithSepExp.MatchString(mappedDoc.LeadingContent)
|
commentStartsWithSeparator := commentsStartWithSepExp.MatchString(mappedDoc.LeadingContent)
|
||||||
|
|
||||||
if (p.previousDocIndex != mappedDoc.Document || p.previousFileIndex != mappedDoc.FileIndex) && !commentStartsWithSeparator {
|
if (p.previousDocIndex != mappedDoc.Document || p.previousFileIndex != mappedDoc.FileIndex) && !commentStartsWithSeparator {
|
||||||
|
@ -82,15 +82,15 @@ func TestPrinterMultipleDocsInSequenceWithLeadingContent(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
el := inputs.Front()
|
el := inputs.Front()
|
||||||
el.Value.(*CandidateNode).LeadingContent = "# go cats\n$yqDocSeperator$\n"
|
el.Value.(*CandidateNode).LeadingContent = "# go cats\n$yqDocSeparator$\n"
|
||||||
sample1 := nodeToList(el.Value.(*CandidateNode))
|
sample1 := nodeToList(el.Value.(*CandidateNode))
|
||||||
|
|
||||||
el = el.Next()
|
el = el.Next()
|
||||||
el.Value.(*CandidateNode).LeadingContent = "$yqDocSeperator$\n"
|
el.Value.(*CandidateNode).LeadingContent = "$yqDocSeparator$\n"
|
||||||
sample2 := nodeToList(el.Value.(*CandidateNode))
|
sample2 := nodeToList(el.Value.(*CandidateNode))
|
||||||
|
|
||||||
el = el.Next()
|
el = el.Next()
|
||||||
el.Value.(*CandidateNode).LeadingContent = "$yqDocSeperator$\n# cool\n"
|
el.Value.(*CandidateNode).LeadingContent = "$yqDocSeparator$\n# cool\n"
|
||||||
sample3 := nodeToList(el.Value.(*CandidateNode))
|
sample3 := nodeToList(el.Value.(*CandidateNode))
|
||||||
|
|
||||||
err = printer.PrintResults(sample1)
|
err = printer.PrintResults(sample1)
|
||||||
@ -174,21 +174,21 @@ func TestPrinterMultipleFilesInSequenceWithLeadingContent(t *testing.T) {
|
|||||||
elNode := el.Value.(*CandidateNode)
|
elNode := el.Value.(*CandidateNode)
|
||||||
elNode.Document = 0
|
elNode.Document = 0
|
||||||
elNode.FileIndex = 0
|
elNode.FileIndex = 0
|
||||||
elNode.LeadingContent = "# go cats\n$yqDocSeperator$\n"
|
elNode.LeadingContent = "# go cats\n$yqDocSeparator$\n"
|
||||||
sample1 := nodeToList(elNode)
|
sample1 := nodeToList(elNode)
|
||||||
|
|
||||||
el = el.Next()
|
el = el.Next()
|
||||||
elNode = el.Value.(*CandidateNode)
|
elNode = el.Value.(*CandidateNode)
|
||||||
elNode.Document = 0
|
elNode.Document = 0
|
||||||
elNode.FileIndex = 1
|
elNode.FileIndex = 1
|
||||||
elNode.LeadingContent = "$yqDocSeperator$\n"
|
elNode.LeadingContent = "$yqDocSeparator$\n"
|
||||||
sample2 := nodeToList(elNode)
|
sample2 := nodeToList(elNode)
|
||||||
|
|
||||||
el = el.Next()
|
el = el.Next()
|
||||||
elNode = el.Value.(*CandidateNode)
|
elNode = el.Value.(*CandidateNode)
|
||||||
elNode.Document = 0
|
elNode.Document = 0
|
||||||
elNode.FileIndex = 2
|
elNode.FileIndex = 2
|
||||||
elNode.LeadingContent = "$yqDocSeperator$\n# cool\n"
|
elNode.LeadingContent = "$yqDocSeparator$\n# cool\n"
|
||||||
sample3 := nodeToList(elNode)
|
sample3 := nodeToList(elNode)
|
||||||
|
|
||||||
err = printer.PrintResults(sample1)
|
err = printer.PrintResults(sample1)
|
||||||
@ -239,7 +239,7 @@ func TestPrinterMultipleDocsInSinglePrintWithLeadingDoc(t *testing.T) {
|
|||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
inputs.Front().Value.(*CandidateNode).LeadingContent = "# go cats\n$yqDocSeperator$\n"
|
inputs.Front().Value.(*CandidateNode).LeadingContent = "# go cats\n$yqDocSeparator$\n"
|
||||||
|
|
||||||
err = printer.PrintResults(inputs)
|
err = printer.PrintResults(inputs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -267,7 +267,7 @@ func TestPrinterMultipleDocsInSinglePrintWithLeadingDocTrailing(t *testing.T) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
inputs.Front().Value.(*CandidateNode).LeadingContent = "$yqDocSeperator$\n"
|
inputs.Front().Value.(*CandidateNode).LeadingContent = "$yqDocSeparator$\n"
|
||||||
err = printer.PrintResults(inputs)
|
err = printer.PrintResults(inputs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
@ -313,7 +313,7 @@ func TestPrinterMultipleDocsJson(t *testing.T) {
|
|||||||
var output bytes.Buffer
|
var output bytes.Buffer
|
||||||
var writer = bufio.NewWriter(&output)
|
var writer = bufio.NewWriter(&output)
|
||||||
// note printDocSeparators is true, it should still not print document separators
|
// note printDocSeparators is true, it should still not print document separators
|
||||||
// when outputing JSON.
|
// when outputting JSON.
|
||||||
encoder := NewJSONEncoder(0, false, false)
|
encoder := NewJSONEncoder(0, false, false)
|
||||||
if encoder == nil {
|
if encoder == nil {
|
||||||
t.Skipf("no support for %s output format", "json")
|
t.Skipf("no support for %s output format", "json")
|
||||||
@ -365,7 +365,7 @@ func TestPrinterNulSeparatorWithJson(t *testing.T) {
|
|||||||
var output bytes.Buffer
|
var output bytes.Buffer
|
||||||
var writer = bufio.NewWriter(&output)
|
var writer = bufio.NewWriter(&output)
|
||||||
// note printDocSeparators is true, it should still not print document separators
|
// note printDocSeparators is true, it should still not print document separators
|
||||||
// when outputing JSON.
|
// when outputting JSON.
|
||||||
encoder := NewJSONEncoder(0, false, false)
|
encoder := NewJSONEncoder(0, false, false)
|
||||||
if encoder == nil {
|
if encoder == nil {
|
||||||
t.Skipf("no support for %s output format", "json")
|
t.Skipf("no support for %s output format", "json")
|
||||||
|
@ -35,12 +35,12 @@ var shellVariablesScenarios = []formatScenario{
|
|||||||
"ascii_=_symbols: replaced with _" + "\n" +
|
"ascii_=_symbols: replaced with _" + "\n" +
|
||||||
"\"ascii_\t_controls\": dropped (this example uses \\t)" + "\n" +
|
"\"ascii_\t_controls\": dropped (this example uses \\t)" + "\n" +
|
||||||
"nonascii_\u05d0_characters: dropped" + "\n" +
|
"nonascii_\u05d0_characters: dropped" + "\n" +
|
||||||
"effrot_expe\u00f1ded_t\u00f2_preserve_accented_latin_letters: moderate (via unicode NFKD)" + "\n",
|
"effort_expe\u00f1ded_t\u00f2_preserve_accented_latin_letters: moderate (via unicode NFKD)" + "\n",
|
||||||
expected: "" +
|
expected: "" +
|
||||||
"ascii___symbols='replaced with _'" + "\n" +
|
"ascii___symbols='replaced with _'" + "\n" +
|
||||||
"ascii__controls='dropped (this example uses \\t)'" + "\n" +
|
"ascii__controls='dropped (this example uses \\t)'" + "\n" +
|
||||||
"nonascii__characters=dropped" + "\n" +
|
"nonascii__characters=dropped" + "\n" +
|
||||||
"effrot_expended_to_preserve_accented_latin_letters='moderate (via unicode NFKD)'" + "\n",
|
"effort_expended_to_preserve_accented_latin_letters='moderate (via unicode NFKD)'" + "\n",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
description: "Encode shell variables: empty values, arrays and maps",
|
description: "Encode shell variables: empty values, arrays and maps",
|
||||||
@ -65,10 +65,10 @@ func TestShellVariableScenarios(t *testing.T) {
|
|||||||
for i, s := range shellVariablesScenarios {
|
for i, s := range shellVariablesScenarios {
|
||||||
genericScenarios[i] = s
|
genericScenarios[i] = s
|
||||||
}
|
}
|
||||||
documentScenarios(t, "usage", "shellvariables", genericScenarios, documentShellVaraibleScenario)
|
documentScenarios(t, "usage", "shellvariables", genericScenarios, documentShellVariableScenario)
|
||||||
}
|
}
|
||||||
|
|
||||||
func documentShellVaraibleScenario(_ *testing.T, w *bufio.Writer, i interface{}) {
|
func documentShellVariableScenario(_ *testing.T, w *bufio.Writer, i interface{}) {
|
||||||
s := i.(formatScenario)
|
s := i.(formatScenario)
|
||||||
if s.skipDoc {
|
if s.skipDoc {
|
||||||
return
|
return
|
||||||
|
@ -44,7 +44,7 @@ func (w *writeInPlaceHandlerImpl) CreateTempFile() (*os.File, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (w *writeInPlaceHandlerImpl) FinishWriteInPlace(evaluatedSuccessfully bool) error {
|
func (w *writeInPlaceHandlerImpl) FinishWriteInPlace(evaluatedSuccessfully bool) error {
|
||||||
log.Debug("Going to write-inplace, evaluatedSuccessfully=%v, target=%v", evaluatedSuccessfully, w.inputFilename)
|
log.Debug("Going to write in place, evaluatedSuccessfully=%v, target=%v", evaluatedSuccessfully, w.inputFilename)
|
||||||
safelyCloseFile(w.tempFile)
|
safelyCloseFile(w.tempFile)
|
||||||
if evaluatedSuccessfully {
|
if evaluatedSuccessfully {
|
||||||
log.Debug("Moving temp file to target")
|
log.Debug("Moving temp file to target")
|
||||||
|
@ -671,7 +671,7 @@ func documentXMLScenario(t *testing.T, w *bufio.Writer, i interface{}) {
|
|||||||
case "decode-raw-token-off":
|
case "decode-raw-token-off":
|
||||||
documentXMLDecodeKeepNsRawTokenScenario(w, s)
|
documentXMLDecodeKeepNsRawTokenScenario(w, s)
|
||||||
case "roundtrip-skip-directives":
|
case "roundtrip-skip-directives":
|
||||||
documentXMLSkipDirectrivesScenario(w, s)
|
documentXMLSkipDirectivesScenario(w, s)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
panic(fmt.Sprintf("unhandled scenario type %q", s.scenarioType))
|
panic(fmt.Sprintf("unhandled scenario type %q", s.scenarioType))
|
||||||
@ -787,7 +787,7 @@ func documentXMLRoundTripScenario(w *bufio.Writer, s formatScenario) {
|
|||||||
writeOrPanic(w, fmt.Sprintf("```xml\n%v```\n\n", mustProcessFormatScenario(s, NewXMLDecoder(ConfiguredXMLPreferences), NewXMLEncoder(2, ConfiguredXMLPreferences))))
|
writeOrPanic(w, fmt.Sprintf("```xml\n%v```\n\n", mustProcessFormatScenario(s, NewXMLDecoder(ConfiguredXMLPreferences), NewXMLEncoder(2, ConfiguredXMLPreferences))))
|
||||||
}
|
}
|
||||||
|
|
||||||
func documentXMLSkipDirectrivesScenario(w *bufio.Writer, s formatScenario) {
|
func documentXMLSkipDirectivesScenario(w *bufio.Writer, s formatScenario) {
|
||||||
writeOrPanic(w, fmt.Sprintf("## %v\n", s.description))
|
writeOrPanic(w, fmt.Sprintf("## %v\n", s.description))
|
||||||
|
|
||||||
if s.subdescription != "" {
|
if s.subdescription != "" {
|
||||||
|
253
project-words.txt
Normal file
253
project-words.txt
Normal file
@ -0,0 +1,253 @@
|
|||||||
|
abxbbxdbxebxczzx
|
||||||
|
abxbbxdbxebxczzy
|
||||||
|
accum
|
||||||
|
Accum
|
||||||
|
adithyasunil
|
||||||
|
AEDT
|
||||||
|
água
|
||||||
|
ÁGUA
|
||||||
|
alecthomas
|
||||||
|
appleapple
|
||||||
|
Astuff
|
||||||
|
autocreating
|
||||||
|
autoparse
|
||||||
|
AWST
|
||||||
|
axbxcxdxe
|
||||||
|
axbxcxdxexxx
|
||||||
|
bananabanana
|
||||||
|
barp
|
||||||
|
bitnami
|
||||||
|
blarp
|
||||||
|
blddir
|
||||||
|
Bobo
|
||||||
|
BODMAS
|
||||||
|
bonapite
|
||||||
|
Brien
|
||||||
|
Bstuff
|
||||||
|
BUILDKIT
|
||||||
|
buildpackage
|
||||||
|
catmeow
|
||||||
|
CATYPE
|
||||||
|
CBVVE
|
||||||
|
chardata
|
||||||
|
chillum
|
||||||
|
choco
|
||||||
|
chomper
|
||||||
|
cleanup
|
||||||
|
cmlu
|
||||||
|
colorise
|
||||||
|
colors
|
||||||
|
compinit
|
||||||
|
coolioo
|
||||||
|
coverprofile
|
||||||
|
createmap
|
||||||
|
csvd
|
||||||
|
CSVUTF
|
||||||
|
currentlabel
|
||||||
|
cygpath
|
||||||
|
czvf
|
||||||
|
datestring
|
||||||
|
datetime
|
||||||
|
Datetime
|
||||||
|
datetimes
|
||||||
|
DEBEMAIL
|
||||||
|
debhelper
|
||||||
|
Debugf
|
||||||
|
debuild
|
||||||
|
delish
|
||||||
|
delpaths
|
||||||
|
DELPATHS
|
||||||
|
devorbitus
|
||||||
|
devscripts
|
||||||
|
dimchansky
|
||||||
|
Dont
|
||||||
|
dput
|
||||||
|
elliotchance
|
||||||
|
endhint
|
||||||
|
endofname
|
||||||
|
Entriesfrom
|
||||||
|
envsubst
|
||||||
|
errorlevel
|
||||||
|
Escandón
|
||||||
|
Evalall
|
||||||
|
fakefilename
|
||||||
|
fakeroot
|
||||||
|
Farah
|
||||||
|
fatih
|
||||||
|
Fifi
|
||||||
|
filebytes
|
||||||
|
Fileish
|
||||||
|
foobar
|
||||||
|
foobaz
|
||||||
|
foof
|
||||||
|
frood
|
||||||
|
fullpath
|
||||||
|
gitbook
|
||||||
|
githubactions
|
||||||
|
gnupg
|
||||||
|
goccy
|
||||||
|
gofmt
|
||||||
|
gogo
|
||||||
|
golangci
|
||||||
|
GOMODCACHE
|
||||||
|
GOPATH
|
||||||
|
gosec
|
||||||
|
gota
|
||||||
|
goversion
|
||||||
|
GOVERSION
|
||||||
|
haha
|
||||||
|
headcommentwas
|
||||||
|
hellno
|
||||||
|
herbygillot
|
||||||
|
hexdump
|
||||||
|
Hoang
|
||||||
|
hostpath
|
||||||
|
hotdog
|
||||||
|
howdy
|
||||||
|
incase
|
||||||
|
inlinetables
|
||||||
|
inplace
|
||||||
|
ints
|
||||||
|
ireduce
|
||||||
|
iwatch
|
||||||
|
jinzhu
|
||||||
|
jq's
|
||||||
|
jsond
|
||||||
|
keygrip
|
||||||
|
Keygrip
|
||||||
|
KEYGRIP
|
||||||
|
KEYID
|
||||||
|
keyvalue
|
||||||
|
kwak
|
||||||
|
lalilu
|
||||||
|
ldflags
|
||||||
|
LDFLAGS
|
||||||
|
lexer
|
||||||
|
Lexer
|
||||||
|
libdistro
|
||||||
|
lindex
|
||||||
|
linecomment
|
||||||
|
magiconair
|
||||||
|
mapvalues
|
||||||
|
Mier
|
||||||
|
mikefarah
|
||||||
|
minideb
|
||||||
|
minishift
|
||||||
|
mipsle
|
||||||
|
mitchellh
|
||||||
|
mktemp
|
||||||
|
multidoc
|
||||||
|
multimaint
|
||||||
|
multine
|
||||||
|
myenv
|
||||||
|
myenvnonexisting
|
||||||
|
myfile
|
||||||
|
myformat
|
||||||
|
ndjson
|
||||||
|
NDJSON
|
||||||
|
NFKD
|
||||||
|
nixpkgs
|
||||||
|
nojson
|
||||||
|
nonascii
|
||||||
|
nonempty
|
||||||
|
noninteractive
|
||||||
|
Nonquoting
|
||||||
|
nosec
|
||||||
|
notoml
|
||||||
|
noxml
|
||||||
|
nullinput
|
||||||
|
onea
|
||||||
|
Oneshot
|
||||||
|
opencollect
|
||||||
|
opstack
|
||||||
|
orderedmap
|
||||||
|
orignal
|
||||||
|
osarch
|
||||||
|
overridign
|
||||||
|
pacman
|
||||||
|
Padder
|
||||||
|
pandoc
|
||||||
|
parsechangelog
|
||||||
|
pcsv
|
||||||
|
pelletier
|
||||||
|
pflag
|
||||||
|
prechecking
|
||||||
|
Prerelease
|
||||||
|
proc
|
||||||
|
propsd
|
||||||
|
qylib
|
||||||
|
readline
|
||||||
|
realnames
|
||||||
|
realpath
|
||||||
|
repr
|
||||||
|
rhash
|
||||||
|
rindex
|
||||||
|
risentveber
|
||||||
|
rmescandon
|
||||||
|
Rosey
|
||||||
|
roundtrip
|
||||||
|
Roundtrip
|
||||||
|
roundtripping
|
||||||
|
runningvms
|
||||||
|
sadface
|
||||||
|
selfupdate
|
||||||
|
setpath
|
||||||
|
sharedfolder
|
||||||
|
Sharedfolder
|
||||||
|
shellvariables
|
||||||
|
shellvars
|
||||||
|
shortfunc
|
||||||
|
shortpipe
|
||||||
|
shunit
|
||||||
|
Sidenote
|
||||||
|
snapcraft
|
||||||
|
somevalue
|
||||||
|
splt
|
||||||
|
squeek
|
||||||
|
srcdir
|
||||||
|
stackoverflow
|
||||||
|
stiched
|
||||||
|
Strc
|
||||||
|
strenv
|
||||||
|
strload
|
||||||
|
stylig
|
||||||
|
subarray
|
||||||
|
subchild
|
||||||
|
subdescription
|
||||||
|
submatch
|
||||||
|
submatches
|
||||||
|
SUBSTR
|
||||||
|
tempfile
|
||||||
|
tfstate
|
||||||
|
Tfstate
|
||||||
|
thar
|
||||||
|
timezone
|
||||||
|
Timezone
|
||||||
|
timezones
|
||||||
|
Timezones
|
||||||
|
tojson
|
||||||
|
Tokenvalue
|
||||||
|
traver
|
||||||
|
tsvd
|
||||||
|
Tuan
|
||||||
|
tzdata
|
||||||
|
Uhoh
|
||||||
|
updateassign
|
||||||
|
urid
|
||||||
|
utfbom
|
||||||
|
Warningf
|
||||||
|
Wazowski
|
||||||
|
webi
|
||||||
|
Webi
|
||||||
|
whereever
|
||||||
|
winget
|
||||||
|
withdots
|
||||||
|
wizz
|
||||||
|
woop
|
||||||
|
workdir
|
||||||
|
Writable
|
||||||
|
xmld
|
||||||
|
xyzzy
|
||||||
|
yamld
|
||||||
|
yqlib
|
||||||
|
zabbix
|
@ -26,7 +26,7 @@ if [ "$1" == "" ]; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$2" != "" ]; then
|
if [ "$2" != "" ]; then
|
||||||
# so we dont match x.tar.gz when 'x' is given
|
# so we don't match x.tar.gz when 'x' is given
|
||||||
file="$2\s"
|
file="$2\s"
|
||||||
else
|
else
|
||||||
file=""
|
file=""
|
||||||
@ -47,7 +47,7 @@ fi
|
|||||||
|
|
||||||
grepMatch=$(grep -m 1 -n "$1" checksums_hashes_order)
|
grepMatch=$(grep -m 1 -n "$1" checksums_hashes_order)
|
||||||
if [ "$grepMatch" == "" ]; then
|
if [ "$grepMatch" == "" ]; then
|
||||||
echo "Could not find hash algorith '$1' in checksums_hashes_order"
|
echo "Could not find hash algorithm '$1' in checksums_hashes_order"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
#! /bin/bash
|
#! /bin/bash
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
# note that this reqires pandoc to be installed.
|
# note that this requires pandoc to be installed.
|
||||||
|
|
||||||
cat ./pkg/yqlib/doc/operators/headers/Main.md > man.md
|
cat ./pkg/yqlib/doc/operators/headers/Main.md > man.md
|
||||||
printf "\n# HOW IT WORKS\n" >> man.md
|
printf "\n# HOW IT WORKS\n" >> man.md
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
#! /bin/bash
|
#! /bin/bash
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
# note that this reqires pandoc to be installed.
|
# note that this requires pandoc to be installed.
|
||||||
|
|
||||||
pandoc \
|
pandoc \
|
||||||
--variable=title:"YQ" \
|
--variable=title:"YQ" \
|
||||||
|
@ -30,7 +30,7 @@ show_help() {
|
|||||||
echo " distribution is considered"
|
echo " distribution is considered"
|
||||||
echo " --goversion VERSION The version of Golang to use. Default to $GOVERSION"
|
echo " --goversion VERSION The version of Golang to use. Default to $GOVERSION"
|
||||||
echo " -k, --sign-key KEYID Sign the package sources with the provided gpg key id (long format). When not provided this"
|
echo " -k, --sign-key KEYID Sign the package sources with the provided gpg key id (long format). When not provided this"
|
||||||
echo " paramater, the generated sources are not signed"
|
echo " parameter, the generated sources are not signed"
|
||||||
echo " -s, --sign Sign the package sources with a gpg key of the maintainer"
|
echo " -s, --sign Sign the package sources with a gpg key of the maintainer"
|
||||||
echo " -m, --maintainer WHO The maintainer used as author of the changelog. git.name and git.email (see git config) is"
|
echo " -m, --maintainer WHO The maintainer used as author of the changelog. git.name and git.email (see git config) is"
|
||||||
echo " the considered format"
|
echo " the considered format"
|
||||||
|
3
scripts/spelling.sh
Executable file
3
scripts/spelling.sh
Executable file
@ -0,0 +1,3 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
npx cspell --no-progress "**/*.{sh,go}"
|
Loading…
Reference in New Issue
Block a user