mirror of
https://github.com/mikefarah/yq.git
synced 2026-03-10 15:54:26 +00:00
Compare commits
75 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
16e4df2304 | ||
|
|
79a92d0478 | ||
|
|
88a31ae8c6 | ||
|
|
5a7e72a743 | ||
|
|
562531d936 | ||
|
|
2c471b6498 | ||
|
|
f4ef6ef3cf | ||
|
|
f49f2bd2d8 | ||
|
|
6ccc7b7797 | ||
|
|
b3e1fbb7d1 | ||
|
|
288ca2d114 | ||
|
|
eb04fa87af | ||
|
|
2be0094729 | ||
|
|
3c18d5b035 | ||
|
|
2dcc2293da | ||
|
|
eb4fde4ef8 | ||
|
|
06ea4cf62e | ||
|
|
37089d24af | ||
|
|
7cf88a0291 | ||
|
|
41adc1ad18 | ||
|
|
b4b96f2a68 | ||
|
|
2824d66a65 | ||
|
|
4bbffa9022 | ||
|
|
bdeedbd275 | ||
|
|
3d918acc2a | ||
|
|
01005cc8fd | ||
|
|
c4468165f2 | ||
|
|
e35d32a0b6 | ||
|
|
78192a915b | ||
|
|
c4f4e6d416 | ||
|
|
5f90039bdc | ||
|
|
c6fa371d8d | ||
|
|
3a27e39778 | ||
|
|
414a085563 | ||
|
|
542801926f | ||
|
|
1bcc44ff9b | ||
|
|
a6f1b02340 | ||
|
|
f98028c925 | ||
|
|
c6029376a5 | ||
|
|
23abf50fef | ||
|
|
64ec1f4aa7 | ||
|
|
4973c355e6 | ||
|
|
ecbdcada9f | ||
|
|
029ba68014 | ||
|
|
4a06cce376 | ||
|
|
37e48cea44 | ||
|
|
207bec6b29 | ||
|
|
7198d16575 | ||
|
|
5d6c2047cf | ||
|
|
7f60daad20 | ||
|
|
b7cbe59fd7 | ||
|
|
9fa353b123 | ||
|
|
c6ecad1546 | ||
|
|
56eb3655b8 | ||
|
|
1de4ec59f2 | ||
|
|
c132c32731 | ||
|
|
0914121d29 | ||
|
|
aa5134e645 | ||
|
|
4d620bfa26 | ||
|
|
b8d90fd574 | ||
|
|
c1b81f1a03 | ||
|
|
ea40e14fb1 | ||
|
|
b974d973ee | ||
|
|
66ec487792 | ||
|
|
161be10791 | ||
|
|
aa858520a8 | ||
|
|
ac2889c296 | ||
|
|
626624af7b | ||
|
|
b0d2522f80 | ||
|
|
2ee38e15b6 | ||
|
|
4e9d5e8e48 | ||
|
|
1338b521ff | ||
|
|
3a5323824f | ||
|
|
8780172b33 | ||
|
|
5f9bf8d241 |
4
.github/ISSUE_TEMPLATE/bug_report_v4.md
vendored
4
.github/ISSUE_TEMPLATE/bug_report_v4.md
vendored
@ -34,13 +34,13 @@ The command you ran:
|
||||
yq eval-all 'select(fileIndex==0) | .a.b.c' data1.yml data2.yml
|
||||
```
|
||||
|
||||
**Actual behavior**
|
||||
**Actual behaviour**
|
||||
|
||||
```yaml
|
||||
cat: meow
|
||||
```
|
||||
|
||||
**Expected behavior**
|
||||
**Expected behaviour**
|
||||
|
||||
```yaml
|
||||
this: should really work
|
||||
|
||||
1
.github/instructions/instructions.md
vendored
Normal file
1
.github/instructions/instructions.md
vendored
Normal file
@ -0,0 +1 @@
|
||||
When you find a bug - make sure to include a new test that exposes the bug, as well as the fix for the bug itself.
|
||||
6
.github/workflows/docker-release.yml
vendored
6
.github/workflows/docker-release.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
uses: docker/setup-qemu-action@v4
|
||||
with:
|
||||
platforms: all
|
||||
|
||||
@ -31,13 +31,13 @@ jobs:
|
||||
run: echo ${{ steps.buildx.outputs.platforms }} && docker version
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v4
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v4
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
|
||||
2
.github/workflows/go.yml
vendored
2
.github/workflows/go.yml
vendored
@ -11,7 +11,7 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version: '^1.20'
|
||||
id: go
|
||||
|
||||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@ -10,7 +10,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-go@v5
|
||||
- uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version: '^1.20'
|
||||
check-latest: true
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@ -43,9 +43,11 @@ yq*.snap
|
||||
|
||||
test.yml
|
||||
test*.yml
|
||||
test*.tf
|
||||
test*.xml
|
||||
test*.toml
|
||||
test*.yaml
|
||||
*.kyaml
|
||||
test_dir1/
|
||||
test_dir2/
|
||||
0.yml
|
||||
@ -68,3 +70,7 @@ debian/files
|
||||
.vscode
|
||||
|
||||
yq3
|
||||
|
||||
# Golang
|
||||
.gomodcache/
|
||||
.gocache/
|
||||
|
||||
@ -14,6 +14,11 @@ linters:
|
||||
- unconvert
|
||||
- unparam
|
||||
settings:
|
||||
misspell:
|
||||
locale: UK
|
||||
ignore-rules:
|
||||
- color
|
||||
- colors
|
||||
depguard:
|
||||
rules:
|
||||
prevent_unmaintained_packages:
|
||||
|
||||
@ -39,7 +39,6 @@ builds:
|
||||
- openbsd_amd64
|
||||
- windows_386
|
||||
- windows_amd64
|
||||
- windows_arm
|
||||
- windows_arm64
|
||||
|
||||
no_unique_dist_dir: true
|
||||
|
||||
@ -11,7 +11,7 @@ appearance, race, religion, or sexual identity and orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
Examples of behaviour that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
@ -20,7 +20,7 @@ include:
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
Examples of unacceptable behaviour by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
@ -34,13 +34,13 @@ Examples of unacceptable behavior by participants include:
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable
|
||||
behavior and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behavior.
|
||||
behaviour and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behaviour.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||
permanently any contributor for other behaviors that they deem inappropriate,
|
||||
permanently any contributor for other behaviours that they deem inappropriate,
|
||||
threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
@ -54,7 +54,7 @@ further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
Instances of abusive, harassing, or otherwise unacceptable behaviour may be
|
||||
reported by contacting the project team at mikefarah@gmail.com. All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
|
||||
@ -197,6 +197,21 @@ Note: PRs with small changes (e.g. minor typos) may not be merged (see https://j
|
||||
make [local] test # Run in Docker container
|
||||
```
|
||||
|
||||
- **Problem**: Tests fail with a VCS error:
|
||||
```bash
|
||||
error obtaining VCS status: exit status 128
|
||||
Use -buildvcs=false to disable VCS stamping.
|
||||
```
|
||||
- **Solution**:
|
||||
Git security mechanisms prevent Golang from detecting the Git details inside
|
||||
the container; either build with the `local` option, or pass GOFLAGS to
|
||||
disable Golang buildvcs behaviour.
|
||||
```bash
|
||||
make local test
|
||||
# OR
|
||||
make test GOFLAGS='-buildvcs=true'
|
||||
```
|
||||
|
||||
### Documentation Generation Issues
|
||||
- **Problem**: Generated docs don't update after test changes
|
||||
- **Solution**:
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
FROM golang:1.25.5 AS builder
|
||||
FROM golang:1.26.0 AS builder
|
||||
|
||||
WORKDIR /go/src/mikefarah/yq
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
FROM golang:1.25.5
|
||||
FROM golang:1.26.0
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y npm && \
|
||||
|
||||
1
Makefile
1
Makefile
@ -35,6 +35,7 @@ clean:
|
||||
## prefix before other make targets to run in your local dev environment
|
||||
local: | quiet
|
||||
@$(eval ENGINERUN= )
|
||||
@$(eval GOFLAGS="$(GOFLAGS)" )
|
||||
@mkdir -p tmp
|
||||
@touch tmp/dev_image_id
|
||||
quiet: # this is silly but shuts up 'Nothing to be done for `local`'
|
||||
|
||||
@ -4,6 +4,7 @@ IMPORT_PATH := github.com/mikefarah/${PROJECT}
|
||||
export GIT_COMMIT = $(shell git rev-parse --short HEAD)
|
||||
export GIT_DIRTY = $(shell test -n "$$(git status --porcelain)" && echo "+CHANGES" || true)
|
||||
export GIT_DESCRIBE = $(shell git describe --tags --always)
|
||||
GOFLAGS :=
|
||||
LDFLAGS :=
|
||||
LDFLAGS += -X main.GitCommit=${GIT_COMMIT}${GIT_DIRTY}
|
||||
LDFLAGS += -X main.GitDescribe=${GIT_DESCRIBE}
|
||||
@ -26,13 +27,15 @@ ifeq ($(CYG_CHECK),1)
|
||||
else
|
||||
# all non-windows environments
|
||||
ROOT := $(shell pwd)
|
||||
SELINUX := $(shell which getenforce 2>&1 >/dev/null && echo :z)
|
||||
# Deliberately use `command -v` instead of `which` to be POSIX compliant
|
||||
SELINUX := $(shell command -v getenforce >/dev/null 2>&1 && echo :z)
|
||||
endif
|
||||
|
||||
DEV_IMAGE := ${PROJECT}_dev
|
||||
|
||||
ENGINERUN := ${ENGINE} run --rm \
|
||||
-e LDFLAGS="${LDFLAGS}" \
|
||||
-e GOFLAGS="${GOFLAGS}" \
|
||||
-e GITHUB_TOKEN="${GITHUB_TOKEN}" \
|
||||
-v ${ROOT}/vendor:/go/src${SELINUX} \
|
||||
-v ${ROOT}:/${PROJECT}/src/${IMPORT_PATH}${SELINUX} \
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
    
|
||||
|
||||
|
||||
A lightweight and portable command-line YAML, JSON, INI and XML processor. `yq` uses [jq](https://github.com/stedolan/jq) (a popular JSON processor) like syntax but works with yaml files as well as json, xml, ini, properties, csv and tsv. It doesn't yet support everything `jq` does - but it does support the most common operations and functions, and more is being added continuously.
|
||||
A lightweight and portable command-line YAML, JSON, INI and XML processor. `yq` uses [jq](https://github.com/stedolan/jq) (a popular JSON processor) like syntax but works with yaml files as well as json, kyaml, xml, ini, properties, csv and tsv. It doesn't yet support everything `jq` does - but it does support the most common operations and functions, and more is being added continuously.
|
||||
|
||||
yq is written in Go - so you can download a dependency free binary for your platform and you are good to go! If you prefer there are a variety of package managers that can be used as well as Docker and Podman, all listed below.
|
||||
|
||||
@ -363,6 +363,8 @@ gah install yq
|
||||
- [Load content from other files](https://mikefarah.gitbook.io/yq/operators/load)
|
||||
- [Convert to/from json/ndjson](https://mikefarah.gitbook.io/yq/v/v4.x/usage/convert)
|
||||
- [Convert to/from xml](https://mikefarah.gitbook.io/yq/v/v4.x/usage/xml)
|
||||
- [Convert to/from hcl (terraform)](https://mikefarah.gitbook.io/yq/v/v4.x/usage/hcl)
|
||||
- [Convert to/from toml](https://mikefarah.gitbook.io/yq/v/v4.x/usage/toml)
|
||||
- [Convert to/from properties](https://mikefarah.gitbook.io/yq/v/v4.x/usage/properties)
|
||||
- [Convert to/from csv/tsv](https://mikefarah.gitbook.io/yq/usage/csv-tsv)
|
||||
- [General shell completion scripts (bash/zsh/fish/powershell)](https://mikefarah.gitbook.io/yq/v/v4.x/commands/shell-completion)
|
||||
@ -413,7 +415,7 @@ Flags:
|
||||
-h, --help help for yq
|
||||
-I, --indent int sets indent level for output (default 2)
|
||||
-i, --inplace update the file in place of first file given.
|
||||
-p, --input-format string [auto|a|yaml|y|json|j|props|p|csv|c|tsv|t|xml|x|base64|uri|toml|lua|l|ini|i] parse format for input. (default "auto")
|
||||
-p, --input-format string [auto|a|yaml|y|json|j|kyaml|ky|props|p|csv|c|tsv|t|xml|x|base64|uri|toml|hcl|h|lua|l|ini|i] parse format for input. (default "auto")
|
||||
--lua-globals output keys as top-level global variables
|
||||
--lua-prefix string prefix (default "return ")
|
||||
--lua-suffix string suffix (default ";\n")
|
||||
@ -422,7 +424,7 @@ Flags:
|
||||
-N, --no-doc Don't print document separators (---)
|
||||
-0, --nul-output Use NUL char to separate values. If unwrap scalar is also set, fail if unwrapped scalar contains NUL char.
|
||||
-n, --null-input Don't read input, simply evaluate the expression given. Useful for creating docs from scratch.
|
||||
-o, --output-format string [auto|a|yaml|y|json|j|props|p|csv|c|tsv|t|xml|x|base64|uri|toml|shell|s|lua|l|ini|i] output format type. (default "auto")
|
||||
-o, --output-format string [auto|a|yaml|y|json|j|kyaml|ky|props|p|csv|c|tsv|t|xml|x|base64|uri|toml|hcl|h|shell|s|lua|l|ini|i] output format type. (default "auto")
|
||||
-P, --prettyPrint pretty print, shorthand for '... style = ""'
|
||||
--properties-array-brackets use [x] in array paths (e.g. for SpringBoot)
|
||||
--properties-separator string separator to use between keys and values (default " = ")
|
||||
|
||||
@ -6,6 +6,7 @@ setUp() {
|
||||
rm test*.csv 2>/dev/null || true
|
||||
rm test*.tsv 2>/dev/null || true
|
||||
rm test*.xml 2>/dev/null || true
|
||||
rm test*.tf 2>/dev/null || true
|
||||
}
|
||||
|
||||
testInputProperties() {
|
||||
@ -153,6 +154,37 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testInputKYaml() {
|
||||
cat >test.kyaml <<'EOL'
|
||||
# leading
|
||||
{
|
||||
a: 1, # a line
|
||||
# head b
|
||||
b: 2,
|
||||
c: [
|
||||
# head d
|
||||
"d", # d line
|
||||
],
|
||||
}
|
||||
EOL
|
||||
|
||||
read -r -d '' expected <<'EOM'
|
||||
# leading
|
||||
a: 1 # a line
|
||||
# head b
|
||||
b: 2
|
||||
c:
|
||||
# head d
|
||||
- d # d line
|
||||
EOM
|
||||
|
||||
X=$(./yq e -p=kyaml -P test.kyaml)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(./yq ea -p=kyaml -P test.kyaml)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@ -255,4 +287,61 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
source ./scripts/shunit2
|
||||
testInputTerraform() {
|
||||
cat >test.tf <<EOL
|
||||
resource "aws_s3_bucket" "example" {
|
||||
bucket = "my-bucket"
|
||||
tags = {
|
||||
Environment = "Dev"
|
||||
Project = "Test"
|
||||
}
|
||||
}
|
||||
EOL
|
||||
|
||||
read -r -d '' expected << EOM
|
||||
resource "aws_s3_bucket" "example" {
|
||||
bucket = "my-bucket"
|
||||
tags = {
|
||||
Environment = "Dev"
|
||||
Project = "Test"
|
||||
}
|
||||
}
|
||||
EOM
|
||||
|
||||
X=$(./yq test.tf)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(./yq ea test.tf)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testInputTerraformGithubAction() {
|
||||
cat >test.tf <<EOL
|
||||
resource "aws_s3_bucket" "example" {
|
||||
bucket = "my-bucket"
|
||||
|
||||
tags = {
|
||||
Environment = "Dev"
|
||||
Project = "Test"
|
||||
}
|
||||
}
|
||||
EOL
|
||||
|
||||
read -r -d '' expected << EOM
|
||||
resource "aws_s3_bucket" "example" {
|
||||
bucket = "my-bucket"
|
||||
tags = {
|
||||
Environment = "Dev"
|
||||
Project = "Test"
|
||||
}
|
||||
}
|
||||
EOM
|
||||
|
||||
X=$(cat /dev/null | ./yq test.tf)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(cat /dev/null | ./yq ea test.tf)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
source ./scripts/shunit2
|
||||
|
||||
@ -280,6 +280,55 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testOutputKYaml() {
|
||||
cat >test.yml <<'EOL'
|
||||
# leading
|
||||
a: 1 # a line
|
||||
# head b
|
||||
b: 2
|
||||
c:
|
||||
# head d
|
||||
- d # d line
|
||||
EOL
|
||||
|
||||
read -r -d '' expected <<'EOM'
|
||||
# leading
|
||||
{
|
||||
a: 1, # a line
|
||||
# head b
|
||||
b: 2,
|
||||
c: [
|
||||
# head d
|
||||
"d", # d line
|
||||
],
|
||||
}
|
||||
EOM
|
||||
|
||||
X=$(./yq e --output-format=kyaml test.yml)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(./yq ea --output-format=kyaml test.yml)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testOutputKYamlShort() {
|
||||
cat >test.yml <<EOL
|
||||
a: b
|
||||
EOL
|
||||
|
||||
read -r -d '' expected <<'EOM'
|
||||
{
|
||||
a: "b",
|
||||
}
|
||||
EOM
|
||||
|
||||
X=$(./yq e -o=ky test.yml)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(./yq ea -o=ky test.yml)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testOutputXmComplex() {
|
||||
cat >test.yml <<EOL
|
||||
a: {b: {c: ["cat", "dog"], +@f: meow}}
|
||||
|
||||
24
agents.md
24
agents.md
@ -1,3 +1,17 @@
|
||||
# General rules
|
||||
✅ **DO:**
|
||||
- You can use ./yq with the `--debug-node-info` flag to get a deeper understanding of the ast.
|
||||
- run ./scripts/format.sh to format the code; then ./scripts/check.sh lint and finally ./scripts/spelling.sh to check spelling.
|
||||
- Add comprehensive tests to cover the changes
|
||||
- Run test suite to ensure there is no regression
|
||||
- Use UK english spelling
|
||||
|
||||
❌ **DON'T:**
|
||||
- Git add or commit
|
||||
- Add comments to functions that are self-explanatory
|
||||
|
||||
|
||||
|
||||
# Adding a New Encoder/Decoder
|
||||
|
||||
This guide explains how to add support for a new format (encoder/decoder) to yq without modifying `candidate_node.go`.
|
||||
@ -69,6 +83,7 @@ Create a test file `pkg/yqlib/<format>_test.go` using the `formatScenario` patte
|
||||
- `scenarioType` can be `"decode"` (test decoding to YAML) or `"roundtrip"` (encode/decode preservation)
|
||||
- Create a helper function `test<Format>Scenario()` that switches on `scenarioType`
|
||||
- Create main test function `Test<Format>FormatScenarios()` that iterates over scenarios
|
||||
- The main test function should use `documentScenarios` to ensure testcase documentation is generated.
|
||||
|
||||
Test coverage must include:
|
||||
- Basic data types (scalars, arrays, objects/maps)
|
||||
@ -183,14 +198,6 @@ Tests must be implemented in `<format>_test.go` following the `formatScenario` p
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Scalar-Only Formats
|
||||
Some formats only work with scalars (like base64, uri):
|
||||
```go
|
||||
if node.guessTagFromCustomType() != "!!str" {
|
||||
return fmt.Errorf("cannot encode %v as <format>, can only operate on strings", node.Tag)
|
||||
}
|
||||
```
|
||||
|
||||
### Format with Indentation
|
||||
Use preferences to control output formatting:
|
||||
```go
|
||||
@ -332,6 +339,7 @@ Create `pkg/yqlib/operator_<type>_test.go` using the `expressionScenario` patter
|
||||
- Include `subdescription` for longer test names
|
||||
- Set `expectedError` if testing error cases
|
||||
- Create main test function that iterates over scenarios
|
||||
- The main test function should use `documentScenarios` to ensure testcase documentation is generated.
|
||||
|
||||
Test coverage must include:
|
||||
- Basic data types and nested structures
|
||||
|
||||
@ -60,7 +60,7 @@ func evaluateAll(cmd *cobra.Command, args []string) (cmdError error) {
|
||||
out := cmd.OutOrStdout()
|
||||
|
||||
if writeInplace {
|
||||
// only use colors if its forced
|
||||
// only use colours if its forced
|
||||
colorsEnabled = forceColor
|
||||
writeInPlaceHandler := yqlib.NewWriteInPlaceHandler(args[0])
|
||||
out, err = writeInPlaceHandler.CreateTempFile()
|
||||
|
||||
@ -74,7 +74,7 @@ func evaluateSequence(cmd *cobra.Command, args []string) (cmdError error) {
|
||||
}
|
||||
|
||||
if writeInplace {
|
||||
// only use colors if its forced
|
||||
// only use colours if its forced
|
||||
colorsEnabled = forceColor
|
||||
writeInPlaceHandler := yqlib.NewWriteInPlaceHandler(args[0])
|
||||
out, err = writeInPlaceHandler.CreateTempFile()
|
||||
|
||||
@ -184,7 +184,7 @@ yq -P -oy sample.json
|
||||
}
|
||||
rootCmd.Flags().BoolVarP(&version, "version", "V", false, "Print version information and quit")
|
||||
rootCmd.PersistentFlags().BoolVarP(&writeInplace, "inplace", "i", false, "update the file in place of first file given.")
|
||||
rootCmd.PersistentFlags().VarP(unwrapScalarFlag, "unwrapScalar", "r", "unwrap scalar, print the value with no quotes, colors or comments. Defaults to true for yaml")
|
||||
rootCmd.PersistentFlags().VarP(unwrapScalarFlag, "unwrapScalar", "r", "unwrap scalar, print the value with no quotes, colours or comments. Defaults to true for yaml")
|
||||
rootCmd.PersistentFlags().Lookup("unwrapScalar").NoOptDefVal = "true"
|
||||
rootCmd.PersistentFlags().BoolVarP(&nulSepOutput, "nul-output", "0", false, "Use NUL char to separate values. If unwrap scalar is also set, fail if unwrapped scalar contains NUL char.")
|
||||
|
||||
@ -203,6 +203,7 @@ yq -P -oy sample.json
|
||||
}
|
||||
rootCmd.PersistentFlags().BoolVarP(&yqlib.ConfiguredYamlPreferences.LeadingContentPreProcessing, "header-preprocess", "", true, "Slurp any header comments and separators before processing expression.")
|
||||
rootCmd.PersistentFlags().BoolVarP(&yqlib.ConfiguredYamlPreferences.FixMergeAnchorToSpec, "yaml-fix-merge-anchor-to-spec", "", false, "Fix merge anchor to match YAML spec. Will default to true in late 2025")
|
||||
rootCmd.PersistentFlags().BoolVarP(&yqlib.ConfiguredYamlPreferences.CompactSequenceIndent, "yaml-compact-seq-indent", "c", false, "Use compact sequence indentation where '- ' is considered part of the indentation.")
|
||||
|
||||
rootCmd.PersistentFlags().StringVarP(&splitFileExp, "split-exp", "s", "", "print each result (or doc) into a file named (exp). [exp] argument must return a string. You can use $index in the expression as the result counter. The necessary directories will be created.")
|
||||
if err = rootCmd.RegisterFlagCompletionFunc("split-exp", cobra.NoFileCompletions); err != nil {
|
||||
|
||||
@ -166,6 +166,9 @@ func configureDecoder(evaluateTogether bool) (yqlib.Decoder, error) {
|
||||
}
|
||||
yqlib.ConfiguredYamlPreferences.EvaluateTogether = evaluateTogether
|
||||
|
||||
if format.DecoderFactory == nil {
|
||||
return nil, fmt.Errorf("no support for %s input format", inputFormat)
|
||||
}
|
||||
yqlibDecoder := format.DecoderFactory()
|
||||
if yqlibDecoder == nil {
|
||||
return nil, fmt.Errorf("no support for %s input format", inputFormat)
|
||||
@ -197,17 +200,23 @@ func configureEncoder() (yqlib.Encoder, error) {
|
||||
}
|
||||
yqlib.ConfiguredXMLPreferences.Indent = indent
|
||||
yqlib.ConfiguredYamlPreferences.Indent = indent
|
||||
yqlib.ConfiguredKYamlPreferences.Indent = indent
|
||||
yqlib.ConfiguredJSONPreferences.Indent = indent
|
||||
|
||||
yqlib.ConfiguredYamlPreferences.UnwrapScalar = unwrapScalar
|
||||
yqlib.ConfiguredKYamlPreferences.UnwrapScalar = unwrapScalar
|
||||
yqlib.ConfiguredPropertiesPreferences.UnwrapScalar = unwrapScalar
|
||||
yqlib.ConfiguredJSONPreferences.UnwrapScalar = unwrapScalar
|
||||
yqlib.ConfiguredShellVariablesPreferences.UnwrapScalar = unwrapScalar
|
||||
|
||||
yqlib.ConfiguredYamlPreferences.ColorsEnabled = colorsEnabled
|
||||
yqlib.ConfiguredKYamlPreferences.ColorsEnabled = colorsEnabled
|
||||
yqlib.ConfiguredJSONPreferences.ColorsEnabled = colorsEnabled
|
||||
yqlib.ConfiguredHclPreferences.ColorsEnabled = colorsEnabled
|
||||
yqlib.ConfiguredTomlPreferences.ColorsEnabled = colorsEnabled
|
||||
|
||||
yqlib.ConfiguredYamlPreferences.PrintDocSeparators = !noDocSeparators
|
||||
yqlib.ConfiguredKYamlPreferences.PrintDocSeparators = !noDocSeparators
|
||||
|
||||
encoder := yqlibOutputFormat.EncoderFactory()
|
||||
|
||||
|
||||
@ -926,13 +926,13 @@ func TestSetupColors(t *testing.T) {
|
||||
expectColors bool
|
||||
}{
|
||||
{
|
||||
name: "force color enabled",
|
||||
name: "force colour enabled",
|
||||
forceColor: true,
|
||||
forceNoColor: false,
|
||||
expectColors: true,
|
||||
},
|
||||
{
|
||||
name: "force no color enabled",
|
||||
name: "force no colour enabled",
|
||||
forceColor: false,
|
||||
forceNoColor: true,
|
||||
expectColors: false,
|
||||
|
||||
@ -11,7 +11,7 @@ var (
|
||||
GitDescribe string
|
||||
|
||||
// Version is main version number that is being run at the moment.
|
||||
Version = "v4.50.1"
|
||||
Version = "v4.52.4"
|
||||
|
||||
// VersionPrerelease is a pre-release marker for the version. If this is "" (empty string)
|
||||
// then it means that it is a final release. Otherwise, this is a pre-release
|
||||
|
||||
@ -1,6 +1,9 @@
|
||||
package cmd
|
||||
|
||||
import "testing"
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestGetVersionDisplay(t *testing.T) {
|
||||
var expectedVersion = ProductName + " (https://github.com/mikefarah/yq/) version " + Version
|
||||
@ -25,6 +28,18 @@ func TestGetVersionDisplay(t *testing.T) {
|
||||
}
|
||||
|
||||
func Test_getHumanVersion(t *testing.T) {
|
||||
// Save original values
|
||||
origGitDescribe := GitDescribe
|
||||
origGitCommit := GitCommit
|
||||
origVersionPrerelease := VersionPrerelease
|
||||
|
||||
// Restore after test
|
||||
defer func() {
|
||||
GitDescribe = origGitDescribe
|
||||
GitCommit = origGitCommit
|
||||
VersionPrerelease = origVersionPrerelease
|
||||
}()
|
||||
|
||||
GitDescribe = "e42813d"
|
||||
GitCommit = "e42813d+CHANGES"
|
||||
var wanted string
|
||||
@ -49,3 +64,118 @@ func Test_getHumanVersion(t *testing.T) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Test_getHumanVersion_NoGitDescribe(t *testing.T) {
|
||||
// Save original values
|
||||
origGitDescribe := GitDescribe
|
||||
origGitCommit := GitCommit
|
||||
origVersionPrerelease := VersionPrerelease
|
||||
|
||||
// Restore after test
|
||||
defer func() {
|
||||
GitDescribe = origGitDescribe
|
||||
GitCommit = origGitCommit
|
||||
VersionPrerelease = origVersionPrerelease
|
||||
}()
|
||||
|
||||
GitDescribe = ""
|
||||
GitCommit = ""
|
||||
VersionPrerelease = ""
|
||||
|
||||
got := getHumanVersion()
|
||||
if got != Version {
|
||||
t.Errorf("getHumanVersion() = %v, want %v", got, Version)
|
||||
}
|
||||
}
|
||||
|
||||
func Test_getHumanVersion_WithPrerelease(t *testing.T) {
|
||||
// Save original values
|
||||
origGitDescribe := GitDescribe
|
||||
origGitCommit := GitCommit
|
||||
origVersionPrerelease := VersionPrerelease
|
||||
|
||||
// Restore after test
|
||||
defer func() {
|
||||
GitDescribe = origGitDescribe
|
||||
GitCommit = origGitCommit
|
||||
VersionPrerelease = origVersionPrerelease
|
||||
}()
|
||||
|
||||
GitDescribe = ""
|
||||
GitCommit = "abc123"
|
||||
VersionPrerelease = "beta"
|
||||
|
||||
got := getHumanVersion()
|
||||
expected := Version + "-beta (abc123)"
|
||||
if got != expected {
|
||||
t.Errorf("getHumanVersion() = %v, want %v", got, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func Test_getHumanVersion_PrereleaseInVersion(t *testing.T) {
|
||||
// Save original values
|
||||
origGitDescribe := GitDescribe
|
||||
origGitCommit := GitCommit
|
||||
origVersionPrerelease := VersionPrerelease
|
||||
|
||||
// Restore after test
|
||||
defer func() {
|
||||
GitDescribe = origGitDescribe
|
||||
GitCommit = origGitCommit
|
||||
VersionPrerelease = origVersionPrerelease
|
||||
}()
|
||||
|
||||
GitDescribe = "v1.2.3-rc1"
|
||||
GitCommit = "xyz789"
|
||||
VersionPrerelease = "rc1"
|
||||
|
||||
got := getHumanVersion()
|
||||
// Should not duplicate "rc1" since it's already in GitDescribe
|
||||
expected := "v1.2.3-rc1 (xyz789)"
|
||||
if got != expected {
|
||||
t.Errorf("getHumanVersion() = %v, want %v", got, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func Test_getHumanVersion_StripSingleQuotes(t *testing.T) {
|
||||
// Save original values
|
||||
origGitDescribe := GitDescribe
|
||||
origGitCommit := GitCommit
|
||||
origVersionPrerelease := VersionPrerelease
|
||||
|
||||
// Restore after test
|
||||
defer func() {
|
||||
GitDescribe = origGitDescribe
|
||||
GitCommit = origGitCommit
|
||||
VersionPrerelease = origVersionPrerelease
|
||||
}()
|
||||
|
||||
GitDescribe = "'v1.2.3'"
|
||||
GitCommit = "'commit123'"
|
||||
VersionPrerelease = ""
|
||||
|
||||
got := getHumanVersion()
|
||||
// Should strip single quotes
|
||||
if strings.Contains(got, "'") {
|
||||
t.Errorf("getHumanVersion() = %v, should not contain single quotes", got)
|
||||
}
|
||||
expected := "v1.2.3"
|
||||
if got != expected {
|
||||
t.Errorf("getHumanVersion() = %v, want %v", got, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestProductName(t *testing.T) {
|
||||
if ProductName != "yq" {
|
||||
t.Errorf("ProductName = %v, want yq", ProductName)
|
||||
}
|
||||
}
|
||||
|
||||
func TestVersionIsSet(t *testing.T) {
|
||||
if Version == "" {
|
||||
t.Error("Version should not be empty")
|
||||
}
|
||||
if !strings.HasPrefix(Version, "v") {
|
||||
t.Errorf("Version %v should start with 'v'", Version)
|
||||
}
|
||||
}
|
||||
|
||||
10
examples/kyaml.kyaml
Normal file
10
examples/kyaml.kyaml
Normal file
@ -0,0 +1,10 @@
|
||||
# leading
|
||||
{
|
||||
a: 1, # a line
|
||||
# head b
|
||||
b: 2,
|
||||
c: [
|
||||
# head d
|
||||
"d", # d line
|
||||
],
|
||||
}
|
||||
7
examples/kyaml.yml
Normal file
7
examples/kyaml.yml
Normal file
@ -0,0 +1,7 @@
|
||||
# leading
|
||||
a: 1 # a line
|
||||
# head b
|
||||
b: 2
|
||||
c:
|
||||
# head d
|
||||
- d # d line
|
||||
27
examples/sample.tf
Normal file
27
examples/sample.tf
Normal file
@ -0,0 +1,27 @@
|
||||
# main.tf
|
||||
|
||||
# Define required providers and minimum Terraform version
|
||||
terraform {
|
||||
required_providers {
|
||||
aws = {
|
||||
source = "hashicorp/aws"
|
||||
version = "~> 5.0"
|
||||
}
|
||||
}
|
||||
required_version = ">= 1.2"
|
||||
}
|
||||
|
||||
# Configure the AWS provider
|
||||
provider "aws" {
|
||||
region = var.aws_region
|
||||
}
|
||||
|
||||
# Define an S3 bucket resource
|
||||
resource "aws_s3_bucket" "example_bucket" {
|
||||
bucket = var.bucket_name
|
||||
|
||||
tags = {
|
||||
Environment = "Development"
|
||||
Project = "TerraformExample"
|
||||
}
|
||||
}
|
||||
@ -1,6 +1,26 @@
|
||||
[[fruits]]
|
||||
|
||||
[animals]
|
||||
|
||||
[[fruits.varieties]] # nested array of tables
|
||||
name = "red delicious"
|
||||
# This is a TOML document
|
||||
|
||||
title = "TOML Example"
|
||||
|
||||
[owner]
|
||||
name = "Tom Preston-Werner"
|
||||
dob = 1979-05-27T07:32:00-08:00
|
||||
|
||||
[database]
|
||||
enabled = true
|
||||
ports = [ 8000, 8001, 8002 ]
|
||||
data = [ ["delta", "phi"], [3.14] ]
|
||||
temp_targets = { cpu = 79.5, case = 72.0 }
|
||||
|
||||
[servers]
|
||||
|
||||
[servers.alpha]
|
||||
ip = "10.0.0.1"
|
||||
role = "frontend"
|
||||
|
||||
[servers.beta]
|
||||
ip = "10.0.0.2"
|
||||
role = "backend"
|
||||
|
||||
|
||||
12
go.mod
12
go.mod
@ -9,7 +9,7 @@ require (
|
||||
github.com/fatih/color v1.18.0
|
||||
github.com/go-ini/ini v1.67.0
|
||||
github.com/goccy/go-json v0.10.5
|
||||
github.com/goccy/go-yaml v1.19.0
|
||||
github.com/goccy/go-yaml v1.19.2
|
||||
github.com/hashicorp/hcl/v2 v2.24.0
|
||||
github.com/jinzhu/copier v0.4.0
|
||||
github.com/magiconair/properties v1.8.10
|
||||
@ -20,8 +20,9 @@ require (
|
||||
github.com/yuin/gopher-lua v1.1.1
|
||||
github.com/zclconf/go-cty v1.17.0
|
||||
go.yaml.in/yaml/v4 v4.0.0-rc.3
|
||||
golang.org/x/net v0.48.0
|
||||
golang.org/x/text v0.32.0
|
||||
golang.org/x/mod v0.33.0
|
||||
golang.org/x/net v0.50.0
|
||||
golang.org/x/text v0.34.0
|
||||
gopkg.in/op/go-logging.v1 v1.0.0-20160211212156-b2cb9fa56473
|
||||
)
|
||||
|
||||
@ -33,10 +34,9 @@ require (
|
||||
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
|
||||
golang.org/x/mod v0.30.0 // indirect
|
||||
golang.org/x/sync v0.19.0 // indirect
|
||||
golang.org/x/sys v0.39.0 // indirect
|
||||
golang.org/x/tools v0.39.0 // indirect
|
||||
golang.org/x/sys v0.41.0 // indirect
|
||||
golang.org/x/tools v0.41.0 // indirect
|
||||
)
|
||||
|
||||
go 1.24.0
|
||||
|
||||
24
go.sum
24
go.sum
@ -26,8 +26,8 @@ github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68=
|
||||
github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
|
||||
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/goccy/go-yaml v1.19.0 h1:EmkZ9RIsX+Uq4DYFowegAuJo8+xdX3T/2dwNPXbxEYE=
|
||||
github.com/goccy/go-yaml v1.19.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
|
||||
github.com/goccy/go-yaml v1.19.2 h1:PmFC1S6h8ljIz6gMRBopkjP1TVT7xuwrButHID66PoM=
|
||||
github.com/goccy/go-yaml v1.19.2/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/hashicorp/hcl/v2 v2.24.0 h1:2QJdZ454DSsYGoaE6QheQZjtKZSUs9Nh2izTWiwQxvE=
|
||||
@ -70,19 +70,19 @@ github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmB
|
||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||
go.yaml.in/yaml/v4 v4.0.0-rc.3 h1:3h1fjsh1CTAPjW7q/EMe+C8shx5d8ctzZTrLcs/j8Go=
|
||||
go.yaml.in/yaml/v4 v4.0.0-rc.3/go.mod h1:aZqd9kCMsGL7AuUv/m/PvWLdg5sjJsZ4oHDEnfPPfY0=
|
||||
golang.org/x/mod v0.30.0 h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk=
|
||||
golang.org/x/mod v0.30.0/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc=
|
||||
golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU=
|
||||
golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY=
|
||||
golang.org/x/mod v0.33.0 h1:tHFzIWbBifEmbwtGz65eaWyGiGZatSrT9prnU8DbVL8=
|
||||
golang.org/x/mod v0.33.0/go.mod h1:swjeQEj+6r7fODbD2cqrnje9PnziFuw4bmLbBZFrQ5w=
|
||||
golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60=
|
||||
golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM=
|
||||
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk=
|
||||
golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU=
|
||||
golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY=
|
||||
golang.org/x/tools v0.39.0 h1:ik4ho21kwuQln40uelmciQPp9SipgNDdrafrYA4TmQQ=
|
||||
golang.org/x/tools v0.39.0/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ=
|
||||
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
|
||||
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk=
|
||||
golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA=
|
||||
golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc=
|
||||
golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/op/go-logging.v1 v1.0.0-20160211212156-b2cb9fa56473 h1:6D+BvnJ/j6e222UW8s2qTSe3wGBtvo0MbVQG/c5k8RE=
|
||||
gopkg.in/op/go-logging.v1 v1.0.0-20160211212156-b2cb9fa56473/go.mod h1:N1eN2tsCx0Ydtgjl4cqmbRCsY4/+z4cYDeqwZTk6zog=
|
||||
|
||||
24
go_install_test.go
Normal file
24
go_install_test.go
Normal file
@ -0,0 +1,24 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"io"
|
||||
"testing"
|
||||
|
||||
"golang.org/x/mod/module"
|
||||
"golang.org/x/mod/zip"
|
||||
)
|
||||
|
||||
// TestGoInstallCompatibility ensures the module can be zipped for go install.
|
||||
// This is an integration test that uses the same zip.CreateFromDir function
|
||||
// that go install uses internally. If this test fails, go install will fail.
|
||||
// See: https://github.com/mikefarah/yq/issues/2587
|
||||
func TestGoInstallCompatibility(t *testing.T) {
|
||||
mod := module.Version{
|
||||
Path: "github.com/mikefarah/yq/v4",
|
||||
Version: "v4.0.0", // the actual version doesn't matter for validation
|
||||
}
|
||||
|
||||
if err := zip.CreateFromDir(io.Discard, mod, "."); err != nil {
|
||||
t.Fatalf("Module cannot be zipped for go install: %v", err)
|
||||
}
|
||||
}
|
||||
@ -465,6 +465,9 @@ func (n *CandidateNode) UpdateAttributesFrom(other *CandidateNode, prefs assignP
|
||||
n.Anchor = other.Anchor
|
||||
}
|
||||
|
||||
// Preserve EncodeSeparate flag for format-specific encoding hints
|
||||
n.EncodeSeparate = other.EncodeSeparate
|
||||
|
||||
// merge will pickup the style of the new thing
|
||||
// when autocreating nodes
|
||||
|
||||
|
||||
@ -16,7 +16,7 @@ type iniDecoder struct {
|
||||
|
||||
func NewINIDecoder() Decoder {
|
||||
return &iniDecoder{
|
||||
finished: false, // Initialize the flag as false
|
||||
finished: false, // Initialise the flag as false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -8,16 +8,19 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
toml "github.com/pelletier/go-toml/v2/unstable"
|
||||
)
|
||||
|
||||
type tomlDecoder struct {
|
||||
parser toml.Parser
|
||||
finished bool
|
||||
d DataTreeNavigator
|
||||
rootMap *CandidateNode
|
||||
parser toml.Parser
|
||||
finished bool
|
||||
d DataTreeNavigator
|
||||
rootMap *CandidateNode
|
||||
pendingComments []string // Head comments collected from Comment nodes
|
||||
firstContentSeen bool // Track if we've processed the first non-comment node
|
||||
}
|
||||
|
||||
func NewTomlDecoder() Decoder {
|
||||
@ -28,7 +31,7 @@ func NewTomlDecoder() Decoder {
|
||||
}
|
||||
|
||||
func (dec *tomlDecoder) Init(reader io.Reader) error {
|
||||
dec.parser = toml.Parser{}
|
||||
dec.parser = toml.Parser{KeepComments: true}
|
||||
buf := new(bytes.Buffer)
|
||||
_, err := buf.ReadFrom(reader)
|
||||
if err != nil {
|
||||
@ -39,9 +42,23 @@ func (dec *tomlDecoder) Init(reader io.Reader) error {
|
||||
Kind: MappingNode,
|
||||
Tag: "!!map",
|
||||
}
|
||||
dec.pendingComments = make([]string, 0)
|
||||
dec.firstContentSeen = false
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dec *tomlDecoder) attachOrphanedCommentsToNode(tableNodeValue *CandidateNode) {
|
||||
if len(dec.pendingComments) > 0 {
|
||||
comments := strings.Join(dec.pendingComments, "\n")
|
||||
if tableNodeValue.HeadComment == "" {
|
||||
tableNodeValue.HeadComment = comments
|
||||
} else {
|
||||
tableNodeValue.HeadComment = tableNodeValue.HeadComment + "\n" + comments
|
||||
}
|
||||
dec.pendingComments = make([]string, 0)
|
||||
}
|
||||
}
|
||||
|
||||
func (dec *tomlDecoder) getFullPath(tomlNode *toml.Node) []interface{} {
|
||||
path := make([]interface{}, 0)
|
||||
for {
|
||||
@ -56,13 +73,24 @@ func (dec *tomlDecoder) getFullPath(tomlNode *toml.Node) []interface{} {
|
||||
func (dec *tomlDecoder) processKeyValueIntoMap(rootMap *CandidateNode, tomlNode *toml.Node) error {
|
||||
value := tomlNode.Value()
|
||||
path := dec.getFullPath(value.Next())
|
||||
log.Debug("processKeyValueIntoMap: %v", path)
|
||||
|
||||
valueNode, err := dec.decodeNode(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Attach pending head comments
|
||||
if len(dec.pendingComments) > 0 {
|
||||
valueNode.HeadComment = strings.Join(dec.pendingComments, "\n")
|
||||
dec.pendingComments = make([]string, 0)
|
||||
}
|
||||
|
||||
// Check for inline comment chained to the KeyValue node
|
||||
nextNode := tomlNode.Next()
|
||||
if nextNode != nil && nextNode.Kind == toml.Comment {
|
||||
valueNode.LineComment = string(nextNode.Data)
|
||||
}
|
||||
|
||||
context := Context{}
|
||||
context = context.SingleChildContext(rootMap)
|
||||
|
||||
@ -79,11 +107,15 @@ func (dec *tomlDecoder) decodeKeyValuesIntoMap(rootMap *CandidateNode, tomlNode
|
||||
nextItem := dec.parser.Expression()
|
||||
log.Debug("decodeKeyValuesIntoMap -- next exp, its a %v", nextItem.Kind)
|
||||
|
||||
if nextItem.Kind == toml.KeyValue {
|
||||
switch nextItem.Kind {
|
||||
case toml.KeyValue:
|
||||
if err := dec.processKeyValueIntoMap(rootMap, nextItem); err != nil {
|
||||
return false, err
|
||||
}
|
||||
} else {
|
||||
case toml.Comment:
|
||||
// Standalone comment - add to pending for next element
|
||||
dec.pendingComments = append(dec.pendingComments, string(nextItem.Data))
|
||||
default:
|
||||
// run out of key values
|
||||
log.Debug("done in decodeKeyValuesIntoMap, gota a %v", nextItem.Kind)
|
||||
return true, nil
|
||||
@ -125,13 +157,30 @@ func (dec *tomlDecoder) createInlineTableMap(tomlNode *toml.Node) (*CandidateNod
|
||||
|
||||
func (dec *tomlDecoder) createArray(tomlNode *toml.Node) (*CandidateNode, error) {
|
||||
content := make([]*CandidateNode, 0)
|
||||
var pendingArrayComments []string
|
||||
|
||||
iterator := tomlNode.Children()
|
||||
for iterator.Next() {
|
||||
child := iterator.Node()
|
||||
|
||||
// Handle comments within arrays
|
||||
if child.Kind == toml.Comment {
|
||||
// Collect comments to attach to the next array element
|
||||
pendingArrayComments = append(pendingArrayComments, string(child.Data))
|
||||
continue
|
||||
}
|
||||
|
||||
yamlNode, err := dec.decodeNode(child)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Attach any pending comments to this array element
|
||||
if len(pendingArrayComments) > 0 {
|
||||
yamlNode.HeadComment = strings.Join(pendingArrayComments, "\n")
|
||||
pendingArrayComments = make([]string, 0)
|
||||
}
|
||||
|
||||
content = append(content, yamlNode)
|
||||
}
|
||||
|
||||
@ -250,11 +299,29 @@ func (dec *tomlDecoder) processTopLevelNode(currentNode *toml.Node) (bool, error
|
||||
var err error
|
||||
log.Debug("processTopLevelNode: Going to process %v state is current %v", currentNode.Kind, NodeToString(dec.rootMap))
|
||||
switch currentNode.Kind {
|
||||
case toml.Comment:
|
||||
// Collect comment to attach to next element
|
||||
commentText := string(currentNode.Data)
|
||||
// If we haven't seen any content yet, accumulate comments for root
|
||||
if !dec.firstContentSeen {
|
||||
if dec.rootMap.HeadComment == "" {
|
||||
dec.rootMap.HeadComment = commentText
|
||||
} else {
|
||||
dec.rootMap.HeadComment = dec.rootMap.HeadComment + "\n" + commentText
|
||||
}
|
||||
} else {
|
||||
// We've seen content, so these comments are for the next element
|
||||
dec.pendingComments = append(dec.pendingComments, commentText)
|
||||
}
|
||||
return false, nil
|
||||
case toml.Table:
|
||||
dec.firstContentSeen = true
|
||||
runAgainstCurrentExp, err = dec.processTable(currentNode)
|
||||
case toml.ArrayTable:
|
||||
dec.firstContentSeen = true
|
||||
runAgainstCurrentExp, err = dec.processArrayTable(currentNode)
|
||||
default:
|
||||
dec.firstContentSeen = true
|
||||
runAgainstCurrentExp, err = dec.decodeKeyValuesIntoMap(dec.rootMap, currentNode)
|
||||
}
|
||||
|
||||
@ -264,7 +331,8 @@ func (dec *tomlDecoder) processTopLevelNode(currentNode *toml.Node) (bool, error
|
||||
|
||||
func (dec *tomlDecoder) processTable(currentNode *toml.Node) (bool, error) {
|
||||
log.Debug("Enter processTable")
|
||||
fullPath := dec.getFullPath(currentNode.Child())
|
||||
child := currentNode.Child()
|
||||
fullPath := dec.getFullPath(child)
|
||||
log.Debug("fullpath: %v", fullPath)
|
||||
|
||||
c := Context{}
|
||||
@ -276,27 +344,53 @@ func (dec *tomlDecoder) processTable(currentNode *toml.Node) (bool, error) {
|
||||
}
|
||||
|
||||
tableNodeValue := &CandidateNode{
|
||||
Kind: MappingNode,
|
||||
Tag: "!!map",
|
||||
Content: make([]*CandidateNode, 0),
|
||||
Kind: MappingNode,
|
||||
Tag: "!!map",
|
||||
Content: make([]*CandidateNode, 0),
|
||||
EncodeSeparate: true,
|
||||
}
|
||||
|
||||
// Attach pending head comments to the table
|
||||
if len(dec.pendingComments) > 0 {
|
||||
tableNodeValue.HeadComment = strings.Join(dec.pendingComments, "\n")
|
||||
dec.pendingComments = make([]string, 0)
|
||||
}
|
||||
|
||||
var tableValue *toml.Node
|
||||
runAgainstCurrentExp := false
|
||||
hasValue := dec.parser.NextExpression()
|
||||
// check to see if there is any table data
|
||||
if hasValue {
|
||||
sawKeyValue := false
|
||||
for dec.parser.NextExpression() {
|
||||
tableValue = dec.parser.Expression()
|
||||
// next expression is not table data, so we are done
|
||||
if tableValue.Kind != toml.KeyValue {
|
||||
log.Debug("got an empty table")
|
||||
runAgainstCurrentExp = true
|
||||
} else {
|
||||
runAgainstCurrentExp, err = dec.decodeKeyValuesIntoMap(tableNodeValue, tableValue)
|
||||
if err != nil && !errors.Is(err, io.EOF) {
|
||||
return false, err
|
||||
}
|
||||
// Allow standalone comments inside the table before the first key-value.
|
||||
// These should be associated with the next element in the table (usually the first key-value),
|
||||
// not treated as "end of table" (which would cause subsequent key-values to be parsed at root).
|
||||
if tableValue.Kind == toml.Comment {
|
||||
dec.pendingComments = append(dec.pendingComments, string(tableValue.Data))
|
||||
continue
|
||||
}
|
||||
|
||||
// next expression is not table data, so we are done (but we need to re-process it at top-level)
|
||||
if tableValue.Kind != toml.KeyValue {
|
||||
log.Debug("got an empty table (or reached next section)")
|
||||
// If the table had only comments, attach them to the table itself so they don't leak to the next node.
|
||||
if !sawKeyValue {
|
||||
dec.attachOrphanedCommentsToNode(tableNodeValue)
|
||||
}
|
||||
runAgainstCurrentExp = true
|
||||
break
|
||||
}
|
||||
|
||||
sawKeyValue = true
|
||||
runAgainstCurrentExp, err = dec.decodeKeyValuesIntoMap(tableNodeValue, tableValue)
|
||||
if err != nil && !errors.Is(err, io.EOF) {
|
||||
return false, err
|
||||
}
|
||||
break
|
||||
}
|
||||
// If we hit EOF after only seeing comments inside this table, attach them to the table itself
|
||||
// so they don't leak to whatever comes next.
|
||||
if !sawKeyValue {
|
||||
dec.attachOrphanedCommentsToNode(tableNodeValue)
|
||||
}
|
||||
|
||||
err = dec.d.DeeplyAssign(c, fullPath, tableNodeValue)
|
||||
@ -330,7 +424,8 @@ func (dec *tomlDecoder) arrayAppend(context Context, path []interface{}, rhsNode
|
||||
|
||||
func (dec *tomlDecoder) processArrayTable(currentNode *toml.Node) (bool, error) {
|
||||
log.Debug("Enter processArrayTable")
|
||||
fullPath := dec.getFullPath(currentNode.Child())
|
||||
child := currentNode.Child()
|
||||
fullPath := dec.getFullPath(child)
|
||||
log.Debug("Fullpath: %v", fullPath)
|
||||
|
||||
c := Context{}
|
||||
@ -346,23 +441,64 @@ func (dec *tomlDecoder) processArrayTable(currentNode *toml.Node) (bool, error)
|
||||
hasValue := dec.parser.NextExpression()
|
||||
|
||||
tableNodeValue := &CandidateNode{
|
||||
Kind: MappingNode,
|
||||
Tag: "!!map",
|
||||
Kind: MappingNode,
|
||||
Tag: "!!map",
|
||||
EncodeSeparate: true,
|
||||
}
|
||||
|
||||
// Attach pending head comments to the array table
|
||||
if len(dec.pendingComments) > 0 {
|
||||
tableNodeValue.HeadComment = strings.Join(dec.pendingComments, "\n")
|
||||
dec.pendingComments = make([]string, 0)
|
||||
}
|
||||
|
||||
runAgainstCurrentExp := false
|
||||
// if the next value is a ArrayTable or Table, then its not part of this declaration (not a key value pair)
|
||||
// so lets leave that expression for the next round of parsing
|
||||
if hasValue && (dec.parser.Expression().Kind == toml.ArrayTable || dec.parser.Expression().Kind == toml.Table) {
|
||||
runAgainstCurrentExp = true
|
||||
} else if hasValue {
|
||||
// otherwise, if there is a value, it must be some key value pairs of the
|
||||
// first object in the array!
|
||||
tableValue := dec.parser.Expression()
|
||||
runAgainstCurrentExp, err = dec.decodeKeyValuesIntoMap(tableNodeValue, tableValue)
|
||||
if err != nil && !errors.Is(err, io.EOF) {
|
||||
return false, err
|
||||
sawKeyValue := false
|
||||
if hasValue {
|
||||
for {
|
||||
exp := dec.parser.Expression()
|
||||
// Allow standalone comments inside array tables before the first key-value.
|
||||
if exp.Kind == toml.Comment {
|
||||
dec.pendingComments = append(dec.pendingComments, string(exp.Data))
|
||||
hasValue = dec.parser.NextExpression()
|
||||
if !hasValue {
|
||||
break
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// if the next value is a ArrayTable or Table, then its not part of this declaration (not a key value pair)
|
||||
// so lets leave that expression for the next round of parsing
|
||||
if exp.Kind == toml.ArrayTable || exp.Kind == toml.Table {
|
||||
// If this array-table entry had only comments, attach them to the entry so they don't leak.
|
||||
if !sawKeyValue {
|
||||
dec.attachOrphanedCommentsToNode(tableNodeValue)
|
||||
}
|
||||
runAgainstCurrentExp = true
|
||||
break
|
||||
}
|
||||
|
||||
sawKeyValue = true
|
||||
// otherwise, if there is a value, it must be some key value pairs of the
|
||||
// first object in the array!
|
||||
runAgainstCurrentExp, err = dec.decodeKeyValuesIntoMap(tableNodeValue, exp)
|
||||
if err != nil && !errors.Is(err, io.EOF) {
|
||||
return false, err
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
// If we hit EOF after only seeing comments inside this array-table entry, attach them to the entry
|
||||
// so they don't leak to whatever comes next.
|
||||
if !sawKeyValue && len(dec.pendingComments) > 0 {
|
||||
comments := strings.Join(dec.pendingComments, "\n")
|
||||
if tableNodeValue.HeadComment == "" {
|
||||
tableNodeValue.HeadComment = comments
|
||||
} else {
|
||||
tableNodeValue.HeadComment = tableNodeValue.HeadComment + "\n" + comments
|
||||
}
|
||||
dec.pendingComments = make([]string, 0)
|
||||
}
|
||||
|
||||
// += function
|
||||
err = dec.arrayAppend(c, fullPath, tableNodeValue)
|
||||
@ -375,23 +511,42 @@ func (dec *tomlDecoder) processArrayTable(currentNode *toml.Node) (bool, error)
|
||||
// Because TOML. So we'll inject the last index into the path.
|
||||
|
||||
func getPathToUse(fullPath []interface{}, dec *tomlDecoder, c Context) ([]interface{}, error) {
|
||||
pathToCheck := fullPath
|
||||
if len(fullPath) >= 1 {
|
||||
pathToCheck = fullPath[:len(fullPath)-1]
|
||||
}
|
||||
readOp := createTraversalTree(pathToCheck, traversePreferences{DontAutoCreate: true}, false)
|
||||
// We need to check the entire path (except the last element), not just the immediate parent,
|
||||
// because we may have nested array tables like [[array.subarray.subsubarray]]
|
||||
// where both 'array' and 'subarray' are arrays that already exist.
|
||||
|
||||
resultContext, err := dec.d.GetMatchingNodes(c, readOp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
if len(fullPath) == 0 {
|
||||
return fullPath, nil
|
||||
}
|
||||
if resultContext.MatchingNodes.Len() >= 1 {
|
||||
match := resultContext.MatchingNodes.Front().Value.(*CandidateNode)
|
||||
// path refers to an array, we need to add this to the last element in the array
|
||||
if match.Kind == SequenceNode {
|
||||
fullPath = append(pathToCheck, len(match.Content)-1, fullPath[len(fullPath)-1])
|
||||
log.Debugf("Adding to end of %v array, using path: %v", pathToCheck, fullPath)
|
||||
|
||||
resultPath := make([]interface{}, 0, len(fullPath)*2) // preallocate with extra space for indices
|
||||
|
||||
// Process all segments except the last one
|
||||
for i := 0; i < len(fullPath)-1; i++ {
|
||||
resultPath = append(resultPath, fullPath[i])
|
||||
|
||||
// Check if the current path segment points to an array
|
||||
readOp := createTraversalTree(resultPath, traversePreferences{DontAutoCreate: true}, false)
|
||||
resultContext, err := dec.d.GetMatchingNodes(c, readOp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if resultContext.MatchingNodes.Len() >= 1 {
|
||||
match := resultContext.MatchingNodes.Front().Value.(*CandidateNode)
|
||||
// If this segment points to an array, we need to add the last index
|
||||
// before continuing with the rest of the path
|
||||
if match.Kind == SequenceNode && len(match.Content) > 0 {
|
||||
lastIndex := len(match.Content) - 1
|
||||
resultPath = append(resultPath, lastIndex)
|
||||
log.Debugf("Path segment %v is an array, injecting index %d", resultPath[:len(resultPath)-1], lastIndex)
|
||||
}
|
||||
}
|
||||
}
|
||||
return fullPath, err
|
||||
|
||||
// Add the last segment
|
||||
resultPath = append(resultPath, fullPath[len(fullPath)-1])
|
||||
|
||||
log.Debugf("getPathToUse: original path %v -> result path %v", fullPath, resultPath)
|
||||
return resultPath, nil
|
||||
}
|
||||
|
||||
160
pkg/yqlib/decoder_uri_test.go
Normal file
160
pkg/yqlib/decoder_uri_test.go
Normal file
@ -0,0 +1,160 @@
|
||||
//go:build !yq_nouri
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"io"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/mikefarah/yq/v4/test"
|
||||
)
|
||||
|
||||
func TestUriDecoder_Init(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("test")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodeSimpleString(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("hello%20world")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
node, err := decoder.Decode()
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "!!str", node.Tag)
|
||||
test.AssertResult(t, "hello world", node.Value)
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodeSpecialCharacters(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("hello%21%40%23%24%25")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
node, err := decoder.Decode()
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "hello!@#$%", node.Value)
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodeUTF8(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("%E2%9C%93%20check")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
node, err := decoder.Decode()
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "✓ check", node.Value)
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodePlusSign(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("a+b")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
node, err := decoder.Decode()
|
||||
test.AssertResult(t, nil, err)
|
||||
// Note: url.QueryUnescape does NOT convert + to space
|
||||
// That's only for form encoding (url.ParseQuery)
|
||||
test.AssertResult(t, "a b", node.Value)
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodeEmptyString(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
node, err := decoder.Decode()
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "", node.Value)
|
||||
|
||||
// Second decode should return EOF
|
||||
node, err = decoder.Decode()
|
||||
test.AssertResult(t, io.EOF, err)
|
||||
test.AssertResult(t, (*CandidateNode)(nil), node)
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodeMultipleCalls(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("test")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
// First decode
|
||||
node, err := decoder.Decode()
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "test", node.Value)
|
||||
|
||||
// Second decode should return EOF since we've consumed all input
|
||||
node, err = decoder.Decode()
|
||||
test.AssertResult(t, io.EOF, err)
|
||||
test.AssertResult(t, (*CandidateNode)(nil), node)
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodeInvalidEscape(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("test%ZZ")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
_, err = decoder.Decode()
|
||||
// Should return an error for invalid escape sequence
|
||||
if err == nil {
|
||||
t.Error("Expected error for invalid escape sequence, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodeSlashAndQuery(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("path%2Fto%2Ffile%3Fquery%3Dvalue")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
node, err := decoder.Decode()
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "path/to/file?query=value", node.Value)
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodePercent(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("100%25")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
node, err := decoder.Decode()
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "100%", node.Value)
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodeNoEscaping(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("simple_text-123")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
node, err := decoder.Decode()
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "simple_text-123", node.Value)
|
||||
}
|
||||
|
||||
// Mock reader that returns an error
|
||||
type errorReader struct{}
|
||||
|
||||
func (e *errorReader) Read(_ []byte) (n int, err error) {
|
||||
return 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodeReadError(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
err := decoder.Init(&errorReader{})
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
_, err = decoder.Decode()
|
||||
test.AssertResult(t, io.ErrUnexpectedEOF, err)
|
||||
}
|
||||
@ -78,7 +78,7 @@ func (dec *yamlDecoder) processReadStream(reader *bufio.Reader) (io.Reader, stri
|
||||
if separatorPrefixRe.MatchString(line) {
|
||||
match := separatorPrefixRe.FindString(line)
|
||||
remainder := line[len(match):]
|
||||
// normalize separator newline: if original had none, default to LF
|
||||
// normalise separator newline: if original had none, default to LF
|
||||
sepNewline := newline
|
||||
if sepNewline == "" {
|
||||
sepNewline = "\n"
|
||||
|
||||
@ -22,7 +22,7 @@ see https://yaml.org/type/merge.html
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- &CENTER
|
||||
- &CENTRE
|
||||
x: 1
|
||||
y: 2
|
||||
- &LEFT
|
||||
@ -32,7 +32,7 @@ Given a sample.yml file of:
|
||||
r: 10
|
||||
- &SMALL
|
||||
r: 1
|
||||
- !!merge <<: *CENTER
|
||||
- !!merge <<: *CENTRE
|
||||
r: 10
|
||||
```
|
||||
then
|
||||
@ -288,7 +288,7 @@ see https://yaml.org/type/merge.html. This has the correct data, but the wrong k
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- &CENTER
|
||||
- &CENTRE
|
||||
x: 1
|
||||
y: 2
|
||||
- &LEFT
|
||||
@ -299,7 +299,7 @@ Given a sample.yml file of:
|
||||
- &SMALL
|
||||
r: 1
|
||||
- !!merge <<:
|
||||
- *CENTER
|
||||
- *CENTRE
|
||||
- *BIG
|
||||
```
|
||||
then
|
||||
@ -318,7 +318,7 @@ see https://yaml.org/type/merge.html. This has the correct data, but the wrong k
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- &CENTER
|
||||
- &CENTRE
|
||||
x: 1
|
||||
y: 2
|
||||
- &LEFT
|
||||
@ -401,7 +401,7 @@ Taken from https://yaml.org/type/merge.html. Same values as legacy, but with the
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- &CENTER
|
||||
- &CENTRE
|
||||
x: 1
|
||||
y: 2
|
||||
- &LEFT
|
||||
@ -412,7 +412,7 @@ Given a sample.yml file of:
|
||||
- &SMALL
|
||||
r: 1
|
||||
- !!merge <<:
|
||||
- *CENTER
|
||||
- *CENTRE
|
||||
- *BIG
|
||||
```
|
||||
then
|
||||
@ -432,7 +432,7 @@ Taken from https://yaml.org/type/merge.html. Same values as legacy, but with the
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- &CENTER
|
||||
- &CENTRE
|
||||
x: 1
|
||||
y: 2
|
||||
- &LEFT
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
|
||||
Various operators for parsing and manipulating dates.
|
||||
|
||||
## Date time formattings
|
||||
## Date time formatting
|
||||
This uses Golang's built in time library for parsing and formatting date times.
|
||||
|
||||
When not specified, the RFC3339 standard is assumed `2006-01-02T15:04:05Z07:00` for parsing.
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
|
||||
Various operators for parsing and manipulating dates.
|
||||
|
||||
## Date time formattings
|
||||
## Date time formatting
|
||||
This uses Golang's built in time library for parsing and formatting date times.
|
||||
|
||||
When not specified, the RFC3339 standard is assumed `2006-01-02T15:04:05Z07:00` for parsing.
|
||||
|
||||
@ -79,6 +79,46 @@ will output
|
||||
c: cat
|
||||
```
|
||||
|
||||
## Get the top (root) parent
|
||||
Use negative numbers to get the top parents. You can think of this as indexing into the 'parents' array above
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
a:
|
||||
b:
|
||||
c: cat
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.a.b.c | parent(-1)' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
a:
|
||||
b:
|
||||
c: cat
|
||||
```
|
||||
|
||||
## Root
|
||||
Alias for parent(-1), returns the top level parent. This is usually the document node.
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
a:
|
||||
b:
|
||||
c: cat
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.a.b.c | root' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
a:
|
||||
b:
|
||||
c: cat
|
||||
```
|
||||
|
||||
## N-th parent
|
||||
You can optionally supply the number of levels to go up for the parent, the default being 1.
|
||||
|
||||
@ -116,6 +156,25 @@ a:
|
||||
c: cat
|
||||
```
|
||||
|
||||
## N-th negative
|
||||
Similarly, use negative numbers to index backwards from the parents array
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
a:
|
||||
b:
|
||||
c: cat
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.a.b.c | parent(-2)' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
b:
|
||||
c: cat
|
||||
```
|
||||
|
||||
## No parent
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
|
||||
@ -7,7 +7,7 @@ HCL is commonly used in HashiCorp tools like Terraform for configuration files.
|
||||
- String interpolation and expressions (preserved without quotes)
|
||||
- Comments (leading, head, and line comments)
|
||||
- Nested structures (maps and lists)
|
||||
- Syntax colorization when enabled
|
||||
- Syntax colorisation when enabled
|
||||
|
||||
|
||||
## Parse HCL
|
||||
|
||||
@ -7,5 +7,5 @@ HCL is commonly used in HashiCorp tools like Terraform for configuration files.
|
||||
- String interpolation and expressions (preserved without quotes)
|
||||
- Comments (leading, head, and line comments)
|
||||
- Nested structures (maps and lists)
|
||||
- Syntax colorization when enabled
|
||||
- Syntax colorisation when enabled
|
||||
|
||||
|
||||
9
pkg/yqlib/doc/usage/headers/kyaml.md
Normal file
9
pkg/yqlib/doc/usage/headers/kyaml.md
Normal file
@ -0,0 +1,9 @@
|
||||
# KYaml
|
||||
|
||||
Encode and decode to and from KYaml (a restricted subset of YAML that uses flow-style collections).
|
||||
|
||||
KYaml is useful when you want YAML data rendered in a compact, JSON-like form while still supporting YAML features like comments.
|
||||
|
||||
Notes:
|
||||
- Strings are always double-quoted in KYaml output.
|
||||
- Anchors and aliases are expanded (KYaml output does not emit them).
|
||||
253
pkg/yqlib/doc/usage/kyaml.md
Normal file
253
pkg/yqlib/doc/usage/kyaml.md
Normal file
@ -0,0 +1,253 @@
|
||||
# KYaml
|
||||
|
||||
Encode and decode to and from KYaml (a restricted subset of YAML that uses flow-style collections).
|
||||
|
||||
KYaml is useful when you want YAML data rendered in a compact, JSON-like form while still supporting YAML features like comments.
|
||||
|
||||
Notes:
|
||||
- Strings are always double-quoted in KYaml output.
|
||||
- Anchors and aliases are expanded (KYaml output does not emit them).
|
||||
|
||||
## Encode kyaml: plain string scalar
|
||||
Strings are always double-quoted in KYaml output.
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
cat
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=kyaml '.' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
"cat"
|
||||
```
|
||||
|
||||
## encode flow mapping and sequence
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
a: b
|
||||
c:
|
||||
- d
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=kyaml '.' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
{
|
||||
a: "b",
|
||||
c: [
|
||||
"d",
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## encode non-string scalars
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
a: 12
|
||||
b: true
|
||||
c: null
|
||||
d: "true"
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=kyaml '.' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
{
|
||||
a: 12,
|
||||
b: true,
|
||||
c: null,
|
||||
d: "true",
|
||||
}
|
||||
```
|
||||
|
||||
## quote non-identifier keys
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
"1a": b
|
||||
"has space": c
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=kyaml '.' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
{
|
||||
"1a": "b",
|
||||
"has space": "c",
|
||||
}
|
||||
```
|
||||
|
||||
## escape quoted strings
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
a: "line1\nline2\t\"q\""
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=kyaml '.' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
{
|
||||
a: "line1\nline2\t\"q\"",
|
||||
}
|
||||
```
|
||||
|
||||
## preserve comments when encoding
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
# leading
|
||||
a: 1 # a line
|
||||
# head b
|
||||
b: 2
|
||||
c:
|
||||
# head d
|
||||
- d # d line
|
||||
- e
|
||||
# trailing
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=kyaml '.' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
# leading
|
||||
{
|
||||
a: 1, # a line
|
||||
# head b
|
||||
b: 2,
|
||||
c: [
|
||||
# head d
|
||||
"d", # d line
|
||||
"e",
|
||||
],
|
||||
# trailing
|
||||
}
|
||||
```
|
||||
|
||||
## Encode kyaml: anchors and aliases
|
||||
KYaml output does not support anchors/aliases; they are expanded to concrete values.
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
base: &base
|
||||
a: b
|
||||
copy: *base
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=kyaml '.' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
{
|
||||
base: {
|
||||
a: "b",
|
||||
},
|
||||
copy: {
|
||||
a: "b",
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Encode kyaml: yaml to kyaml shows formatting differences
|
||||
KYaml uses flow-style collections (braces/brackets) and explicit commas.
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
person:
|
||||
name: John
|
||||
pets:
|
||||
- cat
|
||||
- dog
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=kyaml '.' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
{
|
||||
person: {
|
||||
name: "John",
|
||||
pets: [
|
||||
"cat",
|
||||
"dog",
|
||||
],
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Encode kyaml: nested lists of objects
|
||||
Lists and objects can be nested arbitrarily; KYaml always uses flow-style collections.
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- name: a
|
||||
items:
|
||||
- id: 1
|
||||
tags:
|
||||
- k: x
|
||||
v: y
|
||||
- k: x2
|
||||
v: y2
|
||||
- id: 2
|
||||
tags:
|
||||
- k: z
|
||||
v: w
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=kyaml '.' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
[
|
||||
{
|
||||
name: "a",
|
||||
items: [
|
||||
{
|
||||
id: 1,
|
||||
tags: [
|
||||
{
|
||||
k: "x",
|
||||
v: "y",
|
||||
},
|
||||
{
|
||||
k: "x2",
|
||||
v: "y2",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
tags: [
|
||||
{
|
||||
k: "z",
|
||||
v: "w",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
```
|
||||
|
||||
@ -141,3 +141,246 @@ will output
|
||||
dependencies: {}
|
||||
```
|
||||
|
||||
## Roundtrip: inline table attribute
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
name = { first = "Tom", last = "Preston-Werner" }
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
name = { first = "Tom", last = "Preston-Werner" }
|
||||
```
|
||||
|
||||
## Roundtrip: table section
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
[owner.contact]
|
||||
name = "Tom"
|
||||
age = 36
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
[owner.contact]
|
||||
name = "Tom"
|
||||
age = 36
|
||||
```
|
||||
|
||||
## Roundtrip: array of tables
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
[[fruits]]
|
||||
name = "apple"
|
||||
[[fruits.varieties]]
|
||||
name = "red delicious"
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
[[fruits]]
|
||||
name = "apple"
|
||||
[[fruits.varieties]]
|
||||
name = "red delicious"
|
||||
```
|
||||
|
||||
## Roundtrip: arrays and scalars
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
A = ["hello", ["world", "again"]]
|
||||
B = 12
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
A = ["hello", ["world", "again"]]
|
||||
B = 12
|
||||
```
|
||||
|
||||
## Roundtrip: simple
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
A = "hello"
|
||||
B = 12
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
A = "hello"
|
||||
B = 12
|
||||
```
|
||||
|
||||
## Roundtrip: deep paths
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
[person]
|
||||
name = "hello"
|
||||
address = "12 cat st"
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
[person]
|
||||
name = "hello"
|
||||
address = "12 cat st"
|
||||
```
|
||||
|
||||
## Roundtrip: empty array
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
A = []
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
A = []
|
||||
```
|
||||
|
||||
## Roundtrip: sample table
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
var = "x"
|
||||
|
||||
[owner.contact]
|
||||
name = "Tom Preston-Werner"
|
||||
age = 36
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
var = "x"
|
||||
|
||||
[owner.contact]
|
||||
name = "Tom Preston-Werner"
|
||||
age = 36
|
||||
```
|
||||
|
||||
## Roundtrip: empty table
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
[dependencies]
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
[dependencies]
|
||||
```
|
||||
|
||||
## Roundtrip: comments
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
# This is a comment
|
||||
A = "hello" # inline comment
|
||||
B = 12
|
||||
|
||||
# Table comment
|
||||
[person]
|
||||
name = "Tom" # name comment
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
# This is a comment
|
||||
A = "hello" # inline comment
|
||||
B = 12
|
||||
|
||||
# Table comment
|
||||
[person]
|
||||
name = "Tom" # name comment
|
||||
```
|
||||
|
||||
## Roundtrip: sample from web
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
# This is a TOML document
|
||||
title = "TOML Example"
|
||||
|
||||
[owner]
|
||||
name = "Tom Preston-Werner"
|
||||
dob = 1979-05-27T07:32:00-08:00
|
||||
|
||||
[database]
|
||||
enabled = true
|
||||
ports = [8000, 8001, 8002]
|
||||
data = [["delta", "phi"], [3.14]]
|
||||
temp_targets = { cpu = 79.5, case = 72.0 }
|
||||
|
||||
# [servers] yq can't do this one yet
|
||||
[servers.alpha]
|
||||
ip = "10.0.0.1"
|
||||
role = "frontend"
|
||||
|
||||
[servers.beta]
|
||||
ip = "10.0.0.2"
|
||||
role = "backend"
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
# This is a TOML document
|
||||
title = "TOML Example"
|
||||
|
||||
[owner]
|
||||
name = "Tom Preston-Werner"
|
||||
dob = 1979-05-27T07:32:00-08:00
|
||||
|
||||
[database]
|
||||
enabled = true
|
||||
ports = [8000, 8001, 8002]
|
||||
data = [["delta", "phi"], [3.14]]
|
||||
temp_targets = { cpu = 79.5, case = 72.0 }
|
||||
|
||||
# [servers] yq can't do this one yet
|
||||
[servers.alpha]
|
||||
ip = "10.0.0.1"
|
||||
role = "frontend"
|
||||
|
||||
[servers.beta]
|
||||
ip = "10.0.0.2"
|
||||
role = "backend"
|
||||
```
|
||||
|
||||
|
||||
@ -1,7 +1,12 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"github.com/fatih/color"
|
||||
)
|
||||
|
||||
type Encoder interface {
|
||||
@ -25,3 +30,63 @@ func mapKeysToStrings(node *CandidateNode) {
|
||||
mapKeysToStrings(child)
|
||||
}
|
||||
}
|
||||
|
||||
// Some funcs are shared between encoder_yaml and encoder_kyaml
|
||||
func PrintYAMLDocumentSeparator(writer io.Writer, PrintDocSeparators bool) error {
|
||||
if PrintDocSeparators {
|
||||
log.Debug("writing doc sep")
|
||||
if err := writeString(writer, "---\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
func PrintYAMLLeadingContent(writer io.Writer, content string, PrintDocSeparators bool, ColorsEnabled bool) error {
|
||||
reader := bufio.NewReader(strings.NewReader(content))
|
||||
|
||||
// reuse precompiled package-level regex
|
||||
// (declared in decoder_yaml.go)
|
||||
|
||||
for {
|
||||
|
||||
readline, errReading := reader.ReadString('\n')
|
||||
if errReading != nil && !errors.Is(errReading, io.EOF) {
|
||||
return errReading
|
||||
}
|
||||
if strings.Contains(readline, "$yqDocSeparator$") {
|
||||
// Preserve the original line ending (CRLF or LF)
|
||||
lineEnding := "\n"
|
||||
if strings.HasSuffix(readline, "\r\n") {
|
||||
lineEnding = "\r\n"
|
||||
}
|
||||
if PrintDocSeparators {
|
||||
if err := writeString(writer, "---"+lineEnding); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
if len(readline) > 0 && readline != "\n" && readline[0] != '%' && !commentLineRe.MatchString(readline) {
|
||||
readline = "# " + readline
|
||||
}
|
||||
if ColorsEnabled && strings.TrimSpace(readline) != "" {
|
||||
readline = format(color.FgHiBlack) + readline + format(color.Reset)
|
||||
}
|
||||
if err := writeString(writer, readline); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if errors.Is(errReading, io.EOF) {
|
||||
if readline != "" {
|
||||
// the last comment we read didn't have a newline, put one in
|
||||
if err := writeString(writer, "\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -43,6 +43,9 @@ func (he *hclEncoder) PrintLeadingContent(_ io.Writer, _ string) error {
|
||||
|
||||
func (he *hclEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
log.Debugf("I need to encode %v", NodeToString(node))
|
||||
if node.Kind == ScalarNode {
|
||||
return writeString(writer, node.Value+"\n")
|
||||
}
|
||||
|
||||
f := hclwrite.NewEmptyFile()
|
||||
body := f.Body()
|
||||
@ -63,8 +66,8 @@ func (he *hclEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
finalOutput := he.injectComments(compactOutput, commentMap)
|
||||
|
||||
if he.prefs.ColorsEnabled {
|
||||
colorized := he.colorizeHcl(finalOutput)
|
||||
_, err := writer.Write(colorized)
|
||||
colourized := he.colorizeHcl(finalOutput)
|
||||
_, err := writer.Write(colourized)
|
||||
return err
|
||||
}
|
||||
|
||||
@ -170,19 +173,18 @@ func (he *hclEncoder) injectComments(output []byte, commentMap map[string]string
|
||||
return []byte(result)
|
||||
}
|
||||
|
||||
// colorizeHcl applies syntax highlighting to HCL output using fatih/color
|
||||
func (he *hclEncoder) colorizeHcl(input []byte) []byte {
|
||||
hcl := string(input)
|
||||
result := strings.Builder{}
|
||||
|
||||
// Create color functions for different token types
|
||||
// Create colour functions for different token types
|
||||
commentColor := color.New(color.FgHiBlack).SprintFunc()
|
||||
stringColor := color.New(color.FgGreen).SprintFunc()
|
||||
numberColor := color.New(color.FgHiMagenta).SprintFunc()
|
||||
keyColor := color.New(color.FgCyan).SprintFunc()
|
||||
boolColor := color.New(color.FgHiMagenta).SprintFunc()
|
||||
|
||||
// Simple tokenization for HCL coloring
|
||||
// Simple tokenization for HCL colouring
|
||||
i := 0
|
||||
for i < len(hcl) {
|
||||
ch := hcl[i]
|
||||
@ -473,12 +475,12 @@ func (he *hclEncoder) encodeBlockIfMapping(body *hclwrite.Body, key string, valu
|
||||
}
|
||||
}
|
||||
|
||||
// If all child values are mappings, treat each child key as a labeled instance of this block type
|
||||
// If all child values are mappings, treat each child key as a labelled instance of this block type
|
||||
if handled, _ := he.encodeMappingChildrenAsBlocks(body, key, valueNode); handled {
|
||||
return true
|
||||
}
|
||||
|
||||
// No labels detected, render as unlabeled block
|
||||
// No labels detected, render as unlabelled block
|
||||
block := body.AppendNewBlock(key, nil)
|
||||
if err := he.encodeNodeAttributes(block.Body(), valueNode); err == nil {
|
||||
return true
|
||||
@ -490,7 +492,7 @@ func (he *hclEncoder) encodeBlockIfMapping(body *hclwrite.Body, key string, valu
|
||||
// encodeNode encodes a CandidateNode directly to HCL, preserving style information
|
||||
func (he *hclEncoder) encodeNode(body *hclwrite.Body, node *CandidateNode) error {
|
||||
if node.Kind != MappingNode {
|
||||
return fmt.Errorf("HCL encoder expects a mapping at the root level")
|
||||
return fmt.Errorf("HCL encoder expects a mapping at the root level, got %v", kindToString(node.Kind))
|
||||
}
|
||||
|
||||
for i := 0; i < len(node.Content); i += 2 {
|
||||
|
||||
318
pkg/yqlib/encoder_kyaml.go
Normal file
318
pkg/yqlib/encoder_kyaml.go
Normal file
@ -0,0 +1,318 @@
|
||||
//go:build !yq_nokyaml
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type kyamlEncoder struct {
|
||||
prefs KYamlPreferences
|
||||
}
|
||||
|
||||
func NewKYamlEncoder(prefs KYamlPreferences) Encoder {
|
||||
return &kyamlEncoder{prefs: prefs}
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) CanHandleAliases() bool {
|
||||
// KYAML is a restricted subset; avoid emitting anchors/aliases.
|
||||
return false
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) PrintDocumentSeparator(writer io.Writer) error {
|
||||
return PrintYAMLDocumentSeparator(writer, ke.prefs.PrintDocSeparators)
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) PrintLeadingContent(writer io.Writer, content string) error {
|
||||
return PrintYAMLLeadingContent(writer, content, ke.prefs.PrintDocSeparators, ke.prefs.ColorsEnabled)
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
log.Debug("encoderKYaml - going to print %v", NodeToString(node))
|
||||
if node.Kind == ScalarNode && ke.prefs.UnwrapScalar {
|
||||
return writeString(writer, node.Value+"\n")
|
||||
}
|
||||
|
||||
destination := writer
|
||||
tempBuffer := bytes.NewBuffer(nil)
|
||||
if ke.prefs.ColorsEnabled {
|
||||
destination = tempBuffer
|
||||
}
|
||||
|
||||
// Mirror the YAML encoder behaviour: trailing comments on the document root
|
||||
// are stored in FootComment and need to be printed after the document.
|
||||
trailingContent := node.FootComment
|
||||
|
||||
if err := ke.writeCommentBlock(destination, node.HeadComment, 0); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := ke.writeNode(destination, node, 0); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := ke.writeInlineComment(destination, node.LineComment); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeString(destination, "\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := ke.PrintLeadingContent(destination, trailingContent); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if ke.prefs.ColorsEnabled {
|
||||
return colorizeAndPrint(tempBuffer.Bytes(), writer)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) writeNode(writer io.Writer, node *CandidateNode, indent int) error {
|
||||
switch node.Kind {
|
||||
case MappingNode:
|
||||
return ke.writeMapping(writer, node, indent)
|
||||
case SequenceNode:
|
||||
return ke.writeSequence(writer, node, indent)
|
||||
case ScalarNode:
|
||||
return writeString(writer, ke.formatScalar(node))
|
||||
case AliasNode:
|
||||
// Should have been exploded by the printer, but handle defensively.
|
||||
if node.Alias == nil {
|
||||
return writeString(writer, "null")
|
||||
}
|
||||
return ke.writeNode(writer, node.Alias, indent)
|
||||
default:
|
||||
return writeString(writer, "null")
|
||||
}
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) writeMapping(writer io.Writer, node *CandidateNode, indent int) error {
|
||||
if len(node.Content) == 0 {
|
||||
return writeString(writer, "{}")
|
||||
}
|
||||
if err := writeString(writer, "{\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i := 0; i+1 < len(node.Content); i += 2 {
|
||||
keyNode := node.Content[i]
|
||||
valueNode := node.Content[i+1]
|
||||
|
||||
entryIndent := indent + ke.prefs.Indent
|
||||
if err := ke.writeCommentBlock(writer, keyNode.HeadComment, entryIndent); err != nil {
|
||||
return err
|
||||
}
|
||||
if valueNode.HeadComment != "" && valueNode.HeadComment != keyNode.HeadComment {
|
||||
if err := ke.writeCommentBlock(writer, valueNode.HeadComment, entryIndent); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := ke.writeIndent(writer, entryIndent); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeString(writer, ke.formatKey(keyNode)); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeString(writer, ": "); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := ke.writeNode(writer, valueNode, entryIndent); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Always emit a trailing comma; KYAML encourages explicit separators,
|
||||
// and this ensures all quoted strings have a trailing `",` as requested.
|
||||
if err := writeString(writer, ","); err != nil {
|
||||
return err
|
||||
}
|
||||
inline := valueNode.LineComment
|
||||
if inline == "" {
|
||||
inline = keyNode.LineComment
|
||||
}
|
||||
if err := ke.writeInlineComment(writer, inline); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeString(writer, "\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
foot := valueNode.FootComment
|
||||
if foot == "" {
|
||||
foot = keyNode.FootComment
|
||||
}
|
||||
if err := ke.writeCommentBlock(writer, foot, entryIndent); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := ke.writeIndent(writer, indent); err != nil {
|
||||
return err
|
||||
}
|
||||
return writeString(writer, "}")
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) writeSequence(writer io.Writer, node *CandidateNode, indent int) error {
|
||||
if len(node.Content) == 0 {
|
||||
return writeString(writer, "[]")
|
||||
}
|
||||
if err := writeString(writer, "[\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, child := range node.Content {
|
||||
itemIndent := indent + ke.prefs.Indent
|
||||
if err := ke.writeCommentBlock(writer, child.HeadComment, itemIndent); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := ke.writeIndent(writer, itemIndent); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := ke.writeNode(writer, child, itemIndent); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeString(writer, ","); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := ke.writeInlineComment(writer, child.LineComment); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeString(writer, "\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := ke.writeCommentBlock(writer, child.FootComment, itemIndent); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := ke.writeIndent(writer, indent); err != nil {
|
||||
return err
|
||||
}
|
||||
return writeString(writer, "]")
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) writeIndent(writer io.Writer, indent int) error {
|
||||
if indent <= 0 {
|
||||
return nil
|
||||
}
|
||||
return writeString(writer, strings.Repeat(" ", indent))
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) formatKey(keyNode *CandidateNode) string {
|
||||
// KYAML examples use bare keys. Quote keys only when needed.
|
||||
key := keyNode.Value
|
||||
if isValidKYamlBareKey(key) {
|
||||
return key
|
||||
}
|
||||
return `"` + escapeDoubleQuotedString(key) + `"`
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) formatScalar(node *CandidateNode) string {
|
||||
switch node.Tag {
|
||||
case "!!null":
|
||||
return "null"
|
||||
case "!!bool":
|
||||
return strings.ToLower(node.Value)
|
||||
case "!!int", "!!float":
|
||||
return node.Value
|
||||
case "!!str":
|
||||
return `"` + escapeDoubleQuotedString(node.Value) + `"`
|
||||
default:
|
||||
// Fall back to a string representation to avoid implicit typing surprises.
|
||||
return `"` + escapeDoubleQuotedString(node.Value) + `"`
|
||||
}
|
||||
}
|
||||
|
||||
var kyamlBareKeyRe = regexp.MustCompile(`^[A-Za-z_][A-Za-z0-9_-]*$`)
|
||||
|
||||
func isValidKYamlBareKey(s string) bool {
|
||||
// Conservative: require an identifier-like key; otherwise quote.
|
||||
if s == "" {
|
||||
return false
|
||||
}
|
||||
return kyamlBareKeyRe.MatchString(s)
|
||||
}
|
||||
|
||||
func escapeDoubleQuotedString(s string) string {
|
||||
var b strings.Builder
|
||||
b.Grow(len(s) + 2)
|
||||
|
||||
for _, r := range s {
|
||||
switch r {
|
||||
case '\\':
|
||||
b.WriteString(`\\`)
|
||||
case '"':
|
||||
b.WriteString(`\"`)
|
||||
case '\n':
|
||||
b.WriteString(`\n`)
|
||||
case '\r':
|
||||
b.WriteString(`\r`)
|
||||
case '\t':
|
||||
b.WriteString(`\t`)
|
||||
default:
|
||||
if r < 0x20 {
|
||||
// YAML double-quoted strings support \uXXXX escapes.
|
||||
b.WriteString(`\u`)
|
||||
hex := "0000" + strings.ToUpper(strconv.FormatInt(int64(r), 16))
|
||||
b.WriteString(hex[len(hex)-4:])
|
||||
} else {
|
||||
b.WriteRune(r)
|
||||
}
|
||||
}
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) writeCommentBlock(writer io.Writer, comment string, indent int) error {
|
||||
if strings.TrimSpace(comment) == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
lines := strings.Split(strings.ReplaceAll(comment, "\r\n", "\n"), "\n")
|
||||
for _, line := range lines {
|
||||
trimmed := strings.TrimSpace(line)
|
||||
if trimmed == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := ke.writeIndent(writer, indent); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
toWrite := line
|
||||
if !commentLineRe.MatchString(toWrite) {
|
||||
toWrite = "# " + toWrite
|
||||
}
|
||||
if err := writeString(writer, toWrite); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeString(writer, "\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) writeInlineComment(writer io.Writer, comment string) error {
|
||||
comment = strings.TrimSpace(strings.ReplaceAll(comment, "\r\n", "\n"))
|
||||
if comment == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
lines := strings.Split(comment, "\n")
|
||||
first := strings.TrimSpace(lines[0])
|
||||
if first == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(first, "#") {
|
||||
first = "# " + first
|
||||
}
|
||||
|
||||
if err := writeString(writer, " "); err != nil {
|
||||
return err
|
||||
}
|
||||
return writeString(writer, first)
|
||||
}
|
||||
@ -57,7 +57,13 @@ func (pe *shellVariablesEncoder) doEncode(w *io.Writer, node *CandidateNode, pat
|
||||
// let's just pick a fallback key to use if we are encoding a single scalar
|
||||
nonemptyPath = "value"
|
||||
}
|
||||
_, err := io.WriteString(*w, nonemptyPath+"="+quoteValue(node.Value)+"\n")
|
||||
var valueString string
|
||||
if pe.prefs.UnwrapScalar {
|
||||
valueString = node.Value
|
||||
} else {
|
||||
valueString = quoteValue(node.Value)
|
||||
}
|
||||
_, err := io.WriteString(*w, nonemptyPath+"="+valueString+"\n")
|
||||
return err
|
||||
case SequenceNode:
|
||||
for index, child := range node.Content {
|
||||
|
||||
@ -135,3 +135,36 @@ func TestShellVariablesEncoderCustomSeparatorArray(t *testing.T) {
|
||||
func TestShellVariablesEncoderCustomSeparatorSingleChar(t *testing.T) {
|
||||
assertEncodesToWithSeparator(t, "a:\n b: value", "aXb=value", "X")
|
||||
}
|
||||
|
||||
func assertEncodesToUnwrapped(t *testing.T, yaml string, shellvars string) {
|
||||
var output bytes.Buffer
|
||||
writer := bufio.NewWriter(&output)
|
||||
|
||||
originalUnwrapScalar := ConfiguredShellVariablesPreferences.UnwrapScalar
|
||||
defer func() {
|
||||
ConfiguredShellVariablesPreferences.UnwrapScalar = originalUnwrapScalar
|
||||
}()
|
||||
|
||||
ConfiguredShellVariablesPreferences.UnwrapScalar = true
|
||||
|
||||
var encoder = NewShellVariablesEncoder()
|
||||
inputs, err := readDocuments(strings.NewReader(yaml), "test.yml", 0, NewYamlDecoder(ConfiguredYamlPreferences))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
node := inputs.Front().Value.(*CandidateNode)
|
||||
err = encoder.Encode(writer, node)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
writer.Flush()
|
||||
|
||||
test.AssertResult(t, shellvars, strings.TrimSuffix(output.String(), "\n"))
|
||||
}
|
||||
|
||||
func TestShellVariablesEncoderUnwrapScalar(t *testing.T) {
|
||||
assertEncodesToUnwrapped(t, "a: Lewis Carroll", "a=Lewis Carroll")
|
||||
assertEncodesToUnwrapped(t, "b: 123", "b=123")
|
||||
assertEncodesToUnwrapped(t, "c: true", "c=true")
|
||||
assertEncodesToUnwrapped(t, "d: value with spaces", "d=value with spaces")
|
||||
}
|
||||
|
||||
@ -1,22 +1,58 @@
|
||||
//go:build !yq_notoml
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"github.com/fatih/color"
|
||||
)
|
||||
|
||||
type tomlEncoder struct {
|
||||
wroteRootAttr bool // Track if we wrote root-level attributes before tables
|
||||
prefs TomlPreferences
|
||||
}
|
||||
|
||||
func NewTomlEncoder() Encoder {
|
||||
return &tomlEncoder{}
|
||||
return NewTomlEncoderWithPrefs(ConfiguredTomlPreferences)
|
||||
}
|
||||
|
||||
func NewTomlEncoderWithPrefs(prefs TomlPreferences) Encoder {
|
||||
return &tomlEncoder{prefs: prefs}
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
if node.Kind == ScalarNode {
|
||||
return writeString(writer, node.Value+"\n")
|
||||
if node.Kind != MappingNode {
|
||||
// For standalone selections, TOML tests expect raw value for scalars
|
||||
if node.Kind == ScalarNode {
|
||||
return writeString(writer, node.Value+"\n")
|
||||
}
|
||||
return fmt.Errorf("TOML encoder expects a mapping at the root level")
|
||||
}
|
||||
return fmt.Errorf("only scalars (e.g. strings, numbers, booleans) are supported for TOML output at the moment. Please use yaml output format (-oy) until the encoder has been fully implemented")
|
||||
|
||||
// Encode to a buffer first if colors are enabled
|
||||
var buf bytes.Buffer
|
||||
var targetWriter io.Writer
|
||||
targetWriter = writer
|
||||
if te.prefs.ColorsEnabled {
|
||||
targetWriter = &buf
|
||||
}
|
||||
|
||||
// Encode a root mapping as a sequence of attributes, tables, and arrays of tables
|
||||
if err := te.encodeRootMapping(targetWriter, node); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if te.prefs.ColorsEnabled {
|
||||
colourised := te.colorizeToml(buf.Bytes())
|
||||
_, err := writer.Write(colourised)
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) PrintDocumentSeparator(_ io.Writer) error {
|
||||
@ -30,3 +66,725 @@ func (te *tomlEncoder) PrintLeadingContent(_ io.Writer, _ string) error {
|
||||
func (te *tomlEncoder) CanHandleAliases() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// ---- helpers ----
|
||||
|
||||
func (te *tomlEncoder) writeComment(w io.Writer, comment string) error {
|
||||
if comment == "" {
|
||||
return nil
|
||||
}
|
||||
lines := strings.Split(comment, "\n")
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
if !strings.HasPrefix(line, "#") {
|
||||
line = "# " + line
|
||||
}
|
||||
if _, err := w.Write([]byte(line + "\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) formatScalar(node *CandidateNode) string {
|
||||
switch node.Tag {
|
||||
case "!!str":
|
||||
// Quote strings per TOML spec
|
||||
return fmt.Sprintf("%q", node.Value)
|
||||
case "!!bool", "!!int", "!!float":
|
||||
return node.Value
|
||||
case "!!null":
|
||||
// TOML does not have null; encode as empty string
|
||||
return `""`
|
||||
default:
|
||||
return node.Value
|
||||
}
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) encodeRootMapping(w io.Writer, node *CandidateNode) error {
|
||||
te.wroteRootAttr = false // Reset state
|
||||
|
||||
// Write root head comment if present (at the very beginning, no leading blank line)
|
||||
if node.HeadComment != "" {
|
||||
if err := te.writeComment(w, node.HeadComment); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Preserve existing order by iterating Content
|
||||
for i := 0; i < len(node.Content); i += 2 {
|
||||
keyNode := node.Content[i]
|
||||
valNode := node.Content[i+1]
|
||||
if err := te.encodeTopLevelEntry(w, []string{keyNode.Value}, valNode); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// encodeTopLevelEntry encodes a key/value at the root, dispatching to attribute, table, or array-of-tables
|
||||
func (te *tomlEncoder) encodeTopLevelEntry(w io.Writer, path []string, node *CandidateNode) error {
|
||||
if len(path) == 0 {
|
||||
return fmt.Errorf("cannot encode TOML entry with empty path")
|
||||
}
|
||||
|
||||
switch node.Kind {
|
||||
case ScalarNode:
|
||||
// key = value
|
||||
return te.writeAttribute(w, path[len(path)-1], node)
|
||||
case SequenceNode:
|
||||
// Empty arrays should be encoded as [] attributes
|
||||
if len(node.Content) == 0 {
|
||||
return te.writeArrayAttribute(w, path[len(path)-1], node)
|
||||
}
|
||||
|
||||
// If all items are mappings => array of tables; else => array attribute
|
||||
allMaps := true
|
||||
for _, it := range node.Content {
|
||||
if it.Kind != MappingNode {
|
||||
allMaps = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if allMaps {
|
||||
key := path[len(path)-1]
|
||||
for _, it := range node.Content {
|
||||
// [[key]] then body
|
||||
if _, err := w.Write([]byte("[[" + key + "]]\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := te.encodeMappingBodyWithPath(w, []string{key}, it); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
// Regular array attribute
|
||||
return te.writeArrayAttribute(w, path[len(path)-1], node)
|
||||
case MappingNode:
|
||||
// Inline table if not EncodeSeparate, else emit separate tables/arrays of tables for children under this path
|
||||
if !node.EncodeSeparate {
|
||||
// If children contain mappings or arrays of mappings, prefer separate sections
|
||||
if te.hasEncodeSeparateChild(node) || te.hasStructuralChildren(node) {
|
||||
return te.encodeSeparateMapping(w, path, node)
|
||||
}
|
||||
return te.writeInlineTableAttribute(w, path[len(path)-1], node)
|
||||
}
|
||||
return te.encodeSeparateMapping(w, path, node)
|
||||
default:
|
||||
return fmt.Errorf("unsupported node kind for TOML: %v", node.Kind)
|
||||
}
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) writeAttribute(w io.Writer, key string, value *CandidateNode) error {
|
||||
te.wroteRootAttr = true // Mark that we wrote a root attribute
|
||||
|
||||
// Write head comment before the attribute
|
||||
if err := te.writeComment(w, value.HeadComment); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Write the attribute
|
||||
line := key + " = " + te.formatScalar(value)
|
||||
|
||||
// Add line comment if present
|
||||
if value.LineComment != "" {
|
||||
lineComment := strings.TrimSpace(value.LineComment)
|
||||
if !strings.HasPrefix(lineComment, "#") {
|
||||
lineComment = "# " + lineComment
|
||||
}
|
||||
line += " " + lineComment
|
||||
}
|
||||
|
||||
_, err := w.Write([]byte(line + "\n"))
|
||||
return err
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) writeArrayAttribute(w io.Writer, key string, seq *CandidateNode) error {
|
||||
te.wroteRootAttr = true // Mark that we wrote a root attribute
|
||||
|
||||
// Write head comment before the array
|
||||
if err := te.writeComment(w, seq.HeadComment); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Handle empty arrays
|
||||
if len(seq.Content) == 0 {
|
||||
line := key + " = []"
|
||||
if seq.LineComment != "" {
|
||||
lineComment := strings.TrimSpace(seq.LineComment)
|
||||
if !strings.HasPrefix(lineComment, "#") {
|
||||
lineComment = "# " + lineComment
|
||||
}
|
||||
line += " " + lineComment
|
||||
}
|
||||
_, err := w.Write([]byte(line + "\n"))
|
||||
return err
|
||||
}
|
||||
|
||||
// Check if any array elements have head comments - if so, use multiline format
|
||||
hasElementComments := false
|
||||
for _, it := range seq.Content {
|
||||
if it.HeadComment != "" {
|
||||
hasElementComments = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if hasElementComments {
|
||||
// Write multiline array format with comments
|
||||
if _, err := w.Write([]byte(key + " = [\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i, it := range seq.Content {
|
||||
// Write head comment for this element
|
||||
if it.HeadComment != "" {
|
||||
commentLines := strings.Split(it.HeadComment, "\n")
|
||||
for _, commentLine := range commentLines {
|
||||
if strings.TrimSpace(commentLine) != "" {
|
||||
if !strings.HasPrefix(strings.TrimSpace(commentLine), "#") {
|
||||
commentLine = "# " + commentLine
|
||||
}
|
||||
if _, err := w.Write([]byte(" " + commentLine + "\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Write the element value
|
||||
var itemStr string
|
||||
switch it.Kind {
|
||||
case ScalarNode:
|
||||
itemStr = te.formatScalar(it)
|
||||
case SequenceNode:
|
||||
nested, err := te.sequenceToInlineArray(it)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
itemStr = nested
|
||||
case MappingNode:
|
||||
inline, err := te.mappingToInlineTable(it)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
itemStr = inline
|
||||
case AliasNode:
|
||||
return fmt.Errorf("aliases are not supported in TOML")
|
||||
default:
|
||||
return fmt.Errorf("unsupported array item kind: %v", it.Kind)
|
||||
}
|
||||
|
||||
// Always add trailing comma in multiline arrays
|
||||
itemStr += ","
|
||||
|
||||
if _, err := w.Write([]byte(" " + itemStr + "\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Add blank line between elements (except after the last one)
|
||||
if i < len(seq.Content)-1 {
|
||||
if _, err := w.Write([]byte("\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if _, err := w.Write([]byte("]\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Join scalars or nested arrays recursively into TOML array syntax
|
||||
items := make([]string, 0, len(seq.Content))
|
||||
for _, it := range seq.Content {
|
||||
switch it.Kind {
|
||||
case ScalarNode:
|
||||
items = append(items, te.formatScalar(it))
|
||||
case SequenceNode:
|
||||
// Nested arrays: encode inline
|
||||
nested, err := te.sequenceToInlineArray(it)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
items = append(items, nested)
|
||||
case MappingNode:
|
||||
// Inline table inside array
|
||||
inline, err := te.mappingToInlineTable(it)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
items = append(items, inline)
|
||||
case AliasNode:
|
||||
return fmt.Errorf("aliases are not supported in TOML")
|
||||
default:
|
||||
return fmt.Errorf("unsupported array item kind: %v", it.Kind)
|
||||
}
|
||||
}
|
||||
|
||||
line := key + " = [" + strings.Join(items, ", ") + "]"
|
||||
|
||||
// Add line comment if present
|
||||
if seq.LineComment != "" {
|
||||
lineComment := strings.TrimSpace(seq.LineComment)
|
||||
if !strings.HasPrefix(lineComment, "#") {
|
||||
lineComment = "# " + lineComment
|
||||
}
|
||||
line += " " + lineComment
|
||||
}
|
||||
|
||||
_, err := w.Write([]byte(line + "\n"))
|
||||
return err
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) sequenceToInlineArray(seq *CandidateNode) (string, error) {
|
||||
items := make([]string, 0, len(seq.Content))
|
||||
for _, it := range seq.Content {
|
||||
switch it.Kind {
|
||||
case ScalarNode:
|
||||
items = append(items, te.formatScalar(it))
|
||||
case SequenceNode:
|
||||
nested, err := te.sequenceToInlineArray(it)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
items = append(items, nested)
|
||||
case MappingNode:
|
||||
inline, err := te.mappingToInlineTable(it)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
items = append(items, inline)
|
||||
default:
|
||||
return "", fmt.Errorf("unsupported array item kind: %v", it.Kind)
|
||||
}
|
||||
}
|
||||
return "[" + strings.Join(items, ", ") + "]", nil
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) mappingToInlineTable(m *CandidateNode) (string, error) {
|
||||
// key = { a = 1, b = "x" }
|
||||
parts := make([]string, 0, len(m.Content)/2)
|
||||
for i := 0; i < len(m.Content); i += 2 {
|
||||
k := m.Content[i].Value
|
||||
v := m.Content[i+1]
|
||||
switch v.Kind {
|
||||
case ScalarNode:
|
||||
parts = append(parts, fmt.Sprintf("%s = %s", k, te.formatScalar(v)))
|
||||
case SequenceNode:
|
||||
// inline array in inline table
|
||||
arr, err := te.sequenceToInlineArray(v)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
parts = append(parts, fmt.Sprintf("%s = %s", k, arr))
|
||||
case MappingNode:
|
||||
// nested inline table
|
||||
inline, err := te.mappingToInlineTable(v)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
parts = append(parts, fmt.Sprintf("%s = %s", k, inline))
|
||||
default:
|
||||
return "", fmt.Errorf("unsupported inline table value kind: %v", v.Kind)
|
||||
}
|
||||
}
|
||||
return "{ " + strings.Join(parts, ", ") + " }", nil
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) writeInlineTableAttribute(w io.Writer, key string, m *CandidateNode) error {
|
||||
inline, err := te.mappingToInlineTable(m)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = w.Write([]byte(key + " = " + inline + "\n"))
|
||||
return err
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) writeTableHeader(w io.Writer, path []string, m *CandidateNode) error {
|
||||
// Add blank line before table header (or before comment if present) if we wrote root attributes
|
||||
needsBlankLine := te.wroteRootAttr
|
||||
if needsBlankLine {
|
||||
if _, err := w.Write([]byte("\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
te.wroteRootAttr = false // Only add once
|
||||
}
|
||||
|
||||
// Write head comment before the table header
|
||||
if m.HeadComment != "" {
|
||||
if err := te.writeComment(w, m.HeadComment); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Write table header [a.b.c]
|
||||
header := "[" + strings.Join(path, ".") + "]\n"
|
||||
_, err := w.Write([]byte(header))
|
||||
return err
|
||||
}
|
||||
|
||||
// encodeSeparateMapping handles a mapping that should be encoded as table sections.
|
||||
// It emits the table header for this mapping if it has any content, then processes children.
|
||||
func (te *tomlEncoder) encodeSeparateMapping(w io.Writer, path []string, m *CandidateNode) error {
|
||||
// Check if this mapping has any non-mapping, non-array-of-tables children (i.e., attributes)
|
||||
hasAttrs := false
|
||||
for i := 0; i < len(m.Content); i += 2 {
|
||||
v := m.Content[i+1]
|
||||
if v.Kind == ScalarNode {
|
||||
hasAttrs = true
|
||||
break
|
||||
}
|
||||
if v.Kind == SequenceNode {
|
||||
// Check if it's NOT an array of tables
|
||||
allMaps := true
|
||||
for _, it := range v.Content {
|
||||
if it.Kind != MappingNode {
|
||||
allMaps = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if !allMaps {
|
||||
hasAttrs = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If there are attributes or if the mapping is empty, emit the table header
|
||||
if hasAttrs || len(m.Content) == 0 {
|
||||
if err := te.writeTableHeader(w, path, m); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := te.encodeMappingBodyWithPath(w, path, m); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// No attributes, just nested structures - process children
|
||||
for i := 0; i < len(m.Content); i += 2 {
|
||||
k := m.Content[i].Value
|
||||
v := m.Content[i+1]
|
||||
switch v.Kind {
|
||||
case MappingNode:
|
||||
// Emit [path.k]
|
||||
newPath := append(append([]string{}, path...), k)
|
||||
if err := te.writeTableHeader(w, newPath, v); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := te.encodeMappingBodyWithPath(w, newPath, v); err != nil {
|
||||
return err
|
||||
}
|
||||
case SequenceNode:
|
||||
// If sequence of maps, emit [[path.k]] per element
|
||||
allMaps := true
|
||||
for _, it := range v.Content {
|
||||
if it.Kind != MappingNode {
|
||||
allMaps = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if allMaps {
|
||||
key := strings.Join(append(append([]string{}, path...), k), ".")
|
||||
for _, it := range v.Content {
|
||||
if _, err := w.Write([]byte("[[" + key + "]]\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := te.encodeMappingBodyWithPath(w, append(append([]string{}, path...), k), it); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Regular array attribute under the current table path
|
||||
if err := te.writeArrayAttribute(w, k, v); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
case ScalarNode:
|
||||
// Attributes directly under the current table path
|
||||
if err := te.writeAttribute(w, k, v); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) hasEncodeSeparateChild(m *CandidateNode) bool {
|
||||
for i := 0; i < len(m.Content); i += 2 {
|
||||
v := m.Content[i+1]
|
||||
if v.Kind == MappingNode && v.EncodeSeparate {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) hasStructuralChildren(m *CandidateNode) bool {
|
||||
for i := 0; i < len(m.Content); i += 2 {
|
||||
v := m.Content[i+1]
|
||||
// Only consider it structural if mapping has EncodeSeparate or is non-empty
|
||||
if v.Kind == MappingNode && v.EncodeSeparate {
|
||||
return true
|
||||
}
|
||||
if v.Kind == SequenceNode {
|
||||
allMaps := true
|
||||
for _, it := range v.Content {
|
||||
if it.Kind != MappingNode {
|
||||
allMaps = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if allMaps {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// encodeMappingBodyWithPath encodes attributes and nested arrays of tables using full dotted path context
|
||||
func (te *tomlEncoder) encodeMappingBodyWithPath(w io.Writer, path []string, m *CandidateNode) error {
|
||||
// First, attributes (scalars and non-map arrays)
|
||||
for i := 0; i < len(m.Content); i += 2 {
|
||||
k := m.Content[i].Value
|
||||
v := m.Content[i+1]
|
||||
switch v.Kind {
|
||||
case ScalarNode:
|
||||
if err := te.writeAttribute(w, k, v); err != nil {
|
||||
return err
|
||||
}
|
||||
case SequenceNode:
|
||||
allMaps := true
|
||||
for _, it := range v.Content {
|
||||
if it.Kind != MappingNode {
|
||||
allMaps = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if !allMaps {
|
||||
if err := te.writeArrayAttribute(w, k, v); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Then, nested arrays of tables with full path
|
||||
for i := 0; i < len(m.Content); i += 2 {
|
||||
k := m.Content[i].Value
|
||||
v := m.Content[i+1]
|
||||
if v.Kind == SequenceNode {
|
||||
allMaps := true
|
||||
for _, it := range v.Content {
|
||||
if it.Kind != MappingNode {
|
||||
allMaps = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if allMaps {
|
||||
dotted := strings.Join(append(append([]string{}, path...), k), ".")
|
||||
for _, it := range v.Content {
|
||||
if _, err := w.Write([]byte("[[" + dotted + "]]\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := te.encodeMappingBodyWithPath(w, append(append([]string{}, path...), k), it); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Finally, child mappings that are not marked EncodeSeparate get inlined as attributes
|
||||
for i := 0; i < len(m.Content); i += 2 {
|
||||
k := m.Content[i].Value
|
||||
v := m.Content[i+1]
|
||||
if v.Kind == MappingNode && !v.EncodeSeparate {
|
||||
if err := te.writeInlineTableAttribute(w, k, v); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// colorizeToml applies syntax highlighting to TOML output using fatih/color
|
||||
func (te *tomlEncoder) colorizeToml(input []byte) []byte {
|
||||
toml := string(input)
|
||||
result := strings.Builder{}
|
||||
|
||||
// Force color output (don't check for TTY)
|
||||
color.NoColor = false
|
||||
|
||||
// Create color functions for different token types
|
||||
// Use EnableColor() to ensure colors work even when NO_COLOR env is set
|
||||
commentColorObj := color.New(color.FgHiBlack)
|
||||
commentColorObj.EnableColor()
|
||||
stringColorObj := color.New(color.FgGreen)
|
||||
stringColorObj.EnableColor()
|
||||
numberColorObj := color.New(color.FgHiMagenta)
|
||||
numberColorObj.EnableColor()
|
||||
keyColorObj := color.New(color.FgCyan)
|
||||
keyColorObj.EnableColor()
|
||||
boolColorObj := color.New(color.FgHiMagenta)
|
||||
boolColorObj.EnableColor()
|
||||
sectionColorObj := color.New(color.FgYellow, color.Bold)
|
||||
sectionColorObj.EnableColor()
|
||||
|
||||
commentColor := commentColorObj.SprintFunc()
|
||||
stringColor := stringColorObj.SprintFunc()
|
||||
numberColor := numberColorObj.SprintFunc()
|
||||
keyColor := keyColorObj.SprintFunc()
|
||||
boolColor := boolColorObj.SprintFunc()
|
||||
sectionColor := sectionColorObj.SprintFunc()
|
||||
|
||||
// Simple tokenization for TOML colouring
|
||||
i := 0
|
||||
for i < len(toml) {
|
||||
ch := toml[i]
|
||||
|
||||
// Comments - from # to end of line
|
||||
if ch == '#' {
|
||||
end := i
|
||||
for end < len(toml) && toml[end] != '\n' {
|
||||
end++
|
||||
}
|
||||
result.WriteString(commentColor(toml[i:end]))
|
||||
i = end
|
||||
continue
|
||||
}
|
||||
|
||||
// Table sections - [section] or [[array]]
|
||||
// Only treat '[' as a table section if it appears at the start of the line
|
||||
// (possibly after whitespace). This avoids mis-colouring inline arrays like
|
||||
// "ports = [8000, 8001]" as table sections.
|
||||
if ch == '[' {
|
||||
isSectionHeader := true
|
||||
if i > 0 {
|
||||
isSectionHeader = false
|
||||
j := i - 1
|
||||
for j >= 0 && toml[j] != '\n' {
|
||||
if toml[j] != ' ' && toml[j] != '\t' && toml[j] != '\r' {
|
||||
// Found a non-whitespace character before this '[' on the same line,
|
||||
// so this is not a table header.
|
||||
break
|
||||
}
|
||||
j--
|
||||
}
|
||||
if j < 0 || toml[j] == '\n' {
|
||||
// Reached the start of the string or a newline without encountering
|
||||
// any non-whitespace, so '[' is at the logical start of the line.
|
||||
isSectionHeader = true
|
||||
}
|
||||
}
|
||||
if isSectionHeader {
|
||||
end := i + 1
|
||||
// Check for [[
|
||||
if end < len(toml) && toml[end] == '[' {
|
||||
end++
|
||||
}
|
||||
// Find closing ]
|
||||
for end < len(toml) && toml[end] != ']' {
|
||||
end++
|
||||
}
|
||||
// Include closing ]
|
||||
if end < len(toml) {
|
||||
end++
|
||||
// Check for ]]
|
||||
if end < len(toml) && toml[end] == ']' {
|
||||
end++
|
||||
}
|
||||
}
|
||||
result.WriteString(sectionColor(toml[i:end]))
|
||||
i = end
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Strings - quoted text (double or single quotes)
|
||||
if ch == '"' || ch == '\'' {
|
||||
quote := ch
|
||||
end := i + 1
|
||||
for end < len(toml) {
|
||||
if toml[end] == quote {
|
||||
break
|
||||
}
|
||||
if toml[end] == '\\' && end+1 < len(toml) {
|
||||
// Skip the backslash and the escaped character
|
||||
end += 2
|
||||
continue
|
||||
}
|
||||
end++
|
||||
}
|
||||
if end < len(toml) {
|
||||
end++ // include closing quote
|
||||
}
|
||||
result.WriteString(stringColor(toml[i:end]))
|
||||
i = end
|
||||
continue
|
||||
}
|
||||
|
||||
// Numbers - sequences of digits, possibly with decimal point or minus
|
||||
if (ch >= '0' && ch <= '9') || (ch == '-' && i+1 < len(toml) && toml[i+1] >= '0' && toml[i+1] <= '9') {
|
||||
end := i
|
||||
if ch == '-' {
|
||||
end++
|
||||
}
|
||||
for end < len(toml) {
|
||||
c := toml[end]
|
||||
if (c >= '0' && c <= '9') || c == '.' || c == 'e' || c == 'E' {
|
||||
end++
|
||||
} else if (c == '+' || c == '-') && end > 0 && (toml[end-1] == 'e' || toml[end-1] == 'E') {
|
||||
// Only allow + or - immediately after 'e' or 'E' for scientific notation
|
||||
end++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
result.WriteString(numberColor(toml[i:end]))
|
||||
i = end
|
||||
continue
|
||||
}
|
||||
|
||||
// Identifiers/keys - alphanumeric + underscore + dash
|
||||
if (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || ch == '_' {
|
||||
end := i
|
||||
for end < len(toml) && ((toml[end] >= 'a' && toml[end] <= 'z') ||
|
||||
(toml[end] >= 'A' && toml[end] <= 'Z') ||
|
||||
(toml[end] >= '0' && toml[end] <= '9') ||
|
||||
toml[end] == '_' || toml[end] == '-') {
|
||||
end++
|
||||
}
|
||||
ident := toml[i:end]
|
||||
|
||||
// Check if this is a boolean/null keyword
|
||||
switch ident {
|
||||
case "true", "false":
|
||||
result.WriteString(boolColor(ident))
|
||||
default:
|
||||
// Check if followed by = or whitespace then = (it's a key)
|
||||
j := end
|
||||
for j < len(toml) && (toml[j] == ' ' || toml[j] == '\t') {
|
||||
j++
|
||||
}
|
||||
if j < len(toml) && toml[j] == '=' {
|
||||
result.WriteString(keyColor(ident))
|
||||
} else {
|
||||
result.WriteString(ident) // plain text for other identifiers
|
||||
}
|
||||
}
|
||||
i = end
|
||||
continue
|
||||
}
|
||||
|
||||
// Everything else (whitespace, operators, brackets) - no color
|
||||
result.WriteByte(ch)
|
||||
i++
|
||||
}
|
||||
|
||||
return []byte(result.String())
|
||||
}
|
||||
|
||||
@ -1,13 +1,10 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"errors"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"github.com/fatih/color"
|
||||
"go.yaml.in/yaml/v4"
|
||||
)
|
||||
|
||||
@ -24,63 +21,11 @@ func (ye *yamlEncoder) CanHandleAliases() bool {
|
||||
}
|
||||
|
||||
func (ye *yamlEncoder) PrintDocumentSeparator(writer io.Writer) error {
|
||||
if ye.prefs.PrintDocSeparators {
|
||||
log.Debug("writing doc sep")
|
||||
if err := writeString(writer, "---\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
return PrintYAMLDocumentSeparator(writer, ye.prefs.PrintDocSeparators)
|
||||
}
|
||||
|
||||
func (ye *yamlEncoder) PrintLeadingContent(writer io.Writer, content string) error {
|
||||
reader := bufio.NewReader(strings.NewReader(content))
|
||||
|
||||
// reuse precompiled package-level regex
|
||||
// (declared in decoder_yaml.go)
|
||||
|
||||
for {
|
||||
|
||||
readline, errReading := reader.ReadString('\n')
|
||||
if errReading != nil && !errors.Is(errReading, io.EOF) {
|
||||
return errReading
|
||||
}
|
||||
if strings.Contains(readline, "$yqDocSeparator$") {
|
||||
// Preserve the original line ending (CRLF or LF)
|
||||
lineEnding := "\n"
|
||||
if strings.HasSuffix(readline, "\r\n") {
|
||||
lineEnding = "\r\n"
|
||||
}
|
||||
if ye.prefs.PrintDocSeparators {
|
||||
if err := writeString(writer, "---"+lineEnding); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
if len(readline) > 0 && readline != "\n" && readline[0] != '%' && !commentLineRe.MatchString(readline) {
|
||||
readline = "# " + readline
|
||||
}
|
||||
if ye.prefs.ColorsEnabled && strings.TrimSpace(readline) != "" {
|
||||
readline = format(color.FgHiBlack) + readline + format(color.Reset)
|
||||
}
|
||||
if err := writeString(writer, readline); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if errors.Is(errReading, io.EOF) {
|
||||
if readline != "" {
|
||||
// the last comment we read didn't have a newline, put one in
|
||||
if err := writeString(writer, "\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
return PrintYAMLLeadingContent(writer, content, ye.prefs.PrintDocSeparators, ye.prefs.ColorsEnabled)
|
||||
}
|
||||
|
||||
func (ye *yamlEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
@ -107,6 +52,9 @@ func (ye *yamlEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
var encoder = yaml.NewEncoder(destination)
|
||||
|
||||
encoder.SetIndent(ye.prefs.Indent)
|
||||
if ye.prefs.CompactSequenceIndent {
|
||||
encoder.CompactSeqIndent()
|
||||
}
|
||||
|
||||
target, err := node.MarshalYAML()
|
||||
|
||||
|
||||
@ -7,7 +7,15 @@ import (
|
||||
)
|
||||
|
||||
func tryRenameFile(from string, to string) error {
|
||||
if renameError := os.Rename(from, to); renameError != nil {
|
||||
if info, err := os.Lstat(to); err == nil && info.Mode()&os.ModeSymlink != 0 {
|
||||
log.Debug("Target file is symlink, skipping rename and attempting to copy contents")
|
||||
|
||||
if copyError := copyFileContents(from, to); copyError != nil {
|
||||
return fmt.Errorf("failed copying from %v to %v: %w", from, to, copyError)
|
||||
}
|
||||
tryRemoveTempFile(from)
|
||||
return nil
|
||||
} else if renameError := os.Rename(from, to); renameError != nil {
|
||||
log.Debugf("Error renaming from %v to %v, attempting to copy contents", from, to)
|
||||
log.Debug(renameError.Error())
|
||||
log.Debug("going to try copying instead")
|
||||
|
||||
@ -22,6 +22,12 @@ var YamlFormat = &Format{"yaml", []string{"y", "yml"},
|
||||
func() Decoder { return NewYamlDecoder(ConfiguredYamlPreferences) },
|
||||
}
|
||||
|
||||
var KYamlFormat = &Format{"kyaml", []string{"ky"},
|
||||
func() Encoder { return NewKYamlEncoder(ConfiguredKYamlPreferences) },
|
||||
// KYaml is stricter YAML
|
||||
func() Decoder { return NewYamlDecoder(ConfiguredYamlPreferences) },
|
||||
}
|
||||
|
||||
var JSONFormat = &Format{"json", []string{"j"},
|
||||
func() Encoder { return NewJSONEncoder(ConfiguredJSONPreferences) },
|
||||
func() Decoder { return NewJSONDecoder() },
|
||||
@ -63,11 +69,11 @@ var ShFormat = &Format{"", nil,
|
||||
}
|
||||
|
||||
var TomlFormat = &Format{"toml", []string{},
|
||||
func() Encoder { return NewTomlEncoder() },
|
||||
func() Encoder { return NewTomlEncoderWithPrefs(ConfiguredTomlPreferences) },
|
||||
func() Decoder { return NewTomlDecoder() },
|
||||
}
|
||||
|
||||
var HclFormat = &Format{"hcl", []string{"h"},
|
||||
var HclFormat = &Format{"hcl", []string{"h", "tf"},
|
||||
func() Encoder { return NewHclEncoder(ConfiguredHclPreferences) },
|
||||
func() Decoder { return NewHclDecoder() },
|
||||
}
|
||||
@ -89,6 +95,7 @@ var INIFormat = &Format{"ini", []string{"i"},
|
||||
|
||||
var Formats = []*Format{
|
||||
YamlFormat,
|
||||
KYamlFormat,
|
||||
JSONFormat,
|
||||
PropertiesFormat,
|
||||
CSVFormat,
|
||||
|
||||
@ -4,6 +4,7 @@ package yqlib
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
@ -325,6 +326,14 @@ var hclFormatScenarios = []formatScenario{
|
||||
expected: "# Configuration\nport = 8080\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: extraction",
|
||||
skipDoc: true,
|
||||
input: simpleSample,
|
||||
expression: ".shouty_message",
|
||||
expected: "upper(message)\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: With templates, functions and arithmetic",
|
||||
input: simpleSample,
|
||||
@ -535,6 +544,35 @@ func documentHclRoundTripScenario(w *bufio.Writer, s formatScenario) {
|
||||
writeOrPanic(w, fmt.Sprintf("```hcl\n%v```\n\n", mustProcessFormatScenario(s, NewHclDecoder(), NewHclEncoder(ConfiguredHclPreferences))))
|
||||
}
|
||||
|
||||
func TestHclEncoderPrintDocumentSeparator(t *testing.T) {
|
||||
encoder := NewHclEncoder(ConfiguredHclPreferences)
|
||||
var buf bytes.Buffer
|
||||
writer := bufio.NewWriter(&buf)
|
||||
|
||||
err := encoder.PrintDocumentSeparator(writer)
|
||||
writer.Flush()
|
||||
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "", buf.String())
|
||||
}
|
||||
|
||||
func TestHclEncoderPrintLeadingContent(t *testing.T) {
|
||||
encoder := NewHclEncoder(ConfiguredHclPreferences)
|
||||
var buf bytes.Buffer
|
||||
writer := bufio.NewWriter(&buf)
|
||||
|
||||
err := encoder.PrintLeadingContent(writer, "some content")
|
||||
writer.Flush()
|
||||
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "", buf.String())
|
||||
}
|
||||
|
||||
func TestHclEncoderCanHandleAliases(t *testing.T) {
|
||||
encoder := NewHclEncoder(ConfiguredHclPreferences)
|
||||
test.AssertResult(t, false, encoder.CanHandleAliases())
|
||||
}
|
||||
|
||||
func TestHclFormatScenarios(t *testing.T) {
|
||||
for _, tt := range hclFormatScenarios {
|
||||
testHclScenario(t, tt)
|
||||
|
||||
30
pkg/yqlib/kyaml.go
Normal file
30
pkg/yqlib/kyaml.go
Normal file
@ -0,0 +1,30 @@
|
||||
//go:build !yq_nokyaml
|
||||
|
||||
package yqlib
|
||||
|
||||
type KYamlPreferences struct {
|
||||
Indent int
|
||||
ColorsEnabled bool
|
||||
PrintDocSeparators bool
|
||||
UnwrapScalar bool
|
||||
}
|
||||
|
||||
func NewDefaultKYamlPreferences() KYamlPreferences {
|
||||
return KYamlPreferences{
|
||||
Indent: 2,
|
||||
ColorsEnabled: false,
|
||||
PrintDocSeparators: true,
|
||||
UnwrapScalar: true,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *KYamlPreferences) Copy() KYamlPreferences {
|
||||
return KYamlPreferences{
|
||||
Indent: p.Indent,
|
||||
ColorsEnabled: p.ColorsEnabled,
|
||||
PrintDocSeparators: p.PrintDocSeparators,
|
||||
UnwrapScalar: p.UnwrapScalar,
|
||||
}
|
||||
}
|
||||
|
||||
var ConfiguredKYamlPreferences = NewDefaultKYamlPreferences()
|
||||
542
pkg/yqlib/kyaml_test.go
Normal file
542
pkg/yqlib/kyaml_test.go
Normal file
@ -0,0 +1,542 @@
|
||||
//go:build !yq_nokyaml
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/mikefarah/yq/v4/test"
|
||||
)
|
||||
|
||||
var ansiRe = regexp.MustCompile(`\x1b\[[0-9;]*m`)
|
||||
|
||||
func stripANSI(s string) string {
|
||||
return ansiRe.ReplaceAllString(s, "")
|
||||
}
|
||||
|
||||
var kyamlFormatScenarios = []formatScenario{
|
||||
{
|
||||
description: "Encode kyaml: plain string scalar",
|
||||
subdescription: "Strings are always double-quoted in KYaml output.",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "cat\n",
|
||||
expected: "\"cat\"\n",
|
||||
},
|
||||
{
|
||||
description: "encode plain int scalar",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "12\n",
|
||||
expected: "12\n",
|
||||
skipDoc: true,
|
||||
},
|
||||
{
|
||||
description: "encode plain bool scalar",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "true\n",
|
||||
expected: "true\n",
|
||||
skipDoc: true,
|
||||
},
|
||||
{
|
||||
description: "encode plain null scalar",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "null\n",
|
||||
expected: "null\n",
|
||||
skipDoc: true,
|
||||
},
|
||||
{
|
||||
description: "encode flow mapping and sequence",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "a: b\nc:\n - d\n",
|
||||
expected: "{\n" +
|
||||
" a: \"b\",\n" +
|
||||
" c: [\n" +
|
||||
" \"d\",\n" +
|
||||
" ],\n" +
|
||||
"}\n",
|
||||
},
|
||||
{
|
||||
description: "encode non-string scalars",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "a: 12\n" +
|
||||
"b: true\n" +
|
||||
"c: null\n" +
|
||||
"d: \"true\"\n",
|
||||
expected: "{\n" +
|
||||
" a: 12,\n" +
|
||||
" b: true,\n" +
|
||||
" c: null,\n" +
|
||||
" d: \"true\",\n" +
|
||||
"}\n",
|
||||
},
|
||||
{
|
||||
description: "quote non-identifier keys",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "\"1a\": b\n\"has space\": c\n",
|
||||
expected: "{\n" +
|
||||
" \"1a\": \"b\",\n" +
|
||||
" \"has space\": \"c\",\n" +
|
||||
"}\n",
|
||||
},
|
||||
{
|
||||
description: "escape quoted strings",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "a: \"line1\\nline2\\t\\\"q\\\"\"\n",
|
||||
expected: "{\n" +
|
||||
" a: \"line1\\nline2\\t\\\"q\\\"\",\n" +
|
||||
"}\n",
|
||||
},
|
||||
{
|
||||
description: "preserve comments when encoding",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "# leading\n" +
|
||||
"a: 1 # a line\n" +
|
||||
"# head b\n" +
|
||||
"b: 2\n" +
|
||||
"c:\n" +
|
||||
" # head d\n" +
|
||||
" - d # d line\n" +
|
||||
" - e\n" +
|
||||
"# trailing\n",
|
||||
expected: "# leading\n" +
|
||||
"{\n" +
|
||||
" a: 1, # a line\n" +
|
||||
" # head b\n" +
|
||||
" b: 2,\n" +
|
||||
" c: [\n" +
|
||||
" # head d\n" +
|
||||
" \"d\", # d line\n" +
|
||||
" \"e\",\n" +
|
||||
" ],\n" +
|
||||
" # trailing\n" +
|
||||
"}\n",
|
||||
},
|
||||
{
|
||||
description: "Encode kyaml: anchors and aliases",
|
||||
subdescription: "KYaml output does not support anchors/aliases; they are expanded to concrete values.",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "base: &base\n" +
|
||||
" a: b\n" +
|
||||
"copy: *base\n",
|
||||
expected: "{\n" +
|
||||
" base: {\n" +
|
||||
" a: \"b\",\n" +
|
||||
" },\n" +
|
||||
" copy: {\n" +
|
||||
" a: \"b\",\n" +
|
||||
" },\n" +
|
||||
"}\n",
|
||||
},
|
||||
{
|
||||
description: "Encode kyaml: yaml to kyaml shows formatting differences",
|
||||
subdescription: "KYaml uses flow-style collections (braces/brackets) and explicit commas.",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "person:\n" +
|
||||
" name: John\n" +
|
||||
" pets:\n" +
|
||||
" - cat\n" +
|
||||
" - dog\n",
|
||||
expected: "{\n" +
|
||||
" person: {\n" +
|
||||
" name: \"John\",\n" +
|
||||
" pets: [\n" +
|
||||
" \"cat\",\n" +
|
||||
" \"dog\",\n" +
|
||||
" ],\n" +
|
||||
" },\n" +
|
||||
"}\n",
|
||||
},
|
||||
{
|
||||
description: "Encode kyaml: nested lists of objects",
|
||||
subdescription: "Lists and objects can be nested arbitrarily; KYaml always uses flow-style collections.",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "- name: a\n" +
|
||||
" items:\n" +
|
||||
" - id: 1\n" +
|
||||
" tags:\n" +
|
||||
" - k: x\n" +
|
||||
" v: y\n" +
|
||||
" - k: x2\n" +
|
||||
" v: y2\n" +
|
||||
" - id: 2\n" +
|
||||
" tags:\n" +
|
||||
" - k: z\n" +
|
||||
" v: w\n",
|
||||
expected: "[\n" +
|
||||
" {\n" +
|
||||
" name: \"a\",\n" +
|
||||
" items: [\n" +
|
||||
" {\n" +
|
||||
" id: 1,\n" +
|
||||
" tags: [\n" +
|
||||
" {\n" +
|
||||
" k: \"x\",\n" +
|
||||
" v: \"y\",\n" +
|
||||
" },\n" +
|
||||
" {\n" +
|
||||
" k: \"x2\",\n" +
|
||||
" v: \"y2\",\n" +
|
||||
" },\n" +
|
||||
" ],\n" +
|
||||
" },\n" +
|
||||
" {\n" +
|
||||
" id: 2,\n" +
|
||||
" tags: [\n" +
|
||||
" {\n" +
|
||||
" k: \"z\",\n" +
|
||||
" v: \"w\",\n" +
|
||||
" },\n" +
|
||||
" ],\n" +
|
||||
" },\n" +
|
||||
" ],\n" +
|
||||
" },\n" +
|
||||
"]\n",
|
||||
},
|
||||
}
|
||||
|
||||
func testKYamlScenario(t *testing.T, s formatScenario) {
|
||||
prefs := ConfiguredKYamlPreferences.Copy()
|
||||
prefs.Indent = s.indent
|
||||
prefs.UnwrapScalar = false
|
||||
|
||||
switch s.scenarioType {
|
||||
case "encode":
|
||||
test.AssertResultWithContext(
|
||||
t,
|
||||
s.expected,
|
||||
mustProcessFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewKYamlEncoder(prefs)),
|
||||
s.description,
|
||||
)
|
||||
default:
|
||||
panic(fmt.Sprintf("unhandled scenario type %q", s.scenarioType))
|
||||
}
|
||||
}
|
||||
|
||||
func documentKYamlScenario(_ *testing.T, w *bufio.Writer, i interface{}) {
|
||||
s := i.(formatScenario)
|
||||
if s.skipDoc {
|
||||
return
|
||||
}
|
||||
|
||||
switch s.scenarioType {
|
||||
case "encode":
|
||||
documentKYamlEncodeScenario(w, s)
|
||||
default:
|
||||
panic(fmt.Sprintf("unhandled scenario type %q", s.scenarioType))
|
||||
}
|
||||
}
|
||||
|
||||
func documentKYamlEncodeScenario(w *bufio.Writer, s formatScenario) {
|
||||
writeOrPanic(w, fmt.Sprintf("## %v\n", s.description))
|
||||
|
||||
if s.subdescription != "" {
|
||||
writeOrPanic(w, s.subdescription)
|
||||
writeOrPanic(w, "\n\n")
|
||||
}
|
||||
|
||||
writeOrPanic(w, "Given a sample.yml file of:\n")
|
||||
writeOrPanic(w, fmt.Sprintf("```yaml\n%v\n```\n", s.input))
|
||||
|
||||
writeOrPanic(w, "then\n")
|
||||
|
||||
expression := s.expression
|
||||
if expression == "" {
|
||||
expression = "."
|
||||
}
|
||||
|
||||
if s.indent == 2 {
|
||||
writeOrPanic(w, fmt.Sprintf("```bash\nyq -o=kyaml '%v' sample.yml\n```\n", expression))
|
||||
} else {
|
||||
writeOrPanic(w, fmt.Sprintf("```bash\nyq -o=kyaml -I=%v '%v' sample.yml\n```\n", s.indent, expression))
|
||||
}
|
||||
|
||||
writeOrPanic(w, "will output\n")
|
||||
|
||||
prefs := ConfiguredKYamlPreferences.Copy()
|
||||
prefs.Indent = s.indent
|
||||
prefs.UnwrapScalar = false
|
||||
|
||||
writeOrPanic(w, fmt.Sprintf("```yaml\n%v```\n\n", mustProcessFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewKYamlEncoder(prefs))))
|
||||
}
|
||||
|
||||
func TestKYamlFormatScenarios(t *testing.T) {
|
||||
for _, s := range kyamlFormatScenarios {
|
||||
testKYamlScenario(t, s)
|
||||
}
|
||||
|
||||
genericScenarios := make([]interface{}, len(kyamlFormatScenarios))
|
||||
for i, s := range kyamlFormatScenarios {
|
||||
genericScenarios[i] = s
|
||||
}
|
||||
documentScenarios(t, "usage", "kyaml", genericScenarios, documentKYamlScenario)
|
||||
}
|
||||
|
||||
func TestKYamlEncoderPrintDocumentSeparator(t *testing.T) {
|
||||
t.Run("enabled", func(t *testing.T) {
|
||||
prefs := NewDefaultKYamlPreferences()
|
||||
prefs.PrintDocSeparators = true
|
||||
|
||||
var buf bytes.Buffer
|
||||
err := NewKYamlEncoder(prefs).PrintDocumentSeparator(&buf)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != "---\n" {
|
||||
t.Fatalf("expected doc separator, got %q", buf.String())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("disabled", func(t *testing.T) {
|
||||
prefs := NewDefaultKYamlPreferences()
|
||||
prefs.PrintDocSeparators = false
|
||||
|
||||
var buf bytes.Buffer
|
||||
err := NewKYamlEncoder(prefs).PrintDocumentSeparator(&buf)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != "" {
|
||||
t.Fatalf("expected no output, got %q", buf.String())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestKYamlEncoderEncodeUnwrapScalar(t *testing.T) {
|
||||
prefs := NewDefaultKYamlPreferences()
|
||||
prefs.UnwrapScalar = true
|
||||
|
||||
var buf bytes.Buffer
|
||||
err := NewKYamlEncoder(prefs).Encode(&buf, &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: "cat",
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != "cat\n" {
|
||||
t.Fatalf("expected unwrapped scalar, got %q", buf.String())
|
||||
}
|
||||
}
|
||||
|
||||
func TestKYamlEncoderEncodeColorsEnabled(t *testing.T) {
|
||||
prefs := NewDefaultKYamlPreferences()
|
||||
prefs.UnwrapScalar = false
|
||||
prefs.ColorsEnabled = true
|
||||
|
||||
var buf bytes.Buffer
|
||||
err := NewKYamlEncoder(prefs).Encode(&buf, &CandidateNode{
|
||||
Kind: MappingNode,
|
||||
Content: []*CandidateNode{
|
||||
{Kind: ScalarNode, Tag: "!!str", Value: "a"},
|
||||
{Kind: ScalarNode, Tag: "!!str", Value: "b"},
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
out := stripANSI(buf.String())
|
||||
if !strings.Contains(out, "a:") || !strings.Contains(out, "\"b\"") {
|
||||
t.Fatalf("expected colourised output to contain rendered tokens, got %q", out)
|
||||
}
|
||||
}
|
||||
|
||||
func TestKYamlEncoderWriteNodeAliasAndUnknown(t *testing.T) {
|
||||
ke := NewKYamlEncoder(NewDefaultKYamlPreferences()).(*kyamlEncoder)
|
||||
|
||||
t.Run("alias_nil", func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := ke.writeNode(&buf, &CandidateNode{Kind: AliasNode}, 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != "null" {
|
||||
t.Fatalf("expected null for nil alias, got %q", buf.String())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("alias_value", func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := ke.writeNode(&buf, &CandidateNode{
|
||||
Kind: AliasNode,
|
||||
Alias: &CandidateNode{Kind: ScalarNode, Tag: "!!int", Value: "12"},
|
||||
}, 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != "12" {
|
||||
t.Fatalf("expected dereferenced alias value, got %q", buf.String())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("unknown_kind", func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := ke.writeNode(&buf, &CandidateNode{Kind: Kind(12345)}, 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != "null" {
|
||||
t.Fatalf("expected null for unknown kind, got %q", buf.String())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestKYamlEncoderEmptyCollections(t *testing.T) {
|
||||
ke := NewKYamlEncoder(NewDefaultKYamlPreferences()).(*kyamlEncoder)
|
||||
|
||||
t.Run("empty_mapping", func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := ke.writeNode(&buf, &CandidateNode{Kind: MappingNode}, 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != "{}" {
|
||||
t.Fatalf("expected empty mapping, got %q", buf.String())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("empty_sequence", func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := ke.writeNode(&buf, &CandidateNode{Kind: SequenceNode}, 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != "[]" {
|
||||
t.Fatalf("expected empty sequence, got %q", buf.String())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestKYamlEncoderScalarFallbackAndEscaping(t *testing.T) {
|
||||
ke := NewKYamlEncoder(NewDefaultKYamlPreferences()).(*kyamlEncoder)
|
||||
|
||||
t.Run("unknown_tag_falls_back_to_string", func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := ke.writeNode(&buf, &CandidateNode{Kind: ScalarNode, Tag: "!!timestamp", Value: "2020-01-01T00:00:00Z"}, 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != "\"2020-01-01T00:00:00Z\"" {
|
||||
t.Fatalf("expected quoted fallback, got %q", buf.String())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("escape_double_quoted", func(t *testing.T) {
|
||||
got := escapeDoubleQuotedString("a\\b\"c\n\r\t" + string(rune(0x01)))
|
||||
want := "a\\\\b\\\"c\\n\\r\\t\\u0001"
|
||||
if got != want {
|
||||
t.Fatalf("expected %q, got %q", want, got)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("valid_bare_key", func(t *testing.T) {
|
||||
if isValidKYamlBareKey("") {
|
||||
t.Fatalf("expected empty string to be invalid")
|
||||
}
|
||||
if isValidKYamlBareKey("1a") {
|
||||
t.Fatalf("expected leading digit to be invalid")
|
||||
}
|
||||
if !isValidKYamlBareKey("a_b-2") {
|
||||
t.Fatalf("expected identifier-like key to be valid")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestKYamlEncoderCommentsInMapping(t *testing.T) {
|
||||
prefs := NewDefaultKYamlPreferences()
|
||||
prefs.UnwrapScalar = false
|
||||
ke := NewKYamlEncoder(prefs).(*kyamlEncoder)
|
||||
|
||||
var buf bytes.Buffer
|
||||
err := ke.writeNode(&buf, &CandidateNode{
|
||||
Kind: MappingNode,
|
||||
Content: []*CandidateNode{
|
||||
{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: "a",
|
||||
HeadComment: "key head",
|
||||
LineComment: "key line",
|
||||
FootComment: "key foot",
|
||||
},
|
||||
{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: "b",
|
||||
HeadComment: "value head",
|
||||
},
|
||||
},
|
||||
}, 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
out := buf.String()
|
||||
if !strings.Contains(out, "# key head\n") {
|
||||
t.Fatalf("expected key head comment, got %q", out)
|
||||
}
|
||||
if !strings.Contains(out, "# value head\n") {
|
||||
t.Fatalf("expected value head comment, got %q", out)
|
||||
}
|
||||
if !strings.Contains(out, ", # key line\n") {
|
||||
t.Fatalf("expected inline key comment fallback, got %q", out)
|
||||
}
|
||||
if !strings.Contains(out, "# key foot\n") {
|
||||
t.Fatalf("expected foot comment fallback, got %q", out)
|
||||
}
|
||||
}
|
||||
|
||||
func TestKYamlEncoderCommentBlockAndInlineComment(t *testing.T) {
|
||||
ke := NewKYamlEncoder(NewDefaultKYamlPreferences()).(*kyamlEncoder)
|
||||
|
||||
t.Run("comment_block_prefixing_and_crlf", func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := ke.writeCommentBlock(&buf, "line1\r\n\r\n# already\r\nline2", 2)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
want := " # line1\n # already\n # line2\n"
|
||||
if buf.String() != want {
|
||||
t.Fatalf("expected %q, got %q", want, buf.String())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("inline_comment_prefix_and_first_line_only", func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := ke.writeInlineComment(&buf, "hello\r\nsecond line")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != " # hello" {
|
||||
t.Fatalf("expected %q, got %q", " # hello", buf.String())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("inline_comment_already_prefixed", func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := ke.writeInlineComment(&buf, "# hello")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != " # hello" {
|
||||
t.Fatalf("expected %q, got %q", " # hello", buf.String())
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -61,7 +61,7 @@ func unwrap(value string) string {
|
||||
}
|
||||
|
||||
func extractNumberParameter(value string) (int, error) {
|
||||
parameterParser := regexp.MustCompile(`.*\(([0-9]+)\)`)
|
||||
parameterParser := regexp.MustCompile(`.*\((-?[0-9]+)\)`)
|
||||
matches := parameterParser.FindStringSubmatch(value)
|
||||
var indent, errParsingInt = parseInt(matches[1])
|
||||
if errParsingInt != nil {
|
||||
|
||||
@ -57,7 +57,7 @@ var participleYqRules = []*participleYqRule{
|
||||
simpleOp("sort_?keys", sortKeysOpType),
|
||||
|
||||
{"ArrayToMap", "array_?to_?map", expressionOpToken(`(.[] | select(. != null) ) as $i ireduce({}; .[$i | key] = $i)`), 0},
|
||||
|
||||
{"Root", "root", expressionOpToken(`parent(-1)`), 0},
|
||||
{"YamlEncodeWithIndent", `to_?yaml\([0-9]+\)`, encodeParseIndent(YamlFormat), 0},
|
||||
{"XMLEncodeWithIndent", `to_?xml\([0-9]+\)`, encodeParseIndent(XMLFormat), 0},
|
||||
{"JSONEncodeWithIndent", `to_?json\([0-9]+\)`, encodeParseIndent(JSONFormat), 0},
|
||||
@ -132,7 +132,7 @@ var participleYqRules = []*participleYqRule{
|
||||
simpleOp("split", splitStringOpType),
|
||||
|
||||
simpleOp("parents", getParentsOpType),
|
||||
{"ParentWithLevel", `parent\([0-9]+\)`, parentWithLevel(), 0},
|
||||
{"ParentWithLevel", `parent\(-?[0-9]+\)`, parentWithLevel(), 0},
|
||||
{"ParentWithDefaultLevel", `parent`, parentWithDefaultLevel(), 0},
|
||||
|
||||
simpleOp("keys", keysOpType),
|
||||
@ -451,6 +451,7 @@ func multiplyWithPrefs(op *operationType) yqAction {
|
||||
prefs.AssignPrefs.ClobberCustomTags = true
|
||||
}
|
||||
prefs.TraversePrefs.DontFollowAlias = true
|
||||
prefs.TraversePrefs.ExactKeyMatch = true
|
||||
op := &Operation{OperationType: op, Value: multiplyOpType.Type, StringValue: options, Preferences: prefs}
|
||||
return &token{TokenType: operationToken, Operation: op}, nil
|
||||
}
|
||||
|
||||
7
pkg/yqlib/no_kyaml.go
Normal file
7
pkg/yqlib/no_kyaml.go
Normal file
@ -0,0 +1,7 @@
|
||||
//go:build yq_nokyaml
|
||||
|
||||
package yqlib
|
||||
|
||||
func NewKYamlEncoder(_ KYamlPreferences) Encoder {
|
||||
return nil
|
||||
}
|
||||
@ -5,3 +5,11 @@ package yqlib
|
||||
func NewTomlDecoder() Decoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewTomlEncoder() Encoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewTomlEncoderWithPrefs(prefs TomlPreferences) Encoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -4,7 +4,7 @@ import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
var specDocument = `- &CENTER { x: 1, y: 2 }
|
||||
var specDocument = `- &CENTRE { x: 1, y: 2 }
|
||||
- &LEFT { x: 0, y: 2 }
|
||||
- &BIG { r: 10 }
|
||||
- &SMALL { r: 1 }
|
||||
@ -139,7 +139,7 @@ var fixedAnchorOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
description: "FIXED: Merge multiple maps",
|
||||
subdescription: "Taken from https://yaml.org/type/merge.html. Same values as legacy, but with the correct key order.",
|
||||
document: specDocument + "- << : [ *CENTER, *BIG ]\n",
|
||||
document: specDocument + "- << : [ *CENTRE, *BIG ]\n",
|
||||
expression: ".[4] | explode(.)",
|
||||
expected: []string{"D0, P[4], (!!map)::x: 1\ny: 2\nr: 10\n"},
|
||||
},
|
||||
@ -171,7 +171,7 @@ var fixedAnchorOperatorScenarios = []expressionScenario{
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "Exploding merge anchor should not explode neighbors",
|
||||
description: "Exploding merge anchor should not explode neighbours",
|
||||
subdescription: "b must not be exploded, as `r: *a` will become invalid",
|
||||
document: `{b: &b {a: &a 42}, r: *a, c: {<<: *b}}`,
|
||||
expression: `explode(.c)`,
|
||||
@ -181,7 +181,7 @@ var fixedAnchorOperatorScenarios = []expressionScenario{
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "Exploding sequence merge anchor should not explode neighbors",
|
||||
description: "Exploding sequence merge anchor should not explode neighbours",
|
||||
subdescription: "b must not be exploded, as `r: *a` will become invalid",
|
||||
document: `{b: &b {a: &a 42}, r: *a, c: {<<: [*b]}}`,
|
||||
expression: `explode(.c)`,
|
||||
@ -265,7 +265,7 @@ var badAnchorOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
description: "LEGACY: Merge multiple maps",
|
||||
subdescription: "see https://yaml.org/type/merge.html. This has the correct data, but the wrong key order; set --yaml-fix-merge-anchor-to-spec=true to fix the key order.",
|
||||
document: specDocument + "- << : [ *CENTER, *BIG ]\n",
|
||||
document: specDocument + "- << : [ *CENTRE, *BIG ]\n",
|
||||
expression: ".[4] | explode(.)",
|
||||
expected: []string{"D0, P[4], (!!map)::r: 10\nx: 1\ny: 2\n"},
|
||||
},
|
||||
@ -297,7 +297,7 @@ var anchorOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
description: "Merge one map",
|
||||
subdescription: "see https://yaml.org/type/merge.html",
|
||||
document: specDocument + "- << : *CENTER\n r: 10\n",
|
||||
document: specDocument + "- << : *CENTRE\n r: 10\n",
|
||||
expression: ".[4] | explode(.)",
|
||||
expected: []string{expectedSpecResult},
|
||||
},
|
||||
|
||||
@ -30,7 +30,7 @@ func multiplyAssignOperator(d *dataTreeNavigator, context Context, expressionNod
|
||||
|
||||
func multiplyOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
log.Debugf("MultiplyOperator")
|
||||
return crossFunction(d, context, expressionNode, multiply(expressionNode.Operation.Preferences.(multiplyPreferences)), false)
|
||||
return crossFunction(d, context.ReadOnlyClone(), expressionNode, multiply(expressionNode.Operation.Preferences.(multiplyPreferences)), false)
|
||||
}
|
||||
|
||||
func getComments(lhs *CandidateNode, rhs *CandidateNode) (leadingContent string, headComment string, footComment string) {
|
||||
@ -168,7 +168,7 @@ func mergeObjects(d *dataTreeNavigator, context Context, lhs *CandidateNode, rhs
|
||||
|
||||
// only need to recurse the array if we are doing a deep merge
|
||||
prefs := recursiveDescentPreferences{RecurseArray: preferences.DeepMergeArrays,
|
||||
TraversePreferences: traversePreferences{DontFollowAlias: true, IncludeMapKeys: true}}
|
||||
TraversePreferences: traversePreferences{DontFollowAlias: true, IncludeMapKeys: true, ExactKeyMatch: true}}
|
||||
log.Debugf("merge - preferences.DeepMergeArrays %v", preferences.DeepMergeArrays)
|
||||
log.Debugf("merge - preferences.AppendArrays %v", preferences.AppendArrays)
|
||||
err := recursiveDecent(results, context.SingleChildContext(rhs), prefs)
|
||||
|
||||
@ -86,7 +86,35 @@ c:
|
||||
<<: *cat
|
||||
`
|
||||
|
||||
var mergeWithGlobA = `
|
||||
"**cat": things,
|
||||
"meow**cat": stuff
|
||||
`
|
||||
|
||||
var mergeWithGlobB = `
|
||||
"**cat": newThings,
|
||||
`
|
||||
|
||||
var multiplyOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
description: "multiple should be readonly",
|
||||
skipDoc: true,
|
||||
document: "",
|
||||
expression: ".x |= (root | (.a * .b))",
|
||||
expected: []string{
|
||||
"D0, P[], ()::x: null\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "glob keys are treated as literals when merging",
|
||||
skipDoc: true,
|
||||
document: mergeWithGlobA,
|
||||
document2: mergeWithGlobB,
|
||||
expression: `select(fi == 0) * select(fi == 1)`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!map)::\n\"**cat\": newThings,\n\"meow**cat\": stuff\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
document: mergeArrayWithAnchors,
|
||||
|
||||
@ -35,9 +35,28 @@ func getParentOperator(_ *dataTreeNavigator, context Context, expressionNode *Ex
|
||||
|
||||
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
|
||||
candidate := el.Value.(*CandidateNode)
|
||||
|
||||
// Handle negative levels: count total parents first
|
||||
levelsToGoUp := prefs.Level
|
||||
if prefs.Level < 0 {
|
||||
// Count all parents
|
||||
totalParents := 0
|
||||
temp := candidate.Parent
|
||||
for temp != nil {
|
||||
totalParents++
|
||||
temp = temp.Parent
|
||||
}
|
||||
// Convert negative index to positive
|
||||
// -1 means last parent (root), -2 means second to last, etc.
|
||||
levelsToGoUp = totalParents + prefs.Level + 1
|
||||
if levelsToGoUp < 0 {
|
||||
levelsToGoUp = 0
|
||||
}
|
||||
}
|
||||
|
||||
currentLevel := 0
|
||||
for currentLevel < prefs.Level && candidate != nil {
|
||||
log.Debugf("currentLevel: %v, desired: %v", currentLevel, prefs.Level)
|
||||
for currentLevel < levelsToGoUp && candidate != nil {
|
||||
log.Debugf("currentLevel: %v, desired: %v", currentLevel, levelsToGoUp)
|
||||
log.Debugf("candidate: %v", NodeToString(candidate))
|
||||
candidate = candidate.Parent
|
||||
currentLevel++
|
||||
|
||||
@ -38,6 +38,58 @@ var parentOperatorScenarios = []expressionScenario{
|
||||
"D0, P[], (!!seq)::- {c: cat}\n- {b: {c: cat}}\n- {a: {b: {c: cat}}}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Get the top (root) parent",
|
||||
subdescription: "Use negative numbers to get the top parents. You can think of this as indexing into the 'parents' array above",
|
||||
document: "a:\n b:\n c: cat\n",
|
||||
expression: `.a.b.c | parent(-1)`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!map)::a:\n b:\n c: cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Root",
|
||||
subdescription: "Alias for parent(-1), returns the top level parent. This is usually the document node.",
|
||||
document: "a:\n b:\n c: cat\n",
|
||||
expression: `.a.b.c | root`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!map)::a:\n b:\n c: cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "boundary negative",
|
||||
skipDoc: true,
|
||||
document: "a:\n b:\n c: cat\n",
|
||||
expression: `.a.b.c | parent(-3)`,
|
||||
expected: []string{
|
||||
"D0, P[a b], (!!map)::c: cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "large negative",
|
||||
skipDoc: true,
|
||||
document: "a:\n b:\n c: cat\n",
|
||||
expression: `.a.b.c | parent(-10)`,
|
||||
expected: []string{
|
||||
"D0, P[a b c], (!!str)::cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "parent zero",
|
||||
skipDoc: true,
|
||||
document: "a:\n b:\n c: cat\n",
|
||||
expression: `.a.b.c | parent(0)`,
|
||||
expected: []string{
|
||||
"D0, P[a b c], (!!str)::cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "large positive",
|
||||
skipDoc: true,
|
||||
document: "a:\n b:\n c: cat\n",
|
||||
expression: `.a.b.c | parent(10)`,
|
||||
expected: []string{},
|
||||
},
|
||||
{
|
||||
description: "N-th parent",
|
||||
subdescription: "You can optionally supply the number of levels to go up for the parent, the default being 1.",
|
||||
@ -55,6 +107,15 @@ var parentOperatorScenarios = []expressionScenario{
|
||||
"D0, P[], (!!map)::a:\n b:\n c: cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "N-th negative",
|
||||
subdescription: "Similarly, use negative numbers to index backwards from the parents array",
|
||||
document: "a:\n b:\n c: cat\n",
|
||||
expression: `.a.b.c | parent(-2)`,
|
||||
expected: []string{
|
||||
"D0, P[a], (!!map)::b:\n c: cat\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "No parent",
|
||||
document: `{}`,
|
||||
|
||||
@ -14,6 +14,7 @@ type traversePreferences struct {
|
||||
DontAutoCreate bool // by default, we automatically create entries on the fly.
|
||||
DontIncludeMapValues bool
|
||||
OptionalTraverse bool // e.g. .adf?
|
||||
ExactKeyMatch bool // by default we let wild/glob patterns. Don't do that for merge though.
|
||||
}
|
||||
|
||||
func splat(context Context, prefs traversePreferences) (Context, error) {
|
||||
@ -216,7 +217,11 @@ func traverseArrayWithIndices(node *CandidateNode, indices []*CandidateNode, pre
|
||||
return newMatches, nil
|
||||
}
|
||||
|
||||
func keyMatches(key *CandidateNode, wantedKey string) bool {
|
||||
func keyMatches(key *CandidateNode, wantedKey string, exactKeyMatch bool) bool {
|
||||
if exactKeyMatch {
|
||||
// this is used for merge
|
||||
return key.Value == wantedKey
|
||||
}
|
||||
return matchKey(key.Value, wantedKey)
|
||||
}
|
||||
|
||||
@ -303,7 +308,7 @@ func doTraverseMap(newMatches *orderedmap.OrderedMap, node *CandidateNode, wante
|
||||
return err
|
||||
}
|
||||
}
|
||||
} else if splat || keyMatches(key, wantedKey) {
|
||||
} else if splat || keyMatches(key, wantedKey, prefs.ExactKeyMatch) {
|
||||
log.Debug("MATCHED")
|
||||
if prefs.IncludeMapKeys {
|
||||
log.Debug("including key")
|
||||
|
||||
@ -132,7 +132,7 @@ func (p *resultsPrinter) PrintResults(matchingNodes *list.List) error {
|
||||
tempBufferBytes := tempBuffer.Bytes()
|
||||
if bytes.IndexByte(tempBufferBytes, 0) != -1 {
|
||||
return fmt.Errorf(
|
||||
"can't serialize value because it contains NUL char and you are using NUL separated output",
|
||||
"can't serialise value because it contains NUL char and you are using NUL separated output",
|
||||
)
|
||||
}
|
||||
if _, err := writer.Write(tempBufferBytes); err != nil {
|
||||
|
||||
@ -49,3 +49,179 @@ func TestNodeInfoPrinter_PrintResults(t *testing.T) {
|
||||
test.AssertResult(t, true, strings.Contains(outStr, "footComment: foot"))
|
||||
test.AssertResult(t, true, strings.Contains(outStr, "anchor: anchor"))
|
||||
}
|
||||
|
||||
func TestNodeInfoPrinter_PrintedAnything_True(t *testing.T) {
|
||||
node := &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: "test",
|
||||
}
|
||||
listNodes := list.New()
|
||||
listNodes.PushBack(node)
|
||||
|
||||
var output bytes.Buffer
|
||||
writer := bufio.NewWriter(&output)
|
||||
printer := NewNodeInfoPrinter(NewSinglePrinterWriter(writer))
|
||||
|
||||
// Before printing, should be false
|
||||
test.AssertResult(t, false, printer.PrintedAnything())
|
||||
|
||||
err := printer.PrintResults(listNodes)
|
||||
writer.Flush()
|
||||
if err != nil {
|
||||
t.Fatalf("PrintResults error: %v", err)
|
||||
}
|
||||
|
||||
// After printing, should be true
|
||||
test.AssertResult(t, true, printer.PrintedAnything())
|
||||
}
|
||||
|
||||
func TestNodeInfoPrinter_PrintedAnything_False(t *testing.T) {
|
||||
listNodes := list.New()
|
||||
|
||||
var output bytes.Buffer
|
||||
writer := bufio.NewWriter(&output)
|
||||
printer := NewNodeInfoPrinter(NewSinglePrinterWriter(writer))
|
||||
|
||||
err := printer.PrintResults(listNodes)
|
||||
writer.Flush()
|
||||
if err != nil {
|
||||
t.Fatalf("PrintResults error: %v", err)
|
||||
}
|
||||
|
||||
// No nodes printed, should still be false
|
||||
test.AssertResult(t, false, printer.PrintedAnything())
|
||||
}
|
||||
|
||||
func TestNodeInfoPrinter_SetNulSepOutput(_ *testing.T) {
|
||||
var output bytes.Buffer
|
||||
writer := bufio.NewWriter(&output)
|
||||
printer := NewNodeInfoPrinter(NewSinglePrinterWriter(writer))
|
||||
|
||||
// Should not panic or error
|
||||
printer.SetNulSepOutput(true)
|
||||
printer.SetNulSepOutput(false)
|
||||
}
|
||||
|
||||
func TestNodeInfoPrinter_SetAppendix(t *testing.T) {
|
||||
node := &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: "test",
|
||||
}
|
||||
listNodes := list.New()
|
||||
listNodes.PushBack(node)
|
||||
|
||||
var output bytes.Buffer
|
||||
writer := bufio.NewWriter(&output)
|
||||
printer := NewNodeInfoPrinter(NewSinglePrinterWriter(writer))
|
||||
|
||||
appendixText := "This is appendix text\n"
|
||||
appendixReader := strings.NewReader(appendixText)
|
||||
printer.SetAppendix(appendixReader)
|
||||
|
||||
err := printer.PrintResults(listNodes)
|
||||
writer.Flush()
|
||||
if err != nil {
|
||||
t.Fatalf("PrintResults error: %v", err)
|
||||
}
|
||||
|
||||
outStr := output.String()
|
||||
test.AssertResult(t, true, strings.Contains(outStr, "test"))
|
||||
test.AssertResult(t, true, strings.Contains(outStr, appendixText))
|
||||
}
|
||||
|
||||
func TestNodeInfoPrinter_MultipleNodes(t *testing.T) {
|
||||
node1 := &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: "first",
|
||||
}
|
||||
node2 := &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: "second",
|
||||
}
|
||||
listNodes := list.New()
|
||||
listNodes.PushBack(node1)
|
||||
listNodes.PushBack(node2)
|
||||
|
||||
var output bytes.Buffer
|
||||
writer := bufio.NewWriter(&output)
|
||||
printer := NewNodeInfoPrinter(NewSinglePrinterWriter(writer))
|
||||
|
||||
err := printer.PrintResults(listNodes)
|
||||
writer.Flush()
|
||||
if err != nil {
|
||||
t.Fatalf("PrintResults error: %v", err)
|
||||
}
|
||||
|
||||
outStr := output.String()
|
||||
test.AssertResult(t, true, strings.Contains(outStr, "value: first"))
|
||||
test.AssertResult(t, true, strings.Contains(outStr, "value: second"))
|
||||
}
|
||||
|
||||
func TestNodeInfoPrinter_SequenceNode(t *testing.T) {
|
||||
node := &CandidateNode{
|
||||
Kind: SequenceNode,
|
||||
Tag: "!!seq",
|
||||
Style: FlowStyle,
|
||||
}
|
||||
listNodes := list.New()
|
||||
listNodes.PushBack(node)
|
||||
|
||||
var output bytes.Buffer
|
||||
writer := bufio.NewWriter(&output)
|
||||
printer := NewNodeInfoPrinter(NewSinglePrinterWriter(writer))
|
||||
|
||||
err := printer.PrintResults(listNodes)
|
||||
writer.Flush()
|
||||
if err != nil {
|
||||
t.Fatalf("PrintResults error: %v", err)
|
||||
}
|
||||
|
||||
outStr := output.String()
|
||||
test.AssertResult(t, true, strings.Contains(outStr, "kind: SequenceNode"))
|
||||
test.AssertResult(t, true, strings.Contains(outStr, "tag: '!!seq'"))
|
||||
test.AssertResult(t, true, strings.Contains(outStr, "style: FlowStyle"))
|
||||
}
|
||||
|
||||
func TestNodeInfoPrinter_MappingNode(t *testing.T) {
|
||||
node := &CandidateNode{
|
||||
Kind: MappingNode,
|
||||
Tag: "!!map",
|
||||
}
|
||||
listNodes := list.New()
|
||||
listNodes.PushBack(node)
|
||||
|
||||
var output bytes.Buffer
|
||||
writer := bufio.NewWriter(&output)
|
||||
printer := NewNodeInfoPrinter(NewSinglePrinterWriter(writer))
|
||||
|
||||
err := printer.PrintResults(listNodes)
|
||||
writer.Flush()
|
||||
if err != nil {
|
||||
t.Fatalf("PrintResults error: %v", err)
|
||||
}
|
||||
|
||||
outStr := output.String()
|
||||
test.AssertResult(t, true, strings.Contains(outStr, "kind: MappingNode"))
|
||||
test.AssertResult(t, true, strings.Contains(outStr, "tag: '!!map'"))
|
||||
}
|
||||
|
||||
func TestNodeInfoPrinter_EmptyList(t *testing.T) {
|
||||
listNodes := list.New()
|
||||
|
||||
var output bytes.Buffer
|
||||
writer := bufio.NewWriter(&output)
|
||||
printer := NewNodeInfoPrinter(NewSinglePrinterWriter(writer))
|
||||
|
||||
err := printer.PrintResults(listNodes)
|
||||
writer.Flush()
|
||||
if err != nil {
|
||||
t.Fatalf("PrintResults error: %v", err)
|
||||
}
|
||||
|
||||
test.AssertResult(t, "", output.String())
|
||||
test.AssertResult(t, false, printer.PrintedAnything())
|
||||
}
|
||||
|
||||
@ -481,7 +481,7 @@ func TestPrinterNulSeparatorWithNullChar(t *testing.T) {
|
||||
t.Fatal("Expected error for null character in NUL separated output")
|
||||
}
|
||||
|
||||
expectedError := "can't serialize value because it contains NUL char and you are using NUL separated output"
|
||||
expectedError := "can't serialise value because it contains NUL char and you are using NUL separated output"
|
||||
if err.Error() != expectedError {
|
||||
t.Fatalf("Expected error '%s', got '%s'", expectedError, err.Error())
|
||||
}
|
||||
|
||||
@ -2,11 +2,13 @@ package yqlib
|
||||
|
||||
type ShellVariablesPreferences struct {
|
||||
KeySeparator string
|
||||
UnwrapScalar bool
|
||||
}
|
||||
|
||||
func NewDefaultShellVariablesPreferences() ShellVariablesPreferences {
|
||||
return ShellVariablesPreferences{
|
||||
KeySeparator: "_",
|
||||
UnwrapScalar: false,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
15
pkg/yqlib/toml.go
Normal file
15
pkg/yqlib/toml.go
Normal file
@ -0,0 +1,15 @@
|
||||
package yqlib
|
||||
|
||||
type TomlPreferences struct {
|
||||
ColorsEnabled bool
|
||||
}
|
||||
|
||||
func NewDefaultTomlPreferences() TomlPreferences {
|
||||
return TomlPreferences{ColorsEnabled: false}
|
||||
}
|
||||
|
||||
func (p *TomlPreferences) Copy() TomlPreferences {
|
||||
return TomlPreferences{ColorsEnabled: p.ColorsEnabled}
|
||||
}
|
||||
|
||||
var ConfiguredTomlPreferences = NewDefaultTomlPreferences()
|
||||
@ -2,9 +2,12 @@ package yqlib
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/fatih/color"
|
||||
"github.com/mikefarah/yq/v4/test"
|
||||
)
|
||||
|
||||
@ -175,6 +178,115 @@ var expectedSampleWithHeader = `servers:
|
||||
ip: 10.0.0.1
|
||||
`
|
||||
|
||||
// Roundtrip fixtures
|
||||
var rtInlineTableAttr = `name = { first = "Tom", last = "Preston-Werner" }
|
||||
`
|
||||
|
||||
var rtTableSection = `[owner.contact]
|
||||
name = "Tom"
|
||||
age = 36
|
||||
`
|
||||
|
||||
var rtArrayOfTables = `[[fruits]]
|
||||
name = "apple"
|
||||
[[fruits.varieties]]
|
||||
name = "red delicious"
|
||||
`
|
||||
|
||||
var rtArraysAndScalars = `A = ["hello", ["world", "again"]]
|
||||
B = 12
|
||||
`
|
||||
|
||||
var rtSimple = `A = "hello"
|
||||
B = 12
|
||||
`
|
||||
|
||||
var rtDeepPaths = `[person]
|
||||
name = "hello"
|
||||
address = "12 cat st"
|
||||
`
|
||||
|
||||
var rtEmptyArray = `A = []
|
||||
`
|
||||
|
||||
var rtSampleTable = `var = "x"
|
||||
|
||||
[owner.contact]
|
||||
name = "Tom Preston-Werner"
|
||||
age = 36
|
||||
`
|
||||
|
||||
var rtEmptyTable = `[dependencies]
|
||||
`
|
||||
|
||||
var rtComments = `# This is a comment
|
||||
A = "hello" # inline comment
|
||||
B = 12
|
||||
|
||||
# Table comment
|
||||
[person]
|
||||
name = "Tom" # name comment
|
||||
`
|
||||
|
||||
// Reproduce bug for https://github.com/mikefarah/yq/issues/2588
|
||||
// Bug: standalone comments inside a table cause subsequent key-values to be assigned at root.
|
||||
var issue2588RustToolchainWithComments = `[owner]
|
||||
# comment
|
||||
name = "Tomer"
|
||||
`
|
||||
|
||||
var tableWithComment = `[owner]
|
||||
# comment
|
||||
[things]
|
||||
`
|
||||
|
||||
var sampleFromWeb = `# This is a TOML document
|
||||
title = "TOML Example"
|
||||
|
||||
[owner]
|
||||
name = "Tom Preston-Werner"
|
||||
dob = 1979-05-27T07:32:00-08:00
|
||||
|
||||
[database]
|
||||
enabled = true
|
||||
ports = [8000, 8001, 8002]
|
||||
data = [["delta", "phi"], [3.14]]
|
||||
temp_targets = { cpu = 79.5, case = 72.0 }
|
||||
|
||||
# [servers] yq can't do this one yet
|
||||
[servers.alpha]
|
||||
ip = "10.0.0.1"
|
||||
role = "frontend"
|
||||
|
||||
[servers.beta]
|
||||
ip = "10.0.0.2"
|
||||
role = "backend"
|
||||
`
|
||||
|
||||
var subArrays = `
|
||||
[[array]]
|
||||
|
||||
[[array.subarray]]
|
||||
|
||||
[[array.subarray.subsubarray]]
|
||||
`
|
||||
|
||||
var tomlTableWithComments = `[section]
|
||||
the_array = [
|
||||
# comment
|
||||
"value 1",
|
||||
|
||||
# comment
|
||||
"value 2",
|
||||
]
|
||||
`
|
||||
|
||||
var expectedSubArrays = `array:
|
||||
- subarray:
|
||||
- subsubarray:
|
||||
- {}
|
||||
`
|
||||
|
||||
var tomlScenarios = []formatScenario{
|
||||
{
|
||||
skipDoc: true,
|
||||
@ -382,6 +494,126 @@ var tomlScenarios = []formatScenario{
|
||||
expected: expectedMultipleEmptyTables,
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "subArrays",
|
||||
skipDoc: true,
|
||||
input: subArrays,
|
||||
expected: expectedSubArrays,
|
||||
scenarioType: "decode",
|
||||
},
|
||||
// Roundtrip scenarios
|
||||
{
|
||||
description: "Roundtrip: inline table attribute",
|
||||
input: rtInlineTableAttr,
|
||||
expression: ".",
|
||||
expected: rtInlineTableAttr,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: table section",
|
||||
input: rtTableSection,
|
||||
expression: ".",
|
||||
expected: rtTableSection,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: array of tables",
|
||||
input: rtArrayOfTables,
|
||||
expression: ".",
|
||||
expected: rtArrayOfTables,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: arrays and scalars",
|
||||
input: rtArraysAndScalars,
|
||||
expression: ".",
|
||||
expected: rtArraysAndScalars,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: simple",
|
||||
input: rtSimple,
|
||||
expression: ".",
|
||||
expected: rtSimple,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: deep paths",
|
||||
input: rtDeepPaths,
|
||||
expression: ".",
|
||||
expected: rtDeepPaths,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: empty array",
|
||||
input: rtEmptyArray,
|
||||
expression: ".",
|
||||
expected: rtEmptyArray,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: sample table",
|
||||
input: rtSampleTable,
|
||||
expression: ".",
|
||||
expected: rtSampleTable,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: empty table",
|
||||
input: rtEmptyTable,
|
||||
expression: ".",
|
||||
expected: rtEmptyTable,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: comments",
|
||||
input: rtComments,
|
||||
expression: ".",
|
||||
expected: rtComments,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "Issue #2588: comments inside table must not flatten (.owner.name)",
|
||||
input: issue2588RustToolchainWithComments,
|
||||
expression: ".owner.name",
|
||||
expected: "Tomer\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "Issue #2588: comments inside table must not flatten (.name)",
|
||||
input: issue2588RustToolchainWithComments,
|
||||
expression: ".name",
|
||||
expected: "null\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
input: issue2588RustToolchainWithComments,
|
||||
expected: issue2588RustToolchainWithComments,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
input: tableWithComment,
|
||||
expression: ".owner | headComment",
|
||||
expected: "comment\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: sample from web",
|
||||
input: sampleFromWeb,
|
||||
expression: ".",
|
||||
expected: sampleFromWeb,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
input: tomlTableWithComments,
|
||||
expected: tomlTableWithComments,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
}
|
||||
|
||||
func testTomlScenario(t *testing.T, s formatScenario) {
|
||||
@ -471,3 +703,295 @@ func TestTomlScenarios(t *testing.T) {
|
||||
}
|
||||
documentScenarios(t, "usage", "toml", genericScenarios, documentTomlScenario)
|
||||
}
|
||||
|
||||
// TestTomlColourization tests that colourization correctly distinguishes
|
||||
// between table section headers and inline arrays
|
||||
func TestTomlColourization(t *testing.T) {
|
||||
// Save and restore color state
|
||||
oldNoColor := color.NoColor
|
||||
color.NoColor = false
|
||||
defer func() { color.NoColor = oldNoColor }()
|
||||
|
||||
// Test that inline arrays are not coloured as table sections
|
||||
encoder := &tomlEncoder{prefs: TomlPreferences{ColorsEnabled: true}}
|
||||
|
||||
// Create TOML with both table sections and inline arrays
|
||||
input := []byte(`[database]
|
||||
enabled = true
|
||||
ports = [8000, 8001, 8002]
|
||||
|
||||
[servers]
|
||||
alpha = "test"
|
||||
`)
|
||||
|
||||
result := encoder.colorizeToml(input)
|
||||
resultStr := string(result)
|
||||
|
||||
// The bug would cause the inline array [8000, 8001, 8002] to be
|
||||
// coloured with the section colour (Yellow + Bold) instead of being
|
||||
// left uncoloured or coloured differently.
|
||||
//
|
||||
// To test this, we check that the section colour codes appear only
|
||||
// for actual table sections, not for inline arrays.
|
||||
|
||||
// Get the ANSI codes for section colour (Yellow + Bold)
|
||||
sectionColourObj := color.New(color.FgYellow, color.Bold)
|
||||
sectionColourObj.EnableColor()
|
||||
sampleSection := sectionColourObj.Sprint("[database]")
|
||||
|
||||
// Extract just the ANSI codes from the sample
|
||||
// ANSI codes start with \x1b[
|
||||
var ansiStart string
|
||||
for i := 0; i < len(sampleSection); i++ {
|
||||
if sampleSection[i] == '\x1b' {
|
||||
// Find the end of the ANSI sequence (ends with 'm')
|
||||
end := i
|
||||
for end < len(sampleSection) && sampleSection[end] != 'm' {
|
||||
end++
|
||||
}
|
||||
if end < len(sampleSection) {
|
||||
ansiStart = sampleSection[i : end+1]
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Count how many times the section colour appears in the output
|
||||
// It should appear exactly twice: once for [database] and once for [servers]
|
||||
// If it appears more times (e.g., for [8000, 8001, 8002]), that's the bug
|
||||
sectionColourCount := strings.Count(resultStr, ansiStart)
|
||||
|
||||
// We expect exactly 2 occurrences (for [database] and [servers])
|
||||
// The bug would cause more occurrences (e.g., also for [8000)
|
||||
if sectionColourCount != 2 {
|
||||
t.Errorf("Expected section colour to appear exactly 2 times (for [database] and [servers]), but it appeared %d times.\nOutput: %s", sectionColourCount, resultStr)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTomlColorisationNumberBug(t *testing.T) {
|
||||
// Save and restore color state
|
||||
oldNoColor := color.NoColor
|
||||
color.NoColor = false
|
||||
defer func() { color.NoColor = oldNoColor }()
|
||||
|
||||
encoder := NewTomlEncoder()
|
||||
tomlEncoder := encoder.(*tomlEncoder)
|
||||
|
||||
// Test case that exposes the bug: "123-+-45" should NOT be colourised as a single number
|
||||
input := "A = 123-+-45\n"
|
||||
result := string(tomlEncoder.colorizeToml([]byte(input)))
|
||||
|
||||
// The bug causes "123-+-45" to be colourised as one token
|
||||
// It should stop at "123" because the next character '-' is not valid in this position
|
||||
if strings.Contains(result, "123-+-45") {
|
||||
// Check if it's colourised as a single token (no color codes in the middle)
|
||||
idx := strings.Index(result, "123-+-45")
|
||||
// Look backwards for color code
|
||||
beforeIdx := idx - 1
|
||||
for beforeIdx >= 0 && result[beforeIdx] != '\x1b' {
|
||||
beforeIdx--
|
||||
}
|
||||
// Look forward for reset code
|
||||
afterIdx := idx + 8 // length of "123-+-45"
|
||||
hasResetAfter := false
|
||||
for afterIdx < len(result) && afterIdx < idx+20 {
|
||||
if result[afterIdx] == '\x1b' {
|
||||
hasResetAfter = true
|
||||
break
|
||||
}
|
||||
afterIdx++
|
||||
}
|
||||
|
||||
if beforeIdx >= 0 && hasResetAfter {
|
||||
// The entire "123-+-45" is wrapped in color codes - this is the bug!
|
||||
t.Errorf("BUG DETECTED: '123-+-45' is incorrectly colourised as a single number")
|
||||
t.Errorf("Expected only '123' to be colourised as a number, but got the entire '123-+-45'")
|
||||
t.Logf("Full output: %q", result)
|
||||
t.Fail()
|
||||
}
|
||||
}
|
||||
|
||||
// Additional test cases for the bug
|
||||
bugTests := []struct {
|
||||
name string
|
||||
input string
|
||||
invalidSequence string
|
||||
description string
|
||||
}{
|
||||
{
|
||||
name: "consecutive minuses",
|
||||
input: "A = 123--45\n",
|
||||
invalidSequence: "123--45",
|
||||
description: "'123--45' should not be colourised as a single number",
|
||||
},
|
||||
{
|
||||
name: "plus in middle",
|
||||
input: "A = 123+45\n",
|
||||
invalidSequence: "123+45",
|
||||
description: "'123+45' should not be colourised as a single number",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range bugTests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := string(tomlEncoder.colorizeToml([]byte(tt.input)))
|
||||
if strings.Contains(result, tt.invalidSequence) {
|
||||
idx := strings.Index(result, tt.invalidSequence)
|
||||
beforeIdx := idx - 1
|
||||
for beforeIdx >= 0 && result[beforeIdx] != '\x1b' {
|
||||
beforeIdx--
|
||||
}
|
||||
afterIdx := idx + len(tt.invalidSequence)
|
||||
hasResetAfter := false
|
||||
for afterIdx < len(result) && afterIdx < idx+20 {
|
||||
if result[afterIdx] == '\x1b' {
|
||||
hasResetAfter = true
|
||||
break
|
||||
}
|
||||
afterIdx++
|
||||
}
|
||||
|
||||
if beforeIdx >= 0 && hasResetAfter {
|
||||
t.Errorf("BUG: %s", tt.description)
|
||||
t.Logf("Full output: %q", result)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Test that valid scientific notation still works
|
||||
validTests := []struct {
|
||||
name string
|
||||
input string
|
||||
}{
|
||||
{"scientific positive", "A = 1.23e+45\n"},
|
||||
{"scientific negative", "A = 6.626e-34\n"},
|
||||
{"scientific uppercase", "A = 1.23E+10\n"},
|
||||
}
|
||||
|
||||
for _, tt := range validTests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := tomlEncoder.colorizeToml([]byte(tt.input))
|
||||
if len(result) == 0 {
|
||||
t.Error("Expected non-empty colourised output")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Tests that the encoder handles empty path slices gracefully
|
||||
func TestTomlEmptyPathPanic(t *testing.T) {
|
||||
encoder := NewTomlEncoder()
|
||||
tomlEncoder := encoder.(*tomlEncoder)
|
||||
|
||||
var buf bytes.Buffer
|
||||
|
||||
// Create a simple scalar node
|
||||
scalarNode := &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: "test",
|
||||
}
|
||||
|
||||
// Test with empty path - this should not panic
|
||||
err := tomlEncoder.encodeTopLevelEntry(&buf, []string{}, scalarNode)
|
||||
if err == nil {
|
||||
t.Error("Expected error when encoding with empty path, got nil")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// TestTomlStringEscapeColourization tests that string colourization correctly
|
||||
// handles escape sequences, particularly escaped quotes at the end of strings
|
||||
func TestTomlStringEscapeColourization(t *testing.T) {
|
||||
// Save and restore color state
|
||||
oldNoColor := color.NoColor
|
||||
color.NoColor = false
|
||||
defer func() { color.NoColor = oldNoColor }()
|
||||
|
||||
encoder := NewTomlEncoder()
|
||||
tomlEncoder := encoder.(*tomlEncoder)
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
input string
|
||||
description string
|
||||
}{
|
||||
{
|
||||
name: "escaped quote at end",
|
||||
input: `A = "test\""` + "\n",
|
||||
description: "String ending with escaped quote should be colourised correctly",
|
||||
},
|
||||
{
|
||||
name: "escaped backslash then quote",
|
||||
input: `A = "test\\\""` + "\n",
|
||||
description: "String with escaped backslash followed by escaped quote",
|
||||
},
|
||||
{
|
||||
name: "escaped quote in middle",
|
||||
input: `A = "test\"middle"` + "\n",
|
||||
description: "String with escaped quote in the middle should be colourised correctly",
|
||||
},
|
||||
{
|
||||
name: "multiple escaped quotes",
|
||||
input: `A = "\"test\""` + "\n",
|
||||
description: "String with escaped quotes at start and end",
|
||||
},
|
||||
{
|
||||
name: "escaped newline",
|
||||
input: `A = "test\n"` + "\n",
|
||||
description: "String with escaped newline should be colourised correctly",
|
||||
},
|
||||
{
|
||||
name: "single quote with escaped single quote",
|
||||
input: `A = 'test\''` + "\n",
|
||||
description: "Single-quoted string with escaped single quote",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range testCases {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// The test should not panic and should return some output
|
||||
result := tomlEncoder.colorizeToml([]byte(tt.input))
|
||||
if len(result) == 0 {
|
||||
t.Error("Expected non-empty colourised output")
|
||||
}
|
||||
|
||||
// Check that the result contains the input string (with color codes)
|
||||
// At minimum, it should contain "A" and "="
|
||||
resultStr := string(result)
|
||||
if !strings.Contains(resultStr, "A") || !strings.Contains(resultStr, "=") {
|
||||
t.Errorf("Expected output to contain 'A' and '=', got: %q", resultStr)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTomlEncoderPrintDocumentSeparator(t *testing.T) {
|
||||
encoder := NewTomlEncoder()
|
||||
var buf bytes.Buffer
|
||||
writer := bufio.NewWriter(&buf)
|
||||
|
||||
err := encoder.PrintDocumentSeparator(writer)
|
||||
writer.Flush()
|
||||
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "", buf.String())
|
||||
}
|
||||
|
||||
func TestTomlEncoderPrintLeadingContent(t *testing.T) {
|
||||
encoder := NewTomlEncoder()
|
||||
var buf bytes.Buffer
|
||||
writer := bufio.NewWriter(&buf)
|
||||
|
||||
err := encoder.PrintLeadingContent(writer, "some content")
|
||||
writer.Flush()
|
||||
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "", buf.String())
|
||||
}
|
||||
|
||||
func TestTomlEncoderCanHandleAliases(t *testing.T) {
|
||||
encoder := NewTomlEncoder()
|
||||
test.AssertResult(t, false, encoder.CanHandleAliases())
|
||||
}
|
||||
|
||||
@ -139,6 +139,66 @@ func TestWriteInPlaceHandlerImpl_FinishWriteInPlace_Failure(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteInPlaceHandlerImpl_FinishWriteInPlace_Symlink_Success(t *testing.T) {
|
||||
// Create a temporary directory and file for testing
|
||||
tempDir := t.TempDir()
|
||||
inputFile := filepath.Join(tempDir, "input.yaml")
|
||||
symlinkFile := filepath.Join(tempDir, "symlink.yaml")
|
||||
|
||||
// Create input file with some content
|
||||
content := []byte("test: value\n")
|
||||
err := os.WriteFile(inputFile, content, 0600)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create input file: %v", err)
|
||||
}
|
||||
|
||||
err = os.Symlink(inputFile, symlinkFile)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to symlink to input file: %v", err)
|
||||
}
|
||||
|
||||
handler := NewWriteInPlaceHandler(symlinkFile)
|
||||
tempFile, err := handler.CreateTempFile()
|
||||
if err != nil {
|
||||
t.Fatalf("CreateTempFile failed: %v", err)
|
||||
}
|
||||
defer tempFile.Close()
|
||||
|
||||
// Write some content to temp file
|
||||
tempContent := []byte("updated: content\n")
|
||||
_, err = tempFile.Write(tempContent)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to write to temp file: %v", err)
|
||||
}
|
||||
tempFile.Close()
|
||||
|
||||
// Test successful finish
|
||||
err = handler.FinishWriteInPlace(true)
|
||||
if err != nil {
|
||||
t.Fatalf("FinishWriteInPlace failed: %v", err)
|
||||
}
|
||||
|
||||
// Verify that the symlink is still present
|
||||
info, err := os.Lstat(symlinkFile)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to lstat input file: %v", err)
|
||||
}
|
||||
if info.Mode()&os.ModeSymlink == 0 {
|
||||
t.Errorf("Input file symlink is no longer present")
|
||||
}
|
||||
|
||||
// Verify the original file was updated
|
||||
updatedContent, err := os.ReadFile(inputFile)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read updated file: %v", err)
|
||||
}
|
||||
|
||||
if string(updatedContent) != string(tempContent) {
|
||||
t.Errorf("File content not updated correctly. Expected %q, got %q",
|
||||
string(tempContent), string(updatedContent))
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteInPlaceHandlerImpl_CreateTempFile_Permissions(t *testing.T) {
|
||||
// Create a temporary directory and file for testing
|
||||
tempDir := t.TempDir()
|
||||
|
||||
@ -8,6 +8,7 @@ type YamlPreferences struct {
|
||||
UnwrapScalar bool
|
||||
EvaluateTogether bool
|
||||
FixMergeAnchorToSpec bool
|
||||
CompactSequenceIndent bool
|
||||
}
|
||||
|
||||
func NewDefaultYamlPreferences() YamlPreferences {
|
||||
@ -19,6 +20,7 @@ func NewDefaultYamlPreferences() YamlPreferences {
|
||||
UnwrapScalar: true,
|
||||
EvaluateTogether: false,
|
||||
FixMergeAnchorToSpec: false,
|
||||
CompactSequenceIndent: false,
|
||||
}
|
||||
}
|
||||
|
||||
@ -31,6 +33,7 @@ func (p *YamlPreferences) Copy() YamlPreferences {
|
||||
UnwrapScalar: p.UnwrapScalar,
|
||||
EvaluateTogether: p.EvaluateTogether,
|
||||
FixMergeAnchorToSpec: p.FixMergeAnchorToSpec,
|
||||
CompactSequenceIndent: p.CompactSequenceIndent,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -38,7 +38,8 @@ cleanup
|
||||
cmlu
|
||||
colorise
|
||||
colors
|
||||
coloring
|
||||
Colors
|
||||
colourize
|
||||
compinit
|
||||
coolioo
|
||||
coverprofile
|
||||
@ -276,4 +277,24 @@ nohcl
|
||||
zclconf
|
||||
cty
|
||||
go-cty
|
||||
unlabeled
|
||||
Colorisation
|
||||
goimports
|
||||
errorlint
|
||||
RDBMS
|
||||
expeñded
|
||||
bananabananabananabanana
|
||||
edwinjhlee
|
||||
flox
|
||||
unlabelled
|
||||
kyaml
|
||||
KYAML
|
||||
nokyaml
|
||||
buildvcs
|
||||
behaviour
|
||||
GOFLAGS
|
||||
gocache
|
||||
subsubarray
|
||||
Ffile
|
||||
Fquery
|
||||
coverpkg
|
||||
gsub
|
||||
@ -1,5 +1,34 @@
|
||||
4.52.4:
|
||||
- Dropping windows/arm - no longer supported in cross-compile
|
||||
|
||||
4.52.3:
|
||||
- Fixing comments in TOML arrays (#2592)
|
||||
- Bumped dependencies
|
||||
|
||||
|
||||
4.52.2:
|
||||
- Fixed bad instructions file breaking go-install (#2587) Thanks @theyoprst
|
||||
- Fixed TOML table scope after comments (#2588) Thanks @tomers
|
||||
- Multiply uses a readonly context (#2558)
|
||||
- Fixed merge globbing wildcards in keys (#2564)
|
||||
- Fixing TOML subarray parsing issue (#2581)
|
||||
|
||||
4.52.1:
|
||||
- TOML encoder support - you can now roundtrip! #1364
|
||||
- Parent now supports negative indices, and added a 'root' command for referencing the top level document
|
||||
- Fixed scalar encoding for HCL
|
||||
- Add --yaml-compact-seq-indent / -c flag for compact sequence indentation (#2583) Thanks @jfenal
|
||||
- Add symlink check to file rename util (#2576) Thanks @Elias-elastisys
|
||||
- Powershell fixed default command used for __completeNoDesc alias (#2568) Thanks @teejaded
|
||||
- Unwrap scalars in shell output mode. (#2548) Thanks @flintwinters
|
||||
- Added K8S KYAML output format support (#2560) Thanks @robbat2
|
||||
|
||||
- Bumped dependencies
|
||||
- Special shout out to @ccoVeille for reviewing my PRs!
|
||||
|
||||
4.50.1:
|
||||
- Added HCL support!
|
||||
- Fixing handling of CRLF #2352
|
||||
- Bumped dependencies
|
||||
|
||||
4.49.2:
|
||||
|
||||
@ -1,2 +1,2 @@
|
||||
#!/bin/bash
|
||||
go build -tags "yq_nolua yq_noini yq_notoml yq_noxml yq_nojson yq_nohcl" -ldflags "-s -w" .
|
||||
go build -tags "yq_nolua yq_noini yq_notoml yq_noxml yq_nojson yq_nohcl yq_nokyaml" -ldflags "-s -w" .
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Currently, the `yq_nojson` feature must be enabled when using TinyGo.
|
||||
tinygo build -no-debug -tags "yq_nolua yq_noini yq_notoml yq_noxml yq_nojson yq_nocsv yq_nobase64 yq_nouri yq_noprops yq_nosh yq_noshell yq_nohcl" .
|
||||
tinygo build -no-debug -tags "yq_nolua yq_noini yq_notoml yq_noxml yq_nojson yq_nocsv yq_nobase64 yq_nouri yq_noprops yq_nosh yq_noshell yq_nohcl yq_nokyaml" .
|
||||
|
||||
@ -22,4 +22,4 @@ else
|
||||
exit 1
|
||||
fi
|
||||
|
||||
"$LINT_CMD" run --verbose
|
||||
GOFLAGS="${GOFLAGS}" "$LINT_CMD" run --verbose
|
||||
|
||||
@ -3,7 +3,9 @@
|
||||
set -e
|
||||
|
||||
echo "Running tests and generating coverage..."
|
||||
go test -coverprofile=coverage.out -v $(go list ./... | grep -v -E 'examples' | grep -v -E 'test')
|
||||
packages=$(go list ./... | grep -v -E 'examples' | grep -v -E 'test' | tr '\n' ',' | sed 's/,$//')
|
||||
test_packages=$(go list ./... | grep -v -E 'examples' | grep -v -E 'test' | grep -v '^github.com/mikefarah/yq/v4$')
|
||||
go test -coverprofile=coverage.out -coverpkg="$packages" -v $test_packages
|
||||
|
||||
echo "Generating HTML coverage report..."
|
||||
go tool cover -html=coverage.out -o coverage.html
|
||||
@ -58,11 +60,31 @@ tail -n +1 coverage_sorted.txt | while read percent file; do
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "Top 10 files needing attention (lowest coverage):"
|
||||
echo "Top 10 files by uncovered statements:"
|
||||
echo "================================================="
|
||||
grep -v "TOTAL:" coverage_sorted.txt | tail -10 | while read percent file; do
|
||||
# Calculate uncovered statements for each file and sort by that
|
||||
go tool cover -func=coverage.out | grep -E "\.go:[0-9]+:" | \
|
||||
awk '{
|
||||
# Extract filename and percentage
|
||||
split($1, parts, ":")
|
||||
file = parts[1]
|
||||
pct = $NF
|
||||
gsub(/%/, "", pct)
|
||||
|
||||
# Track stats per file
|
||||
total[file]++
|
||||
covered[file] += pct
|
||||
}
|
||||
END {
|
||||
for (file in total) {
|
||||
avg_pct = covered[file] / total[file]
|
||||
uncovered = total[file] * (100 - avg_pct) / 100
|
||||
covered_count = total[file] - uncovered
|
||||
printf "%.0f %d %.0f %.1f %s\n", uncovered, total[file], covered_count, avg_pct, file
|
||||
}
|
||||
}' | sort -rn | head -10 | while read uncovered total covered pct file; do
|
||||
filename=$(basename "$file")
|
||||
printf "%-60s %8.1f%%\n" "$filename" "$percent"
|
||||
printf "%-60s %4d uncovered (%4d/%4d, %5.1f%%)\n" "$filename" "$uncovered" "$covered" "$total" "$pct"
|
||||
done
|
||||
|
||||
echo ""
|
||||
|
||||
@ -2,4 +2,4 @@
|
||||
set -ex
|
||||
go mod download golang.org/x/tools@latest
|
||||
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v2.1.5
|
||||
curl -sSfL https://raw.githubusercontent.com/securego/gosec/master/install.sh | sh -s v2.22.5
|
||||
curl -sSfL https://raw.githubusercontent.com/securego/gosec/master/install.sh | sh -s v2.22.11
|
||||
@ -3,9 +3,11 @@
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
|
||||
if command -v gosec &> /dev/null
|
||||
then
|
||||
gosec "${PWD}" ./...
|
||||
else
|
||||
./bin/gosec "${PWD}" ./...
|
||||
fi
|
||||
OPTS=(
|
||||
-exclude-dir=vendor
|
||||
-exclude-dir=.gomodcache
|
||||
-exclude-dir=.gocache
|
||||
)
|
||||
|
||||
command -v gosec &> /dev/null && BIN=gosec || BIN=./bin/gosec
|
||||
"${BIN}" "${OPTS[@]}" "${PWD}" ./...
|
||||
|
||||
@ -783,7 +783,7 @@ _FAIL_NOT_SAME_='eval failNotSame --lineno "${LINENO:-}"'
|
||||
# None
|
||||
startSkipping() { __shunit_skip=${SHUNIT_TRUE}; }
|
||||
|
||||
# Resume the normal recording behavior of assert and fail calls.
|
||||
# Resume the normal recording behaviour of assert and fail calls.
|
||||
#
|
||||
# Args:
|
||||
# None
|
||||
@ -1293,7 +1293,7 @@ if command [ "${__shunit_mode}" = "${__SHUNIT_MODE_STANDALONE}" ]; then
|
||||
command . "`_shunit_prepForSourcing \"${__shunit_script}\"`"
|
||||
fi
|
||||
|
||||
# Configure default output coloring behavior.
|
||||
# Configure default output coloring behaviour.
|
||||
_shunit_configureColor "${SHUNIT_COLOR}"
|
||||
|
||||
# Execute the oneTimeSetUp function (if it exists).
|
||||
|
||||
@ -1,3 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
npx cspell --no-progress "**/*.{sh,go}"
|
||||
npx cspell --no-progress "**/*.{sh,go,md}"
|
||||
@ -1,5 +1,5 @@
|
||||
name: yq
|
||||
version: 'v4.50.1'
|
||||
version: 'v4.52.4'
|
||||
summary: A lightweight and portable command-line data file processor
|
||||
description: |
|
||||
`yq` uses [jq](https://github.com/stedolan/jq) like syntax but works with yaml, json, xml, csv, properties and TOML files.
|
||||
@ -32,6 +32,6 @@ parts:
|
||||
build-environment:
|
||||
- CGO_ENABLED: 0
|
||||
source: https://github.com/mikefarah/yq.git
|
||||
source-tag: v4.50.1
|
||||
source-tag: v4.52.4
|
||||
build-snaps:
|
||||
- go/latest/stable
|
||||
|
||||
2
yq.go
2
yq.go
@ -12,7 +12,7 @@ func main() {
|
||||
args := os.Args[1:]
|
||||
|
||||
_, _, err := cmd.Find(args)
|
||||
if err != nil && args[0] != "__complete" {
|
||||
if err != nil && args[0] != "__complete" && args[0] != "__completeNoDesc" {
|
||||
// default command when nothing matches...
|
||||
newArgs := []string{"eval"}
|
||||
cmd.SetArgs(append(newArgs, os.Args[1:]...))
|
||||
|
||||
34
yq_test.go
34
yq_test.go
@ -48,6 +48,12 @@ func TestMainFunctionLogic(t *testing.T) {
|
||||
if err == nil {
|
||||
t.Error("Expected error when no command found for '__complete', but got nil")
|
||||
}
|
||||
|
||||
args = []string{"__completeNoDesc"}
|
||||
_, _, err = cmd.Find(args)
|
||||
if err == nil {
|
||||
t.Error("Expected error when no command found for '__completeNoDesc', but got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestMainFunctionWithArgs(t *testing.T) {
|
||||
@ -75,6 +81,12 @@ func TestMainFunctionWithArgs(t *testing.T) {
|
||||
if err == nil {
|
||||
t.Error("Expected error with __complete command")
|
||||
}
|
||||
|
||||
args = []string{"__completeNoDesc"}
|
||||
_, _, err = cmd.Find(args)
|
||||
if err == nil {
|
||||
t.Error("Expected error with __completeNoDesc command")
|
||||
}
|
||||
}
|
||||
|
||||
func TestMainFunctionExecution(t *testing.T) {
|
||||
@ -151,6 +163,28 @@ func TestMainFunctionWithCompletionCommand(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestMainFunctionWithCompletionNoDescCommand(t *testing.T) {
|
||||
// Test that __complete command doesn't trigger default command logic
|
||||
cmd := command.New()
|
||||
|
||||
args := []string{"__completeNoDesc"}
|
||||
_, _, err := cmd.Find(args)
|
||||
if err == nil {
|
||||
t.Error("Expected error with __completeNoDesc command")
|
||||
}
|
||||
|
||||
// The main function logic would be:
|
||||
// if err != nil && args[0] != "__completeNoDesc" {
|
||||
// // This should NOT execute for __completeNoDesc
|
||||
// }
|
||||
|
||||
// Verify that __completeNoDesc doesn't trigger the default command logic
|
||||
if args[0] == "__completeNoDesc" {
|
||||
// This means the default command logic should NOT execute
|
||||
t.Log("__completeNoDesc command correctly identified, default command logic should not execute")
|
||||
}
|
||||
}
|
||||
|
||||
func TestMainFunctionIntegration(t *testing.T) {
|
||||
// Integration test to verify the main function logic works end-to-end
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user