Compare commits

...

130 Commits

Author SHA1 Message Date
dependabot[bot]
b5cb9a2f20
Bump github.com/zclconf/go-cty from 1.17.0 to 1.18.0 (#2616)
Bumps [github.com/zclconf/go-cty](https://github.com/zclconf/go-cty) from 1.17.0 to 1.18.0.
- [Release notes](https://github.com/zclconf/go-cty/releases)
- [Changelog](https://github.com/zclconf/go-cty/blob/main/CHANGELOG.md)
- [Commits](https://github.com/zclconf/go-cty/compare/v1.17.0...v1.18.0)

---
updated-dependencies:
- dependency-name: github.com/zclconf/go-cty
  dependency-version: 1.18.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-21 10:19:36 +11:00
dependabot[bot]
133ba767a6
Bump golang.org/x/mod from 0.33.0 to 0.34.0 (#2629)
Bumps [golang.org/x/mod](https://github.com/golang/mod) from 0.33.0 to 0.34.0.
- [Commits](https://github.com/golang/mod/compare/v0.33.0...v0.34.0)

---
updated-dependencies:
- dependency-name: golang.org/x/mod
  dependency-version: 0.34.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-21 10:19:22 +11:00
dependabot[bot]
5db3dcf394
Bump golang.org/x/text from 0.34.0 to 0.35.0 (#2630)
Bumps [golang.org/x/text](https://github.com/golang/text) from 0.34.0 to 0.35.0.
- [Release notes](https://github.com/golang/text/releases)
- [Commits](https://github.com/golang/text/compare/v0.34.0...v0.35.0)

---
updated-dependencies:
- dependency-name: golang.org/x/text
  dependency-version: 0.35.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-21 10:11:12 +11:00
Oleksandr Redko
4c148178e2
Fix typo in filename (#2611) 2026-03-21 09:29:07 +11:00
dependabot[bot]
4df6e46f95
Bump docker/setup-buildx-action from 3 to 4 (#2627)
Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 3 to 4.
- [Release notes](https://github.com/docker/setup-buildx-action/releases)
- [Commits](https://github.com/docker/setup-buildx-action/compare/v3...v4)

---
updated-dependencies:
- dependency-name: docker/setup-buildx-action
  dependency-version: '4'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-21 09:28:17 +11:00
Mike Farah
6a965bc39a Bumping golint 2026-03-21 09:25:31 +11:00
dependabot[bot]
34d3a29308
Bump golang from 1.26.0 to 1.26.1 (#2626)
Bumps golang from 1.26.0 to 1.26.1.

---
updated-dependencies:
- dependency-name: golang
  dependency-version: 1.26.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-20 20:30:05 +11:00
dependabot[bot]
16e4df2304
Bump docker/login-action from 3 to 4 (#2620)
Bumps [docker/login-action](https://github.com/docker/login-action) from 3 to 4.
- [Release notes](https://github.com/docker/login-action/releases)
- [Commits](https://github.com/docker/login-action/compare/v3...v4)

---
updated-dependencies:
- dependency-name: docker/login-action
  dependency-version: '4'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-07 13:48:13 +11:00
dependabot[bot]
79a92d0478
Bump docker/setup-qemu-action from 3 to 4 (#2621)
Bumps [docker/setup-qemu-action](https://github.com/docker/setup-qemu-action) from 3 to 4.
- [Release notes](https://github.com/docker/setup-qemu-action/releases)
- [Commits](https://github.com/docker/setup-qemu-action/compare/v3...v4)

---
updated-dependencies:
- dependency-name: docker/setup-qemu-action
  dependency-version: '4'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-07 13:47:56 +11:00
Mike Farah
88a31ae8c6 updating release notes 2026-02-14 18:43:51 +11:00
Mike Farah
5a7e72a743 Bumping version 2026-02-14 18:43:09 +11:00
Mike Farah
562531d936 Dropping windows/arm 2026-02-14 18:42:31 +11:00
Mike Farah
2c471b6498 Bumping version 2026-02-14 11:51:00 +11:00
Mike Farah
f4ef6ef3cf Release notes 2026-02-14 11:50:51 +11:00
dependabot[bot]
f49f2bd2d8
Bump golang.org/x/mod from 0.31.0 to 0.33.0 (#2606)
Bumps [golang.org/x/mod](https://github.com/golang/mod) from 0.31.0 to 0.33.0.
- [Commits](https://github.com/golang/mod/compare/v0.31.0...v0.33.0)

---
updated-dependencies:
- dependency-name: golang.org/x/mod
  dependency-version: 0.33.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-02-14 11:42:40 +11:00
dependabot[bot]
6ccc7b7797
Bump golang.org/x/net from 0.49.0 to 0.50.0 (#2604)
Bumps [golang.org/x/net](https://github.com/golang/net) from 0.49.0 to 0.50.0.
- [Commits](https://github.com/golang/net/compare/v0.49.0...v0.50.0)

---
updated-dependencies:
- dependency-name: golang.org/x/net
  dependency-version: 0.50.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-02-14 11:32:58 +11:00
dependabot[bot]
b3e1fbb7d1
Bump golang from 1.25.6 to 1.26.0 (#2603)
Bumps golang from 1.25.6 to 1.26.0.

---
updated-dependencies:
- dependency-name: golang
  dependency-version: 1.26.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-02-14 11:32:47 +11:00
Mike Farah
288ca2d114
Fixing comments in TOML arrays #2592 (#2595) 2026-02-03 19:42:49 +11:00
Mike Farah
eb04fa87af More tests 2026-02-01 10:27:18 +11:00
Mike Farah
2be0094729 Bumping version 2026-02-01 09:19:54 +11:00
Mike Farah
3c18d5b035 Preparing release 2026-02-01 09:19:45 +11:00
Mike Farah
2dcc2293da Merge branch 'tomers-fix/toml-comments-table-scope-2588' 2026-02-01 09:14:32 +11:00
Mike Farah
eb4fde4ef8 Pulling out common code 2026-02-01 09:14:18 +11:00
Mike Farah
06ea4cf62e Fixing spelling 2026-02-01 09:10:48 +11:00
Mike Farah
37089d24af Merge branch 'fix/toml-comments-table-scope-2588' of github.com:tomers/yq into tomers-fix/toml-comments-table-scope-2588 2026-02-01 09:08:20 +11:00
Slava Ezhkin
7cf88a0291
Add regression test for go install compatibility #2587 (#2591) 2026-02-01 09:01:53 +11:00
Mike Farah
41adc1ad18 Fixing wrongly named instructions file 2026-02-01 08:53:12 +11:00
Tomer Shalev
b4b96f2a68 Fix TOML table parsing after standalone comments
Standalone TOML comments immediately inside a table/array-table no longer end the table scope, preventing subsequent keys from being flattened to the document root.
2026-01-31 14:41:30 +02:00
Mike Farah
2824d66a65 Multiply uses a readonly context #2558 2026-01-31 16:47:58 +11:00
Mike Farah
4bbffa9022 Fixed merge globbing wildcards in keys #2564 2026-01-31 15:44:50 +11:00
Mike Farah
bdeedbd275 Fixing TOML subarray parsing issue #2581 2026-01-31 15:25:11 +11:00
Mike Farah
3d918acc2a Bumping version 2026-01-31 15:03:32 +11:00
Mike Farah
01005cc8fd Preparing release notes 2026-01-31 15:03:23 +11:00
Mike Farah
c4468165f2 Formatting 2026-01-31 14:55:36 +11:00
sydarn
e35d32a0b6
buildfix: which -> command -v (#2582) 2026-01-31 14:51:56 +11:00
jfenal
78192a915b
feat: Add --yaml-compact-seq-indent / -c flag for compact sequence indentation (#2583)
Adds a new CLI flag that enables compact sequence indentation where '- ' is
considered part of the indentation. This leverages the CompactSeqIndent()
method from the underlying go.yaml.in/yaml/v4 library.

Example output with --yaml-compact-seq-indent:
  parent:
    items:
    - one
    - two

Instead of the default:
  parent:
    items:
      - one
      - two

Closes #1841
2026-01-31 14:50:01 +11:00
jfenal
c4f4e6d416
fix: TOML colorization now works when NO_COLOR env is set (#2584)
The colorizeToml function intended to force colors by setting
color.NoColor = false, but SprintFunc() still respects the NO_COLOR
environment variable. This caused TestTomlColourization to fail in
CI environments where NO_COLOR=1 is set.

Fixed by calling EnableColor() on each color object, which explicitly
forces colors regardless of environment settings.

Vibe-coded with Cursor (Claude Opus 4)
2026-01-31 14:49:42 +11:00
dependabot[bot]
5f90039bdc
Bump golang from 1.25.5 to 1.25.6 (#2580)
Bumps golang from 1.25.5 to 1.25.6.

---
updated-dependencies:
- dependency-name: golang
  dependency-version: 1.25.6
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-22 14:59:24 +11:00
Elias-elastisys
c6fa371d8d
Add symlink check to file rename util (#2576) 2026-01-22 13:43:32 +11:00
dependabot[bot]
3a27e39778
Bump actions/setup-go from 5 to 6 (#2471)
Bumps [actions/setup-go](https://github.com/actions/setup-go) from 5 to 6.
- [Release notes](https://github.com/actions/setup-go/releases)
- [Commits](https://github.com/actions/setup-go/compare/v5...v6)

---
updated-dependencies:
- dependency-name: actions/setup-go
  dependency-version: '6'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-22 13:42:54 +11:00
TJ Miller
414a085563
Fix default command used for __completeNoDesc alias (#2568) 2026-01-22 13:41:42 +11:00
dependabot[bot]
542801926f
Bump github.com/goccy/go-yaml from 1.19.1 to 1.19.2 (#2566)
Bumps [github.com/goccy/go-yaml](https://github.com/goccy/go-yaml) from 1.19.1 to 1.19.2.
- [Release notes](https://github.com/goccy/go-yaml/releases)
- [Changelog](https://github.com/goccy/go-yaml/blob/master/CHANGELOG.md)
- [Commits](https://github.com/goccy/go-yaml/compare/v1.19.1...v1.19.2)

---
updated-dependencies:
- dependency-name: github.com/goccy/go-yaml
  dependency-version: 1.19.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-21 10:17:43 +11:00
Robin H. Johnson
1bcc44ff9b
ci: ensure lint has goflags (#2570)
Signed-off-by: Robin H. Johnson <rjohnson@coreweave.com>
2026-01-21 10:17:26 +11:00
dependabot[bot]
a6f1b02340
Bump golang.org/x/net from 0.48.0 to 0.49.0 (#2575)
Bumps [golang.org/x/net](https://github.com/golang/net) from 0.48.0 to 0.49.0.
- [Commits](https://github.com/golang/net/compare/v0.48.0...v0.49.0)

---
updated-dependencies:
- dependency-name: golang.org/x/net
  dependency-version: 0.49.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-21 10:15:44 +11:00
Flint Winters
f98028c925
Unwrap scalars in shell output mode. (#2548)
* feat: Add UnwrapScalar to ShellVariablesPreferences

- Add UnwrapScalar boolean field to ShellVariablesPreferences struct.
- Initialize UnwrapScalar to false in NewDefaultShellVariablesPreferences.
- This preference will control whether shell output should be quoted or raw.

* feat: Propagate unwrapScalar to ShellVariablesPreferences

- In configureEncoder function, set UnwrapScalar in ConfiguredShellVariablesPreferences.
- This ensures the -r flag's state is passed to the shell encoder for raw output control.

* feat: Implement conditional quoting in shellVariablesEncoder

- Modify doEncode method to check pe.prefs.UnwrapScalar.
- If UnwrapScalar is true, output raw node.Value.
- Otherwise, use quoteValue for shell-safe quoting.
- This enables quote-free output for Kubernetes workflows when -r is used.

* test: Add tests for UnwrapScalar in shell encoder

- Introduce assertEncodesToUnwrapped helper function.
- Add TestShellVariablesEncoderUnwrapScalar to verify quote-free output with -r.
- Add TestShellVariablesEncoderDefaultQuoting to confirm default quoting behavior without -r.
- Ensure comprehensive testing of conditional quoting logic for shell output.

* remove redundant test
2026-01-01 15:21:55 +11:00
Robin H. Johnson
c6029376a5
feat: K8S KYAML output format support (#2560)
* feat: K8S KYAML output format support

Reference: https://github.com/kubernetes/enhancements/blob/master/keps/sig-cli/5295-kyaml/README.md
Co-authored-by: Codex <codex@openai.com>
Generated-with: OpenAI Codex CLI (partial)
Signed-off-by: Robin H. Johnson <rjohnson@coreweave.com>

* build: gomodcache/gocache should not be committed

Signed-off-by: Robin H. Johnson <rjohnson@coreweave.com>

* chore: fix spelling of behaviour

Signed-off-by: Robin H. Johnson <robbat2@gentoo.org>

* build: pass GOFLAGS to docker to support buildvcs=false

In trying to develop the KYAML support, various tests gave false
positive results because they made assumptions about Git functionality
Make it possible to avoid that by passing GOFLAGS='-buildvcs=false' to
to Makefile.

Signed-off-by: Robin H. Johnson <robbat2@gentoo.org>

* doc: cover documentScenarios for tests

Signed-off-by: Robin H. Johnson <rjohnson@coreweave.com>

* build: exclude go caches from gosec

Without tuning, gosec scans all of the vendor/gocache/gomodcache, taking
several minutes (3m35 here), whereas the core of the yq takes only 15
seconds to scan.

If we intend to remediate upstream issues in future; add a seperate
target to scan those.

Signed-off-by: Robin H. Johnson <rjohnson@coreweave.com>

---------

Signed-off-by: Robin H. Johnson <rjohnson@coreweave.com>
Signed-off-by: Robin H. Johnson <robbat2@gentoo.org>
Co-authored-by: Codex <codex@openai.com>
2026-01-01 15:14:53 +11:00
Mike Farah
23abf50fef Adding notoml directive to encoder 2025-12-26 11:08:24 +11:00
Mike Farah
64ec1f4aa7 Adding negative parent example 2025-12-26 10:53:42 +11:00
Mike Farah
4973c355e6 Bumping version 2025-12-20 19:21:32 +11:00
Mike Farah
ecbdcada9f Preparing release 2025-12-20 19:21:24 +11:00
Mike Farah
029ba68014 Bump gosec version 2025-12-20 19:15:36 +11:00
Mike Farah
4a06cce376
Switch to uk (#2557)
* Setting golangci to UK english (that's what we use in AU)

* Fixing more spelling

* Fixing

* Include MD files in spell checker
2025-12-20 19:11:48 +11:00
Mike Farah
37e48cea44 Refining agents.md 2025-12-20 16:04:09 +11:00
Mike Farah
207bec6b29 whitespace 2025-12-20 16:01:07 +11:00
Mike Farah
7198d16575 Merge branch 'master' into toml_encoder 2025-12-20 15:58:57 +11:00
copilot-swe-agent[bot]
5d6c2047cf Fix spelling: use British English Colourization
Co-authored-by: mikefarah <1151925+mikefarah@users.noreply.github.com>
2025-12-20 15:55:17 +11:00
copilot-swe-agent[bot]
7f60daad20 Add test for string escape bug and implement fix
Co-authored-by: mikefarah <1151925+mikefarah@users.noreply.github.com>
2025-12-20 15:55:17 +11:00
Mike Farah
b7cbe59fd7
Update pkg/yqlib/encoder_toml.go
Co-authored-by: ccoVeille <3875889+ccoVeille@users.noreply.github.com>
2025-12-20 15:37:55 +11:00
copilot-swe-agent[bot]
9fa353b123 Add test coverage for parent(0) and parent(-3) edge cases
Co-authored-by: mikefarah <1151925+mikefarah@users.noreply.github.com>
2025-12-20 15:36:49 +11:00
Mike Farah
c6ecad1546 Support negative parent indices 2025-12-20 15:36:49 +11:00
Mike Farah
56eb3655b8 Formatting 2025-12-20 15:35:41 +11:00
copilot-swe-agent[bot]
1de4ec59f2 Merge remote-tracking branch 'origin/pr/2552' into copilot/sub-pr-2552
# Conflicts:
#	pkg/yqlib/toml_test.go
2025-12-20 04:26:11 +00:00
copilot-swe-agent[bot]
c132c32731 Convert to UK English spelling (colourization, coloured)
Co-authored-by: mikefarah <1151925+mikefarah@users.noreply.github.com>
2025-12-20 04:17:39 +00:00
Mike Farah
0914121d29 Fixing number color issue 2025-12-20 15:12:30 +11:00
copilot-swe-agent[bot]
aa5134e645 Add test case and fix colorization bug for inline arrays in TOML
Co-authored-by: mikefarah <1151925+mikefarah@users.noreply.github.com>
2025-12-20 04:09:04 +00:00
Mike Farah
4d620bfa26
Update pkg/yqlib/encoder_toml.go
Co-authored-by: ccoVeille <3875889+ccoVeille@users.noreply.github.com>
2025-12-20 15:07:00 +11:00
Mike Farah
b8d90fd574
Update pkg/yqlib/candidate_node.go
Co-authored-by: ccoVeille <3875889+ccoVeille@users.noreply.github.com>
2025-12-20 15:05:03 +11:00
copilot-swe-agent[bot]
c1b81f1a03 Initial plan 2025-12-20 04:04:24 +00:00
Mike Farah
ea40e14fb1
Create *.instructions.md 2025-12-20 15:02:22 +11:00
Mike Farah
b974d973ee spelling 2025-12-20 09:55:29 +11:00
dependabot[bot]
66ec487792 Bump github.com/goccy/go-yaml from 1.19.0 to 1.19.1
Bumps [github.com/goccy/go-yaml](https://github.com/goccy/go-yaml) from 1.19.0 to 1.19.1.
- [Release notes](https://github.com/goccy/go-yaml/releases)
- [Changelog](https://github.com/goccy/go-yaml/blob/master/CHANGELOG.md)
- [Commits](https://github.com/goccy/go-yaml/compare/v1.19.0...v1.19.1)

---
updated-dependencies:
- dependency-name: github.com/goccy/go-yaml
  dependency-version: 1.19.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-19 10:19:44 +11:00
Mike Farah
161be10791 Comments! 2025-12-16 20:47:15 +11:00
Mike Farah
aa858520a8 Merge branch 'master' into toml_encoder 2025-12-16 14:27:22 +11:00
Mike Farah
ac2889c296 Fixed scalar encoding for HCL 2025-12-16 14:22:50 +11:00
Mike Farah
626624af7b Adding tf to hcl format names 2025-12-16 14:17:38 +11:00
Mike Farah
b0d2522f80 Readme update 2025-12-16 14:05:58 +11:00
Mike Farah
2ee38e15b6 Adding HCL to readme 2025-12-15 11:45:08 +11:00
Mike Farah
4e9d5e8e48 wip 2025-12-15 11:40:28 +11:00
Mike Farah
1338b521ff Colours! 2025-12-14 19:41:45 +11:00
Mike Farah
3a5323824f Handles comments! 2025-12-14 19:33:00 +11:00
Mike Farah
8780172b33 Added missing fix in release notes 2025-12-14 19:05:55 +11:00
Mike Farah
5f9bf8d241 wip toml encoder 2025-12-14 19:03:54 +11:00
Mike Farah
065b200af9 Bumping version 2025-12-14 17:07:57 +11:00
Mike Farah
745a7ffb3c Preparing release 2025-12-14 17:07:45 +11:00
dependabot[bot]
a305d706d4 Bump golang.org/x/net from 0.47.0 to 0.48.0
Bumps [golang.org/x/net](https://github.com/golang/net) from 0.47.0 to 0.48.0.
- [Commits](https://github.com/golang/net/compare/v0.47.0...v0.48.0)

---
updated-dependencies:
- dependency-name: golang.org/x/net
  dependency-version: 0.48.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-14 08:45:25 +11:00
dependabot[bot]
0671ccd2cc Bump github.com/zclconf/go-cty from 1.16.3 to 1.17.0
Bumps [github.com/zclconf/go-cty](https://github.com/zclconf/go-cty) from 1.16.3 to 1.17.0.
- [Release notes](https://github.com/zclconf/go-cty/releases)
- [Changelog](https://github.com/zclconf/go-cty/blob/main/CHANGELOG.md)
- [Commits](https://github.com/zclconf/go-cty/compare/v1.16.3...v1.17.0)

---
updated-dependencies:
- dependency-name: github.com/zclconf/go-cty
  dependency-version: 1.17.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-14 08:45:16 +11:00
dependabot[bot]
4d8cd450bd Bump golang.org/x/text from 0.31.0 to 0.32.0
Bumps [golang.org/x/text](https://github.com/golang/text) from 0.31.0 to 0.32.0.
- [Release notes](https://github.com/golang/text/releases)
- [Commits](https://github.com/golang/text/compare/v0.31.0...v0.32.0)

---
updated-dependencies:
- dependency-name: golang.org/x/text
  dependency-version: 0.32.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-13 22:37:33 +11:00
Mike Farah
d2d657eacc HCL improvements 2025-12-09 19:49:34 +11:00
Mike Farah
f4fd8c585a Better roundtriping of HCL 2025-12-08 21:09:21 +11:00
Mike Farah
e4bf8a1e0a Simplifying HCL decoder 2025-12-08 20:31:28 +11:00
copilot-swe-agent[bot]
fd405749f9 Add build tag to hcl_test.go to skip tests when HCL is disabled
Co-authored-by: mikefarah <1151925+mikefarah@users.noreply.github.com>
2025-12-08 20:30:47 +11:00
Mike Farah
51ddf8d357 Update pkg/yqlib/format.go
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-08 20:30:47 +11:00
Mike Farah
77eccfd3db Fixed comment parsing, added generated docs 2025-12-08 20:30:47 +11:00
Mike Farah
554bf5a2f2 colorise output 2025-12-08 20:30:47 +11:00
Mike Farah
8162f3a100 wip - comments 2025-12-08 20:30:47 +11:00
Mike Farah
48707369a0 wip - comments 2025-12-08 20:30:47 +11:00
Mike Farah
4f72c37de7 cleanup 2025-12-08 20:30:47 +11:00
Mike Farah
795f9c954c Fixing formatting 2025-12-08 20:30:47 +11:00
Mike Farah
3d35386ad9 Update pkg/yqlib/decoder_hcl.go
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-08 20:30:47 +11:00
Mike Farah
154a4ace01 Update pkg/yqlib/encoder_hcl.go
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-08 20:30:47 +11:00
Mike Farah
effdfe1221 Update agents.md
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-08 20:30:47 +11:00
Mike Farah
8af768a015 wip 2025-12-08 20:30:47 +11:00
Mike Farah
5f3dcb1ccf Fixed processing of block labels 2025-12-08 20:30:47 +11:00
Mike Farah
6270c29f54 Fixed processing of nested data 2025-12-08 20:30:47 +11:00
Mike Farah
df3101ce53 Refining 2025-12-08 20:30:47 +11:00
Mike Farah
65e79845d4 Refining 2025-12-08 20:30:47 +11:00
Mike Farah
b4d8131197 Adding HCL no build directive 2025-12-08 20:30:47 +11:00
Mike Farah
c75a2fad86 Use HCL encoder 2025-12-08 20:30:47 +11:00
Mike Farah
8d430cf41c Use HCL encoder 2025-12-08 20:30:47 +11:00
Mike Farah
2e96a28270 adding agents file 2025-12-08 20:30:47 +11:00
Mike Farah
656f07d0c2 wip 2025-12-08 20:30:47 +11:00
Mike Farah
1852073f29 hcl - sorted decoding 2025-12-08 20:30:47 +11:00
Mike Farah
7d2c774e8f maps and strings 2025-12-08 20:30:47 +11:00
Mike Farah
69076dfe81 wip 2025-12-08 20:30:47 +11:00
Mike Farah
9e17cd683f First cut 2025-12-08 20:30:47 +11:00
Mike Farah
eb3d0e63e3 Fixing handling of CRLF #2352 2025-12-06 19:08:37 +11:00
dependabot[bot]
2072808def Bump golang from 1.25.4 to 1.25.5
Bumps golang from 1.25.4 to 1.25.5.

---
updated-dependencies:
- dependency-name: golang
  dependency-version: 1.25.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-06 14:29:15 +11:00
dependabot[bot]
7d47b36b69 Bump github.com/spf13/cobra from 1.10.1 to 1.10.2
Bumps [github.com/spf13/cobra](https://github.com/spf13/cobra) from 1.10.1 to 1.10.2.
- [Release notes](https://github.com/spf13/cobra/releases)
- [Commits](https://github.com/spf13/cobra/compare/v1.10.1...v1.10.2)

---
updated-dependencies:
- dependency-name: github.com/spf13/cobra
  dependency-version: 1.10.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-06 14:29:08 +11:00
dependabot[bot]
53f10ae360 Bump github.com/goccy/go-yaml from 1.18.0 to 1.19.0
Bumps [github.com/goccy/go-yaml](https://github.com/goccy/go-yaml) from 1.18.0 to 1.19.0.
- [Release notes](https://github.com/goccy/go-yaml/releases)
- [Changelog](https://github.com/goccy/go-yaml/blob/master/CHANGELOG.md)
- [Commits](https://github.com/goccy/go-yaml/compare/v1.18.0...v1.19.0)

---
updated-dependencies:
- dependency-name: github.com/goccy/go-yaml
  dependency-version: 1.19.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-06 14:29:01 +11:00
dependabot[bot]
22510ab8d5 Bump actions/checkout from 5 to 6
Bumps [actions/checkout](https://github.com/actions/checkout) from 5 to 6.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/checkout/compare/v5...v6)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-version: '6'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-06 14:28:39 +11:00
Alexander
588d0bb3dd Bumped to core24 and removed riscv64 2025-11-26 09:31:58 +11:00
Mike Farah
7ccaf8e700 Bumping version 2025-11-25 10:45:39 +11:00
Mike Farah
a1a27b8536 Updating release notes 2025-11-25 10:45:35 +11:00
Mike Farah
1b91fc63ea Removing escape char processing from strenv #2517 2025-11-25 10:44:03 +11:00
Mike Farah
9e0c5fd3c9 Fixing escape charaters again 😢 #2517 2025-11-25 10:17:43 +11:00
Alexander
5d0481c0d2 Running build step on launchpad remote builder with supported architectures 2025-11-25 08:55:36 +11:00
Alexander
f91176a204 Fixed architecture builders 2025-11-25 08:55:36 +11:00
Mike Farah
8e86bdb876 Attempting to fix snap again 2025-11-22 18:33:44 +11:00
Mike Farah
fc164ca9c3 Updating README with latest yq help 2025-11-22 15:04:39 +11:00
Mike Farah
810e9d921e Syncing how-it-works from gitbook branch 2025-11-22 15:02:03 +11:00
107 changed files with 7165 additions and 432 deletions

View File

@ -34,13 +34,13 @@ The command you ran:
yq eval-all 'select(fileIndex==0) | .a.b.c' data1.yml data2.yml
```
**Actual behavior**
**Actual behaviour**
```yaml
cat: meow
```
**Expected behavior**
**Expected behaviour**
```yaml
this: should really work

1
.github/instructions/instructions.md vendored Normal file
View File

@ -0,0 +1 @@
When you find a bug - make sure to include a new test that exposes the bug, as well as the fix for the bug itself.

View File

@ -38,7 +38,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL

View File

@ -14,16 +14,16 @@ jobs:
IMAGE_NAME: mikefarah/yq
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
uses: docker/setup-qemu-action@v4
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@v4
with:
version: latest
@ -31,13 +31,13 @@ jobs:
run: echo ${{ steps.buildx.outputs.platforms }} && docker version
- name: Login to Docker Hub
uses: docker/login-action@v3
uses: docker/login-action@v4
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
uses: docker/login-action@v4
with:
registry: ghcr.io
username: ${{ github.actor }}

View File

@ -11,13 +11,13 @@ jobs:
steps:
- name: Set up Go
uses: actions/setup-go@v5
uses: actions/setup-go@v6
with:
go-version: '^1.20'
id: go
- name: Check out code into the Go module directory
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: Get dependencies
run: |

View File

@ -9,8 +9,8 @@ jobs:
publishGitRelease:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/setup-go@v5
- uses: actions/checkout@v6
- uses: actions/setup-go@v6
with:
go-version: '^1.20'
check-latest: true

View File

@ -12,12 +12,16 @@ jobs:
environment: snap
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
- uses: snapcore/action-build@v1
id: build
env:
SNAPCRAFT_STORE_CREDENTIALS: ${{ secrets.STORE_LOGIN }}
with:
snapcraft-args: "remote-build --launchpad-accept-public-upload"
- uses: snapcore/action-publish@v1
env:
SNAPCRAFT_STORE_CREDENTIALS: ${{ secrets.STORE_LOGIN }}
with:
snap: ${{ steps.build.outputs.snap }}
release: stable
release: stable

View File

@ -13,7 +13,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
- name: Get test
id: get_value
uses: mikefarah/yq@master

6
.gitignore vendored
View File

@ -43,9 +43,11 @@ yq*.snap
test.yml
test*.yml
test*.tf
test*.xml
test*.toml
test*.yaml
*.kyaml
test_dir1/
test_dir2/
0.yml
@ -68,3 +70,7 @@ debian/files
.vscode
yq3
# Golang
.gomodcache/
.gocache/

View File

@ -14,6 +14,11 @@ linters:
- unconvert
- unparam
settings:
misspell:
locale: UK
ignore-rules:
- color
- colors
depguard:
rules:
prevent_unmaintained_packages:

View File

@ -39,7 +39,6 @@ builds:
- openbsd_amd64
- windows_386
- windows_amd64
- windows_arm
- windows_arm64
no_unique_dist_dir: true

View File

@ -11,7 +11,7 @@ appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
Examples of behaviour that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
@ -20,7 +20,7 @@ include:
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
Examples of unacceptable behaviour by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
@ -34,13 +34,13 @@ Examples of unacceptable behavior by participants include:
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
behaviour and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behaviour.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
permanently any contributor for other behaviours that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
@ -54,7 +54,7 @@ further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
Instances of abusive, harassing, or otherwise unacceptable behaviour may be
reported by contacting the project team at mikefarah@gmail.com. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is

View File

@ -197,6 +197,21 @@ Note: PRs with small changes (e.g. minor typos) may not be merged (see https://j
make [local] test # Run in Docker container
```
- **Problem**: Tests fail with a VCS error:
```bash
error obtaining VCS status: exit status 128
Use -buildvcs=false to disable VCS stamping.
```
- **Solution**:
Git security mechanisms prevent Golang from detecting the Git details inside
the container; either build with the `local` option, or pass GOFLAGS to
disable Golang buildvcs behaviour.
```bash
make local test
# OR
make test GOFLAGS='-buildvcs=true'
```
### Documentation Generation Issues
- **Problem**: Generated docs don't update after test changes
- **Solution**:

View File

@ -1,4 +1,4 @@
FROM golang:1.25.4 AS builder
FROM golang:1.26.1 AS builder
WORKDIR /go/src/mikefarah/yq

View File

@ -1,4 +1,4 @@
FROM golang:1.25.4
FROM golang:1.26.1
RUN apt-get update && \
apt-get install -y npm && \

View File

@ -35,6 +35,7 @@ clean:
## prefix before other make targets to run in your local dev environment
local: | quiet
@$(eval ENGINERUN= )
@$(eval GOFLAGS="$(GOFLAGS)" )
@mkdir -p tmp
@touch tmp/dev_image_id
quiet: # this is silly but shuts up 'Nothing to be done for `local`'

View File

@ -4,6 +4,7 @@ IMPORT_PATH := github.com/mikefarah/${PROJECT}
export GIT_COMMIT = $(shell git rev-parse --short HEAD)
export GIT_DIRTY = $(shell test -n "$$(git status --porcelain)" && echo "+CHANGES" || true)
export GIT_DESCRIBE = $(shell git describe --tags --always)
GOFLAGS :=
LDFLAGS :=
LDFLAGS += -X main.GitCommit=${GIT_COMMIT}${GIT_DIRTY}
LDFLAGS += -X main.GitDescribe=${GIT_DESCRIBE}
@ -26,13 +27,15 @@ ifeq ($(CYG_CHECK),1)
else
# all non-windows environments
ROOT := $(shell pwd)
SELINUX := $(shell which getenforce 2>&1 >/dev/null && echo :z)
# Deliberately use `command -v` instead of `which` to be POSIX compliant
SELINUX := $(shell command -v getenforce >/dev/null 2>&1 && echo :z)
endif
DEV_IMAGE := ${PROJECT}_dev
ENGINERUN := ${ENGINE} run --rm \
-e LDFLAGS="${LDFLAGS}" \
-e GOFLAGS="${GOFLAGS}" \
-e GITHUB_TOKEN="${GITHUB_TOKEN}" \
-v ${ROOT}/vendor:/go/src${SELINUX} \
-v ${ROOT}:/${PROJECT}/src/${IMPORT_PATH}${SELINUX} \

103
README.md
View File

@ -3,7 +3,7 @@
![Build](https://github.com/mikefarah/yq/workflows/Build/badge.svg) ![Docker Pulls](https://img.shields.io/docker/pulls/mikefarah/yq.svg) ![Github Releases (by Release)](https://img.shields.io/github/downloads/mikefarah/yq/total.svg) ![Go Report](https://goreportcard.com/badge/github.com/mikefarah/yq) ![CodeQL](https://github.com/mikefarah/yq/workflows/CodeQL/badge.svg)
A lightweight and portable command-line YAML, JSON, INI and XML processor. `yq` uses [jq](https://github.com/stedolan/jq) (a popular JSON processor) like syntax but works with yaml files as well as json, xml, ini, properties, csv and tsv. It doesn't yet support everything `jq` does - but it does support the most common operations and functions, and more is being added continuously.
A lightweight and portable command-line YAML, JSON, INI and XML processor. `yq` uses [jq](https://github.com/stedolan/jq) (a popular JSON processor) like syntax but works with yaml files as well as json, kyaml, xml, ini, properties, csv and tsv. It doesn't yet support everything `jq` does - but it does support the most common operations and functions, and more is being added continuously.
yq is written in Go - so you can download a dependency free binary for your platform and you are good to go! If you prefer there are a variety of package managers that can be used as well as Docker and Podman, all listed below.
@ -363,6 +363,8 @@ gah install yq
- [Load content from other files](https://mikefarah.gitbook.io/yq/operators/load)
- [Convert to/from json/ndjson](https://mikefarah.gitbook.io/yq/v/v4.x/usage/convert)
- [Convert to/from xml](https://mikefarah.gitbook.io/yq/v/v4.x/usage/xml)
- [Convert to/from hcl (terraform)](https://mikefarah.gitbook.io/yq/v/v4.x/usage/hcl)
- [Convert to/from toml](https://mikefarah.gitbook.io/yq/v/v4.x/usage/toml)
- [Convert to/from properties](https://mikefarah.gitbook.io/yq/v/v4.x/usage/properties)
- [Convert to/from csv/tsv](https://mikefarah.gitbook.io/yq/usage/csv-tsv)
- [General shell completion scripts (bash/zsh/fish/powershell)](https://mikefarah.gitbook.io/yq/v/v4.x/commands/shell-completion)
@ -380,10 +382,18 @@ Usage:
Examples:
# yq defaults to 'eval' command if no command is specified. See "yq eval --help" for more examples.
yq '.stuff' < myfile.yml # outputs the data at the "stuff" node from "myfile.yml"
# yq tries to auto-detect the file format based off the extension, and defaults to YAML if it's unknown (or piping through STDIN)
# Use the '-p/--input-format' flag to specify a format type.
cat file.xml | yq -p xml
yq -i '.stuff = "foo"' myfile.yml # update myfile.yml in place
# read the "stuff" node from "myfile.yml"
yq '.stuff' < myfile.yml
# update myfile.yml in place
yq -i '.stuff = "foo"' myfile.yml
# print contents of sample.json as idiomatic YAML
yq -P -oy sample.json
Available Commands:
@ -393,46 +403,51 @@ Available Commands:
help Help about any command
Flags:
-C, --colors force print with colors
--csv-auto-parse parse CSV YAML/JSON values (default true)
--csv-separator char CSV Separator character (default ,)
-e, --exit-status set exit status if there are no matches or null or false is returned
--expression string forcibly set the expression argument. Useful when yq argument detection thinks your expression is a file.
--from-file string Load expression from specified file.
-f, --front-matter string (extract|process) first input as yaml front-matter. Extract will pull out the yaml content, process will run the expression against the yaml content, leaving the remaining data intact
--header-preprocess Slurp any header comments and separators before processing expression. (default true)
-h, --help help for yq
-I, --indent int sets indent level for output (default 2)
-i, --inplace update the file in place of first file given.
-p, --input-format string [auto|a|yaml|y|json|j|props|p|csv|c|tsv|t|xml|x|base64|uri|toml|lua|l|ini|i] parse format for input. (default "auto")
--lua-globals output keys as top-level global variables
--lua-prefix string prefix (default "return ")
--lua-suffix string suffix (default ";\n")
--lua-unquoted output unquoted string keys (e.g. {foo="bar"})
-M, --no-colors force print with no colors
-N, --no-doc Don't print document separators (---)
-0, --nul-output Use NUL char to separate values. If unwrap scalar is also set, fail if unwrapped scalar contains NUL char.
-n, --null-input Don't read input, simply evaluate the expression given. Useful for creating docs from scratch.
-o, --output-format string [auto|a|yaml|y|json|j|props|p|csv|c|tsv|t|xml|x|base64|uri|toml|shell|s|lua|l|ini|i] output format type. (default "auto")
-P, --prettyPrint pretty print, shorthand for '... style = ""'
--properties-array-brackets use [x] in array paths (e.g. for SpringBoot)
--properties-separator string separator to use between keys and values (default " = ")
-s, --split-exp string print each result (or doc) into a file named (exp). [exp] argument must return a string. You can use $index in the expression as the result counter. The necessary directories will be created.
--split-exp-file string Use a file to specify the split-exp expression.
--string-interpolation Toggles strings interpolation of \(exp) (default true)
--tsv-auto-parse parse TSV YAML/JSON values (default true)
-r, --unwrapScalar unwrap scalar, print the value with no quotes, colors or comments. Defaults to true for yaml (default true)
-v, --verbose verbose mode
-V, --version Print version information and quit
--xml-attribute-prefix string prefix for xml attributes (default "+@")
--xml-content-name string name for xml content (if no attribute name is present). (default "+content")
--xml-directive-name string name for xml directives (e.g. <!DOCTYPE thing cat>) (default "+directive")
--xml-keep-namespace enables keeping namespace after parsing attributes (default true)
--xml-proc-inst-prefix string prefix for xml processing instructions (e.g. <?xml version="1"?>) (default "+p_")
--xml-raw-token enables using RawToken method instead Token. Commonly disables namespace translations. See https://pkg.go.dev/encoding/xml#Decoder.RawToken for details. (default true)
--xml-skip-directives skip over directives (e.g. <!DOCTYPE thing cat>)
--xml-skip-proc-inst skip over process instructions (e.g. <?xml version="1"?>)
--xml-strict-mode enables strict parsing of XML. See https://pkg.go.dev/encoding/xml for more details.
-C, --colors force print with colors
--csv-auto-parse parse CSV YAML/JSON values (default true)
--csv-separator char CSV Separator character (default ,)
--debug-node-info debug node info
-e, --exit-status set exit status if there are no matches or null or false is returned
--expression string forcibly set the expression argument. Useful when yq argument detection thinks your expression is a file.
--from-file string Load expression from specified file.
-f, --front-matter string (extract|process) first input as yaml front-matter. Extract will pull out the yaml content, process will run the expression against the yaml content, leaving the remaining data intact
--header-preprocess Slurp any header comments and separators before processing expression. (default true)
-h, --help help for yq
-I, --indent int sets indent level for output (default 2)
-i, --inplace update the file in place of first file given.
-p, --input-format string [auto|a|yaml|y|json|j|kyaml|ky|props|p|csv|c|tsv|t|xml|x|base64|uri|toml|hcl|h|lua|l|ini|i] parse format for input. (default "auto")
--lua-globals output keys as top-level global variables
--lua-prefix string prefix (default "return ")
--lua-suffix string suffix (default ";\n")
--lua-unquoted output unquoted string keys (e.g. {foo="bar"})
-M, --no-colors force print with no colors
-N, --no-doc Don't print document separators (---)
-0, --nul-output Use NUL char to separate values. If unwrap scalar is also set, fail if unwrapped scalar contains NUL char.
-n, --null-input Don't read input, simply evaluate the expression given. Useful for creating docs from scratch.
-o, --output-format string [auto|a|yaml|y|json|j|kyaml|ky|props|p|csv|c|tsv|t|xml|x|base64|uri|toml|hcl|h|shell|s|lua|l|ini|i] output format type. (default "auto")
-P, --prettyPrint pretty print, shorthand for '... style = ""'
--properties-array-brackets use [x] in array paths (e.g. for SpringBoot)
--properties-separator string separator to use between keys and values (default " = ")
--security-disable-env-ops Disable env related operations.
--security-disable-file-ops Disable file related operations (e.g. load)
--shell-key-separator string separator for shell variable key paths (default "_")
-s, --split-exp string print each result (or doc) into a file named (exp). [exp] argument must return a string. You can use $index in the expression as the result counter. The necessary directories will be created.
--split-exp-file string Use a file to specify the split-exp expression.
--string-interpolation Toggles strings interpolation of \(exp) (default true)
--tsv-auto-parse parse TSV YAML/JSON values (default true)
-r, --unwrapScalar unwrap scalar, print the value with no quotes, colors or comments. Defaults to true for yaml (default true)
-v, --verbose verbose mode
-V, --version Print version information and quit
--xml-attribute-prefix string prefix for xml attributes (default "+@")
--xml-content-name string name for xml content (if no attribute name is present). (default "+content")
--xml-directive-name string name for xml directives (e.g. <!DOCTYPE thing cat>) (default "+directive")
--xml-keep-namespace enables keeping namespace after parsing attributes (default true)
--xml-proc-inst-prefix string prefix for xml processing instructions (e.g. <?xml version="1"?>) (default "+p_")
--xml-raw-token enables using RawToken method instead Token. Commonly disables namespace translations. See https://pkg.go.dev/encoding/xml#Decoder.RawToken for details. (default true)
--xml-skip-directives skip over directives (e.g. <!DOCTYPE thing cat>)
--xml-skip-proc-inst skip over process instructions (e.g. <?xml version="1"?>)
--xml-strict-mode enables strict parsing of XML. See https://pkg.go.dev/encoding/xml for more details.
--yaml-fix-merge-anchor-to-spec Fix merge anchor to match YAML spec. Will default to true in late 2025
Use "yq [command] --help" for more information about a command.
```

View File

@ -6,6 +6,7 @@ setUp() {
rm test*.csv 2>/dev/null || true
rm test*.tsv 2>/dev/null || true
rm test*.xml 2>/dev/null || true
rm test*.tf 2>/dev/null || true
}
testInputProperties() {
@ -153,6 +154,37 @@ EOM
assertEquals "$expected" "$X"
}
testInputKYaml() {
cat >test.kyaml <<'EOL'
# leading
{
a: 1, # a line
# head b
b: 2,
c: [
# head d
"d", # d line
],
}
EOL
read -r -d '' expected <<'EOM'
# leading
a: 1 # a line
# head b
b: 2
c:
# head d
- d # d line
EOM
X=$(./yq e -p=kyaml -P test.kyaml)
assertEquals "$expected" "$X"
X=$(./yq ea -p=kyaml -P test.kyaml)
assertEquals "$expected" "$X"
}
@ -255,4 +287,61 @@ EOM
assertEquals "$expected" "$X"
}
source ./scripts/shunit2
testInputTerraform() {
cat >test.tf <<EOL
resource "aws_s3_bucket" "example" {
bucket = "my-bucket"
tags = {
Environment = "Dev"
Project = "Test"
}
}
EOL
read -r -d '' expected << EOM
resource "aws_s3_bucket" "example" {
bucket = "my-bucket"
tags = {
Environment = "Dev"
Project = "Test"
}
}
EOM
X=$(./yq test.tf)
assertEquals "$expected" "$X"
X=$(./yq ea test.tf)
assertEquals "$expected" "$X"
}
testInputTerraformGithubAction() {
cat >test.tf <<EOL
resource "aws_s3_bucket" "example" {
bucket = "my-bucket"
tags = {
Environment = "Dev"
Project = "Test"
}
}
EOL
read -r -d '' expected << EOM
resource "aws_s3_bucket" "example" {
bucket = "my-bucket"
tags = {
Environment = "Dev"
Project = "Test"
}
}
EOM
X=$(cat /dev/null | ./yq test.tf)
assertEquals "$expected" "$X"
X=$(cat /dev/null | ./yq ea test.tf)
assertEquals "$expected" "$X"
}
source ./scripts/shunit2

View File

@ -280,6 +280,55 @@ EOM
assertEquals "$expected" "$X"
}
testOutputKYaml() {
cat >test.yml <<'EOL'
# leading
a: 1 # a line
# head b
b: 2
c:
# head d
- d # d line
EOL
read -r -d '' expected <<'EOM'
# leading
{
a: 1, # a line
# head b
b: 2,
c: [
# head d
"d", # d line
],
}
EOM
X=$(./yq e --output-format=kyaml test.yml)
assertEquals "$expected" "$X"
X=$(./yq ea --output-format=kyaml test.yml)
assertEquals "$expected" "$X"
}
testOutputKYamlShort() {
cat >test.yml <<EOL
a: b
EOL
read -r -d '' expected <<'EOM'
{
a: "b",
}
EOM
X=$(./yq e -o=ky test.yml)
assertEquals "$expected" "$X"
X=$(./yq ea -o=ky test.yml)
assertEquals "$expected" "$X"
}
testOutputXmComplex() {
cat >test.yml <<EOL
a: {b: {c: ["cat", "dog"], +@f: meow}}

422
agents.md Normal file
View File

@ -0,0 +1,422 @@
# General rules
✅ **DO:**
- You can use ./yq with the `--debug-node-info` flag to get a deeper understanding of the ast.
- run ./scripts/format.sh to format the code; then ./scripts/check.sh lint and finally ./scripts/spelling.sh to check spelling.
- Add comprehensive tests to cover the changes
- Run test suite to ensure there is no regression
- Use UK english spelling
❌ **DON'T:**
- Git add or commit
- Add comments to functions that are self-explanatory
# Adding a New Encoder/Decoder
This guide explains how to add support for a new format (encoder/decoder) to yq without modifying `candidate_node.go`.
## Overview
The encoder/decoder architecture in yq is based on two main interfaces:
- **Encoder**: Converts a `CandidateNode` to output in a specific format
- **Decoder**: Reads input in a specific format and creates a `CandidateNode`
Each format is registered in `pkg/yqlib/format.go` and made available through factory functions.
## Architecture
### Key Files
- `pkg/yqlib/encoder.go` - Defines the `Encoder` interface
- `pkg/yqlib/decoder.go` - Defines the `Decoder` interface
- `pkg/yqlib/format.go` - Format registry and factory functions
- `pkg/yqlib/operator_encoder_decoder.go` - Encode/decode operators
- `pkg/yqlib/encoder_*.go` - Encoder implementations
- `pkg/yqlib/decoder_*.go` - Decoder implementations
### Interfaces
**Encoder Interface:**
```go
type Encoder interface {
Encode(writer io.Writer, node *CandidateNode) error
PrintDocumentSeparator(writer io.Writer) error
PrintLeadingContent(writer io.Writer, content string) error
CanHandleAliases() bool
}
```
**Decoder Interface:**
```go
type Decoder interface {
Init(reader io.Reader) error
Decode() (*CandidateNode, error)
}
```
## Step-by-Step: Adding a New Encoder/Decoder
### Step 1: Create the Encoder File
Create `pkg/yqlib/encoder_<format>.go` implementing the `Encoder` interface:
- `Encode()` - Convert a `CandidateNode` to your format and write to the output writer
- `PrintDocumentSeparator()` - Handle document separators if your format requires them
- `PrintLeadingContent()` - Handle leading content/comments if supported
- `CanHandleAliases()` - Return whether your format supports YAML aliases
See `encoder_json.go` or `encoder_base64.go` for examples.
### Step 2: Create the Decoder File
Create `pkg/yqlib/decoder_<format>.go` implementing the `Decoder` interface:
- `Init()` - Initialize the decoder with the input reader and set up any needed state
- `Decode()` - Decode one document from the input and return a `CandidateNode`, or `io.EOF` when finished
See `decoder_json.go` or `decoder_base64.go` for examples.
### Step 3: Create Tests (Mandatory)
Create a test file `pkg/yqlib/<format>_test.go` using the `formatScenario` pattern:
- Define test scenarios as `formatScenario` structs with fields: `description`, `input`, `expected`, `scenarioType`
- `scenarioType` can be `"decode"` (test decoding to YAML) or `"roundtrip"` (encode/decode preservation)
- Create a helper function `test<Format>Scenario()` that switches on `scenarioType`
- Create main test function `Test<Format>FormatScenarios()` that iterates over scenarios
- The main test function should use `documentScenarios` to ensure testcase documentation is generated.
Test coverage must include:
- Basic data types (scalars, arrays, objects/maps)
- Nested structures
- Edge cases (empty inputs, special characters, escape sequences)
- Format-specific features or syntax
- Round-trip tests: decode → encode → decode should preserve data
See `hcl_test.go` for a complete example.
### Step 4: Register the Format in format.go
Edit `pkg/yqlib/format.go`:
1. Add a new format variable:
- `"<format>"` is the formal name (e.g., "json", "yaml")
- `[]string{...}` contains short aliases (can be empty)
- The first function creates an encoder (can be nil for encode-only formats)
- The second function creates a decoder (can be nil for decode-only formats)
2. Add the format to the `Formats` slice in the same file
See existing formats in `format.go` for the exact structure.
### Step 5: Handle Encoder Configuration (if needed)
If your format has preferences/configuration options:
1. Create a preferences struct with your configuration fields
2. Update the encoder to accept preferences in its factory function
3. Update `format.go` to pass the configured preferences
4. Update `operator_encoder_decoder.go` if special indent handling is needed (see existing formats like JSON and YAML for the pattern)
This pattern is optional and only needed if your format has user-configurable options.
## Build Tags
Use build tags to allow optional compilation of formats:
- Add `//go:build !yq_no<format>` at the top of your encoder and decoder files
- Create a no-build version in `pkg/yqlib/no_<format>.go` that returns nil for encoder/decoder factories
This allows users to compile yq without certain formats using: `go build -tags yq_no<format>`
## Working with CandidateNode
The `CandidateNode` struct represents a YAML node with:
- `Kind`: The node type (ScalarNode, SequenceNode, MappingNode)
- `Tag`: The YAML tag (e.g., "!!str", "!!int", "!!map")
- `Value`: The scalar value (for ScalarNode only)
- `Content`: Child nodes (for SequenceNode and MappingNode)
Key methods:
- `node.guessTagFromCustomType()` - Infer the tag from Go type
- `node.AsList()` - Convert to a list for processing
- `node.CreateReplacement()` - Create a new replacement node
- `NewCandidate()` - Create a new CandidateNode
## Key Points
✅ **DO:**
- Implement only the `Encoder` and `Decoder` interfaces
- Register your format in `format.go` only
- Keep format-specific logic in your encoder/decoder files
- Use the candidate_node style attribute to store style information for round-trip. Ask if this needs to be updated with new styles.
- Use build tags for optional compilation
- Add comprehensive tests
- Run the specific encoder/decoder test (e.g. <format>_test.go) whenever you make ay changes to the encoder_<format> or decoder_<format>
- Handle errors gracefully
- Add the no build directive, like the xml encoder and decoder, that enables a minimal yq builds. e.g. `//go:build !yq_<format>`. Be sure to also update the build_small-yq.sh and build-tinygo-yq.sh to not include the new format.
❌ **DON'T:**
- Modify `candidate_node.go` to add format-specific logic
- Add format-specific fields to `CandidateNode`
- Create special cases in core navigation or evaluation logic
- Bypass the encoder/decoder interfaces
- Use candidate_node tag attribute for anything other than indicate the data type
## Examples
Refer to existing format implementations for patterns:
- **Simple encoder/decoder**: `encoder_json.go`, `decoder_json.go`
- **Complex with preferences**: `encoder_yaml.go`, `decoder_yaml.go`
- **Encoder-only**: `encoder_sh.go` (ShFormat has nil decoder)
- **String-only operations**: `encoder_base64.go`, `decoder_base64.go`
## Testing Your Implementation (Mandatory)
Tests must be implemented in `<format>_test.go` following the `formatScenario` pattern:
1. **Create test scenarios** using the `formatScenario` struct with fields:
- `description`: Brief description of what's being tested
- `input`: Sample input in your format
- `expected`: Expected output (typically in YAML for decode tests)
- `scenarioType`: Either `"decode"` or `"roundtrip"`
2. **Test coverage must include:**
- Basic data types (scalars, arrays, objects/maps)
- Nested structures
- Edge cases (empty inputs, special characters, escape sequences)
- Format-specific features or syntax
- Round-trip tests: decode → encode → decode should preserve data
3. **Test function pattern:**
- `test<Format>Scenario()`: Helper function that switches on `scenarioType`
- `Test<Format>FormatScenarios()`: Main test function that iterates over scenarios
4. **Example from existing formats:**
- See `hcl_test.go` for a complete example
- See `yaml_test.go` for YAML-specific patterns
- See `json_test.go` for more complex scenarios
## Common Patterns
### Format with Indentation
Use preferences to control output formatting:
```go
type <format>Preferences struct {
Indent int
}
func (prefs *<format>Preferences) Copy() <format>Preferences {
return *prefs
}
```
### Multiple Documents
Decoders should support reading multiple documents:
```go
func (dec *<format>Decoder) Decode() (*CandidateNode, error) {
if dec.finished {
return nil, io.EOF
}
// ... decode next document ...
if noMoreDocuments {
dec.finished = true
}
return candidate, nil
}
```
---
# Adding a New Operator
This guide explains how to add a new operator to yq. Operators are the core of yq's expression language and process `CandidateNode` objects without requiring modifications to `candidate_node.go` itself.
## Overview
Operators transform data by implementing a handler function that processes a `Context` containing `CandidateNode` objects. Each operator is:
1. Defined as an `operationType` in `operation.go`
2. Registered in the lexer in `lexer_participle.go`
3. Implemented in its own `operator_<type>.go` file
4. Tested in `operator_<type>_test.go`
5. Documented in `pkg/yqlib/doc/operators/headers/<type>.md`
## Architecture
### Key Files
- `pkg/yqlib/operation.go` - Defines `operationType` and operator registry
- `pkg/yqlib/lexer_participle.go` - Registers operators with their syntax patterns
- `pkg/yqlib/operator_<type>.go` - Operator implementation
- `pkg/yqlib/operator_<type>_test.go` - Operator tests using `expressionScenario`
- `pkg/yqlib/doc/operators/headers/<type>.md` - Documentation header
### Core Types
**operationType:**
```go
type operationType struct {
Type string // Unique operator name (e.g., "REVERSE")
NumArgs uint // Number of arguments (0 for no args)
Precedence uint // Operator precedence (higher = higher precedence)
Handler operatorHandler // The function that executes the operator
CheckForPostTraverse bool // Whether to apply post-traversal logic
ToString func(*Operation) string // Custom string representation
}
```
**operatorHandler signature:**
```go
type operatorHandler func(*dataTreeNavigator, Context, *ExpressionNode) (Context, error)
```
**expressionScenario for tests:**
```go
type expressionScenario struct {
description string
subdescription string
document string
expression string
expected []string
skipDoc bool
expectedError string
}
```
## Step-by-Step: Adding a New Operator
### Step 1: Create the Operator Implementation File
Create `pkg/yqlib/operator_<type>.go` implementing the operator handler function:
- Implement the `operatorHandler` function signature
- Process nodes from `context.MatchingNodes`
- Return a new `Context` with results using `context.ChildContext()`
- Use `candidate.CreateReplacement()` or `candidate.CreateReplacementWithComments()` to create new nodes
- Handle errors gracefully with meaningful error messages
See `operator_reverse.go` or `operator_keys.go` for examples.
### Step 2: Register the Operator in operation.go
Add the operator type definition to `pkg/yqlib/operation.go`:
```go
var <type>OpType = &operationType{
Type: "<TYPE>", // All caps, matches pattern in lexer
NumArgs: 0, // 0 for no args, 1+ for args
Precedence: 50, // Typical range: 40-55
Handler: <type>Operator, // Reference to handler function
}
```
**Precedence guidelines:**
- 10-20: Logical operators (OR, AND, UNION)
- 30: Pipe operator
- 40: Assignment and comparison operators
- 42: Arithmetic operators (ADD, SUBTRACT, MULTIPLY, DIVIDE)
- 50-52: Most other operators
- 55: High precedence (e.g., GET_VARIABLE)
**Optional fields:**
- `CheckForPostTraverse: true` - If your operator can have another directly after it without the pipe character. Most of the time this is false.
- `ToString: customToString` - Custom string representation (rarely needed)
### Step 3: Register the Operator in lexer_participle.go
Edit `pkg/yqlib/lexer_participle.go` to add the operator to the lexer rules:
- Use `simpleOp()` for simple keyword patterns
- Use object syntax for regex patterns or complex syntax
- Support optional characters with `_?` and aliases with `|`
See existing operators in `lexer_participle.go` for pattern examples.
### Step 4: Create Tests (Mandatory)
Create `pkg/yqlib/operator_<type>_test.go` using the `expressionScenario` pattern:
- Define test scenarios with `description`, `document`, `expression`, and `expected` fields
- `expected` is a slice of strings showing output format: `"D<doc>, P[<path>], (<tag>)::<value>\n"`
- Set `skipDoc: true` for edge cases you don't want in generated documentation
- Include `subdescription` for longer test names
- Set `expectedError` if testing error cases
- Create main test function that iterates over scenarios
- The main test function should use `documentScenarios` to ensure testcase documentation is generated.
Test coverage must include:
- Basic data types and nested structures
- Edge cases (empty inputs, special characters, type errors)
- Multiple outputs if applicable
- Format-specific features
See `operator_reverse_test.go` for a simple example and `operator_keys_test.go` for complex cases.
### Step 5: Create Documentation Header
Create `pkg/yqlib/doc/operators/headers/<type>.md`:
- Use the exact operator name as the title
- Include a concise 1-2 sentence summary
- Add additional context or examples if the operator is complex
See existing headers in `doc/operators/headers/` for examples.
## Working with Context and CandidateNode
### Context Management
- `context.ChildContext(results)` - Create child context with results
- `context.GetVariable("varName")` - Get variables stored in context
- `context.SetVariable("varName", value)` - Set variables in context
### CandidateNode Operations
- `candidate.CreateReplacement(ScalarNode, "!!str", stringValue)` - Create a replacement node
- `candidate.CreateReplacementWithComments(SequenceNode, "!!seq", candidate.Style)` - With style preserved
- `candidate.Kind` - The node type (ScalarNode, SequenceNode, MappingNode)
- `candidate.Tag` - The YAML tag (!!str, !!int, etc.)
- `candidate.Value` - The scalar value (for ScalarNode only)
- `candidate.Content` - Child nodes (for SequenceNode and MappingNode)
- `candidate.guessTagFromCustomType()` - Infer the tag from Go type
- `candidate.AsList()` - Convert to a list representation
## Key Points
✅ **DO:**
- Implement the operator handler with the correct signature
- Register in `operation.go` with appropriate precedence
- Add the lexer pattern in `lexer_participle.go`
- Write comprehensive tests covering normal and edge cases
- Create a documentation header in `doc/operators/headers/`
- Use `Context.ChildContext()` for proper context threading
- Handle all node types gracefully
- Return meaningful error messages
❌ **DON'T:**
- Modify `candidate_node.go` (operators shouldn't need this)
- Modify core navigation or evaluation logic
- Bypass the handler function pattern
- Add format-specific or operator-specific fields to `CandidateNode`
- Skip tests or documentation
## Examples
Refer to existing operator implementations for patterns:
- **No-argument operator**: `operator_reverse.go` - Processes arrays/sequences
- **Single-argument operator**: `operator_map.go` - Takes an expression argument
- **Complex multi-output**: `operator_keys.go` - Produces multiple results
- **With preferences**: `operator_to_number.go` - Configuration options
- **Error handling**: `operator_error.go` - Control flow with errors
- **String operations**: `operator_strings.go` - Multiple related operators
## Testing Patterns
Refer to existing test files for specific patterns:
- Basic expression tests in `operator_reverse_test.go`
- Multi-output tests in `operator_keys_test.go`
- Error handling tests in `operator_error_test.go`
- Tests with `skipDoc` flag to exclude from generated documentation
## Common Patterns
Refer to existing operator implementations for these patterns:
- Simple transformation: see `operator_reverse.go`
- Type checking: see `operator_error.go`
- Working with arguments: see `operator_map.go`
- Post-traversal operators: see `operator_with.go`

View File

@ -60,7 +60,7 @@ func evaluateAll(cmd *cobra.Command, args []string) (cmdError error) {
out := cmd.OutOrStdout()
if writeInplace {
// only use colors if its forced
// only use colours if its forced
colorsEnabled = forceColor
writeInPlaceHandler := yqlib.NewWriteInPlaceHandler(args[0])
out, err = writeInPlaceHandler.CreateTempFile()

View File

@ -74,7 +74,7 @@ func evaluateSequence(cmd *cobra.Command, args []string) (cmdError error) {
}
if writeInplace {
// only use colors if its forced
// only use colours if its forced
colorsEnabled = forceColor
writeInPlaceHandler := yqlib.NewWriteInPlaceHandler(args[0])
out, err = writeInPlaceHandler.CreateTempFile()

View File

@ -184,7 +184,7 @@ yq -P -oy sample.json
}
rootCmd.Flags().BoolVarP(&version, "version", "V", false, "Print version information and quit")
rootCmd.PersistentFlags().BoolVarP(&writeInplace, "inplace", "i", false, "update the file in place of first file given.")
rootCmd.PersistentFlags().VarP(unwrapScalarFlag, "unwrapScalar", "r", "unwrap scalar, print the value with no quotes, colors or comments. Defaults to true for yaml")
rootCmd.PersistentFlags().VarP(unwrapScalarFlag, "unwrapScalar", "r", "unwrap scalar, print the value with no quotes, colours or comments. Defaults to true for yaml")
rootCmd.PersistentFlags().Lookup("unwrapScalar").NoOptDefVal = "true"
rootCmd.PersistentFlags().BoolVarP(&nulSepOutput, "nul-output", "0", false, "Use NUL char to separate values. If unwrap scalar is also set, fail if unwrapped scalar contains NUL char.")
@ -203,6 +203,7 @@ yq -P -oy sample.json
}
rootCmd.PersistentFlags().BoolVarP(&yqlib.ConfiguredYamlPreferences.LeadingContentPreProcessing, "header-preprocess", "", true, "Slurp any header comments and separators before processing expression.")
rootCmd.PersistentFlags().BoolVarP(&yqlib.ConfiguredYamlPreferences.FixMergeAnchorToSpec, "yaml-fix-merge-anchor-to-spec", "", false, "Fix merge anchor to match YAML spec. Will default to true in late 2025")
rootCmd.PersistentFlags().BoolVarP(&yqlib.ConfiguredYamlPreferences.CompactSequenceIndent, "yaml-compact-seq-indent", "c", false, "Use compact sequence indentation where '- ' is considered part of the indentation.")
rootCmd.PersistentFlags().StringVarP(&splitFileExp, "split-exp", "s", "", "print each result (or doc) into a file named (exp). [exp] argument must return a string. You can use $index in the expression as the result counter. The necessary directories will be created.")
if err = rootCmd.RegisterFlagCompletionFunc("split-exp", cobra.NoFileCompletions); err != nil {

View File

@ -166,6 +166,9 @@ func configureDecoder(evaluateTogether bool) (yqlib.Decoder, error) {
}
yqlib.ConfiguredYamlPreferences.EvaluateTogether = evaluateTogether
if format.DecoderFactory == nil {
return nil, fmt.Errorf("no support for %s input format", inputFormat)
}
yqlibDecoder := format.DecoderFactory()
if yqlibDecoder == nil {
return nil, fmt.Errorf("no support for %s input format", inputFormat)
@ -197,16 +200,23 @@ func configureEncoder() (yqlib.Encoder, error) {
}
yqlib.ConfiguredXMLPreferences.Indent = indent
yqlib.ConfiguredYamlPreferences.Indent = indent
yqlib.ConfiguredKYamlPreferences.Indent = indent
yqlib.ConfiguredJSONPreferences.Indent = indent
yqlib.ConfiguredYamlPreferences.UnwrapScalar = unwrapScalar
yqlib.ConfiguredKYamlPreferences.UnwrapScalar = unwrapScalar
yqlib.ConfiguredPropertiesPreferences.UnwrapScalar = unwrapScalar
yqlib.ConfiguredJSONPreferences.UnwrapScalar = unwrapScalar
yqlib.ConfiguredShellVariablesPreferences.UnwrapScalar = unwrapScalar
yqlib.ConfiguredYamlPreferences.ColorsEnabled = colorsEnabled
yqlib.ConfiguredKYamlPreferences.ColorsEnabled = colorsEnabled
yqlib.ConfiguredJSONPreferences.ColorsEnabled = colorsEnabled
yqlib.ConfiguredHclPreferences.ColorsEnabled = colorsEnabled
yqlib.ConfiguredTomlPreferences.ColorsEnabled = colorsEnabled
yqlib.ConfiguredYamlPreferences.PrintDocSeparators = !noDocSeparators
yqlib.ConfiguredKYamlPreferences.PrintDocSeparators = !noDocSeparators
encoder := yqlibOutputFormat.EncoderFactory()

View File

@ -911,7 +911,7 @@ func stringsEqual(a, b []string) bool {
return false
}
for i := range a {
if a[i] != b[i] {
if a[i] != b[i] { //nolint:gosec // G602 false positive: b length equality is checked above
return false
}
}
@ -926,13 +926,13 @@ func TestSetupColors(t *testing.T) {
expectColors bool
}{
{
name: "force color enabled",
name: "force colour enabled",
forceColor: true,
forceNoColor: false,
expectColors: true,
},
{
name: "force no color enabled",
name: "force no colour enabled",
forceColor: false,
forceNoColor: true,
expectColors: false,

View File

@ -11,7 +11,7 @@ var (
GitDescribe string
// Version is main version number that is being run at the moment.
Version = "v4.49.1"
Version = "v4.52.4"
// VersionPrerelease is a pre-release marker for the version. If this is "" (empty string)
// then it means that it is a final release. Otherwise, this is a pre-release

View File

@ -1,6 +1,9 @@
package cmd
import "testing"
import (
"strings"
"testing"
)
func TestGetVersionDisplay(t *testing.T) {
var expectedVersion = ProductName + " (https://github.com/mikefarah/yq/) version " + Version
@ -25,6 +28,18 @@ func TestGetVersionDisplay(t *testing.T) {
}
func Test_getHumanVersion(t *testing.T) {
// Save original values
origGitDescribe := GitDescribe
origGitCommit := GitCommit
origVersionPrerelease := VersionPrerelease
// Restore after test
defer func() {
GitDescribe = origGitDescribe
GitCommit = origGitCommit
VersionPrerelease = origVersionPrerelease
}()
GitDescribe = "e42813d"
GitCommit = "e42813d+CHANGES"
var wanted string
@ -49,3 +64,118 @@ func Test_getHumanVersion(t *testing.T) {
}
}
}
func Test_getHumanVersion_NoGitDescribe(t *testing.T) {
// Save original values
origGitDescribe := GitDescribe
origGitCommit := GitCommit
origVersionPrerelease := VersionPrerelease
// Restore after test
defer func() {
GitDescribe = origGitDescribe
GitCommit = origGitCommit
VersionPrerelease = origVersionPrerelease
}()
GitDescribe = ""
GitCommit = ""
VersionPrerelease = ""
got := getHumanVersion()
if got != Version {
t.Errorf("getHumanVersion() = %v, want %v", got, Version)
}
}
func Test_getHumanVersion_WithPrerelease(t *testing.T) {
// Save original values
origGitDescribe := GitDescribe
origGitCommit := GitCommit
origVersionPrerelease := VersionPrerelease
// Restore after test
defer func() {
GitDescribe = origGitDescribe
GitCommit = origGitCommit
VersionPrerelease = origVersionPrerelease
}()
GitDescribe = ""
GitCommit = "abc123"
VersionPrerelease = "beta"
got := getHumanVersion()
expected := Version + "-beta (abc123)"
if got != expected {
t.Errorf("getHumanVersion() = %v, want %v", got, expected)
}
}
func Test_getHumanVersion_PrereleaseInVersion(t *testing.T) {
// Save original values
origGitDescribe := GitDescribe
origGitCommit := GitCommit
origVersionPrerelease := VersionPrerelease
// Restore after test
defer func() {
GitDescribe = origGitDescribe
GitCommit = origGitCommit
VersionPrerelease = origVersionPrerelease
}()
GitDescribe = "v1.2.3-rc1"
GitCommit = "xyz789"
VersionPrerelease = "rc1"
got := getHumanVersion()
// Should not duplicate "rc1" since it's already in GitDescribe
expected := "v1.2.3-rc1 (xyz789)"
if got != expected {
t.Errorf("getHumanVersion() = %v, want %v", got, expected)
}
}
func Test_getHumanVersion_StripSingleQuotes(t *testing.T) {
// Save original values
origGitDescribe := GitDescribe
origGitCommit := GitCommit
origVersionPrerelease := VersionPrerelease
// Restore after test
defer func() {
GitDescribe = origGitDescribe
GitCommit = origGitCommit
VersionPrerelease = origVersionPrerelease
}()
GitDescribe = "'v1.2.3'"
GitCommit = "'commit123'"
VersionPrerelease = ""
got := getHumanVersion()
// Should strip single quotes
if strings.Contains(got, "'") {
t.Errorf("getHumanVersion() = %v, should not contain single quotes", got)
}
expected := "v1.2.3"
if got != expected {
t.Errorf("getHumanVersion() = %v, want %v", got, expected)
}
}
func TestProductName(t *testing.T) {
if ProductName != "yq" {
t.Errorf("ProductName = %v, want yq", ProductName)
}
}
func TestVersionIsSet(t *testing.T) {
if Version == "" {
t.Error("Version should not be empty")
}
if !strings.HasPrefix(Version, "v") {
t.Errorf("Version %v should start with 'v'", Version)
}
}

View File

@ -1,3 +1 @@
[[fruits]]
[[fruits.varieties]] # nested array of tables
name = "red delicious
a: apple

10
examples/kyaml.kyaml Normal file
View File

@ -0,0 +1,10 @@
# leading
{
a: 1, # a line
# head b
b: 2,
c: [
# head d
"d", # d line
],
}

7
examples/kyaml.yml Normal file
View File

@ -0,0 +1,7 @@
# leading
a: 1 # a line
# head b
b: 2
c:
# head d
- d # d line

8
examples/sample.hcl Normal file
View File

@ -0,0 +1,8 @@
# Arithmetic with literals and application-provided variables
sum = 1 + addend
# String interpolation and templates
message = "Hello, ${name}!"
# Application-provided functions
shouty_message = upper(message)

27
examples/sample.tf Normal file
View File

@ -0,0 +1,27 @@
# main.tf
# Define required providers and minimum Terraform version
terraform {
required_providers {
aws = {
source = "hashicorp/aws"
version = "~> 5.0"
}
}
required_version = ">= 1.2"
}
# Configure the AWS provider
provider "aws" {
region = var.aws_region
}
# Define an S3 bucket resource
resource "aws_s3_bucket" "example_bucket" {
bucket = var.bucket_name
tags = {
Environment = "Development"
Project = "TerraformExample"
}
}

View File

@ -1,6 +1,26 @@
[[fruits]]
[animals]
[[fruits.varieties]] # nested array of tables
name = "red delicious"
# This is a TOML document
title = "TOML Example"
[owner]
name = "Tom Preston-Werner"
dob = 1979-05-27T07:32:00-08:00
[database]
enabled = true
ports = [ 8000, 8001, 8002 ]
data = [ ["delta", "phi"], [3.14] ]
temp_targets = { cpu = 79.5, case = 72.0 }
[servers]
[servers.alpha]
ip = "10.0.0.1"
role = "frontend"
[servers.beta]
ip = "10.0.0.2"
role = "backend"

8
examples/sample2.hcl Normal file
View File

@ -0,0 +1,8 @@
# Arithmetic with literals and application-provided variables
sum = 1 + addend
# String interpolation and templates
message = "Hello, ${name}!"
# Application-provided functions
shouty_message = upper(message)

23
go.mod
View File

@ -9,27 +9,34 @@ require (
github.com/fatih/color v1.18.0
github.com/go-ini/ini v1.67.0
github.com/goccy/go-json v0.10.5
github.com/goccy/go-yaml v1.18.0
github.com/goccy/go-yaml v1.19.2
github.com/hashicorp/hcl/v2 v2.24.0
github.com/jinzhu/copier v0.4.0
github.com/magiconair/properties v1.8.10
github.com/pelletier/go-toml/v2 v2.2.4
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e
github.com/spf13/cobra v1.10.1
github.com/spf13/cobra v1.10.2
github.com/spf13/pflag v1.0.10
github.com/yuin/gopher-lua v1.1.1
github.com/zclconf/go-cty v1.18.0
go.yaml.in/yaml/v4 v4.0.0-rc.3
golang.org/x/net v0.47.0
golang.org/x/text v0.31.0
golang.org/x/mod v0.34.0
golang.org/x/net v0.50.0
golang.org/x/text v0.35.0
gopkg.in/op/go-logging.v1 v1.0.0-20160211212156-b2cb9fa56473
)
require (
github.com/agext/levenshtein v1.2.1 // indirect
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
github.com/google/go-cmp v0.6.0 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
golang.org/x/sys v0.38.0 // indirect
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
golang.org/x/sync v0.20.0 // indirect
golang.org/x/sys v0.41.0 // indirect
golang.org/x/tools v0.42.0 // indirect
)
go 1.24.0
toolchain go1.24.1
go 1.25.0

49
go.sum
View File

@ -1,14 +1,19 @@
github.com/a8m/envsubst v1.4.3 h1:kDF7paGK8QACWYaQo6KtyYBozY2jhQrTuNNuUxQkhJY=
github.com/a8m/envsubst v1.4.3/go.mod h1:4jjHWQlZoaXPoLQUb7H2qT4iLkZDdmEQiOUogdUmqVU=
github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8=
github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0=
github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
github.com/alecthomas/participle/v2 v2.1.4 h1:W/H79S8Sat/krZ3el6sQMvMaahJ+XcM9WSI2naI7w2U=
github.com/alecthomas/participle/v2 v2.1.4/go.mod h1:8tqVbpTX20Ru4NfYQgZf4mP18eXPTBViyMWiArNEgGI=
github.com/alecthomas/repr v0.5.2 h1:SU73FTI9D1P5UNtvseffFSGmdNci/O6RsqzeXJtP0Qs=
github.com/alecthomas/repr v0.5.2/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY=
github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4=
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U=
github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE=
github.com/elliotchance/orderedmap v1.8.0 h1:TrOREecvh3JbS+NCgwposXG5ZTFHtEsQiCGOhPElnMw=
@ -17,10 +22,16 @@ github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A=
github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68=
github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
github.com/goccy/go-yaml v1.19.2 h1:PmFC1S6h8ljIz6gMRBopkjP1TVT7xuwrButHID66PoM=
github.com/goccy/go-yaml v1.19.2/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/hashicorp/hcl/v2 v2.24.0 h1:2QJdZ454DSsYGoaE6QheQZjtKZSUs9Nh2izTWiwQxvE=
github.com/hashicorp/hcl/v2 v2.24.0/go.mod h1:oGoO1FIQYfn/AgyOhlg9qLC6/nOJPX3qGbkZpYAcqfM=
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
@ -33,6 +44,8 @@ github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHP
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0=
github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0=
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e h1:aoZm08cpOy4WuID//EZDgcC4zIxODThtZNPirFr42+A=
@ -40,8 +53,8 @@ github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsK
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk=
github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
@ -50,18 +63,28 @@ github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5Cc
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M=
github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw=
github.com/zclconf/go-cty v1.18.0 h1:pJ8+HNI4gFoyRNqVE37wWbJWVw43BZczFo7KUoRczaA=
github.com/zclconf/go-cty v1.18.0/go.mod h1:qpnV6EDNgC1sns/AleL1fvatHw72j+S+nS+MJ+T2CSg=
github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6ZMSMNJFMOjqrGHynW3DIBuR2H9j0ug+Mo=
github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM=
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
go.yaml.in/yaml/v4 v4.0.0-rc.3 h1:3h1fjsh1CTAPjW7q/EMe+C8shx5d8ctzZTrLcs/j8Go=
go.yaml.in/yaml/v4 v4.0.0-rc.3/go.mod h1:aZqd9kCMsGL7AuUv/m/PvWLdg5sjJsZ4oHDEnfPPfY0=
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
golang.org/x/mod v0.34.0 h1:xIHgNUUnW6sYkcM5Jleh05DvLOtwc6RitGHbDk4akRI=
golang.org/x/mod v0.34.0/go.mod h1:ykgH52iCZe79kzLLMhyCUzhMci+nQj+0XkbXpNYtVjY=
golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60=
golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM=
golang.org/x/sync v0.20.0 h1:e0PTpb7pjO8GAtTs2dQ6jYa5BWYlMuX047Dco/pItO4=
golang.org/x/sync v0.20.0/go.mod h1:9xrNwdLfx4jkKbNva9FpL6vEN7evnE43NNNJQ2LF3+0=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.35.0 h1:JOVx6vVDFokkpaq1AEptVzLTpDe9KGpj5tR4/X+ybL8=
golang.org/x/text v0.35.0/go.mod h1:khi/HExzZJ2pGnjenulevKNX1W67CUy0AsXcNubPGCA=
golang.org/x/tools v0.42.0 h1:uNgphsn75Tdz5Ji2q36v/nsFSfR/9BRFvqhGBaJGd5k=
golang.org/x/tools v0.42.0/go.mod h1:Ma6lCIwGZvHK6XtgbswSoWroEkhugApmsXyrUmBhfr0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/op/go-logging.v1 v1.0.0-20160211212156-b2cb9fa56473 h1:6D+BvnJ/j6e222UW8s2qTSe3wGBtvo0MbVQG/c5k8RE=
gopkg.in/op/go-logging.v1 v1.0.0-20160211212156-b2cb9fa56473/go.mod h1:N1eN2tsCx0Ydtgjl4cqmbRCsY4/+z4cYDeqwZTk6zog=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

24
go_install_test.go Normal file
View File

@ -0,0 +1,24 @@
package main
import (
"io"
"testing"
"golang.org/x/mod/module"
"golang.org/x/mod/zip"
)
// TestGoInstallCompatibility ensures the module can be zipped for go install.
// This is an integration test that uses the same zip.CreateFromDir function
// that go install uses internally. If this test fails, go install will fail.
// See: https://github.com/mikefarah/yq/issues/2587
func TestGoInstallCompatibility(t *testing.T) {
mod := module.Version{
Path: "github.com/mikefarah/yq/v4",
Version: "v4.0.0", // the actual version doesn't matter for validation
}
if err := zip.CreateFromDir(io.Discard, mod, "."); err != nil {
t.Fatalf("Module cannot be zipped for go install: %v", err)
}
}

View File

@ -1,8 +1,63 @@
# How it works
# Expression Syntax: A Visual Guide
In `yq`, expressions are made up of operators and pipes. A context of nodes is passed through the expression, and each operation takes the context as input and returns a new context as output. That output is piped in as input for the next operation in the expression.
In `yq` expressions are made up of operators and pipes. A context of nodes is passed through the expression and each operation takes the context as input and returns a new context as output. That output is piped in as input for the next operation in the expression. To begin with, the context is set to the first yaml document of the first yaml file (if processing in sequence using eval).
Let's break down the process step by step using a diagram. We'll start with a single YAML document, apply an expression, and observe how the context changes at each step.
Lets look at a couple of examples.
Given a document like:
```yaml
root:
items:
- name: apple
type: fruit
- name: carrot
type: vegetable
- name: banana
type: fruit
```
You can use dot notation to access nested structures. For example, to access the `name` of the first item, you would use the expression `.root.items[0].name`, which would return `apple`.
But lets see how we could find all the fruit under `items`
## Step 1: Initial Context
The context starts at the root of the YAML document. In this case, the entire document is the initial context.
```
root
└── items
├── name: apple
│ type: fruit
├── name: carrot
│ type: vegetable
└── name: banana
type: fruit
```
## Step 2: Splatting the Array
Using the expression `.root.items[]`, we "splat" the items array. This means each element of the array becomes its own node in the context:
```
Node 1: { name: apple, type: fruit }
Node 2: { name: carrot, type: vegetable }
Node 3: { name: banana, type: fruit }
```
## Step 3: Filtering the Nodes
Next, we apply a filter to select only the nodes where type is fruit. The expression `.root.items[] | select(.type == "fruit")` filters the nodes:
```
Filtered Node 1: { name: apple, type: fruit }
Filtered Node 2: { name: banana, type: fruit }
```
## Step 4: Extracting a Field
Finally, we extract the name field from the filtered nodes using `.root.items[] | select(.type == "fruit") | .name` This results in:
```
apple
banana
```
## Simple assignment example
@ -44,7 +99,6 @@ a: dog
b: dog
```
## Complex assignment, operator precedence rules
Just like math expressions - `yq` expressions have an order of precedence. The pipe `|` operator has a low order of precedence, so operators with higher precedence will get evaluated first.
@ -73,7 +127,7 @@ name: sally
fruit: mango
```
To properly update this yaml, you will need to use brackets (think BODMAS from maths) and wrap the entire LHS:
**Important**: To properly update this YAML, you must wrap the entire LHS in parentheses. Think of it like using brackets in math to ensure the correct order of operations.
`(.[] | select(.name == "sally") | .fruit) = "mango"`
@ -126,4 +180,4 @@ The assignment operator then copies across the value from the RHS to the value o
```yaml
a: 2
b: thing
```
```

View File

@ -97,6 +97,9 @@ type CandidateNode struct {
// (e.g. top level cross document merge). This property does not propagate to child nodes.
EvaluateTogether bool
IsMapKey bool
// For formats like HCL and TOML: indicates that child entries should be emitted as separate blocks/tables
// rather than consolidated into nested mappings (default behaviour)
EncodeSeparate bool
}
func (n *CandidateNode) CreateChild() *CandidateNode {
@ -277,7 +280,7 @@ func (n *CandidateNode) AddChild(rawChild *CandidateNode) {
func (n *CandidateNode) AddChildren(children []*CandidateNode) {
if n.Kind == MappingNode {
for i := 0; i < len(children); i += 2 {
for i := 0; i < len(children)-1; i += 2 {
key := children[i]
value := children[i+1]
n.AddKeyValueChild(key, value)
@ -407,6 +410,8 @@ func (n *CandidateNode) doCopy(cloneContent bool) *CandidateNode {
EvaluateTogether: n.EvaluateTogether,
IsMapKey: n.IsMapKey,
EncodeSeparate: n.EncodeSeparate,
}
if cloneContent {
@ -460,6 +465,9 @@ func (n *CandidateNode) UpdateAttributesFrom(other *CandidateNode, prefs assignP
n.Anchor = other.Anchor
}
// Preserve EncodeSeparate flag for format-specific encoding hints
n.EncodeSeparate = other.EncodeSeparate
// merge will pickup the style of the new thing
// when autocreating nodes

471
pkg/yqlib/decoder_hcl.go Normal file
View File

@ -0,0 +1,471 @@
//go:build !yq_nohcl
package yqlib
import (
"fmt"
"io"
"math/big"
"sort"
"strconv"
"strings"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclsyntax"
"github.com/zclconf/go-cty/cty"
)
type hclDecoder struct {
file *hcl.File
fileBytes []byte
readAnything bool
documentIndex uint
}
func NewHclDecoder() Decoder {
return &hclDecoder{}
}
// sortedAttributes returns attributes in declaration order by source position
func sortedAttributes(attrs hclsyntax.Attributes) []*attributeWithName {
var sorted []*attributeWithName
for name, attr := range attrs {
sorted = append(sorted, &attributeWithName{Name: name, Attr: attr})
}
sort.Slice(sorted, func(i, j int) bool {
return sorted[i].Attr.Range().Start.Byte < sorted[j].Attr.Range().Start.Byte
})
return sorted
}
type attributeWithName struct {
Name string
Attr *hclsyntax.Attribute
}
// extractLineComment extracts any inline comment after the given position
func extractLineComment(src []byte, endPos int) string {
// Look for # comment after the token
for i := endPos; i < len(src); i++ {
if src[i] == '#' {
// Found comment, extract until end of line
start := i
for i < len(src) && src[i] != '\n' {
i++
}
return strings.TrimSpace(string(src[start:i]))
}
if src[i] == '\n' {
// Hit newline before comment
break
}
// Skip whitespace and other characters
}
return ""
}
// extractHeadComment extracts comments before a given start position
func extractHeadComment(src []byte, startPos int) string {
var comments []string
// Start just before the token and skip trailing whitespace
i := startPos - 1
for i >= 0 && (src[i] == ' ' || src[i] == '\t' || src[i] == '\n' || src[i] == '\r') {
i--
}
for i >= 0 {
// Find line boundaries
lineEnd := i
for i >= 0 && src[i] != '\n' {
i--
}
lineStart := i + 1
line := strings.TrimRight(string(src[lineStart:lineEnd+1]), " \t\r")
trimmed := strings.TrimSpace(line)
if trimmed == "" {
break
}
if !strings.HasPrefix(trimmed, "#") {
break
}
comments = append([]string{trimmed}, comments...)
// Move to previous line (skip any whitespace/newlines)
i = lineStart - 1
for i >= 0 && (src[i] == ' ' || src[i] == '\t' || src[i] == '\n' || src[i] == '\r') {
i--
}
}
if len(comments) > 0 {
return strings.Join(comments, "\n")
}
return ""
}
func (dec *hclDecoder) Init(reader io.Reader) error {
data, err := io.ReadAll(reader)
if err != nil {
return err
}
file, diags := hclsyntax.ParseConfig(data, "input.hcl", hcl.Pos{Line: 1, Column: 1})
if diags != nil && diags.HasErrors() {
return fmt.Errorf("hcl parse error: %w", diags)
}
dec.file = file
dec.fileBytes = data
dec.readAnything = false
dec.documentIndex = 0
return nil
}
func (dec *hclDecoder) Decode() (*CandidateNode, error) {
if dec.readAnything {
return nil, io.EOF
}
dec.readAnything = true
if dec.file == nil {
return nil, fmt.Errorf("no hcl file parsed")
}
root := &CandidateNode{Kind: MappingNode}
// process attributes in declaration order
body := dec.file.Body.(*hclsyntax.Body)
firstAttr := true
for _, attrWithName := range sortedAttributes(body.Attributes) {
keyNode := createStringScalarNode(attrWithName.Name)
valNode := convertHclExprToNode(attrWithName.Attr.Expr, dec.fileBytes)
// Attach comments if any
attrRange := attrWithName.Attr.Range()
headComment := extractHeadComment(dec.fileBytes, attrRange.Start.Byte)
if firstAttr && headComment != "" {
// For the first attribute, apply its head comment to the root
root.HeadComment = headComment
firstAttr = false
} else if headComment != "" {
keyNode.HeadComment = headComment
}
if lineComment := extractLineComment(dec.fileBytes, attrRange.End.Byte); lineComment != "" {
valNode.LineComment = lineComment
}
root.AddKeyValueChild(keyNode, valNode)
}
// process blocks
// Count blocks by type at THIS level to detect multiple separate blocks
blocksByType := make(map[string]int)
for _, block := range body.Blocks {
blocksByType[block.Type]++
}
for _, block := range body.Blocks {
addBlockToMapping(root, block, dec.fileBytes, blocksByType[block.Type] > 1)
}
dec.documentIndex++
root.document = dec.documentIndex - 1
return root, nil
}
func hclBodyToNode(body *hclsyntax.Body, src []byte) *CandidateNode {
node := &CandidateNode{Kind: MappingNode}
for _, attrWithName := range sortedAttributes(body.Attributes) {
key := createStringScalarNode(attrWithName.Name)
val := convertHclExprToNode(attrWithName.Attr.Expr, src)
// Attach comments if any
attrRange := attrWithName.Attr.Range()
if headComment := extractHeadComment(src, attrRange.Start.Byte); headComment != "" {
key.HeadComment = headComment
}
if lineComment := extractLineComment(src, attrRange.End.Byte); lineComment != "" {
val.LineComment = lineComment
}
node.AddKeyValueChild(key, val)
}
// Process nested blocks, counting blocks by type at THIS level
// to detect which block types appear multiple times
blocksByType := make(map[string]int)
for _, block := range body.Blocks {
blocksByType[block.Type]++
}
for _, block := range body.Blocks {
addBlockToMapping(node, block, src, blocksByType[block.Type] > 1)
}
return node
}
// addBlockToMapping nests block type and labels into the parent mapping, merging children.
// isMultipleBlocksOfType indicates if there are multiple blocks of this type at THIS level
func addBlockToMapping(parent *CandidateNode, block *hclsyntax.Block, src []byte, isMultipleBlocksOfType bool) {
bodyNode := hclBodyToNode(block.Body, src)
current := parent
// ensure block type mapping exists
var typeNode *CandidateNode
for i := 0; i < len(current.Content); i += 2 {
if current.Content[i].Value == block.Type {
typeNode = current.Content[i+1]
break
}
}
if typeNode == nil {
_, typeNode = current.AddKeyValueChild(createStringScalarNode(block.Type), &CandidateNode{Kind: MappingNode})
// Mark the type node if there are multiple blocks of this type at this level
// This tells the encoder to emit them as separate blocks rather than consolidating them
if isMultipleBlocksOfType {
typeNode.EncodeSeparate = true
}
}
current = typeNode
// walk labels, creating/merging mappings
for _, label := range block.Labels {
var next *CandidateNode
for i := 0; i < len(current.Content); i += 2 {
if current.Content[i].Value == label {
next = current.Content[i+1]
break
}
}
if next == nil {
_, next = current.AddKeyValueChild(createStringScalarNode(label), &CandidateNode{Kind: MappingNode})
}
current = next
}
// merge body attributes/blocks into the final mapping
for i := 0; i < len(bodyNode.Content); i += 2 {
current.AddKeyValueChild(bodyNode.Content[i], bodyNode.Content[i+1])
}
}
func convertHclExprToNode(expr hclsyntax.Expression, src []byte) *CandidateNode {
// handle literal values directly
switch e := expr.(type) {
case *hclsyntax.LiteralValueExpr:
v := e.Val
if v.IsNull() {
return createScalarNode(nil, "")
}
switch {
case v.Type().Equals(cty.String):
// prefer to extract exact source (to avoid extra quoting) when available
// Prefer the actual cty string value
s := v.AsString()
node := createScalarNode(s, s)
// Don't set style for regular quoted strings - let YAML handle naturally
return node
case v.Type().Equals(cty.Bool):
b := v.True()
return createScalarNode(b, strconv.FormatBool(b))
case v.Type() == cty.Number:
// prefer integers when the numeric value is integral
bf := v.AsBigFloat()
if bf == nil {
// fallback to string
return createStringScalarNode(v.GoString())
}
// check if bf represents an exact integer
if intVal, acc := bf.Int(nil); acc == big.Exact {
s := intVal.String()
return createScalarNode(intVal.Int64(), s)
}
s := bf.Text('g', -1)
return createScalarNode(0.0, s)
case v.Type().IsTupleType() || v.Type().IsListType() || v.Type().IsSetType():
seq := &CandidateNode{Kind: SequenceNode}
it := v.ElementIterator()
for it.Next() {
_, val := it.Element()
// convert cty.Value to a node by wrapping in literal expr via string representation
child := convertCtyValueToNode(val)
seq.AddChild(child)
}
return seq
case v.Type().IsMapType() || v.Type().IsObjectType():
m := &CandidateNode{Kind: MappingNode}
it := v.ElementIterator()
for it.Next() {
key, val := it.Element()
keyStr := key.AsString()
keyNode := createStringScalarNode(keyStr)
valNode := convertCtyValueToNode(val)
m.AddKeyValueChild(keyNode, valNode)
}
return m
default:
// fallback to string
s := v.GoString()
return createStringScalarNode(s)
}
case *hclsyntax.TupleConsExpr:
// parse tuple/list into YAML sequence
seq := &CandidateNode{Kind: SequenceNode}
for _, exprVal := range e.Exprs {
child := convertHclExprToNode(exprVal, src)
seq.AddChild(child)
}
return seq
case *hclsyntax.ObjectConsExpr:
// parse object into YAML mapping
m := &CandidateNode{Kind: MappingNode}
m.Style = FlowStyle // Mark as inline object (flow style) for encoder
for _, item := range e.Items {
// evaluate key expression to get the key string
keyVal, keyDiags := item.KeyExpr.Value(nil)
if keyDiags != nil && keyDiags.HasErrors() {
// fallback: try to extract key from source
r := item.KeyExpr.Range()
start := r.Start.Byte
end := r.End.Byte
if start >= 0 && end >= start && end <= len(src) {
keyNode := createStringScalarNode(strings.TrimSpace(string(src[start:end])))
valNode := convertHclExprToNode(item.ValueExpr, src)
m.AddKeyValueChild(keyNode, valNode)
}
continue
}
keyStr := keyVal.AsString()
keyNode := createStringScalarNode(keyStr)
valNode := convertHclExprToNode(item.ValueExpr, src)
m.AddKeyValueChild(keyNode, valNode)
}
return m
case *hclsyntax.TemplateExpr:
// Reconstruct template string, preserving ${} syntax for interpolations
var parts []string
for _, p := range e.Parts {
switch lp := p.(type) {
case *hclsyntax.LiteralValueExpr:
if lp.Val.Type().Equals(cty.String) {
parts = append(parts, lp.Val.AsString())
} else {
parts = append(parts, lp.Val.GoString())
}
default:
// Non-literal expression - reconstruct with ${} wrapper
r := p.Range()
start := r.Start.Byte
end := r.End.Byte
if start >= 0 && end >= start && end <= len(src) {
exprText := string(src[start:end])
parts = append(parts, "${"+exprText+"}")
} else {
parts = append(parts, fmt.Sprintf("${%v}", p))
}
}
}
combined := strings.Join(parts, "")
node := createScalarNode(combined, combined)
// Set DoubleQuotedStyle for all templates (which includes all quoted strings in HCL)
// This ensures HCL roundtrips preserve quotes, and YAML properly quotes strings with ${}
node.Style = DoubleQuotedStyle
return node
case *hclsyntax.ScopeTraversalExpr:
// Simple identifier/traversal (e.g. unquoted string literal in HCL)
r := e.Range()
start := r.Start.Byte
end := r.End.Byte
if start >= 0 && end >= start && end <= len(src) {
text := strings.TrimSpace(string(src[start:end]))
return createStringScalarNode(text)
}
// Fallback to root name if source unavailable
if len(e.Traversal) > 0 {
if root, ok := e.Traversal[0].(hcl.TraverseRoot); ok {
return createStringScalarNode(root.Name)
}
}
return createStringScalarNode("")
case *hclsyntax.FunctionCallExpr:
// Preserve function calls as raw expressions for roundtrip
r := e.Range()
start := r.Start.Byte
end := r.End.Byte
if start >= 0 && end >= start && end <= len(src) {
text := strings.TrimSpace(string(src[start:end]))
node := createStringScalarNode(text)
node.Style = 0
return node
}
node := createStringScalarNode(e.Name)
node.Style = 0
return node
default:
// try to evaluate the expression (handles unary, binary ops, etc.)
val, diags := expr.Value(nil)
if diags == nil || !diags.HasErrors() {
// successfully evaluated, convert cty.Value to node
return convertCtyValueToNode(val)
}
// fallback: extract source text for the expression
r := expr.Range()
start := r.Start.Byte
end := r.End.Byte
if start >= 0 && end >= start && end <= len(src) {
text := string(src[start:end])
// Mark as unquoted expression so encoder emits without quoting
node := createStringScalarNode(text)
node.Style = 0
return node
}
return createStringScalarNode(fmt.Sprintf("%v", expr))
}
}
func convertCtyValueToNode(v cty.Value) *CandidateNode {
if v.IsNull() {
return createScalarNode(nil, "")
}
switch {
case v.Type().Equals(cty.String):
return createScalarNode("", v.AsString())
case v.Type().Equals(cty.Bool):
b := v.True()
return createScalarNode(b, strconv.FormatBool(b))
case v.Type() == cty.Number:
bf := v.AsBigFloat()
if bf == nil {
return createStringScalarNode(v.GoString())
}
if intVal, acc := bf.Int(nil); acc == big.Exact {
s := intVal.String()
return createScalarNode(intVal.Int64(), s)
}
s := bf.Text('g', -1)
return createScalarNode(0.0, s)
case v.Type().IsTupleType() || v.Type().IsListType() || v.Type().IsSetType():
seq := &CandidateNode{Kind: SequenceNode}
it := v.ElementIterator()
for it.Next() {
_, val := it.Element()
seq.AddChild(convertCtyValueToNode(val))
}
return seq
case v.Type().IsMapType() || v.Type().IsObjectType():
m := &CandidateNode{Kind: MappingNode}
it := v.ElementIterator()
for it.Next() {
key, val := it.Element()
keyNode := createStringScalarNode(key.AsString())
valNode := convertCtyValueToNode(val)
m.AddKeyValueChild(keyNode, valNode)
}
return m
default:
return createStringScalarNode(v.GoString())
}
}

View File

@ -16,7 +16,7 @@ type iniDecoder struct {
func NewINIDecoder() Decoder {
return &iniDecoder{
finished: false, // Initialize the flag as false
finished: false, // Initialise the flag as false
}
}

View File

@ -8,16 +8,19 @@ import (
"fmt"
"io"
"strconv"
"strings"
"time"
toml "github.com/pelletier/go-toml/v2/unstable"
)
type tomlDecoder struct {
parser toml.Parser
finished bool
d DataTreeNavigator
rootMap *CandidateNode
parser toml.Parser
finished bool
d DataTreeNavigator
rootMap *CandidateNode
pendingComments []string // Head comments collected from Comment nodes
firstContentSeen bool // Track if we've processed the first non-comment node
}
func NewTomlDecoder() Decoder {
@ -28,7 +31,7 @@ func NewTomlDecoder() Decoder {
}
func (dec *tomlDecoder) Init(reader io.Reader) error {
dec.parser = toml.Parser{}
dec.parser = toml.Parser{KeepComments: true}
buf := new(bytes.Buffer)
_, err := buf.ReadFrom(reader)
if err != nil {
@ -39,9 +42,23 @@ func (dec *tomlDecoder) Init(reader io.Reader) error {
Kind: MappingNode,
Tag: "!!map",
}
dec.pendingComments = make([]string, 0)
dec.firstContentSeen = false
return nil
}
func (dec *tomlDecoder) attachOrphanedCommentsToNode(tableNodeValue *CandidateNode) {
if len(dec.pendingComments) > 0 {
comments := strings.Join(dec.pendingComments, "\n")
if tableNodeValue.HeadComment == "" {
tableNodeValue.HeadComment = comments
} else {
tableNodeValue.HeadComment = tableNodeValue.HeadComment + "\n" + comments
}
dec.pendingComments = make([]string, 0)
}
}
func (dec *tomlDecoder) getFullPath(tomlNode *toml.Node) []interface{} {
path := make([]interface{}, 0)
for {
@ -56,13 +73,24 @@ func (dec *tomlDecoder) getFullPath(tomlNode *toml.Node) []interface{} {
func (dec *tomlDecoder) processKeyValueIntoMap(rootMap *CandidateNode, tomlNode *toml.Node) error {
value := tomlNode.Value()
path := dec.getFullPath(value.Next())
log.Debug("processKeyValueIntoMap: %v", path)
valueNode, err := dec.decodeNode(value)
if err != nil {
return err
}
// Attach pending head comments
if len(dec.pendingComments) > 0 {
valueNode.HeadComment = strings.Join(dec.pendingComments, "\n")
dec.pendingComments = make([]string, 0)
}
// Check for inline comment chained to the KeyValue node
nextNode := tomlNode.Next()
if nextNode != nil && nextNode.Kind == toml.Comment {
valueNode.LineComment = string(nextNode.Data)
}
context := Context{}
context = context.SingleChildContext(rootMap)
@ -79,11 +107,15 @@ func (dec *tomlDecoder) decodeKeyValuesIntoMap(rootMap *CandidateNode, tomlNode
nextItem := dec.parser.Expression()
log.Debug("decodeKeyValuesIntoMap -- next exp, its a %v", nextItem.Kind)
if nextItem.Kind == toml.KeyValue {
switch nextItem.Kind {
case toml.KeyValue:
if err := dec.processKeyValueIntoMap(rootMap, nextItem); err != nil {
return false, err
}
} else {
case toml.Comment:
// Standalone comment - add to pending for next element
dec.pendingComments = append(dec.pendingComments, string(nextItem.Data))
default:
// run out of key values
log.Debug("done in decodeKeyValuesIntoMap, gota a %v", nextItem.Kind)
return true, nil
@ -125,13 +157,30 @@ func (dec *tomlDecoder) createInlineTableMap(tomlNode *toml.Node) (*CandidateNod
func (dec *tomlDecoder) createArray(tomlNode *toml.Node) (*CandidateNode, error) {
content := make([]*CandidateNode, 0)
var pendingArrayComments []string
iterator := tomlNode.Children()
for iterator.Next() {
child := iterator.Node()
// Handle comments within arrays
if child.Kind == toml.Comment {
// Collect comments to attach to the next array element
pendingArrayComments = append(pendingArrayComments, string(child.Data))
continue
}
yamlNode, err := dec.decodeNode(child)
if err != nil {
return nil, err
}
// Attach any pending comments to this array element
if len(pendingArrayComments) > 0 {
yamlNode.HeadComment = strings.Join(pendingArrayComments, "\n")
pendingArrayComments = make([]string, 0)
}
content = append(content, yamlNode)
}
@ -250,11 +299,29 @@ func (dec *tomlDecoder) processTopLevelNode(currentNode *toml.Node) (bool, error
var err error
log.Debug("processTopLevelNode: Going to process %v state is current %v", currentNode.Kind, NodeToString(dec.rootMap))
switch currentNode.Kind {
case toml.Comment:
// Collect comment to attach to next element
commentText := string(currentNode.Data)
// If we haven't seen any content yet, accumulate comments for root
if !dec.firstContentSeen {
if dec.rootMap.HeadComment == "" {
dec.rootMap.HeadComment = commentText
} else {
dec.rootMap.HeadComment = dec.rootMap.HeadComment + "\n" + commentText
}
} else {
// We've seen content, so these comments are for the next element
dec.pendingComments = append(dec.pendingComments, commentText)
}
return false, nil
case toml.Table:
dec.firstContentSeen = true
runAgainstCurrentExp, err = dec.processTable(currentNode)
case toml.ArrayTable:
dec.firstContentSeen = true
runAgainstCurrentExp, err = dec.processArrayTable(currentNode)
default:
dec.firstContentSeen = true
runAgainstCurrentExp, err = dec.decodeKeyValuesIntoMap(dec.rootMap, currentNode)
}
@ -264,7 +331,8 @@ func (dec *tomlDecoder) processTopLevelNode(currentNode *toml.Node) (bool, error
func (dec *tomlDecoder) processTable(currentNode *toml.Node) (bool, error) {
log.Debug("Enter processTable")
fullPath := dec.getFullPath(currentNode.Child())
child := currentNode.Child()
fullPath := dec.getFullPath(child)
log.Debug("fullpath: %v", fullPath)
c := Context{}
@ -276,27 +344,53 @@ func (dec *tomlDecoder) processTable(currentNode *toml.Node) (bool, error) {
}
tableNodeValue := &CandidateNode{
Kind: MappingNode,
Tag: "!!map",
Content: make([]*CandidateNode, 0),
Kind: MappingNode,
Tag: "!!map",
Content: make([]*CandidateNode, 0),
EncodeSeparate: true,
}
// Attach pending head comments to the table
if len(dec.pendingComments) > 0 {
tableNodeValue.HeadComment = strings.Join(dec.pendingComments, "\n")
dec.pendingComments = make([]string, 0)
}
var tableValue *toml.Node
runAgainstCurrentExp := false
hasValue := dec.parser.NextExpression()
// check to see if there is any table data
if hasValue {
sawKeyValue := false
for dec.parser.NextExpression() {
tableValue = dec.parser.Expression()
// next expression is not table data, so we are done
if tableValue.Kind != toml.KeyValue {
log.Debug("got an empty table")
runAgainstCurrentExp = true
} else {
runAgainstCurrentExp, err = dec.decodeKeyValuesIntoMap(tableNodeValue, tableValue)
if err != nil && !errors.Is(err, io.EOF) {
return false, err
}
// Allow standalone comments inside the table before the first key-value.
// These should be associated with the next element in the table (usually the first key-value),
// not treated as "end of table" (which would cause subsequent key-values to be parsed at root).
if tableValue.Kind == toml.Comment {
dec.pendingComments = append(dec.pendingComments, string(tableValue.Data))
continue
}
// next expression is not table data, so we are done (but we need to re-process it at top-level)
if tableValue.Kind != toml.KeyValue {
log.Debug("got an empty table (or reached next section)")
// If the table had only comments, attach them to the table itself so they don't leak to the next node.
if !sawKeyValue {
dec.attachOrphanedCommentsToNode(tableNodeValue)
}
runAgainstCurrentExp = true
break
}
sawKeyValue = true
runAgainstCurrentExp, err = dec.decodeKeyValuesIntoMap(tableNodeValue, tableValue)
if err != nil && !errors.Is(err, io.EOF) {
return false, err
}
break
}
// If we hit EOF after only seeing comments inside this table, attach them to the table itself
// so they don't leak to whatever comes next.
if !sawKeyValue {
dec.attachOrphanedCommentsToNode(tableNodeValue)
}
err = dec.d.DeeplyAssign(c, fullPath, tableNodeValue)
@ -330,7 +424,8 @@ func (dec *tomlDecoder) arrayAppend(context Context, path []interface{}, rhsNode
func (dec *tomlDecoder) processArrayTable(currentNode *toml.Node) (bool, error) {
log.Debug("Enter processArrayTable")
fullPath := dec.getFullPath(currentNode.Child())
child := currentNode.Child()
fullPath := dec.getFullPath(child)
log.Debug("Fullpath: %v", fullPath)
c := Context{}
@ -346,23 +441,64 @@ func (dec *tomlDecoder) processArrayTable(currentNode *toml.Node) (bool, error)
hasValue := dec.parser.NextExpression()
tableNodeValue := &CandidateNode{
Kind: MappingNode,
Tag: "!!map",
Kind: MappingNode,
Tag: "!!map",
EncodeSeparate: true,
}
// Attach pending head comments to the array table
if len(dec.pendingComments) > 0 {
tableNodeValue.HeadComment = strings.Join(dec.pendingComments, "\n")
dec.pendingComments = make([]string, 0)
}
runAgainstCurrentExp := false
// if the next value is a ArrayTable or Table, then its not part of this declaration (not a key value pair)
// so lets leave that expression for the next round of parsing
if hasValue && (dec.parser.Expression().Kind == toml.ArrayTable || dec.parser.Expression().Kind == toml.Table) {
runAgainstCurrentExp = true
} else if hasValue {
// otherwise, if there is a value, it must be some key value pairs of the
// first object in the array!
tableValue := dec.parser.Expression()
runAgainstCurrentExp, err = dec.decodeKeyValuesIntoMap(tableNodeValue, tableValue)
if err != nil && !errors.Is(err, io.EOF) {
return false, err
sawKeyValue := false
if hasValue {
for {
exp := dec.parser.Expression()
// Allow standalone comments inside array tables before the first key-value.
if exp.Kind == toml.Comment {
dec.pendingComments = append(dec.pendingComments, string(exp.Data))
hasValue = dec.parser.NextExpression()
if !hasValue {
break
}
continue
}
// if the next value is a ArrayTable or Table, then its not part of this declaration (not a key value pair)
// so lets leave that expression for the next round of parsing
if exp.Kind == toml.ArrayTable || exp.Kind == toml.Table {
// If this array-table entry had only comments, attach them to the entry so they don't leak.
if !sawKeyValue {
dec.attachOrphanedCommentsToNode(tableNodeValue)
}
runAgainstCurrentExp = true
break
}
sawKeyValue = true
// otherwise, if there is a value, it must be some key value pairs of the
// first object in the array!
runAgainstCurrentExp, err = dec.decodeKeyValuesIntoMap(tableNodeValue, exp)
if err != nil && !errors.Is(err, io.EOF) {
return false, err
}
break
}
}
// If we hit EOF after only seeing comments inside this array-table entry, attach them to the entry
// so they don't leak to whatever comes next.
if !sawKeyValue && len(dec.pendingComments) > 0 {
comments := strings.Join(dec.pendingComments, "\n")
if tableNodeValue.HeadComment == "" {
tableNodeValue.HeadComment = comments
} else {
tableNodeValue.HeadComment = tableNodeValue.HeadComment + "\n" + comments
}
dec.pendingComments = make([]string, 0)
}
// += function
err = dec.arrayAppend(c, fullPath, tableNodeValue)
@ -375,23 +511,42 @@ func (dec *tomlDecoder) processArrayTable(currentNode *toml.Node) (bool, error)
// Because TOML. So we'll inject the last index into the path.
func getPathToUse(fullPath []interface{}, dec *tomlDecoder, c Context) ([]interface{}, error) {
pathToCheck := fullPath
if len(fullPath) >= 1 {
pathToCheck = fullPath[:len(fullPath)-1]
}
readOp := createTraversalTree(pathToCheck, traversePreferences{DontAutoCreate: true}, false)
// We need to check the entire path (except the last element), not just the immediate parent,
// because we may have nested array tables like [[array.subarray.subsubarray]]
// where both 'array' and 'subarray' are arrays that already exist.
resultContext, err := dec.d.GetMatchingNodes(c, readOp)
if err != nil {
return nil, err
if len(fullPath) == 0 {
return fullPath, nil
}
if resultContext.MatchingNodes.Len() >= 1 {
match := resultContext.MatchingNodes.Front().Value.(*CandidateNode)
// path refers to an array, we need to add this to the last element in the array
if match.Kind == SequenceNode {
fullPath = append(pathToCheck, len(match.Content)-1, fullPath[len(fullPath)-1])
log.Debugf("Adding to end of %v array, using path: %v", pathToCheck, fullPath)
resultPath := make([]interface{}, 0, len(fullPath)*2) // preallocate with extra space for indices
// Process all segments except the last one
for i := 0; i < len(fullPath)-1; i++ {
resultPath = append(resultPath, fullPath[i])
// Check if the current path segment points to an array
readOp := createTraversalTree(resultPath, traversePreferences{DontAutoCreate: true}, false)
resultContext, err := dec.d.GetMatchingNodes(c, readOp)
if err != nil {
return nil, err
}
if resultContext.MatchingNodes.Len() >= 1 {
match := resultContext.MatchingNodes.Front().Value.(*CandidateNode)
// If this segment points to an array, we need to add the last index
// before continuing with the rest of the path
if match.Kind == SequenceNode && len(match.Content) > 0 {
lastIndex := len(match.Content) - 1
resultPath = append(resultPath, lastIndex)
log.Debugf("Path segment %v is an array, injecting index %d", resultPath[:len(resultPath)-1], lastIndex)
}
}
}
return fullPath, err
// Add the last segment
resultPath = append(resultPath, fullPath[len(fullPath)-1])
log.Debugf("getPathToUse: original path %v -> result path %v", fullPath, resultPath)
return resultPath, nil
}

View File

@ -0,0 +1,160 @@
//go:build !yq_nouri
package yqlib
import (
"io"
"strings"
"testing"
"github.com/mikefarah/yq/v4/test"
)
func TestUriDecoder_Init(t *testing.T) {
decoder := NewUriDecoder()
reader := strings.NewReader("test")
err := decoder.Init(reader)
test.AssertResult(t, nil, err)
}
func TestUriDecoder_DecodeSimpleString(t *testing.T) {
decoder := NewUriDecoder()
reader := strings.NewReader("hello%20world")
err := decoder.Init(reader)
test.AssertResult(t, nil, err)
node, err := decoder.Decode()
test.AssertResult(t, nil, err)
test.AssertResult(t, "!!str", node.Tag)
test.AssertResult(t, "hello world", node.Value)
}
func TestUriDecoder_DecodeSpecialCharacters(t *testing.T) {
decoder := NewUriDecoder()
reader := strings.NewReader("hello%21%40%23%24%25")
err := decoder.Init(reader)
test.AssertResult(t, nil, err)
node, err := decoder.Decode()
test.AssertResult(t, nil, err)
test.AssertResult(t, "hello!@#$%", node.Value)
}
func TestUriDecoder_DecodeUTF8(t *testing.T) {
decoder := NewUriDecoder()
reader := strings.NewReader("%E2%9C%93%20check")
err := decoder.Init(reader)
test.AssertResult(t, nil, err)
node, err := decoder.Decode()
test.AssertResult(t, nil, err)
test.AssertResult(t, "✓ check", node.Value)
}
func TestUriDecoder_DecodePlusSign(t *testing.T) {
decoder := NewUriDecoder()
reader := strings.NewReader("a+b")
err := decoder.Init(reader)
test.AssertResult(t, nil, err)
node, err := decoder.Decode()
test.AssertResult(t, nil, err)
// Note: url.QueryUnescape does NOT convert + to space
// That's only for form encoding (url.ParseQuery)
test.AssertResult(t, "a b", node.Value)
}
func TestUriDecoder_DecodeEmptyString(t *testing.T) {
decoder := NewUriDecoder()
reader := strings.NewReader("")
err := decoder.Init(reader)
test.AssertResult(t, nil, err)
node, err := decoder.Decode()
test.AssertResult(t, nil, err)
test.AssertResult(t, "", node.Value)
// Second decode should return EOF
node, err = decoder.Decode()
test.AssertResult(t, io.EOF, err)
test.AssertResult(t, (*CandidateNode)(nil), node)
}
func TestUriDecoder_DecodeMultipleCalls(t *testing.T) {
decoder := NewUriDecoder()
reader := strings.NewReader("test")
err := decoder.Init(reader)
test.AssertResult(t, nil, err)
// First decode
node, err := decoder.Decode()
test.AssertResult(t, nil, err)
test.AssertResult(t, "test", node.Value)
// Second decode should return EOF since we've consumed all input
node, err = decoder.Decode()
test.AssertResult(t, io.EOF, err)
test.AssertResult(t, (*CandidateNode)(nil), node)
}
func TestUriDecoder_DecodeInvalidEscape(t *testing.T) {
decoder := NewUriDecoder()
reader := strings.NewReader("test%ZZ")
err := decoder.Init(reader)
test.AssertResult(t, nil, err)
_, err = decoder.Decode()
// Should return an error for invalid escape sequence
if err == nil {
t.Error("Expected error for invalid escape sequence, got nil")
}
}
func TestUriDecoder_DecodeSlashAndQuery(t *testing.T) {
decoder := NewUriDecoder()
reader := strings.NewReader("path%2Fto%2Ffile%3Fquery%3Dvalue")
err := decoder.Init(reader)
test.AssertResult(t, nil, err)
node, err := decoder.Decode()
test.AssertResult(t, nil, err)
test.AssertResult(t, "path/to/file?query=value", node.Value)
}
func TestUriDecoder_DecodePercent(t *testing.T) {
decoder := NewUriDecoder()
reader := strings.NewReader("100%25")
err := decoder.Init(reader)
test.AssertResult(t, nil, err)
node, err := decoder.Decode()
test.AssertResult(t, nil, err)
test.AssertResult(t, "100%", node.Value)
}
func TestUriDecoder_DecodeNoEscaping(t *testing.T) {
decoder := NewUriDecoder()
reader := strings.NewReader("simple_text-123")
err := decoder.Init(reader)
test.AssertResult(t, nil, err)
node, err := decoder.Decode()
test.AssertResult(t, nil, err)
test.AssertResult(t, "simple_text-123", node.Value)
}
// Mock reader that returns an error
type errorReader struct{}
func (e *errorReader) Read(_ []byte) (n int, err error) {
return 0, io.ErrUnexpectedEOF
}
func TestUriDecoder_DecodeReadError(t *testing.T) {
decoder := NewUriDecoder()
err := decoder.Init(&errorReader{})
test.AssertResult(t, nil, err)
_, err = decoder.Decode()
test.AssertResult(t, io.ErrUnexpectedEOF, err)
}

View File

@ -11,6 +11,13 @@ import (
yaml "go.yaml.in/yaml/v4"
)
var (
commentLineRe = regexp.MustCompile(`^\s*#`)
yamlDirectiveLineRe = regexp.MustCompile(`^\s*%YAML`)
separatorLineRe = regexp.MustCompile(`^\s*---\s*$`)
separatorPrefixRe = regexp.MustCompile(`^\s*---\s+`)
)
type yamlDecoder struct {
decoder yaml.Decoder
@ -33,51 +40,72 @@ func NewYamlDecoder(prefs YamlPreferences) Decoder {
}
func (dec *yamlDecoder) processReadStream(reader *bufio.Reader) (io.Reader, string, error) {
var commentLineRegEx = regexp.MustCompile(`^\s*#`)
var yamlDirectiveLineRegEx = regexp.MustCompile(`^\s*%YA`)
var sb strings.Builder
for {
peekBytes, err := reader.Peek(4)
if errors.Is(err, io.EOF) {
// EOF are handled else where..
return reader, sb.String(), nil
} else if err != nil {
return reader, sb.String(), err
} else if string(peekBytes[0]) == "\n" {
_, err := reader.ReadString('\n')
sb.WriteString("\n")
if errors.Is(err, io.EOF) {
return reader, sb.String(), nil
} else if err != nil {
return reader, sb.String(), err
}
} else if string(peekBytes) == "--- " {
_, err := reader.ReadString(' ')
sb.WriteString("$yqDocSeparator$\n")
if errors.Is(err, io.EOF) {
return reader, sb.String(), nil
} else if err != nil {
return reader, sb.String(), err
}
} else if string(peekBytes) == "---\n" {
_, err := reader.ReadString('\n')
sb.WriteString("$yqDocSeparator$\n")
if errors.Is(err, io.EOF) {
return reader, sb.String(), nil
} else if err != nil {
return reader, sb.String(), err
}
} else if commentLineRegEx.MatchString(string(peekBytes)) || yamlDirectiveLineRegEx.MatchString(string(peekBytes)) {
line, err := reader.ReadString('\n')
sb.WriteString(line)
if errors.Is(err, io.EOF) {
return reader, sb.String(), nil
} else if err != nil {
return reader, sb.String(), err
}
} else {
line, err := reader.ReadString('\n')
if errors.Is(err, io.EOF) && line == "" {
// no more data
return reader, sb.String(), nil
}
if err != nil && !errors.Is(err, io.EOF) {
return reader, sb.String(), err
}
// Determine newline style and strip it for inspection
newline := ""
if strings.HasSuffix(line, "\r\n") {
newline = "\r\n"
line = strings.TrimSuffix(line, "\r\n")
} else if strings.HasSuffix(line, "\n") {
newline = "\n"
line = strings.TrimSuffix(line, "\n")
}
trimmed := strings.TrimSpace(line)
// Document separator: exact line '---' or a '--- ' prefix followed by content
if separatorLineRe.MatchString(trimmed) {
sb.WriteString("$yqDocSeparator$")
sb.WriteString(newline)
if errors.Is(err, io.EOF) {
return reader, sb.String(), nil
}
continue
}
// Handle lines that start with '--- ' followed by more content (e.g. '--- cat')
if separatorPrefixRe.MatchString(line) {
match := separatorPrefixRe.FindString(line)
remainder := line[len(match):]
// normalise separator newline: if original had none, default to LF
sepNewline := newline
if sepNewline == "" {
sepNewline = "\n"
}
sb.WriteString("$yqDocSeparator$")
sb.WriteString(sepNewline)
// push the remainder back onto the reader and continue processing
reader = bufio.NewReader(io.MultiReader(strings.NewReader(remainder), reader))
if errors.Is(err, io.EOF) && remainder == "" {
return reader, sb.String(), nil
}
continue
}
// Comments, YAML directives, and blank lines are leading content
if commentLineRe.MatchString(line) || yamlDirectiveLineRe.MatchString(line) || trimmed == "" {
sb.WriteString(line)
sb.WriteString(newline)
if errors.Is(err, io.EOF) {
return reader, sb.String(), nil
}
continue
}
// First non-leading line: push it back onto a reader and return
originalLine := line + newline
return io.MultiReader(strings.NewReader(originalLine), reader), sb.String(), nil
}
}

View File

@ -22,7 +22,7 @@ see https://yaml.org/type/merge.html
Given a sample.yml file of:
```yaml
- &CENTER
- &CENTRE
x: 1
y: 2
- &LEFT
@ -32,7 +32,7 @@ Given a sample.yml file of:
r: 10
- &SMALL
r: 1
- !!merge <<: *CENTER
- !!merge <<: *CENTRE
r: 10
```
then
@ -288,7 +288,7 @@ see https://yaml.org/type/merge.html. This has the correct data, but the wrong k
Given a sample.yml file of:
```yaml
- &CENTER
- &CENTRE
x: 1
y: 2
- &LEFT
@ -299,7 +299,7 @@ Given a sample.yml file of:
- &SMALL
r: 1
- !!merge <<:
- *CENTER
- *CENTRE
- *BIG
```
then
@ -318,7 +318,7 @@ see https://yaml.org/type/merge.html. This has the correct data, but the wrong k
Given a sample.yml file of:
```yaml
- &CENTER
- &CENTRE
x: 1
y: 2
- &LEFT
@ -401,7 +401,7 @@ Taken from https://yaml.org/type/merge.html. Same values as legacy, but with the
Given a sample.yml file of:
```yaml
- &CENTER
- &CENTRE
x: 1
y: 2
- &LEFT
@ -412,7 +412,7 @@ Given a sample.yml file of:
- &SMALL
r: 1
- !!merge <<:
- *CENTER
- *CENTRE
- *BIG
```
then
@ -432,7 +432,7 @@ Taken from https://yaml.org/type/merge.html. Same values as legacy, but with the
Given a sample.yml file of:
```yaml
- &CENTER
- &CENTRE
x: 1
y: 2
- &LEFT

View File

@ -2,7 +2,7 @@
Various operators for parsing and manipulating dates.
## Date time formattings
## Date time formatting
This uses Golang's built in time library for parsing and formatting date times.
When not specified, the RFC3339 standard is assumed `2006-01-02T15:04:05Z07:00` for parsing.

View File

@ -2,7 +2,7 @@
Various operators for parsing and manipulating dates.
## Date time formattings
## Date time formatting
This uses Golang's built in time library for parsing and formatting date times.
When not specified, the RFC3339 standard is assumed `2006-01-02T15:04:05Z07:00` for parsing.

View File

@ -79,6 +79,46 @@ will output
c: cat
```
## Get the top (root) parent
Use negative numbers to get the top parents. You can think of this as indexing into the 'parents' array above
Given a sample.yml file of:
```yaml
a:
b:
c: cat
```
then
```bash
yq '.a.b.c | parent(-1)' sample.yml
```
will output
```yaml
a:
b:
c: cat
```
## Root
Alias for parent(-1), returns the top level parent. This is usually the document node.
Given a sample.yml file of:
```yaml
a:
b:
c: cat
```
then
```bash
yq '.a.b.c | root' sample.yml
```
will output
```yaml
a:
b:
c: cat
```
## N-th parent
You can optionally supply the number of levels to go up for the parent, the default being 1.
@ -116,6 +156,25 @@ a:
c: cat
```
## N-th negative
Similarly, use negative numbers to index backwards from the parents array
Given a sample.yml file of:
```yaml
a:
b:
c: cat
```
then
```bash
yq '.a.b.c | parent(-2)' sample.yml
```
will output
```yaml
b:
c: cat
```
## No parent
Given a sample.yml file of:
```yaml

201
pkg/yqlib/doc/usage/hcl.md Normal file
View File

@ -0,0 +1,201 @@
# HCL
Encode and decode to and from [HashiCorp Configuration Language (HCL)](https://github.com/hashicorp/hcl).
HCL is commonly used in HashiCorp tools like Terraform for configuration files. The yq HCL encoder and decoder support:
- Blocks and attributes
- String interpolation and expressions (preserved without quotes)
- Comments (leading, head, and line comments)
- Nested structures (maps and lists)
- Syntax colorisation when enabled
## Parse HCL
Given a sample.hcl file of:
```hcl
io_mode = "async"
```
then
```bash
yq -oy sample.hcl
```
will output
```yaml
io_mode: "async"
```
## Roundtrip: Sample Doc
Given a sample.hcl file of:
```hcl
service "cat" {
process "main" {
command = ["/usr/local/bin/awesome-app", "server"]
}
process "management" {
command = ["/usr/local/bin/awesome-app", "management"]
}
}
```
then
```bash
yq sample.hcl
```
will output
```hcl
service "cat" {
process "main" {
command = ["/usr/local/bin/awesome-app", "server"]
}
process "management" {
command = ["/usr/local/bin/awesome-app", "management"]
}
}
```
## Roundtrip: With an update
Given a sample.hcl file of:
```hcl
service "cat" {
process "main" {
command = ["/usr/local/bin/awesome-app", "server"]
}
process "management" {
command = ["/usr/local/bin/awesome-app", "management"]
}
}
```
then
```bash
yq '.service.cat.process.main.command += "meow"' sample.hcl
```
will output
```hcl
service "cat" {
process "main" {
command = ["/usr/local/bin/awesome-app", "server", "meow"]
}
process "management" {
command = ["/usr/local/bin/awesome-app", "management"]
}
}
```
## Parse HCL: Sample Doc
Given a sample.hcl file of:
```hcl
service "cat" {
process "main" {
command = ["/usr/local/bin/awesome-app", "server"]
}
process "management" {
command = ["/usr/local/bin/awesome-app", "management"]
}
}
```
then
```bash
yq -oy sample.hcl
```
will output
```yaml
service:
cat:
process:
main:
command:
- "/usr/local/bin/awesome-app"
- "server"
management:
command:
- "/usr/local/bin/awesome-app"
- "management"
```
## Parse HCL: with comments
Given a sample.hcl file of:
```hcl
# Configuration
port = 8080 # server port
```
then
```bash
yq -oy sample.hcl
```
will output
```yaml
# Configuration
port: 8080 # server port
```
## Roundtrip: with comments
Given a sample.hcl file of:
```hcl
# Configuration
port = 8080
```
then
```bash
yq sample.hcl
```
will output
```hcl
# Configuration
port = 8080
```
## Roundtrip: With templates, functions and arithmetic
Given a sample.hcl file of:
```hcl
# Arithmetic with literals and application-provided variables
sum = 1 + addend
# String interpolation and templates
message = "Hello, ${name}!"
# Application-provided functions
shouty_message = upper(message)
```
then
```bash
yq sample.hcl
```
will output
```hcl
# Arithmetic with literals and application-provided variables
sum = 1 + addend
# String interpolation and templates
message = "Hello, ${name}!"
# Application-provided functions
shouty_message = upper(message)
```
## Roundtrip: Separate blocks with same name.
Given a sample.hcl file of:
```hcl
resource "aws_instance" "web" {
ami = "ami-12345"
}
resource "aws_instance" "db" {
ami = "ami-67890"
}
```
then
```bash
yq sample.hcl
```
will output
```hcl
resource "aws_instance" "web" {
ami = "ami-12345"
}
resource "aws_instance" "db" {
ami = "ami-67890"
}
```

View File

@ -0,0 +1,11 @@
# HCL
Encode and decode to and from [HashiCorp Configuration Language (HCL)](https://github.com/hashicorp/hcl).
HCL is commonly used in HashiCorp tools like Terraform for configuration files. The yq HCL encoder and decoder support:
- Blocks and attributes
- String interpolation and expressions (preserved without quotes)
- Comments (leading, head, and line comments)
- Nested structures (maps and lists)
- Syntax colorisation when enabled

View File

@ -0,0 +1,9 @@
# KYaml
Encode and decode to and from KYaml (a restricted subset of YAML that uses flow-style collections).
KYaml is useful when you want YAML data rendered in a compact, JSON-like form while still supporting YAML features like comments.
Notes:
- Strings are always double-quoted in KYaml output.
- Anchors and aliases are expanded (KYaml output does not emit them).

View File

@ -0,0 +1,253 @@
# KYaml
Encode and decode to and from KYaml (a restricted subset of YAML that uses flow-style collections).
KYaml is useful when you want YAML data rendered in a compact, JSON-like form while still supporting YAML features like comments.
Notes:
- Strings are always double-quoted in KYaml output.
- Anchors and aliases are expanded (KYaml output does not emit them).
## Encode kyaml: plain string scalar
Strings are always double-quoted in KYaml output.
Given a sample.yml file of:
```yaml
cat
```
then
```bash
yq -o=kyaml '.' sample.yml
```
will output
```yaml
"cat"
```
## encode flow mapping and sequence
Given a sample.yml file of:
```yaml
a: b
c:
- d
```
then
```bash
yq -o=kyaml '.' sample.yml
```
will output
```yaml
{
a: "b",
c: [
"d",
],
}
```
## encode non-string scalars
Given a sample.yml file of:
```yaml
a: 12
b: true
c: null
d: "true"
```
then
```bash
yq -o=kyaml '.' sample.yml
```
will output
```yaml
{
a: 12,
b: true,
c: null,
d: "true",
}
```
## quote non-identifier keys
Given a sample.yml file of:
```yaml
"1a": b
"has space": c
```
then
```bash
yq -o=kyaml '.' sample.yml
```
will output
```yaml
{
"1a": "b",
"has space": "c",
}
```
## escape quoted strings
Given a sample.yml file of:
```yaml
a: "line1\nline2\t\"q\""
```
then
```bash
yq -o=kyaml '.' sample.yml
```
will output
```yaml
{
a: "line1\nline2\t\"q\"",
}
```
## preserve comments when encoding
Given a sample.yml file of:
```yaml
# leading
a: 1 # a line
# head b
b: 2
c:
# head d
- d # d line
- e
# trailing
```
then
```bash
yq -o=kyaml '.' sample.yml
```
will output
```yaml
# leading
{
a: 1, # a line
# head b
b: 2,
c: [
# head d
"d", # d line
"e",
],
# trailing
}
```
## Encode kyaml: anchors and aliases
KYaml output does not support anchors/aliases; they are expanded to concrete values.
Given a sample.yml file of:
```yaml
base: &base
a: b
copy: *base
```
then
```bash
yq -o=kyaml '.' sample.yml
```
will output
```yaml
{
base: {
a: "b",
},
copy: {
a: "b",
},
}
```
## Encode kyaml: yaml to kyaml shows formatting differences
KYaml uses flow-style collections (braces/brackets) and explicit commas.
Given a sample.yml file of:
```yaml
person:
name: John
pets:
- cat
- dog
```
then
```bash
yq -o=kyaml '.' sample.yml
```
will output
```yaml
{
person: {
name: "John",
pets: [
"cat",
"dog",
],
},
}
```
## Encode kyaml: nested lists of objects
Lists and objects can be nested arbitrarily; KYaml always uses flow-style collections.
Given a sample.yml file of:
```yaml
- name: a
items:
- id: 1
tags:
- k: x
v: y
- k: x2
v: y2
- id: 2
tags:
- k: z
v: w
```
then
```bash
yq -o=kyaml '.' sample.yml
```
will output
```yaml
[
{
name: "a",
items: [
{
id: 1,
tags: [
{
k: "x",
v: "y",
},
{
k: "x2",
v: "y2",
},
],
},
{
id: 2,
tags: [
{
k: "z",
v: "w",
},
],
},
],
},
]
```

View File

@ -141,3 +141,246 @@ will output
dependencies: {}
```
## Roundtrip: inline table attribute
Given a sample.toml file of:
```toml
name = { first = "Tom", last = "Preston-Werner" }
```
then
```bash
yq '.' sample.toml
```
will output
```yaml
name = { first = "Tom", last = "Preston-Werner" }
```
## Roundtrip: table section
Given a sample.toml file of:
```toml
[owner.contact]
name = "Tom"
age = 36
```
then
```bash
yq '.' sample.toml
```
will output
```yaml
[owner.contact]
name = "Tom"
age = 36
```
## Roundtrip: array of tables
Given a sample.toml file of:
```toml
[[fruits]]
name = "apple"
[[fruits.varieties]]
name = "red delicious"
```
then
```bash
yq '.' sample.toml
```
will output
```yaml
[[fruits]]
name = "apple"
[[fruits.varieties]]
name = "red delicious"
```
## Roundtrip: arrays and scalars
Given a sample.toml file of:
```toml
A = ["hello", ["world", "again"]]
B = 12
```
then
```bash
yq '.' sample.toml
```
will output
```yaml
A = ["hello", ["world", "again"]]
B = 12
```
## Roundtrip: simple
Given a sample.toml file of:
```toml
A = "hello"
B = 12
```
then
```bash
yq '.' sample.toml
```
will output
```yaml
A = "hello"
B = 12
```
## Roundtrip: deep paths
Given a sample.toml file of:
```toml
[person]
name = "hello"
address = "12 cat st"
```
then
```bash
yq '.' sample.toml
```
will output
```yaml
[person]
name = "hello"
address = "12 cat st"
```
## Roundtrip: empty array
Given a sample.toml file of:
```toml
A = []
```
then
```bash
yq '.' sample.toml
```
will output
```yaml
A = []
```
## Roundtrip: sample table
Given a sample.toml file of:
```toml
var = "x"
[owner.contact]
name = "Tom Preston-Werner"
age = 36
```
then
```bash
yq '.' sample.toml
```
will output
```yaml
var = "x"
[owner.contact]
name = "Tom Preston-Werner"
age = 36
```
## Roundtrip: empty table
Given a sample.toml file of:
```toml
[dependencies]
```
then
```bash
yq '.' sample.toml
```
will output
```yaml
[dependencies]
```
## Roundtrip: comments
Given a sample.toml file of:
```toml
# This is a comment
A = "hello" # inline comment
B = 12
# Table comment
[person]
name = "Tom" # name comment
```
then
```bash
yq '.' sample.toml
```
will output
```yaml
# This is a comment
A = "hello" # inline comment
B = 12
# Table comment
[person]
name = "Tom" # name comment
```
## Roundtrip: sample from web
Given a sample.toml file of:
```toml
# This is a TOML document
title = "TOML Example"
[owner]
name = "Tom Preston-Werner"
dob = 1979-05-27T07:32:00-08:00
[database]
enabled = true
ports = [8000, 8001, 8002]
data = [["delta", "phi"], [3.14]]
temp_targets = { cpu = 79.5, case = 72.0 }
# [servers] yq can't do this one yet
[servers.alpha]
ip = "10.0.0.1"
role = "frontend"
[servers.beta]
ip = "10.0.0.2"
role = "backend"
```
then
```bash
yq '.' sample.toml
```
will output
```yaml
# This is a TOML document
title = "TOML Example"
[owner]
name = "Tom Preston-Werner"
dob = 1979-05-27T07:32:00-08:00
[database]
enabled = true
ports = [8000, 8001, 8002]
data = [["delta", "phi"], [3.14]]
temp_targets = { cpu = 79.5, case = 72.0 }
# [servers] yq can't do this one yet
[servers.alpha]
ip = "10.0.0.1"
role = "frontend"
[servers.beta]
ip = "10.0.0.2"
role = "backend"
```

View File

@ -53,7 +53,7 @@ Given a sample.xml file of:
```
then
```bash
yq -oy '.' sample.xml
yq -oy sample.xml
```
will output
```yaml
@ -100,7 +100,7 @@ Given a sample.xml file of:
```
then
```bash
yq -oy '.' sample.xml
yq -oy sample.xml
```
will output
```yaml
@ -157,7 +157,7 @@ Given a sample.xml file of:
```
then
```bash
yq -oy '.' sample.xml
yq -oy sample.xml
```
will output
```yaml
@ -177,7 +177,7 @@ Given a sample.xml file of:
```
then
```bash
yq -oy '.' sample.xml
yq -oy sample.xml
```
will output
```yaml
@ -196,7 +196,7 @@ Given a sample.xml file of:
```
then
```bash
yq -oy '.' sample.xml
yq -oy sample.xml
```
will output
```yaml
@ -225,7 +225,7 @@ Given a sample.xml file of:
```
then
```bash
yq '.' sample.xml
yq sample.xml
```
will output
```xml
@ -256,7 +256,7 @@ Given a sample.xml file of:
```
then
```bash
yq --xml-skip-directives '.' sample.xml
yq --xml-skip-directives sample.xml
```
will output
```xml
@ -292,7 +292,7 @@ for x --></x>
```
then
```bash
yq -oy '.' sample.xml
yq -oy sample.xml
```
will output
```yaml
@ -327,7 +327,7 @@ Given a sample.xml file of:
```
then
```bash
yq --xml-keep-namespace=false '.' sample.xml
yq --xml-keep-namespace=false sample.xml
```
will output
```xml
@ -361,7 +361,7 @@ Given a sample.xml file of:
```
then
```bash
yq --xml-raw-token=false '.' sample.xml
yq --xml-raw-token=false sample.xml
```
will output
```xml
@ -542,7 +542,7 @@ for x --></x>
```
then
```bash
yq '.' sample.xml
yq sample.xml
```
will output
```xml
@ -575,7 +575,7 @@ Given a sample.xml file of:
```
then
```bash
yq '.' sample.xml
yq sample.xml
```
will output
```xml

View File

@ -1,7 +1,12 @@
package yqlib
import (
"bufio"
"errors"
"io"
"strings"
"github.com/fatih/color"
)
type Encoder interface {
@ -25,3 +30,63 @@ func mapKeysToStrings(node *CandidateNode) {
mapKeysToStrings(child)
}
}
// Some funcs are shared between encoder_yaml and encoder_kyaml
func PrintYAMLDocumentSeparator(writer io.Writer, PrintDocSeparators bool) error {
if PrintDocSeparators {
log.Debug("writing doc sep")
if err := writeString(writer, "---\n"); err != nil {
return err
}
}
return nil
}
func PrintYAMLLeadingContent(writer io.Writer, content string, PrintDocSeparators bool, ColorsEnabled bool) error {
reader := bufio.NewReader(strings.NewReader(content))
// reuse precompiled package-level regex
// (declared in decoder_yaml.go)
for {
readline, errReading := reader.ReadString('\n')
if errReading != nil && !errors.Is(errReading, io.EOF) {
return errReading
}
if strings.Contains(readline, "$yqDocSeparator$") {
// Preserve the original line ending (CRLF or LF)
lineEnding := "\n"
if strings.HasSuffix(readline, "\r\n") {
lineEnding = "\r\n"
}
if PrintDocSeparators {
if err := writeString(writer, "---"+lineEnding); err != nil {
return err
}
}
} else {
if len(readline) > 0 && readline != "\n" && readline[0] != '%' && !commentLineRe.MatchString(readline) {
readline = "# " + readline
}
if ColorsEnabled && strings.TrimSpace(readline) != "" {
readline = format(color.FgHiBlack) + readline + format(color.Reset)
}
if err := writeString(writer, readline); err != nil {
return err
}
}
if errors.Is(errReading, io.EOF) {
if readline != "" {
// the last comment we read didn't have a newline, put one in
if err := writeString(writer, "\n"); err != nil {
return err
}
}
break
}
}
return nil
}

690
pkg/yqlib/encoder_hcl.go Normal file
View File

@ -0,0 +1,690 @@
//go:build !yq_nohcl
package yqlib
import (
"fmt"
"io"
"regexp"
"strings"
"github.com/fatih/color"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclsyntax"
hclwrite "github.com/hashicorp/hcl/v2/hclwrite"
"github.com/zclconf/go-cty/cty"
)
type hclEncoder struct {
prefs HclPreferences
}
// commentPathSep is used to join path segments when collecting comments.
// It uses a rarely used ASCII control character to avoid collisions with
// normal key names (including dots).
const commentPathSep = "\x1e"
// NewHclEncoder creates a new HCL encoder
func NewHclEncoder(prefs HclPreferences) Encoder {
return &hclEncoder{prefs: prefs}
}
func (he *hclEncoder) CanHandleAliases() bool {
return false
}
func (he *hclEncoder) PrintDocumentSeparator(_ io.Writer) error {
return nil
}
func (he *hclEncoder) PrintLeadingContent(_ io.Writer, _ string) error {
return nil
}
func (he *hclEncoder) Encode(writer io.Writer, node *CandidateNode) error {
log.Debugf("I need to encode %v", NodeToString(node))
if node.Kind == ScalarNode {
return writeString(writer, node.Value+"\n")
}
f := hclwrite.NewEmptyFile()
body := f.Body()
// Collect comments as we encode
commentMap := make(map[string]string)
he.collectComments(node, "", commentMap)
if err := he.encodeNode(body, node); err != nil {
return fmt.Errorf("failed to encode HCL: %w", err)
}
// Get the formatted output and remove extra spacing before '='
output := f.Bytes()
compactOutput := he.compactSpacing(output)
// Inject comments back into the output
finalOutput := he.injectComments(compactOutput, commentMap)
if he.prefs.ColorsEnabled {
colourized := he.colorizeHcl(finalOutput)
_, err := writer.Write(colourized)
return err
}
_, err := writer.Write(finalOutput)
return err
}
// compactSpacing removes extra whitespace before '=' in attribute assignments
func (he *hclEncoder) compactSpacing(input []byte) []byte {
// Use regex to replace multiple spaces before = with single space
re := regexp.MustCompile(`(\S)\s{2,}=`)
return re.ReplaceAll(input, []byte("$1 ="))
}
// collectComments recursively collects comments from nodes for later injection
func (he *hclEncoder) collectComments(node *CandidateNode, prefix string, commentMap map[string]string) {
if node == nil {
return
}
// For mapping nodes, collect comments from keys and values
if node.Kind == MappingNode {
// Collect root-level head comment if at root (prefix is empty)
if prefix == "" && node.HeadComment != "" {
commentMap[joinCommentPath("__root__", "head")] = node.HeadComment
}
for i := 0; i < len(node.Content); i += 2 {
keyNode := node.Content[i]
valueNode := node.Content[i+1]
key := keyNode.Value
// Create a path for this key
path := joinCommentPath(prefix, key)
// Store comments from the key (head comments appear before the attribute)
if keyNode.HeadComment != "" {
commentMap[joinCommentPath(path, "head")] = keyNode.HeadComment
}
// Store comments from the value (line comments appear after the value)
if valueNode.LineComment != "" {
commentMap[joinCommentPath(path, "line")] = valueNode.LineComment
}
if valueNode.FootComment != "" {
commentMap[joinCommentPath(path, "foot")] = valueNode.FootComment
}
// Recurse into nested mappings
if valueNode.Kind == MappingNode {
he.collectComments(valueNode, path, commentMap)
}
}
}
}
// joinCommentPath concatenates path segments using commentPathSep, safely handling empty prefixes.
func joinCommentPath(prefix, segment string) string {
if prefix == "" {
return segment
}
return prefix + commentPathSep + segment
}
// injectComments adds collected comments back into the HCL output
func (he *hclEncoder) injectComments(output []byte, commentMap map[string]string) []byte {
// Convert output to string for easier manipulation
result := string(output)
// Root-level head comment (stored on the synthetic __root__/head path)
for path, comment := range commentMap {
if path == joinCommentPath("__root__", "head") {
trimmed := strings.TrimSpace(comment)
if trimmed != "" && !strings.HasPrefix(result, trimmed) {
result = trimmed + "\n" + result
}
}
}
// Attribute head comments: insert above matching assignment
for path, comment := range commentMap {
parts := strings.Split(path, commentPathSep)
if len(parts) < 2 {
continue
}
commentType := parts[len(parts)-1]
key := parts[len(parts)-2]
if commentType != "head" || key == "" {
continue
}
trimmed := strings.TrimSpace(comment)
if trimmed == "" {
continue
}
re := regexp.MustCompile(`(?m)^(\s*)` + regexp.QuoteMeta(key) + `\s*=`)
if re.MatchString(result) {
result = re.ReplaceAllString(result, "$1"+trimmed+"\n$0")
}
}
return []byte(result)
}
func (he *hclEncoder) colorizeHcl(input []byte) []byte {
hcl := string(input)
result := strings.Builder{}
// Create colour functions for different token types
commentColor := color.New(color.FgHiBlack).SprintFunc()
stringColor := color.New(color.FgGreen).SprintFunc()
numberColor := color.New(color.FgHiMagenta).SprintFunc()
keyColor := color.New(color.FgCyan).SprintFunc()
boolColor := color.New(color.FgHiMagenta).SprintFunc()
// Simple tokenization for HCL colouring
i := 0
for i < len(hcl) {
ch := hcl[i]
// Comments - from # to end of line
if ch == '#' {
end := i
for end < len(hcl) && hcl[end] != '\n' {
end++
}
result.WriteString(commentColor(hcl[i:end]))
i = end
continue
}
// Strings - quoted text
if ch == '"' || ch == '\'' {
quote := ch
end := i + 1
for end < len(hcl) && hcl[end] != quote {
if hcl[end] == '\\' {
end++ // skip escaped char
}
end++
}
if end < len(hcl) {
end++ // include closing quote
}
result.WriteString(stringColor(hcl[i:end]))
i = end
continue
}
// Numbers - sequences of digits, possibly with decimal point or minus
if (ch >= '0' && ch <= '9') || (ch == '-' && i+1 < len(hcl) && hcl[i+1] >= '0' && hcl[i+1] <= '9') {
end := i
if ch == '-' {
end++
}
for end < len(hcl) && ((hcl[end] >= '0' && hcl[end] <= '9') || hcl[end] == '.') {
end++
}
result.WriteString(numberColor(hcl[i:end]))
i = end
continue
}
// Identifiers/keys - alphanumeric + underscore
if (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || ch == '_' {
end := i
for end < len(hcl) && ((hcl[end] >= 'a' && hcl[end] <= 'z') ||
(hcl[end] >= 'A' && hcl[end] <= 'Z') ||
(hcl[end] >= '0' && hcl[end] <= '9') ||
hcl[end] == '_' || hcl[end] == '-') {
end++
}
ident := hcl[i:end]
// Check if this is a keyword/reserved word
switch ident {
case "true", "false", "null":
result.WriteString(boolColor(ident))
default:
// Check if followed by = (it's a key)
j := end
for j < len(hcl) && (hcl[j] == ' ' || hcl[j] == '\t') {
j++
}
if j < len(hcl) && hcl[j] == '=' {
result.WriteString(keyColor(ident))
} else if j < len(hcl) && hcl[j] == '{' {
// Block type
result.WriteString(keyColor(ident))
} else {
result.WriteString(ident) // plain text for other identifiers
}
}
i = end
continue
}
// Everything else (whitespace, operators, brackets) - no color
result.WriteByte(ch)
i++
}
return []byte(result.String())
}
// Helper runes for unquoted identifiers
func isHCLIdentifierStart(r rune) bool {
return (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') || r == '_'
}
func isHCLIdentifierPart(r rune) bool {
return (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') || (r >= '0' && r <= '9') || r == '_' || r == '-'
}
func isValidHCLIdentifier(s string) bool {
if s == "" {
return false
}
// HCL identifiers must start with a letter or underscore
// and contain only letters, digits, underscores, and hyphens
for i, r := range s {
if i == 0 {
if !isHCLIdentifierStart(r) {
return false
}
continue
}
if !isHCLIdentifierPart(r) {
return false
}
}
return true
}
// tokensForRawHCLExpr produces a minimal token stream for a simple HCL expression so we can
// write it without introducing quotes (e.g. function calls like upper(message)).
func tokensForRawHCLExpr(expr string) (hclwrite.Tokens, error) {
var tokens hclwrite.Tokens
for i := 0; i < len(expr); {
ch := expr[i]
switch {
case ch == ' ' || ch == '\t':
i++
continue
case isHCLIdentifierStart(rune(ch)):
start := i
i++
for i < len(expr) && isHCLIdentifierPart(rune(expr[i])) {
i++
}
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenIdent, Bytes: []byte(expr[start:i])})
continue
case ch >= '0' && ch <= '9':
start := i
i++
for i < len(expr) && ((expr[i] >= '0' && expr[i] <= '9') || expr[i] == '.') {
i++
}
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenNumberLit, Bytes: []byte(expr[start:i])})
continue
case ch == '(':
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenOParen, Bytes: []byte{'('}})
case ch == ')':
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenCParen, Bytes: []byte{')'}})
case ch == ',':
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenComma, Bytes: []byte{','}})
case ch == '.':
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenDot, Bytes: []byte{'.'}})
case ch == '+':
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenPlus, Bytes: []byte{'+'}})
case ch == '-':
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenMinus, Bytes: []byte{'-'}})
case ch == '*':
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenStar, Bytes: []byte{'*'}})
case ch == '/':
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenSlash, Bytes: []byte{'/'}})
default:
return nil, fmt.Errorf("unsupported character %q in raw HCL expression", ch)
}
i++
}
return tokens, nil
}
// encodeAttribute encodes a value as an HCL attribute
func (he *hclEncoder) encodeAttribute(body *hclwrite.Body, key string, valueNode *CandidateNode) error {
if valueNode.Kind == ScalarNode && valueNode.Tag == "!!str" {
// Handle unquoted expressions (as-is, without quotes)
if valueNode.Style == 0 {
tokens, err := tokensForRawHCLExpr(valueNode.Value)
if err != nil {
return err
}
body.SetAttributeRaw(key, tokens)
return nil
}
if valueNode.Style&LiteralStyle != 0 {
tokens, err := tokensForRawHCLExpr(valueNode.Value)
if err != nil {
return err
}
body.SetAttributeRaw(key, tokens)
return nil
}
// Check if template with interpolation
if valueNode.Style&DoubleQuotedStyle != 0 && strings.Contains(valueNode.Value, "${") {
return he.encodeTemplateAttribute(body, key, valueNode.Value)
}
// Check if unquoted identifier
if isValidHCLIdentifier(valueNode.Value) && valueNode.Style == 0 {
traversal := hcl.Traversal{
hcl.TraverseRoot{Name: valueNode.Value},
}
body.SetAttributeTraversal(key, traversal)
return nil
}
}
// Default: use cty.Value for quoted strings and all other types
ctyValue, err := nodeToCtyValue(valueNode)
if err != nil {
return err
}
body.SetAttributeValue(key, ctyValue)
return nil
}
// encodeTemplateAttribute encodes a template string with ${} interpolations
func (he *hclEncoder) encodeTemplateAttribute(body *hclwrite.Body, key string, templateStr string) error {
tokens := hclwrite.Tokens{
{Type: hclsyntax.TokenOQuote, Bytes: []byte{'"'}},
}
for i := 0; i < len(templateStr); i++ {
if i < len(templateStr)-1 && templateStr[i] == '$' && templateStr[i+1] == '{' {
// Start of template interpolation
tokens = append(tokens, &hclwrite.Token{
Type: hclsyntax.TokenTemplateInterp,
Bytes: []byte("${"),
})
i++ // skip the '{'
// Find the matching '}'
start := i + 1
depth := 1
for i++; i < len(templateStr) && depth > 0; i++ {
switch templateStr[i] {
case '{':
depth++
case '}':
depth--
}
}
i-- // back up to the '}'
interpExpr := templateStr[start:i]
tokens = append(tokens, &hclwrite.Token{
Type: hclsyntax.TokenIdent,
Bytes: []byte(interpExpr),
})
tokens = append(tokens, &hclwrite.Token{
Type: hclsyntax.TokenTemplateSeqEnd,
Bytes: []byte("}"),
})
} else {
// Regular character
tokens = append(tokens, &hclwrite.Token{
Type: hclsyntax.TokenQuotedLit,
Bytes: []byte{templateStr[i]},
})
}
}
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenCQuote, Bytes: []byte{'"'}})
body.SetAttributeRaw(key, tokens)
return nil
}
// encodeBlockIfMapping attempts to encode a value as a block. Returns true if it was encoded as a block.
func (he *hclEncoder) encodeBlockIfMapping(body *hclwrite.Body, key string, valueNode *CandidateNode) bool {
if valueNode.Kind != MappingNode || valueNode.Style == FlowStyle {
return false
}
// If EncodeSeparate is set, emit children as separate blocks regardless of label extraction
if valueNode.EncodeSeparate {
if handled, _ := he.encodeMappingChildrenAsBlocks(body, key, valueNode); handled {
return true
}
}
// Try to extract block labels from a single-entry mapping chain
if labels, bodyNode, ok := extractBlockLabels(valueNode); ok {
if len(labels) > 1 && mappingChildrenAllMappings(bodyNode) {
primaryLabels := labels[:len(labels)-1]
nestedType := labels[len(labels)-1]
block := body.AppendNewBlock(key, primaryLabels)
if handled, err := he.encodeMappingChildrenAsBlocks(block.Body(), nestedType, bodyNode); err == nil && handled {
return true
}
if err := he.encodeNodeAttributes(block.Body(), bodyNode); err == nil {
return true
}
}
block := body.AppendNewBlock(key, labels)
if err := he.encodeNodeAttributes(block.Body(), bodyNode); err == nil {
return true
}
}
// If all child values are mappings, treat each child key as a labelled instance of this block type
if handled, _ := he.encodeMappingChildrenAsBlocks(body, key, valueNode); handled {
return true
}
// No labels detected, render as unlabelled block
block := body.AppendNewBlock(key, nil)
if err := he.encodeNodeAttributes(block.Body(), valueNode); err == nil {
return true
}
return false
}
// encodeNode encodes a CandidateNode directly to HCL, preserving style information
func (he *hclEncoder) encodeNode(body *hclwrite.Body, node *CandidateNode) error {
if node.Kind != MappingNode {
return fmt.Errorf("HCL encoder expects a mapping at the root level, got %v", kindToString(node.Kind))
}
for i := 0; i < len(node.Content); i += 2 {
keyNode := node.Content[i]
valueNode := node.Content[i+1]
key := keyNode.Value
// Render as block or attribute depending on value type
if he.encodeBlockIfMapping(body, key, valueNode) {
continue
}
// Render as attribute: key = value
if err := he.encodeAttribute(body, key, valueNode); err != nil {
return err
}
}
return nil
}
// mappingChildrenAllMappings reports whether all values in a mapping node are non-flow mappings.
func mappingChildrenAllMappings(node *CandidateNode) bool {
if node == nil || node.Kind != MappingNode || node.Style == FlowStyle {
return false
}
if len(node.Content) == 0 {
return false
}
for i := 0; i < len(node.Content); i += 2 {
childVal := node.Content[i+1]
if childVal.Kind != MappingNode || childVal.Style == FlowStyle {
return false
}
}
return true
}
// encodeMappingChildrenAsBlocks emits a block for each mapping child, treating the child key as a label.
// Returns handled=true when it emitted blocks.
func (he *hclEncoder) encodeMappingChildrenAsBlocks(body *hclwrite.Body, blockType string, valueNode *CandidateNode) (bool, error) {
if !mappingChildrenAllMappings(valueNode) {
return false, nil
}
// Only emit as separate blocks if EncodeSeparate is true
// This allows the encoder to respect the original block structure preserved by the decoder
if !valueNode.EncodeSeparate {
return false, nil
}
for i := 0; i < len(valueNode.Content); i += 2 {
childKey := valueNode.Content[i].Value
childVal := valueNode.Content[i+1]
// Check if this child also represents multiple blocks (all children are mappings)
if mappingChildrenAllMappings(childVal) {
// Recursively emit each grandchild as a separate block with extended labels
for j := 0; j < len(childVal.Content); j += 2 {
grandchildKey := childVal.Content[j].Value
grandchildVal := childVal.Content[j+1]
labels := []string{childKey, grandchildKey}
// Try to extract additional labels if this is a single-entry chain
if extraLabels, bodyNode, ok := extractBlockLabels(grandchildVal); ok {
labels = append(labels, extraLabels...)
grandchildVal = bodyNode
}
block := body.AppendNewBlock(blockType, labels)
if err := he.encodeNodeAttributes(block.Body(), grandchildVal); err != nil {
return true, err
}
}
} else {
// Single block with this child as label(s)
labels := []string{childKey}
if extraLabels, bodyNode, ok := extractBlockLabels(childVal); ok {
labels = append(labels, extraLabels...)
childVal = bodyNode
}
block := body.AppendNewBlock(blockType, labels)
if err := he.encodeNodeAttributes(block.Body(), childVal); err != nil {
return true, err
}
}
}
return true, nil
}
// encodeNodeAttributes encodes the attributes of a mapping node (used for blocks)
func (he *hclEncoder) encodeNodeAttributes(body *hclwrite.Body, node *CandidateNode) error {
if node.Kind != MappingNode {
return fmt.Errorf("expected mapping node for block body")
}
for i := 0; i < len(node.Content); i += 2 {
keyNode := node.Content[i]
valueNode := node.Content[i+1]
key := keyNode.Value
// Render as block or attribute depending on value type
if he.encodeBlockIfMapping(body, key, valueNode) {
continue
}
// Render attribute for non-block value
if err := he.encodeAttribute(body, key, valueNode); err != nil {
return err
}
}
return nil
}
// extractBlockLabels detects a chain of single-entry mappings that encode block labels.
// It returns the collected labels and the final mapping to be used as the block body.
// Pattern: {label1: {label2: { ... {bodyMap} }}}
func extractBlockLabels(node *CandidateNode) ([]string, *CandidateNode, bool) {
var labels []string
current := node
for current != nil && current.Kind == MappingNode && len(current.Content) == 2 {
keyNode := current.Content[0]
valNode := current.Content[1]
if valNode.Kind != MappingNode {
break
}
labels = append(labels, keyNode.Value)
// If the child is itself a single mapping entry with a mapping value, keep descending.
if len(valNode.Content) == 2 && valNode.Content[1].Kind == MappingNode {
current = valNode
continue
}
// Otherwise, we have reached the body mapping.
return labels, valNode, true
}
return nil, nil, false
}
// nodeToCtyValue converts a CandidateNode directly to cty.Value, preserving order
func nodeToCtyValue(node *CandidateNode) (cty.Value, error) {
switch node.Kind {
case ScalarNode:
// Parse scalar value based on its tag
switch node.Tag {
case "!!bool":
return cty.BoolVal(node.Value == "true"), nil
case "!!int":
var i int64
_, err := fmt.Sscanf(node.Value, "%d", &i)
if err != nil {
return cty.NilVal, err
}
return cty.NumberIntVal(i), nil
case "!!float":
var f float64
_, err := fmt.Sscanf(node.Value, "%f", &f)
if err != nil {
return cty.NilVal, err
}
return cty.NumberFloatVal(f), nil
case "!!null":
return cty.NullVal(cty.DynamicPseudoType), nil
default:
// Default to string
return cty.StringVal(node.Value), nil
}
case MappingNode:
// Preserve order by iterating Content directly
m := make(map[string]cty.Value)
for i := 0; i < len(node.Content); i += 2 {
keyNode := node.Content[i]
valueNode := node.Content[i+1]
v, err := nodeToCtyValue(valueNode)
if err != nil {
return cty.NilVal, err
}
m[keyNode.Value] = v
}
return cty.ObjectVal(m), nil
case SequenceNode:
vals := make([]cty.Value, len(node.Content))
for i, item := range node.Content {
v, err := nodeToCtyValue(item)
if err != nil {
return cty.NilVal, err
}
vals[i] = v
}
return cty.TupleVal(vals), nil
case AliasNode:
return cty.NilVal, fmt.Errorf("HCL encoder does not support aliases")
default:
return cty.NilVal, fmt.Errorf("unsupported node kind: %v", node.Kind)
}
}

318
pkg/yqlib/encoder_kyaml.go Normal file
View File

@ -0,0 +1,318 @@
//go:build !yq_nokyaml
package yqlib
import (
"bytes"
"io"
"regexp"
"strconv"
"strings"
)
type kyamlEncoder struct {
prefs KYamlPreferences
}
func NewKYamlEncoder(prefs KYamlPreferences) Encoder {
return &kyamlEncoder{prefs: prefs}
}
func (ke *kyamlEncoder) CanHandleAliases() bool {
// KYAML is a restricted subset; avoid emitting anchors/aliases.
return false
}
func (ke *kyamlEncoder) PrintDocumentSeparator(writer io.Writer) error {
return PrintYAMLDocumentSeparator(writer, ke.prefs.PrintDocSeparators)
}
func (ke *kyamlEncoder) PrintLeadingContent(writer io.Writer, content string) error {
return PrintYAMLLeadingContent(writer, content, ke.prefs.PrintDocSeparators, ke.prefs.ColorsEnabled)
}
func (ke *kyamlEncoder) Encode(writer io.Writer, node *CandidateNode) error {
log.Debug("encoderKYaml - going to print %v", NodeToString(node))
if node.Kind == ScalarNode && ke.prefs.UnwrapScalar {
return writeString(writer, node.Value+"\n")
}
destination := writer
tempBuffer := bytes.NewBuffer(nil)
if ke.prefs.ColorsEnabled {
destination = tempBuffer
}
// Mirror the YAML encoder behaviour: trailing comments on the document root
// are stored in FootComment and need to be printed after the document.
trailingContent := node.FootComment
if err := ke.writeCommentBlock(destination, node.HeadComment, 0); err != nil {
return err
}
if err := ke.writeNode(destination, node, 0); err != nil {
return err
}
if err := ke.writeInlineComment(destination, node.LineComment); err != nil {
return err
}
if err := writeString(destination, "\n"); err != nil {
return err
}
if err := ke.PrintLeadingContent(destination, trailingContent); err != nil {
return err
}
if ke.prefs.ColorsEnabled {
return colorizeAndPrint(tempBuffer.Bytes(), writer)
}
return nil
}
func (ke *kyamlEncoder) writeNode(writer io.Writer, node *CandidateNode, indent int) error {
switch node.Kind {
case MappingNode:
return ke.writeMapping(writer, node, indent)
case SequenceNode:
return ke.writeSequence(writer, node, indent)
case ScalarNode:
return writeString(writer, ke.formatScalar(node))
case AliasNode:
// Should have been exploded by the printer, but handle defensively.
if node.Alias == nil {
return writeString(writer, "null")
}
return ke.writeNode(writer, node.Alias, indent)
default:
return writeString(writer, "null")
}
}
func (ke *kyamlEncoder) writeMapping(writer io.Writer, node *CandidateNode, indent int) error {
if len(node.Content) == 0 {
return writeString(writer, "{}")
}
if err := writeString(writer, "{\n"); err != nil {
return err
}
for i := 0; i+1 < len(node.Content); i += 2 {
keyNode := node.Content[i]
valueNode := node.Content[i+1]
entryIndent := indent + ke.prefs.Indent
if err := ke.writeCommentBlock(writer, keyNode.HeadComment, entryIndent); err != nil {
return err
}
if valueNode.HeadComment != "" && valueNode.HeadComment != keyNode.HeadComment {
if err := ke.writeCommentBlock(writer, valueNode.HeadComment, entryIndent); err != nil {
return err
}
}
if err := ke.writeIndent(writer, entryIndent); err != nil {
return err
}
if err := writeString(writer, ke.formatKey(keyNode)); err != nil {
return err
}
if err := writeString(writer, ": "); err != nil {
return err
}
if err := ke.writeNode(writer, valueNode, entryIndent); err != nil {
return err
}
// Always emit a trailing comma; KYAML encourages explicit separators,
// and this ensures all quoted strings have a trailing `",` as requested.
if err := writeString(writer, ","); err != nil {
return err
}
inline := valueNode.LineComment
if inline == "" {
inline = keyNode.LineComment
}
if err := ke.writeInlineComment(writer, inline); err != nil {
return err
}
if err := writeString(writer, "\n"); err != nil {
return err
}
foot := valueNode.FootComment
if foot == "" {
foot = keyNode.FootComment
}
if err := ke.writeCommentBlock(writer, foot, entryIndent); err != nil {
return err
}
}
if err := ke.writeIndent(writer, indent); err != nil {
return err
}
return writeString(writer, "}")
}
func (ke *kyamlEncoder) writeSequence(writer io.Writer, node *CandidateNode, indent int) error {
if len(node.Content) == 0 {
return writeString(writer, "[]")
}
if err := writeString(writer, "[\n"); err != nil {
return err
}
for _, child := range node.Content {
itemIndent := indent + ke.prefs.Indent
if err := ke.writeCommentBlock(writer, child.HeadComment, itemIndent); err != nil {
return err
}
if err := ke.writeIndent(writer, itemIndent); err != nil {
return err
}
if err := ke.writeNode(writer, child, itemIndent); err != nil {
return err
}
if err := writeString(writer, ","); err != nil {
return err
}
if err := ke.writeInlineComment(writer, child.LineComment); err != nil {
return err
}
if err := writeString(writer, "\n"); err != nil {
return err
}
if err := ke.writeCommentBlock(writer, child.FootComment, itemIndent); err != nil {
return err
}
}
if err := ke.writeIndent(writer, indent); err != nil {
return err
}
return writeString(writer, "]")
}
func (ke *kyamlEncoder) writeIndent(writer io.Writer, indent int) error {
if indent <= 0 {
return nil
}
return writeString(writer, strings.Repeat(" ", indent))
}
func (ke *kyamlEncoder) formatKey(keyNode *CandidateNode) string {
// KYAML examples use bare keys. Quote keys only when needed.
key := keyNode.Value
if isValidKYamlBareKey(key) {
return key
}
return `"` + escapeDoubleQuotedString(key) + `"`
}
func (ke *kyamlEncoder) formatScalar(node *CandidateNode) string {
switch node.Tag {
case "!!null":
return "null"
case "!!bool":
return strings.ToLower(node.Value)
case "!!int", "!!float":
return node.Value
case "!!str":
return `"` + escapeDoubleQuotedString(node.Value) + `"`
default:
// Fall back to a string representation to avoid implicit typing surprises.
return `"` + escapeDoubleQuotedString(node.Value) + `"`
}
}
var kyamlBareKeyRe = regexp.MustCompile(`^[A-Za-z_][A-Za-z0-9_-]*$`)
func isValidKYamlBareKey(s string) bool {
// Conservative: require an identifier-like key; otherwise quote.
if s == "" {
return false
}
return kyamlBareKeyRe.MatchString(s)
}
func escapeDoubleQuotedString(s string) string {
var b strings.Builder
b.Grow(len(s) + 2)
for _, r := range s {
switch r {
case '\\':
b.WriteString(`\\`)
case '"':
b.WriteString(`\"`)
case '\n':
b.WriteString(`\n`)
case '\r':
b.WriteString(`\r`)
case '\t':
b.WriteString(`\t`)
default:
if r < 0x20 {
// YAML double-quoted strings support \uXXXX escapes.
b.WriteString(`\u`)
hex := "0000" + strings.ToUpper(strconv.FormatInt(int64(r), 16))
b.WriteString(hex[len(hex)-4:])
} else {
b.WriteRune(r)
}
}
}
return b.String()
}
func (ke *kyamlEncoder) writeCommentBlock(writer io.Writer, comment string, indent int) error {
if strings.TrimSpace(comment) == "" {
return nil
}
lines := strings.Split(strings.ReplaceAll(comment, "\r\n", "\n"), "\n")
for _, line := range lines {
trimmed := strings.TrimSpace(line)
if trimmed == "" {
continue
}
if err := ke.writeIndent(writer, indent); err != nil {
return err
}
toWrite := line
if !commentLineRe.MatchString(toWrite) {
toWrite = "# " + toWrite
}
if err := writeString(writer, toWrite); err != nil {
return err
}
if err := writeString(writer, "\n"); err != nil {
return err
}
}
return nil
}
func (ke *kyamlEncoder) writeInlineComment(writer io.Writer, comment string) error {
comment = strings.TrimSpace(strings.ReplaceAll(comment, "\r\n", "\n"))
if comment == "" {
return nil
}
lines := strings.Split(comment, "\n")
first := strings.TrimSpace(lines[0])
if first == "" {
return nil
}
if !strings.HasPrefix(first, "#") {
first = "# " + first
}
if err := writeString(writer, " "); err != nil {
return err
}
return writeString(writer, first)
}

View File

@ -57,7 +57,13 @@ func (pe *shellVariablesEncoder) doEncode(w *io.Writer, node *CandidateNode, pat
// let's just pick a fallback key to use if we are encoding a single scalar
nonemptyPath = "value"
}
_, err := io.WriteString(*w, nonemptyPath+"="+quoteValue(node.Value)+"\n")
var valueString string
if pe.prefs.UnwrapScalar {
valueString = node.Value
} else {
valueString = quoteValue(node.Value)
}
_, err := io.WriteString(*w, nonemptyPath+"="+valueString+"\n")
return err
case SequenceNode:
for index, child := range node.Content {

View File

@ -135,3 +135,36 @@ func TestShellVariablesEncoderCustomSeparatorArray(t *testing.T) {
func TestShellVariablesEncoderCustomSeparatorSingleChar(t *testing.T) {
assertEncodesToWithSeparator(t, "a:\n b: value", "aXb=value", "X")
}
func assertEncodesToUnwrapped(t *testing.T, yaml string, shellvars string) {
var output bytes.Buffer
writer := bufio.NewWriter(&output)
originalUnwrapScalar := ConfiguredShellVariablesPreferences.UnwrapScalar
defer func() {
ConfiguredShellVariablesPreferences.UnwrapScalar = originalUnwrapScalar
}()
ConfiguredShellVariablesPreferences.UnwrapScalar = true
var encoder = NewShellVariablesEncoder()
inputs, err := readDocuments(strings.NewReader(yaml), "test.yml", 0, NewYamlDecoder(ConfiguredYamlPreferences))
if err != nil {
panic(err)
}
node := inputs.Front().Value.(*CandidateNode)
err = encoder.Encode(writer, node)
if err != nil {
panic(err)
}
writer.Flush()
test.AssertResult(t, shellvars, strings.TrimSuffix(output.String(), "\n"))
}
func TestShellVariablesEncoderUnwrapScalar(t *testing.T) {
assertEncodesToUnwrapped(t, "a: Lewis Carroll", "a=Lewis Carroll")
assertEncodesToUnwrapped(t, "b: 123", "b=123")
assertEncodesToUnwrapped(t, "c: true", "c=true")
assertEncodesToUnwrapped(t, "d: value with spaces", "d=value with spaces")
}

View File

@ -1,22 +1,58 @@
//go:build !yq_notoml
package yqlib
import (
"bytes"
"fmt"
"io"
"strings"
"github.com/fatih/color"
)
type tomlEncoder struct {
wroteRootAttr bool // Track if we wrote root-level attributes before tables
prefs TomlPreferences
}
func NewTomlEncoder() Encoder {
return &tomlEncoder{}
return NewTomlEncoderWithPrefs(ConfiguredTomlPreferences)
}
func NewTomlEncoderWithPrefs(prefs TomlPreferences) Encoder {
return &tomlEncoder{prefs: prefs}
}
func (te *tomlEncoder) Encode(writer io.Writer, node *CandidateNode) error {
if node.Kind == ScalarNode {
return writeString(writer, node.Value+"\n")
if node.Kind != MappingNode {
// For standalone selections, TOML tests expect raw value for scalars
if node.Kind == ScalarNode {
return writeString(writer, node.Value+"\n")
}
return fmt.Errorf("TOML encoder expects a mapping at the root level")
}
return fmt.Errorf("only scalars (e.g. strings, numbers, booleans) are supported for TOML output at the moment. Please use yaml output format (-oy) until the encoder has been fully implemented")
// Encode to a buffer first if colors are enabled
var buf bytes.Buffer
var targetWriter io.Writer
targetWriter = writer
if te.prefs.ColorsEnabled {
targetWriter = &buf
}
// Encode a root mapping as a sequence of attributes, tables, and arrays of tables
if err := te.encodeRootMapping(targetWriter, node); err != nil {
return err
}
if te.prefs.ColorsEnabled {
colourised := te.colorizeToml(buf.Bytes())
_, err := writer.Write(colourised)
return err
}
return nil
}
func (te *tomlEncoder) PrintDocumentSeparator(_ io.Writer) error {
@ -30,3 +66,725 @@ func (te *tomlEncoder) PrintLeadingContent(_ io.Writer, _ string) error {
func (te *tomlEncoder) CanHandleAliases() bool {
return false
}
// ---- helpers ----
func (te *tomlEncoder) writeComment(w io.Writer, comment string) error {
if comment == "" {
return nil
}
lines := strings.Split(comment, "\n")
for _, line := range lines {
line = strings.TrimSpace(line)
if !strings.HasPrefix(line, "#") {
line = "# " + line
}
if _, err := w.Write([]byte(line + "\n")); err != nil {
return err
}
}
return nil
}
func (te *tomlEncoder) formatScalar(node *CandidateNode) string {
switch node.Tag {
case "!!str":
// Quote strings per TOML spec
return fmt.Sprintf("%q", node.Value)
case "!!bool", "!!int", "!!float":
return node.Value
case "!!null":
// TOML does not have null; encode as empty string
return `""`
default:
return node.Value
}
}
func (te *tomlEncoder) encodeRootMapping(w io.Writer, node *CandidateNode) error {
te.wroteRootAttr = false // Reset state
// Write root head comment if present (at the very beginning, no leading blank line)
if node.HeadComment != "" {
if err := te.writeComment(w, node.HeadComment); err != nil {
return err
}
}
// Preserve existing order by iterating Content
for i := 0; i < len(node.Content); i += 2 {
keyNode := node.Content[i]
valNode := node.Content[i+1]
if err := te.encodeTopLevelEntry(w, []string{keyNode.Value}, valNode); err != nil {
return err
}
}
return nil
}
// encodeTopLevelEntry encodes a key/value at the root, dispatching to attribute, table, or array-of-tables
func (te *tomlEncoder) encodeTopLevelEntry(w io.Writer, path []string, node *CandidateNode) error {
if len(path) == 0 {
return fmt.Errorf("cannot encode TOML entry with empty path")
}
switch node.Kind {
case ScalarNode:
// key = value
return te.writeAttribute(w, path[len(path)-1], node)
case SequenceNode:
// Empty arrays should be encoded as [] attributes
if len(node.Content) == 0 {
return te.writeArrayAttribute(w, path[len(path)-1], node)
}
// If all items are mappings => array of tables; else => array attribute
allMaps := true
for _, it := range node.Content {
if it.Kind != MappingNode {
allMaps = false
break
}
}
if allMaps {
key := path[len(path)-1]
for _, it := range node.Content {
// [[key]] then body
if _, err := w.Write([]byte("[[" + key + "]]\n")); err != nil {
return err
}
if err := te.encodeMappingBodyWithPath(w, []string{key}, it); err != nil {
return err
}
}
return nil
}
// Regular array attribute
return te.writeArrayAttribute(w, path[len(path)-1], node)
case MappingNode:
// Inline table if not EncodeSeparate, else emit separate tables/arrays of tables for children under this path
if !node.EncodeSeparate {
// If children contain mappings or arrays of mappings, prefer separate sections
if te.hasEncodeSeparateChild(node) || te.hasStructuralChildren(node) {
return te.encodeSeparateMapping(w, path, node)
}
return te.writeInlineTableAttribute(w, path[len(path)-1], node)
}
return te.encodeSeparateMapping(w, path, node)
default:
return fmt.Errorf("unsupported node kind for TOML: %v", node.Kind)
}
}
func (te *tomlEncoder) writeAttribute(w io.Writer, key string, value *CandidateNode) error {
te.wroteRootAttr = true // Mark that we wrote a root attribute
// Write head comment before the attribute
if err := te.writeComment(w, value.HeadComment); err != nil {
return err
}
// Write the attribute
line := key + " = " + te.formatScalar(value)
// Add line comment if present
if value.LineComment != "" {
lineComment := strings.TrimSpace(value.LineComment)
if !strings.HasPrefix(lineComment, "#") {
lineComment = "# " + lineComment
}
line += " " + lineComment
}
_, err := w.Write([]byte(line + "\n"))
return err
}
func (te *tomlEncoder) writeArrayAttribute(w io.Writer, key string, seq *CandidateNode) error {
te.wroteRootAttr = true // Mark that we wrote a root attribute
// Write head comment before the array
if err := te.writeComment(w, seq.HeadComment); err != nil {
return err
}
// Handle empty arrays
if len(seq.Content) == 0 {
line := key + " = []"
if seq.LineComment != "" {
lineComment := strings.TrimSpace(seq.LineComment)
if !strings.HasPrefix(lineComment, "#") {
lineComment = "# " + lineComment
}
line += " " + lineComment
}
_, err := w.Write([]byte(line + "\n"))
return err
}
// Check if any array elements have head comments - if so, use multiline format
hasElementComments := false
for _, it := range seq.Content {
if it.HeadComment != "" {
hasElementComments = true
break
}
}
if hasElementComments {
// Write multiline array format with comments
if _, err := w.Write([]byte(key + " = [\n")); err != nil {
return err
}
for i, it := range seq.Content {
// Write head comment for this element
if it.HeadComment != "" {
commentLines := strings.Split(it.HeadComment, "\n")
for _, commentLine := range commentLines {
if strings.TrimSpace(commentLine) != "" {
if !strings.HasPrefix(strings.TrimSpace(commentLine), "#") {
commentLine = "# " + commentLine
}
if _, err := w.Write([]byte(" " + commentLine + "\n")); err != nil {
return err
}
}
}
}
// Write the element value
var itemStr string
switch it.Kind {
case ScalarNode:
itemStr = te.formatScalar(it)
case SequenceNode:
nested, err := te.sequenceToInlineArray(it)
if err != nil {
return err
}
itemStr = nested
case MappingNode:
inline, err := te.mappingToInlineTable(it)
if err != nil {
return err
}
itemStr = inline
case AliasNode:
return fmt.Errorf("aliases are not supported in TOML")
default:
return fmt.Errorf("unsupported array item kind: %v", it.Kind)
}
// Always add trailing comma in multiline arrays
itemStr += ","
if _, err := w.Write([]byte(" " + itemStr + "\n")); err != nil {
return err
}
// Add blank line between elements (except after the last one)
if i < len(seq.Content)-1 {
if _, err := w.Write([]byte("\n")); err != nil {
return err
}
}
}
if _, err := w.Write([]byte("]\n")); err != nil {
return err
}
return nil
}
// Join scalars or nested arrays recursively into TOML array syntax
items := make([]string, 0, len(seq.Content))
for _, it := range seq.Content {
switch it.Kind {
case ScalarNode:
items = append(items, te.formatScalar(it))
case SequenceNode:
// Nested arrays: encode inline
nested, err := te.sequenceToInlineArray(it)
if err != nil {
return err
}
items = append(items, nested)
case MappingNode:
// Inline table inside array
inline, err := te.mappingToInlineTable(it)
if err != nil {
return err
}
items = append(items, inline)
case AliasNode:
return fmt.Errorf("aliases are not supported in TOML")
default:
return fmt.Errorf("unsupported array item kind: %v", it.Kind)
}
}
line := key + " = [" + strings.Join(items, ", ") + "]"
// Add line comment if present
if seq.LineComment != "" {
lineComment := strings.TrimSpace(seq.LineComment)
if !strings.HasPrefix(lineComment, "#") {
lineComment = "# " + lineComment
}
line += " " + lineComment
}
_, err := w.Write([]byte(line + "\n"))
return err
}
func (te *tomlEncoder) sequenceToInlineArray(seq *CandidateNode) (string, error) {
items := make([]string, 0, len(seq.Content))
for _, it := range seq.Content {
switch it.Kind {
case ScalarNode:
items = append(items, te.formatScalar(it))
case SequenceNode:
nested, err := te.sequenceToInlineArray(it)
if err != nil {
return "", err
}
items = append(items, nested)
case MappingNode:
inline, err := te.mappingToInlineTable(it)
if err != nil {
return "", err
}
items = append(items, inline)
default:
return "", fmt.Errorf("unsupported array item kind: %v", it.Kind)
}
}
return "[" + strings.Join(items, ", ") + "]", nil
}
func (te *tomlEncoder) mappingToInlineTable(m *CandidateNode) (string, error) {
// key = { a = 1, b = "x" }
parts := make([]string, 0, len(m.Content)/2)
for i := 0; i < len(m.Content); i += 2 {
k := m.Content[i].Value
v := m.Content[i+1]
switch v.Kind {
case ScalarNode:
parts = append(parts, fmt.Sprintf("%s = %s", k, te.formatScalar(v)))
case SequenceNode:
// inline array in inline table
arr, err := te.sequenceToInlineArray(v)
if err != nil {
return "", err
}
parts = append(parts, fmt.Sprintf("%s = %s", k, arr))
case MappingNode:
// nested inline table
inline, err := te.mappingToInlineTable(v)
if err != nil {
return "", err
}
parts = append(parts, fmt.Sprintf("%s = %s", k, inline))
default:
return "", fmt.Errorf("unsupported inline table value kind: %v", v.Kind)
}
}
return "{ " + strings.Join(parts, ", ") + " }", nil
}
func (te *tomlEncoder) writeInlineTableAttribute(w io.Writer, key string, m *CandidateNode) error {
inline, err := te.mappingToInlineTable(m)
if err != nil {
return err
}
_, err = w.Write([]byte(key + " = " + inline + "\n"))
return err
}
func (te *tomlEncoder) writeTableHeader(w io.Writer, path []string, m *CandidateNode) error {
// Add blank line before table header (or before comment if present) if we wrote root attributes
needsBlankLine := te.wroteRootAttr
if needsBlankLine {
if _, err := w.Write([]byte("\n")); err != nil {
return err
}
te.wroteRootAttr = false // Only add once
}
// Write head comment before the table header
if m.HeadComment != "" {
if err := te.writeComment(w, m.HeadComment); err != nil {
return err
}
}
// Write table header [a.b.c]
header := "[" + strings.Join(path, ".") + "]\n"
_, err := w.Write([]byte(header))
return err
}
// encodeSeparateMapping handles a mapping that should be encoded as table sections.
// It emits the table header for this mapping if it has any content, then processes children.
func (te *tomlEncoder) encodeSeparateMapping(w io.Writer, path []string, m *CandidateNode) error {
// Check if this mapping has any non-mapping, non-array-of-tables children (i.e., attributes)
hasAttrs := false
for i := 0; i < len(m.Content); i += 2 {
v := m.Content[i+1]
if v.Kind == ScalarNode {
hasAttrs = true
break
}
if v.Kind == SequenceNode {
// Check if it's NOT an array of tables
allMaps := true
for _, it := range v.Content {
if it.Kind != MappingNode {
allMaps = false
break
}
}
if !allMaps {
hasAttrs = true
break
}
}
}
// If there are attributes or if the mapping is empty, emit the table header
if hasAttrs || len(m.Content) == 0 {
if err := te.writeTableHeader(w, path, m); err != nil {
return err
}
if err := te.encodeMappingBodyWithPath(w, path, m); err != nil {
return err
}
return nil
}
// No attributes, just nested structures - process children
for i := 0; i < len(m.Content); i += 2 {
k := m.Content[i].Value
v := m.Content[i+1]
switch v.Kind {
case MappingNode:
// Emit [path.k]
newPath := append(append([]string{}, path...), k)
if err := te.writeTableHeader(w, newPath, v); err != nil {
return err
}
if err := te.encodeMappingBodyWithPath(w, newPath, v); err != nil {
return err
}
case SequenceNode:
// If sequence of maps, emit [[path.k]] per element
allMaps := true
for _, it := range v.Content {
if it.Kind != MappingNode {
allMaps = false
break
}
}
if allMaps {
key := strings.Join(append(append([]string{}, path...), k), ".")
for _, it := range v.Content {
if _, err := w.Write([]byte("[[" + key + "]]\n")); err != nil {
return err
}
if err := te.encodeMappingBodyWithPath(w, append(append([]string{}, path...), k), it); err != nil {
return err
}
}
} else {
// Regular array attribute under the current table path
if err := te.writeArrayAttribute(w, k, v); err != nil {
return err
}
}
case ScalarNode:
// Attributes directly under the current table path
if err := te.writeAttribute(w, k, v); err != nil {
return err
}
}
}
return nil
}
func (te *tomlEncoder) hasEncodeSeparateChild(m *CandidateNode) bool {
for i := 0; i < len(m.Content); i += 2 {
v := m.Content[i+1]
if v.Kind == MappingNode && v.EncodeSeparate {
return true
}
}
return false
}
func (te *tomlEncoder) hasStructuralChildren(m *CandidateNode) bool {
for i := 0; i < len(m.Content); i += 2 {
v := m.Content[i+1]
// Only consider it structural if mapping has EncodeSeparate or is non-empty
if v.Kind == MappingNode && v.EncodeSeparate {
return true
}
if v.Kind == SequenceNode {
allMaps := true
for _, it := range v.Content {
if it.Kind != MappingNode {
allMaps = false
break
}
}
if allMaps {
return true
}
}
}
return false
}
// encodeMappingBodyWithPath encodes attributes and nested arrays of tables using full dotted path context
func (te *tomlEncoder) encodeMappingBodyWithPath(w io.Writer, path []string, m *CandidateNode) error {
// First, attributes (scalars and non-map arrays)
for i := 0; i < len(m.Content); i += 2 {
k := m.Content[i].Value
v := m.Content[i+1]
switch v.Kind {
case ScalarNode:
if err := te.writeAttribute(w, k, v); err != nil {
return err
}
case SequenceNode:
allMaps := true
for _, it := range v.Content {
if it.Kind != MappingNode {
allMaps = false
break
}
}
if !allMaps {
if err := te.writeArrayAttribute(w, k, v); err != nil {
return err
}
}
}
}
// Then, nested arrays of tables with full path
for i := 0; i < len(m.Content); i += 2 {
k := m.Content[i].Value
v := m.Content[i+1]
if v.Kind == SequenceNode {
allMaps := true
for _, it := range v.Content {
if it.Kind != MappingNode {
allMaps = false
break
}
}
if allMaps {
dotted := strings.Join(append(append([]string{}, path...), k), ".")
for _, it := range v.Content {
if _, err := w.Write([]byte("[[" + dotted + "]]\n")); err != nil {
return err
}
if err := te.encodeMappingBodyWithPath(w, append(append([]string{}, path...), k), it); err != nil {
return err
}
}
}
}
}
// Finally, child mappings that are not marked EncodeSeparate get inlined as attributes
for i := 0; i < len(m.Content); i += 2 {
k := m.Content[i].Value
v := m.Content[i+1]
if v.Kind == MappingNode && !v.EncodeSeparate {
if err := te.writeInlineTableAttribute(w, k, v); err != nil {
return err
}
}
}
return nil
}
// colorizeToml applies syntax highlighting to TOML output using fatih/color
func (te *tomlEncoder) colorizeToml(input []byte) []byte {
toml := string(input)
result := strings.Builder{}
// Force color output (don't check for TTY)
color.NoColor = false
// Create color functions for different token types
// Use EnableColor() to ensure colors work even when NO_COLOR env is set
commentColorObj := color.New(color.FgHiBlack)
commentColorObj.EnableColor()
stringColorObj := color.New(color.FgGreen)
stringColorObj.EnableColor()
numberColorObj := color.New(color.FgHiMagenta)
numberColorObj.EnableColor()
keyColorObj := color.New(color.FgCyan)
keyColorObj.EnableColor()
boolColorObj := color.New(color.FgHiMagenta)
boolColorObj.EnableColor()
sectionColorObj := color.New(color.FgYellow, color.Bold)
sectionColorObj.EnableColor()
commentColor := commentColorObj.SprintFunc()
stringColor := stringColorObj.SprintFunc()
numberColor := numberColorObj.SprintFunc()
keyColor := keyColorObj.SprintFunc()
boolColor := boolColorObj.SprintFunc()
sectionColor := sectionColorObj.SprintFunc()
// Simple tokenization for TOML colouring
i := 0
for i < len(toml) {
ch := toml[i]
// Comments - from # to end of line
if ch == '#' {
end := i
for end < len(toml) && toml[end] != '\n' {
end++
}
result.WriteString(commentColor(toml[i:end]))
i = end
continue
}
// Table sections - [section] or [[array]]
// Only treat '[' as a table section if it appears at the start of the line
// (possibly after whitespace). This avoids mis-colouring inline arrays like
// "ports = [8000, 8001]" as table sections.
if ch == '[' {
isSectionHeader := true
if i > 0 {
isSectionHeader = false
j := i - 1
for j >= 0 && toml[j] != '\n' {
if toml[j] != ' ' && toml[j] != '\t' && toml[j] != '\r' {
// Found a non-whitespace character before this '[' on the same line,
// so this is not a table header.
break
}
j--
}
if j < 0 || toml[j] == '\n' {
// Reached the start of the string or a newline without encountering
// any non-whitespace, so '[' is at the logical start of the line.
isSectionHeader = true
}
}
if isSectionHeader {
end := i + 1
// Check for [[
if end < len(toml) && toml[end] == '[' {
end++
}
// Find closing ]
for end < len(toml) && toml[end] != ']' {
end++
}
// Include closing ]
if end < len(toml) {
end++
// Check for ]]
if end < len(toml) && toml[end] == ']' {
end++
}
}
result.WriteString(sectionColor(toml[i:end]))
i = end
continue
}
}
// Strings - quoted text (double or single quotes)
if ch == '"' || ch == '\'' {
quote := ch
end := i + 1
for end < len(toml) {
if toml[end] == quote {
break
}
if toml[end] == '\\' && end+1 < len(toml) {
// Skip the backslash and the escaped character
end += 2
continue
}
end++
}
if end < len(toml) {
end++ // include closing quote
}
result.WriteString(stringColor(toml[i:end]))
i = end
continue
}
// Numbers - sequences of digits, possibly with decimal point or minus
if (ch >= '0' && ch <= '9') || (ch == '-' && i+1 < len(toml) && toml[i+1] >= '0' && toml[i+1] <= '9') {
end := i
if ch == '-' {
end++
}
for end < len(toml) {
c := toml[end]
if (c >= '0' && c <= '9') || c == '.' || c == 'e' || c == 'E' {
end++
} else if (c == '+' || c == '-') && end > 0 && (toml[end-1] == 'e' || toml[end-1] == 'E') {
// Only allow + or - immediately after 'e' or 'E' for scientific notation
end++
} else {
break
}
}
result.WriteString(numberColor(toml[i:end]))
i = end
continue
}
// Identifiers/keys - alphanumeric + underscore + dash
if (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || ch == '_' {
end := i
for end < len(toml) && ((toml[end] >= 'a' && toml[end] <= 'z') ||
(toml[end] >= 'A' && toml[end] <= 'Z') ||
(toml[end] >= '0' && toml[end] <= '9') ||
toml[end] == '_' || toml[end] == '-') {
end++
}
ident := toml[i:end]
// Check if this is a boolean/null keyword
switch ident {
case "true", "false":
result.WriteString(boolColor(ident))
default:
// Check if followed by = or whitespace then = (it's a key)
j := end
for j < len(toml) && (toml[j] == ' ' || toml[j] == '\t') {
j++
}
if j < len(toml) && toml[j] == '=' {
result.WriteString(keyColor(ident))
} else {
result.WriteString(ident) // plain text for other identifiers
}
}
i = end
continue
}
// Everything else (whitespace, operators, brackets) - no color
result.WriteByte(ch)
i++
}
return []byte(result.String())
}

View File

@ -1,14 +1,10 @@
package yqlib
import (
"bufio"
"bytes"
"errors"
"io"
"regexp"
"strings"
"github.com/fatih/color"
"go.yaml.in/yaml/v4"
)
@ -25,64 +21,24 @@ func (ye *yamlEncoder) CanHandleAliases() bool {
}
func (ye *yamlEncoder) PrintDocumentSeparator(writer io.Writer) error {
if ye.prefs.PrintDocSeparators {
log.Debug("writing doc sep")
if err := writeString(writer, "---\n"); err != nil {
return err
}
}
return nil
return PrintYAMLDocumentSeparator(writer, ye.prefs.PrintDocSeparators)
}
func (ye *yamlEncoder) PrintLeadingContent(writer io.Writer, content string) error {
reader := bufio.NewReader(strings.NewReader(content))
var commentLineRegEx = regexp.MustCompile(`^\s*#`)
for {
readline, errReading := reader.ReadString('\n')
if errReading != nil && !errors.Is(errReading, io.EOF) {
return errReading
}
if strings.Contains(readline, "$yqDocSeparator$") {
if err := ye.PrintDocumentSeparator(writer); err != nil {
return err
}
} else {
if len(readline) > 0 && readline != "\n" && readline[0] != '%' && !commentLineRegEx.MatchString(readline) {
readline = "# " + readline
}
if ye.prefs.ColorsEnabled && strings.TrimSpace(readline) != "" {
readline = format(color.FgHiBlack) + readline + format(color.Reset)
}
if err := writeString(writer, readline); err != nil {
return err
}
}
if errors.Is(errReading, io.EOF) {
if readline != "" {
// the last comment we read didn't have a newline, put one in
if err := writeString(writer, "\n"); err != nil {
return err
}
}
break
}
}
return nil
return PrintYAMLLeadingContent(writer, content, ye.prefs.PrintDocSeparators, ye.prefs.ColorsEnabled)
}
func (ye *yamlEncoder) Encode(writer io.Writer, node *CandidateNode) error {
log.Debug("encoderYaml - going to print %v", NodeToString(node))
// Detect line ending style from LeadingContent
lineEnding := "\n"
if strings.Contains(node.LeadingContent, "\r\n") {
lineEnding = "\r\n"
}
if node.Kind == ScalarNode && ye.prefs.UnwrapScalar {
valueToPrint := node.Value
if node.LeadingContent == "" || valueToPrint != "" {
valueToPrint = valueToPrint + "\n"
valueToPrint = valueToPrint + lineEnding
}
return writeString(writer, valueToPrint)
}
@ -96,6 +52,9 @@ func (ye *yamlEncoder) Encode(writer io.Writer, node *CandidateNode) error {
var encoder = yaml.NewEncoder(destination)
encoder.SetIndent(ye.prefs.Indent)
if ye.prefs.CompactSequenceIndent {
encoder.CompactSeqIndent()
}
target, err := node.MarshalYAML()

View File

@ -7,7 +7,15 @@ import (
)
func tryRenameFile(from string, to string) error {
if renameError := os.Rename(from, to); renameError != nil {
if info, err := os.Lstat(to); err == nil && info.Mode()&os.ModeSymlink != 0 {
log.Debug("Target file is symlink, skipping rename and attempting to copy contents")
if copyError := copyFileContents(from, to); copyError != nil {
return fmt.Errorf("failed copying from %v to %v: %w", from, to, copyError)
}
tryRemoveTempFile(from)
return nil
} else if renameError := os.Rename(from, to); renameError != nil {
log.Debugf("Error renaming from %v to %v, attempting to copy contents", from, to)
log.Debug(renameError.Error())
log.Debug("going to try copying instead")

View File

@ -22,6 +22,12 @@ var YamlFormat = &Format{"yaml", []string{"y", "yml"},
func() Decoder { return NewYamlDecoder(ConfiguredYamlPreferences) },
}
var KYamlFormat = &Format{"kyaml", []string{"ky"},
func() Encoder { return NewKYamlEncoder(ConfiguredKYamlPreferences) },
// KYaml is stricter YAML
func() Decoder { return NewYamlDecoder(ConfiguredYamlPreferences) },
}
var JSONFormat = &Format{"json", []string{"j"},
func() Encoder { return NewJSONEncoder(ConfiguredJSONPreferences) },
func() Decoder { return NewJSONDecoder() },
@ -63,10 +69,15 @@ var ShFormat = &Format{"", nil,
}
var TomlFormat = &Format{"toml", []string{},
func() Encoder { return NewTomlEncoder() },
func() Encoder { return NewTomlEncoderWithPrefs(ConfiguredTomlPreferences) },
func() Decoder { return NewTomlDecoder() },
}
var HclFormat = &Format{"hcl", []string{"h", "tf"},
func() Encoder { return NewHclEncoder(ConfiguredHclPreferences) },
func() Decoder { return NewHclDecoder() },
}
var ShellVariablesFormat = &Format{"shell", []string{"s", "sh"},
func() Encoder { return NewShellVariablesEncoder() },
nil,
@ -84,6 +95,7 @@ var INIFormat = &Format{"ini", []string{"i"},
var Formats = []*Format{
YamlFormat,
KYamlFormat,
JSONFormat,
PropertiesFormat,
CSVFormat,
@ -93,6 +105,7 @@ var Formats = []*Format{
UriFormat,
ShFormat,
TomlFormat,
HclFormat,
ShellVariablesFormat,
LuaFormat,
INIFormat,

15
pkg/yqlib/hcl.go Normal file
View File

@ -0,0 +1,15 @@
package yqlib
type HclPreferences struct {
ColorsEnabled bool
}
func NewDefaultHclPreferences() HclPreferences {
return HclPreferences{ColorsEnabled: false}
}
func (p *HclPreferences) Copy() HclPreferences {
return HclPreferences{ColorsEnabled: p.ColorsEnabled}
}
var ConfiguredHclPreferences = NewDefaultHclPreferences()

585
pkg/yqlib/hcl_test.go Normal file
View File

@ -0,0 +1,585 @@
//go:build !yq_nohcl
package yqlib
import (
"bufio"
"bytes"
"fmt"
"testing"
"github.com/mikefarah/yq/v4/test"
)
var nestedExample = `service "http" "web_proxy" {
listen_addr = "127.0.0.1:8080"
}`
var nestedExampleYaml = "service:\n http:\n web_proxy:\n listen_addr: \"127.0.0.1:8080\"\n"
var multipleBlockLabelKeys = `service "cat" {
process "main" {
command = ["/usr/local/bin/awesome-app", "server"]
}
process "management" {
command = ["/usr/local/bin/awesome-app", "management"]
}
}
`
var multipleBlockLabelKeysExpected = `service "cat" {
process "main" {
command = ["/usr/local/bin/awesome-app", "server"]
}
process "management" {
command = ["/usr/local/bin/awesome-app", "management"]
}
}
`
var multipleBlockLabelKeysExpectedUpdate = `service "cat" {
process "main" {
command = ["/usr/local/bin/awesome-app", "server", "meow"]
}
process "management" {
command = ["/usr/local/bin/awesome-app", "management"]
}
}
`
var multipleBlockLabelKeysExpectedYaml = `service:
cat:
process:
main:
command:
- "/usr/local/bin/awesome-app"
- "server"
management:
command:
- "/usr/local/bin/awesome-app"
- "management"
`
var simpleSample = `# Arithmetic with literals and application-provided variables
sum = 1 + addend
# String interpolation and templates
message = "Hello, ${name}!"
# Application-provided functions
shouty_message = upper(message)`
var simpleSampleExpected = `# Arithmetic with literals and application-provided variables
sum = 1 + addend
# String interpolation and templates
message = "Hello, ${name}!"
# Application-provided functions
shouty_message = upper(message)
`
var simpleSampleExpectedYaml = `# Arithmetic with literals and application-provided variables
sum: 1 + addend
# String interpolation and templates
message: "Hello, ${name}!"
# Application-provided functions
shouty_message: upper(message)
`
var hclFormatScenarios = []formatScenario{
{
description: "Parse HCL",
input: `io_mode = "async"`,
expected: "io_mode: \"async\"\n",
scenarioType: "decode",
},
{
description: "Simple decode, no quotes",
skipDoc: true,
input: `io_mode = async`,
expected: "io_mode: async\n",
scenarioType: "decode",
},
{
description: "Simple roundtrip, no quotes",
skipDoc: true,
input: `io_mode = async`,
expected: "io_mode = async\n",
scenarioType: "roundtrip",
},
{
description: "Nested decode",
skipDoc: true,
input: nestedExample,
expected: nestedExampleYaml,
scenarioType: "decode",
},
{
description: "Template decode",
skipDoc: true,
input: `message = "Hello, ${name}!"`,
expected: "message: \"Hello, ${name}!\"\n",
scenarioType: "decode",
},
{
description: "Roundtrip: with template",
skipDoc: true,
input: `message = "Hello, ${name}!"`,
expected: "message = \"Hello, ${name}!\"\n",
scenarioType: "roundtrip",
},
{
description: "Roundtrip: with function",
skipDoc: true,
input: `shouty_message = upper(message)`,
expected: "shouty_message = upper(message)\n",
scenarioType: "roundtrip",
},
{
description: "Roundtrip: with arithmetic",
skipDoc: true,
input: `sum = 1 + addend`,
expected: "sum = 1 + addend\n",
scenarioType: "roundtrip",
},
{
description: "Arithmetic decode",
skipDoc: true,
input: `sum = 1 + addend`,
expected: "sum: 1 + addend\n",
scenarioType: "decode",
},
{
description: "number attribute",
skipDoc: true,
input: `port = 8080`,
expected: "port: 8080\n",
scenarioType: "decode",
},
{
description: "float attribute",
skipDoc: true,
input: `pi = 3.14`,
expected: "pi: 3.14\n",
scenarioType: "decode",
},
{
description: "boolean attribute",
skipDoc: true,
input: `enabled = true`,
expected: "enabled: true\n",
scenarioType: "decode",
},
{
description: "object/map attribute",
skipDoc: true,
input: `obj = { a = 1, b = "two" }`,
expected: "obj: {a: 1, b: \"two\"}\n",
scenarioType: "decode",
},
{
description: "nested block",
skipDoc: true,
input: `server { port = 8080 }`,
expected: "server:\n port: 8080\n",
scenarioType: "decode",
},
{
description: "multiple attributes",
skipDoc: true,
input: "name = \"app\"\nversion = 1\nenabled = true",
expected: "name: \"app\"\nversion: 1\nenabled: true\n",
scenarioType: "decode",
},
{
description: "binary expression",
skipDoc: true,
input: `count = 0 - 42`,
expected: "count: -42\n",
scenarioType: "decode",
},
{
description: "negative number",
skipDoc: true,
input: `count = -42`,
expected: "count: -42\n",
scenarioType: "decode",
},
{
description: "scientific notation",
skipDoc: true,
input: `value = 1e-3`,
expected: "value: 0.001\n",
scenarioType: "decode",
},
{
description: "nested object",
skipDoc: true,
input: `config = { db = { host = "localhost", port = 5432 } }`,
expected: "config: {db: {host: \"localhost\", port: 5432}}\n",
scenarioType: "decode",
},
{
description: "mixed list",
skipDoc: true,
input: `values = [1, "two", true]`,
expected: "values:\n - 1\n - \"two\"\n - true\n",
scenarioType: "decode",
},
{
description: "Roundtrip: Sample Doc",
input: multipleBlockLabelKeys,
expected: multipleBlockLabelKeysExpected,
scenarioType: "roundtrip",
},
{
description: "Roundtrip: With an update",
input: multipleBlockLabelKeys,
expression: `.service.cat.process.main.command += "meow"`,
expected: multipleBlockLabelKeysExpectedUpdate,
scenarioType: "roundtrip",
},
{
description: "Parse HCL: Sample Doc",
input: multipleBlockLabelKeys,
expected: multipleBlockLabelKeysExpectedYaml,
scenarioType: "decode",
},
{
description: "block with labels",
skipDoc: true,
input: `resource "aws_instance" "example" { ami = "ami-12345" }`,
expected: "resource:\n aws_instance:\n example:\n ami: \"ami-12345\"\n",
scenarioType: "decode",
},
{
description: "block with labels roundtrip",
skipDoc: true,
input: `resource "aws_instance" "example" { ami = "ami-12345" }`,
expected: "resource \"aws_instance\" \"example\" {\n ami = \"ami-12345\"\n}\n",
scenarioType: "roundtrip",
},
{
description: "roundtrip simple attribute",
skipDoc: true,
input: `io_mode = "async"`,
expected: `io_mode = "async"` + "\n",
scenarioType: "roundtrip",
},
{
description: "roundtrip number attribute",
skipDoc: true,
input: `port = 8080`,
expected: "port = 8080\n",
scenarioType: "roundtrip",
},
{
description: "roundtrip float attribute",
skipDoc: true,
input: `pi = 3.14`,
expected: "pi = 3.14\n",
scenarioType: "roundtrip",
},
{
description: "roundtrip boolean attribute",
skipDoc: true,
input: `enabled = true`,
expected: "enabled = true\n",
scenarioType: "roundtrip",
},
{
description: "roundtrip list of strings",
skipDoc: true,
input: `tags = ["a", "b"]`,
expected: "tags = [\"a\", \"b\"]\n",
scenarioType: "roundtrip",
},
{
description: "roundtrip object/map attribute",
skipDoc: true,
input: `obj = { a = 1, b = "two" }`,
expected: "obj = {\n a = 1\n b = \"two\"\n}\n",
scenarioType: "roundtrip",
},
{
description: "roundtrip nested block",
skipDoc: true,
input: `server { port = 8080 }`,
expected: "server {\n port = 8080\n}\n",
scenarioType: "roundtrip",
},
{
description: "roundtrip multiple attributes",
skipDoc: true,
input: "name = \"app\"\nversion = 1\nenabled = true",
expected: "name = \"app\"\nversion = 1\nenabled = true\n",
scenarioType: "roundtrip",
},
{
description: "Parse HCL: with comments",
input: "# Configuration\nport = 8080 # server port",
expected: "# Configuration\nport: 8080 # server port\n",
scenarioType: "decode",
},
{
description: "Roundtrip: with comments",
input: "# Configuration\nport = 8080",
expected: "# Configuration\nport = 8080\n",
scenarioType: "roundtrip",
},
{
description: "Roundtrip: extraction",
skipDoc: true,
input: simpleSample,
expression: ".shouty_message",
expected: "upper(message)\n",
scenarioType: "roundtrip",
},
{
description: "Roundtrip: With templates, functions and arithmetic",
input: simpleSample,
expected: simpleSampleExpected,
scenarioType: "roundtrip",
},
{
description: "roundtrip example",
skipDoc: true,
input: simpleSample,
expected: simpleSampleExpectedYaml,
scenarioType: "decode",
},
{
description: "Parse HCL: List of strings",
skipDoc: true,
input: `tags = ["a", "b"]`,
expected: "tags:\n - \"a\"\n - \"b\"\n",
scenarioType: "decode",
},
{
description: "roundtrip list of objects",
skipDoc: true,
input: `items = [{ name = "a", value = 1 }, { name = "b", value = 2 }]`,
expected: "items = [{\n name = \"a\"\n value = 1\n }, {\n name = \"b\"\n value = 2\n}]\n",
scenarioType: "roundtrip",
},
{
description: "roundtrip nested blocks with same name",
skipDoc: true,
input: "database \"primary\" {\n host = \"localhost\"\n port = 5432\n}\ndatabase \"replica\" {\n host = \"replica.local\"\n port = 5433\n}",
expected: "database \"primary\" {\n host = \"localhost\"\n port = 5432\n}\ndatabase \"replica\" {\n host = \"replica.local\"\n port = 5433\n}\n",
scenarioType: "roundtrip",
},
{
description: "roundtrip mixed nested structure",
skipDoc: true,
input: "servers \"web\" {\n addresses = [\"10.0.1.1\", \"10.0.1.2\"]\n port = 8080\n}",
expected: "servers \"web\" {\n addresses = [\"10.0.1.1\", \"10.0.1.2\"]\n port = 8080\n}\n",
scenarioType: "roundtrip",
},
{
description: "roundtrip null value",
skipDoc: true,
input: `value = null`,
expected: "value = null\n",
scenarioType: "roundtrip",
},
{
description: "roundtrip empty list",
skipDoc: true,
input: `items = []`,
expected: "items = []\n",
scenarioType: "roundtrip",
},
{
description: "roundtrip empty object",
skipDoc: true,
input: `config = {}`,
expected: "config = {}\n",
scenarioType: "roundtrip",
},
{
description: "Roundtrip: Separate blocks with same name.",
input: "resource \"aws_instance\" \"web\" {\n ami = \"ami-12345\"\n}\nresource \"aws_instance\" \"db\" {\n ami = \"ami-67890\"\n}",
expected: "resource \"aws_instance\" \"web\" {\n ami = \"ami-12345\"\n}\nresource \"aws_instance\" \"db\" {\n ami = \"ami-67890\"\n}\n",
scenarioType: "roundtrip",
},
{
description: "roundtrip deeply nested structure",
skipDoc: true,
input: "app \"database\" \"primary\" \"connection\" {\n host = \"db.local\"\n port = 5432\n}",
expected: "app \"database\" \"primary\" \"connection\" {\n host = \"db.local\"\n port = 5432\n}\n",
scenarioType: "roundtrip",
},
{
description: "roundtrip with leading comments",
skipDoc: true,
input: "# Main config\nenabled = true\nport = 8080",
expected: "# Main config\nenabled = true\nport = 8080\n",
scenarioType: "roundtrip",
},
{
description: "Multiple attributes with comments (comment safety with safe path separator)",
skipDoc: true,
input: "# Database config\ndb_host = \"localhost\"\n# Connection pool\ndb_pool = 10",
expected: "# Database config\ndb_host = \"localhost\"\n# Connection pool\ndb_pool = 10\n",
scenarioType: "roundtrip",
},
{
description: "Nested blocks with head comments",
skipDoc: true,
input: "service \"api\" {\n # Listen address\n listen = \"0.0.0.0:8080\"\n # TLS enabled\n tls = true\n}",
expected: "service \"api\" {\n # Listen address\n listen = \"0.0.0.0:8080\"\n # TLS enabled\n tls = true\n}\n",
scenarioType: "roundtrip",
},
{
description: "Multiple blocks with EncodeSeparate preservation",
skipDoc: true,
input: "resource \"aws_s3_bucket\" \"bucket1\" {\n bucket = \"my-bucket-1\"\n}\nresource \"aws_s3_bucket\" \"bucket2\" {\n bucket = \"my-bucket-2\"\n}",
expected: "resource \"aws_s3_bucket\" \"bucket1\" {\n bucket = \"my-bucket-1\"\n}\nresource \"aws_s3_bucket\" \"bucket2\" {\n bucket = \"my-bucket-2\"\n}\n",
scenarioType: "roundtrip",
},
{
description: "Blocks with same name handled separately",
skipDoc: true,
input: "server \"primary\" { port = 8080 }\nserver \"backup\" { port = 8081 }",
expected: "server \"primary\" {\n port = 8080\n}\nserver \"backup\" {\n port = 8081\n}\n",
scenarioType: "roundtrip",
},
{
description: "Block label with dot roundtrip (commentPathSep)",
skipDoc: true,
input: "service \"api.service\" {\n port = 8080\n}",
expected: "service \"api.service\" {\n port = 8080\n}\n",
scenarioType: "roundtrip",
},
{
description: "Nested template expression",
skipDoc: true,
input: `message = "User: ${username}, Role: ${user_role}"`,
expected: "message = \"User: ${username}, Role: ${user_role}\"\n",
scenarioType: "roundtrip",
},
{
description: "Empty object roundtrip",
skipDoc: true,
input: `obj = {}`,
expected: "obj = {}\n",
scenarioType: "roundtrip",
},
{
description: "Null value in block",
skipDoc: true,
input: `service { optional_field = null }`,
expected: "service {\n optional_field = null\n}\n",
scenarioType: "roundtrip",
},
}
func testHclScenario(t *testing.T, s formatScenario) {
switch s.scenarioType {
case "decode":
result := mustProcessFormatScenario(s, NewHclDecoder(), NewYamlEncoder(ConfiguredYamlPreferences))
test.AssertResultWithContext(t, s.expected, result, s.description)
case "roundtrip":
test.AssertResultWithContext(t, s.expected, mustProcessFormatScenario(s, NewHclDecoder(), NewHclEncoder(ConfiguredHclPreferences)), s.description)
}
}
func documentHclScenario(_ *testing.T, w *bufio.Writer, i interface{}) {
s := i.(formatScenario)
if s.skipDoc {
return
}
switch s.scenarioType {
case "", "decode":
documentHclDecodeScenario(w, s)
case "roundtrip":
documentHclRoundTripScenario(w, s)
default:
panic(fmt.Sprintf("unhandled scenario type %q", s.scenarioType))
}
}
func documentHclDecodeScenario(w *bufio.Writer, s formatScenario) {
writeOrPanic(w, fmt.Sprintf("## %v\n", s.description))
if s.subdescription != "" {
writeOrPanic(w, s.subdescription)
writeOrPanic(w, "\n\n")
}
writeOrPanic(w, "Given a sample.hcl file of:\n")
writeOrPanic(w, fmt.Sprintf("```hcl\n%v\n```\n", s.input))
writeOrPanic(w, "then\n")
expression := s.expression
if s.expression != "" {
expression = fmt.Sprintf(" '%v'", s.expression)
}
writeOrPanic(w, fmt.Sprintf("```bash\nyq -oy%v sample.hcl\n```\n", expression))
writeOrPanic(w, "will output\n")
writeOrPanic(w, fmt.Sprintf("```yaml\n%v```\n\n", mustProcessFormatScenario(s, NewHclDecoder(), NewYamlEncoder(ConfiguredYamlPreferences))))
}
func documentHclRoundTripScenario(w *bufio.Writer, s formatScenario) {
writeOrPanic(w, fmt.Sprintf("## %v\n", s.description))
if s.subdescription != "" {
writeOrPanic(w, s.subdescription)
writeOrPanic(w, "\n\n")
}
writeOrPanic(w, "Given a sample.hcl file of:\n")
writeOrPanic(w, fmt.Sprintf("```hcl\n%v\n```\n", s.input))
writeOrPanic(w, "then\n")
expression := s.expression
if s.expression != "" {
expression = fmt.Sprintf(" '%v'", s.expression)
}
writeOrPanic(w, fmt.Sprintf("```bash\nyq%v sample.hcl\n```\n", expression))
writeOrPanic(w, "will output\n")
writeOrPanic(w, fmt.Sprintf("```hcl\n%v```\n\n", mustProcessFormatScenario(s, NewHclDecoder(), NewHclEncoder(ConfiguredHclPreferences))))
}
func TestHclEncoderPrintDocumentSeparator(t *testing.T) {
encoder := NewHclEncoder(ConfiguredHclPreferences)
var buf bytes.Buffer
writer := bufio.NewWriter(&buf)
err := encoder.PrintDocumentSeparator(writer)
writer.Flush()
test.AssertResult(t, nil, err)
test.AssertResult(t, "", buf.String())
}
func TestHclEncoderPrintLeadingContent(t *testing.T) {
encoder := NewHclEncoder(ConfiguredHclPreferences)
var buf bytes.Buffer
writer := bufio.NewWriter(&buf)
err := encoder.PrintLeadingContent(writer, "some content")
writer.Flush()
test.AssertResult(t, nil, err)
test.AssertResult(t, "", buf.String())
}
func TestHclEncoderCanHandleAliases(t *testing.T) {
encoder := NewHclEncoder(ConfiguredHclPreferences)
test.AssertResult(t, false, encoder.CanHandleAliases())
}
func TestHclFormatScenarios(t *testing.T) {
for _, tt := range hclFormatScenarios {
testHclScenario(t, tt)
}
genericScenarios := make([]interface{}, len(hclFormatScenarios))
for i, s := range hclFormatScenarios {
genericScenarios[i] = s
}
documentScenarios(t, "usage", "hcl", genericScenarios, documentHclScenario)
}

30
pkg/yqlib/kyaml.go Normal file
View File

@ -0,0 +1,30 @@
//go:build !yq_nokyaml
package yqlib
type KYamlPreferences struct {
Indent int
ColorsEnabled bool
PrintDocSeparators bool
UnwrapScalar bool
}
func NewDefaultKYamlPreferences() KYamlPreferences {
return KYamlPreferences{
Indent: 2,
ColorsEnabled: false,
PrintDocSeparators: true,
UnwrapScalar: true,
}
}
func (p *KYamlPreferences) Copy() KYamlPreferences {
return KYamlPreferences{
Indent: p.Indent,
ColorsEnabled: p.ColorsEnabled,
PrintDocSeparators: p.PrintDocSeparators,
UnwrapScalar: p.UnwrapScalar,
}
}
var ConfiguredKYamlPreferences = NewDefaultKYamlPreferences()

542
pkg/yqlib/kyaml_test.go Normal file
View File

@ -0,0 +1,542 @@
//go:build !yq_nokyaml
package yqlib
import (
"bufio"
"bytes"
"fmt"
"regexp"
"strings"
"testing"
"github.com/mikefarah/yq/v4/test"
)
var ansiRe = regexp.MustCompile(`\x1b\[[0-9;]*m`)
func stripANSI(s string) string {
return ansiRe.ReplaceAllString(s, "")
}
var kyamlFormatScenarios = []formatScenario{
{
description: "Encode kyaml: plain string scalar",
subdescription: "Strings are always double-quoted in KYaml output.",
scenarioType: "encode",
indent: 2,
input: "cat\n",
expected: "\"cat\"\n",
},
{
description: "encode plain int scalar",
scenarioType: "encode",
indent: 2,
input: "12\n",
expected: "12\n",
skipDoc: true,
},
{
description: "encode plain bool scalar",
scenarioType: "encode",
indent: 2,
input: "true\n",
expected: "true\n",
skipDoc: true,
},
{
description: "encode plain null scalar",
scenarioType: "encode",
indent: 2,
input: "null\n",
expected: "null\n",
skipDoc: true,
},
{
description: "encode flow mapping and sequence",
scenarioType: "encode",
indent: 2,
input: "a: b\nc:\n - d\n",
expected: "{\n" +
" a: \"b\",\n" +
" c: [\n" +
" \"d\",\n" +
" ],\n" +
"}\n",
},
{
description: "encode non-string scalars",
scenarioType: "encode",
indent: 2,
input: "a: 12\n" +
"b: true\n" +
"c: null\n" +
"d: \"true\"\n",
expected: "{\n" +
" a: 12,\n" +
" b: true,\n" +
" c: null,\n" +
" d: \"true\",\n" +
"}\n",
},
{
description: "quote non-identifier keys",
scenarioType: "encode",
indent: 2,
input: "\"1a\": b\n\"has space\": c\n",
expected: "{\n" +
" \"1a\": \"b\",\n" +
" \"has space\": \"c\",\n" +
"}\n",
},
{
description: "escape quoted strings",
scenarioType: "encode",
indent: 2,
input: "a: \"line1\\nline2\\t\\\"q\\\"\"\n",
expected: "{\n" +
" a: \"line1\\nline2\\t\\\"q\\\"\",\n" +
"}\n",
},
{
description: "preserve comments when encoding",
scenarioType: "encode",
indent: 2,
input: "# leading\n" +
"a: 1 # a line\n" +
"# head b\n" +
"b: 2\n" +
"c:\n" +
" # head d\n" +
" - d # d line\n" +
" - e\n" +
"# trailing\n",
expected: "# leading\n" +
"{\n" +
" a: 1, # a line\n" +
" # head b\n" +
" b: 2,\n" +
" c: [\n" +
" # head d\n" +
" \"d\", # d line\n" +
" \"e\",\n" +
" ],\n" +
" # trailing\n" +
"}\n",
},
{
description: "Encode kyaml: anchors and aliases",
subdescription: "KYaml output does not support anchors/aliases; they are expanded to concrete values.",
scenarioType: "encode",
indent: 2,
input: "base: &base\n" +
" a: b\n" +
"copy: *base\n",
expected: "{\n" +
" base: {\n" +
" a: \"b\",\n" +
" },\n" +
" copy: {\n" +
" a: \"b\",\n" +
" },\n" +
"}\n",
},
{
description: "Encode kyaml: yaml to kyaml shows formatting differences",
subdescription: "KYaml uses flow-style collections (braces/brackets) and explicit commas.",
scenarioType: "encode",
indent: 2,
input: "person:\n" +
" name: John\n" +
" pets:\n" +
" - cat\n" +
" - dog\n",
expected: "{\n" +
" person: {\n" +
" name: \"John\",\n" +
" pets: [\n" +
" \"cat\",\n" +
" \"dog\",\n" +
" ],\n" +
" },\n" +
"}\n",
},
{
description: "Encode kyaml: nested lists of objects",
subdescription: "Lists and objects can be nested arbitrarily; KYaml always uses flow-style collections.",
scenarioType: "encode",
indent: 2,
input: "- name: a\n" +
" items:\n" +
" - id: 1\n" +
" tags:\n" +
" - k: x\n" +
" v: y\n" +
" - k: x2\n" +
" v: y2\n" +
" - id: 2\n" +
" tags:\n" +
" - k: z\n" +
" v: w\n",
expected: "[\n" +
" {\n" +
" name: \"a\",\n" +
" items: [\n" +
" {\n" +
" id: 1,\n" +
" tags: [\n" +
" {\n" +
" k: \"x\",\n" +
" v: \"y\",\n" +
" },\n" +
" {\n" +
" k: \"x2\",\n" +
" v: \"y2\",\n" +
" },\n" +
" ],\n" +
" },\n" +
" {\n" +
" id: 2,\n" +
" tags: [\n" +
" {\n" +
" k: \"z\",\n" +
" v: \"w\",\n" +
" },\n" +
" ],\n" +
" },\n" +
" ],\n" +
" },\n" +
"]\n",
},
}
func testKYamlScenario(t *testing.T, s formatScenario) {
prefs := ConfiguredKYamlPreferences.Copy()
prefs.Indent = s.indent
prefs.UnwrapScalar = false
switch s.scenarioType {
case "encode":
test.AssertResultWithContext(
t,
s.expected,
mustProcessFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewKYamlEncoder(prefs)),
s.description,
)
default:
panic(fmt.Sprintf("unhandled scenario type %q", s.scenarioType))
}
}
func documentKYamlScenario(_ *testing.T, w *bufio.Writer, i interface{}) {
s := i.(formatScenario)
if s.skipDoc {
return
}
switch s.scenarioType {
case "encode":
documentKYamlEncodeScenario(w, s)
default:
panic(fmt.Sprintf("unhandled scenario type %q", s.scenarioType))
}
}
func documentKYamlEncodeScenario(w *bufio.Writer, s formatScenario) {
writeOrPanic(w, fmt.Sprintf("## %v\n", s.description))
if s.subdescription != "" {
writeOrPanic(w, s.subdescription)
writeOrPanic(w, "\n\n")
}
writeOrPanic(w, "Given a sample.yml file of:\n")
writeOrPanic(w, fmt.Sprintf("```yaml\n%v\n```\n", s.input))
writeOrPanic(w, "then\n")
expression := s.expression
if expression == "" {
expression = "."
}
if s.indent == 2 {
writeOrPanic(w, fmt.Sprintf("```bash\nyq -o=kyaml '%v' sample.yml\n```\n", expression))
} else {
writeOrPanic(w, fmt.Sprintf("```bash\nyq -o=kyaml -I=%v '%v' sample.yml\n```\n", s.indent, expression))
}
writeOrPanic(w, "will output\n")
prefs := ConfiguredKYamlPreferences.Copy()
prefs.Indent = s.indent
prefs.UnwrapScalar = false
writeOrPanic(w, fmt.Sprintf("```yaml\n%v```\n\n", mustProcessFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewKYamlEncoder(prefs))))
}
func TestKYamlFormatScenarios(t *testing.T) {
for _, s := range kyamlFormatScenarios {
testKYamlScenario(t, s)
}
genericScenarios := make([]interface{}, len(kyamlFormatScenarios))
for i, s := range kyamlFormatScenarios {
genericScenarios[i] = s
}
documentScenarios(t, "usage", "kyaml", genericScenarios, documentKYamlScenario)
}
func TestKYamlEncoderPrintDocumentSeparator(t *testing.T) {
t.Run("enabled", func(t *testing.T) {
prefs := NewDefaultKYamlPreferences()
prefs.PrintDocSeparators = true
var buf bytes.Buffer
err := NewKYamlEncoder(prefs).PrintDocumentSeparator(&buf)
if err != nil {
t.Fatal(err)
}
if buf.String() != "---\n" {
t.Fatalf("expected doc separator, got %q", buf.String())
}
})
t.Run("disabled", func(t *testing.T) {
prefs := NewDefaultKYamlPreferences()
prefs.PrintDocSeparators = false
var buf bytes.Buffer
err := NewKYamlEncoder(prefs).PrintDocumentSeparator(&buf)
if err != nil {
t.Fatal(err)
}
if buf.String() != "" {
t.Fatalf("expected no output, got %q", buf.String())
}
})
}
func TestKYamlEncoderEncodeUnwrapScalar(t *testing.T) {
prefs := NewDefaultKYamlPreferences()
prefs.UnwrapScalar = true
var buf bytes.Buffer
err := NewKYamlEncoder(prefs).Encode(&buf, &CandidateNode{
Kind: ScalarNode,
Tag: "!!str",
Value: "cat",
})
if err != nil {
t.Fatal(err)
}
if buf.String() != "cat\n" {
t.Fatalf("expected unwrapped scalar, got %q", buf.String())
}
}
func TestKYamlEncoderEncodeColorsEnabled(t *testing.T) {
prefs := NewDefaultKYamlPreferences()
prefs.UnwrapScalar = false
prefs.ColorsEnabled = true
var buf bytes.Buffer
err := NewKYamlEncoder(prefs).Encode(&buf, &CandidateNode{
Kind: MappingNode,
Content: []*CandidateNode{
{Kind: ScalarNode, Tag: "!!str", Value: "a"},
{Kind: ScalarNode, Tag: "!!str", Value: "b"},
},
})
if err != nil {
t.Fatal(err)
}
out := stripANSI(buf.String())
if !strings.Contains(out, "a:") || !strings.Contains(out, "\"b\"") {
t.Fatalf("expected colourised output to contain rendered tokens, got %q", out)
}
}
func TestKYamlEncoderWriteNodeAliasAndUnknown(t *testing.T) {
ke := NewKYamlEncoder(NewDefaultKYamlPreferences()).(*kyamlEncoder)
t.Run("alias_nil", func(t *testing.T) {
var buf bytes.Buffer
err := ke.writeNode(&buf, &CandidateNode{Kind: AliasNode}, 0)
if err != nil {
t.Fatal(err)
}
if buf.String() != "null" {
t.Fatalf("expected null for nil alias, got %q", buf.String())
}
})
t.Run("alias_value", func(t *testing.T) {
var buf bytes.Buffer
err := ke.writeNode(&buf, &CandidateNode{
Kind: AliasNode,
Alias: &CandidateNode{Kind: ScalarNode, Tag: "!!int", Value: "12"},
}, 0)
if err != nil {
t.Fatal(err)
}
if buf.String() != "12" {
t.Fatalf("expected dereferenced alias value, got %q", buf.String())
}
})
t.Run("unknown_kind", func(t *testing.T) {
var buf bytes.Buffer
err := ke.writeNode(&buf, &CandidateNode{Kind: Kind(12345)}, 0)
if err != nil {
t.Fatal(err)
}
if buf.String() != "null" {
t.Fatalf("expected null for unknown kind, got %q", buf.String())
}
})
}
func TestKYamlEncoderEmptyCollections(t *testing.T) {
ke := NewKYamlEncoder(NewDefaultKYamlPreferences()).(*kyamlEncoder)
t.Run("empty_mapping", func(t *testing.T) {
var buf bytes.Buffer
err := ke.writeNode(&buf, &CandidateNode{Kind: MappingNode}, 0)
if err != nil {
t.Fatal(err)
}
if buf.String() != "{}" {
t.Fatalf("expected empty mapping, got %q", buf.String())
}
})
t.Run("empty_sequence", func(t *testing.T) {
var buf bytes.Buffer
err := ke.writeNode(&buf, &CandidateNode{Kind: SequenceNode}, 0)
if err != nil {
t.Fatal(err)
}
if buf.String() != "[]" {
t.Fatalf("expected empty sequence, got %q", buf.String())
}
})
}
func TestKYamlEncoderScalarFallbackAndEscaping(t *testing.T) {
ke := NewKYamlEncoder(NewDefaultKYamlPreferences()).(*kyamlEncoder)
t.Run("unknown_tag_falls_back_to_string", func(t *testing.T) {
var buf bytes.Buffer
err := ke.writeNode(&buf, &CandidateNode{Kind: ScalarNode, Tag: "!!timestamp", Value: "2020-01-01T00:00:00Z"}, 0)
if err != nil {
t.Fatal(err)
}
if buf.String() != "\"2020-01-01T00:00:00Z\"" {
t.Fatalf("expected quoted fallback, got %q", buf.String())
}
})
t.Run("escape_double_quoted", func(t *testing.T) {
got := escapeDoubleQuotedString("a\\b\"c\n\r\t" + string(rune(0x01)))
want := "a\\\\b\\\"c\\n\\r\\t\\u0001"
if got != want {
t.Fatalf("expected %q, got %q", want, got)
}
})
t.Run("valid_bare_key", func(t *testing.T) {
if isValidKYamlBareKey("") {
t.Fatalf("expected empty string to be invalid")
}
if isValidKYamlBareKey("1a") {
t.Fatalf("expected leading digit to be invalid")
}
if !isValidKYamlBareKey("a_b-2") {
t.Fatalf("expected identifier-like key to be valid")
}
})
}
func TestKYamlEncoderCommentsInMapping(t *testing.T) {
prefs := NewDefaultKYamlPreferences()
prefs.UnwrapScalar = false
ke := NewKYamlEncoder(prefs).(*kyamlEncoder)
var buf bytes.Buffer
err := ke.writeNode(&buf, &CandidateNode{
Kind: MappingNode,
Content: []*CandidateNode{
{
Kind: ScalarNode,
Tag: "!!str",
Value: "a",
HeadComment: "key head",
LineComment: "key line",
FootComment: "key foot",
},
{
Kind: ScalarNode,
Tag: "!!str",
Value: "b",
HeadComment: "value head",
},
},
}, 0)
if err != nil {
t.Fatal(err)
}
out := buf.String()
if !strings.Contains(out, "# key head\n") {
t.Fatalf("expected key head comment, got %q", out)
}
if !strings.Contains(out, "# value head\n") {
t.Fatalf("expected value head comment, got %q", out)
}
if !strings.Contains(out, ", # key line\n") {
t.Fatalf("expected inline key comment fallback, got %q", out)
}
if !strings.Contains(out, "# key foot\n") {
t.Fatalf("expected foot comment fallback, got %q", out)
}
}
func TestKYamlEncoderCommentBlockAndInlineComment(t *testing.T) {
ke := NewKYamlEncoder(NewDefaultKYamlPreferences()).(*kyamlEncoder)
t.Run("comment_block_prefixing_and_crlf", func(t *testing.T) {
var buf bytes.Buffer
err := ke.writeCommentBlock(&buf, "line1\r\n\r\n# already\r\nline2", 2)
if err != nil {
t.Fatal(err)
}
want := " # line1\n # already\n # line2\n"
if buf.String() != want {
t.Fatalf("expected %q, got %q", want, buf.String())
}
})
t.Run("inline_comment_prefix_and_first_line_only", func(t *testing.T) {
var buf bytes.Buffer
err := ke.writeInlineComment(&buf, "hello\r\nsecond line")
if err != nil {
t.Fatal(err)
}
if buf.String() != " # hello" {
t.Fatalf("expected %q, got %q", " # hello", buf.String())
}
})
t.Run("inline_comment_already_prefixed", func(t *testing.T) {
var buf bytes.Buffer
err := ke.writeInlineComment(&buf, "# hello")
if err != nil {
t.Fatal(err)
}
if buf.String() != " # hello" {
t.Fatalf("expected %q, got %q", " # hello", buf.String())
}
})
}

View File

@ -61,7 +61,7 @@ func unwrap(value string) string {
}
func extractNumberParameter(value string) (int, error) {
parameterParser := regexp.MustCompile(`.*\(([0-9]+)\)`)
parameterParser := regexp.MustCompile(`.*\((-?[0-9]+)\)`)
matches := parameterParser.FindStringSubmatch(value)
var indent, errParsingInt = parseInt(matches[1])
if errParsingInt != nil {

View File

@ -57,7 +57,7 @@ var participleYqRules = []*participleYqRule{
simpleOp("sort_?keys", sortKeysOpType),
{"ArrayToMap", "array_?to_?map", expressionOpToken(`(.[] | select(. != null) ) as $i ireduce({}; .[$i | key] = $i)`), 0},
{"Root", "root", expressionOpToken(`parent(-1)`), 0},
{"YamlEncodeWithIndent", `to_?yaml\([0-9]+\)`, encodeParseIndent(YamlFormat), 0},
{"XMLEncodeWithIndent", `to_?xml\([0-9]+\)`, encodeParseIndent(XMLFormat), 0},
{"JSONEncodeWithIndent", `to_?json\([0-9]+\)`, encodeParseIndent(JSONFormat), 0},
@ -132,7 +132,7 @@ var participleYqRules = []*participleYqRule{
simpleOp("split", splitStringOpType),
simpleOp("parents", getParentsOpType),
{"ParentWithLevel", `parent\([0-9]+\)`, parentWithLevel(), 0},
{"ParentWithLevel", `parent\(-?[0-9]+\)`, parentWithLevel(), 0},
{"ParentWithDefaultLevel", `parent`, parentWithDefaultLevel(), 0},
simpleOp("keys", keysOpType),
@ -451,6 +451,7 @@ func multiplyWithPrefs(op *operationType) yqAction {
prefs.AssignPrefs.ClobberCustomTags = true
}
prefs.TraversePrefs.DontFollowAlias = true
prefs.TraversePrefs.ExactKeyMatch = true
op := &Operation{OperationType: op, Value: multiplyOpType.Type, StringValue: options, Preferences: prefs}
return &token{TokenType: operationToken, Operation: op}, nil
}

View File

@ -29,7 +29,7 @@ func GetLogger() *logging.Logger {
}
func getContentValueByKey(content []*CandidateNode, key string) *CandidateNode {
for index := 0; index < len(content); index = index + 2 {
for index := 0; index < len(content)-1; index = index + 2 {
keyNode := content[index]
valueNode := content[index+1]
if keyNode.Value == key {
@ -187,15 +187,69 @@ func parseInt(numberString string) (int, error) {
}
func processEscapeCharacters(original string) string {
value := original
value = strings.ReplaceAll(value, "\\\"", "\"")
value = strings.ReplaceAll(value, "\\n", "\n")
value = strings.ReplaceAll(value, "\\t", "\t")
value = strings.ReplaceAll(value, "\\r", "\r")
value = strings.ReplaceAll(value, "\\f", "\f")
value = strings.ReplaceAll(value, "\\v", "\v")
value = strings.ReplaceAll(value, "\\b", "\b")
value = strings.ReplaceAll(value, "\\a", "\a")
if original == "" {
return original
}
var result strings.Builder
runes := []rune(original)
for i := 0; i < len(runes); i++ {
if runes[i] == '\\' && i < len(runes)-1 {
next := runes[i+1]
switch next {
case '\\':
// Check if followed by opening bracket - if so, preserve both backslashes
// this is required for string interpolation to work correctly.
if i+2 < len(runes) && runes[i+2] == '(' {
// Preserve \\ when followed by (
result.WriteRune('\\')
result.WriteRune('\\')
i++ // Skip the next backslash (we'll process the ( normally on next iteration)
continue
}
// Escaped backslash: \\ -> \
result.WriteRune('\\')
i++ // Skip the next backslash
continue
case '"':
result.WriteRune('"')
i++ // Skip the quote
continue
case 'n':
result.WriteRune('\n')
i++ // Skip the 'n'
continue
case 't':
result.WriteRune('\t')
i++ // Skip the 't'
continue
case 'r':
result.WriteRune('\r')
i++ // Skip the 'r'
continue
case 'f':
result.WriteRune('\f')
i++ // Skip the 'f'
continue
case 'v':
result.WriteRune('\v')
i++ // Skip the 'v'
continue
case 'b':
result.WriteRune('\b')
i++ // Skip the 'b'
continue
case 'a':
result.WriteRune('\a')
i++ // Skip the 'a'
continue
}
}
result.WriteRune(runes[i])
}
value := result.String()
if value != original {
log.Debug("processEscapeCharacters from [%v] to [%v]", original, value)
}

View File

@ -408,3 +408,122 @@ func TestKindString(t *testing.T) {
test.AssertResult(t, "AliasNode", KindString(AliasNode))
test.AssertResult(t, "unknown!", KindString(Kind(999))) // Invalid kind
}
type processEscapeCharactersScenario struct {
input string
expected string
}
var processEscapeCharactersScenarios = []processEscapeCharactersScenario{
{
input: "",
expected: "",
},
{
input: "hello",
expected: "hello",
},
{
input: "\\\"",
expected: "\"",
},
{
input: "hello\\\"world",
expected: "hello\"world",
},
{
input: "\\n",
expected: "\n",
},
{
input: "line1\\nline2",
expected: "line1\nline2",
},
{
input: "\\t",
expected: "\t",
},
{
input: "hello\\tworld",
expected: "hello\tworld",
},
{
input: "\\r",
expected: "\r",
},
{
input: "hello\\rworld",
expected: "hello\rworld",
},
{
input: "\\f",
expected: "\f",
},
{
input: "hello\\fworld",
expected: "hello\fworld",
},
{
input: "\\v",
expected: "\v",
},
{
input: "hello\\vworld",
expected: "hello\vworld",
},
{
input: "\\b",
expected: "\b",
},
{
input: "hello\\bworld",
expected: "hello\bworld",
},
{
input: "\\a",
expected: "\a",
},
{
input: "hello\\aworld",
expected: "hello\aworld",
},
{
input: "\\\"\\n\\t\\r\\f\\v\\b\\a",
expected: "\"\n\t\r\f\v\b\a",
},
{
input: "multiple\\nlines\\twith\\ttabs",
expected: "multiple\nlines\twith\ttabs",
},
{
input: "quote\\\"here",
expected: "quote\"here",
},
{
input: "\\\\",
expected: "\\", // Backslash is processed: "\\\\" becomes "\\"
},
{
input: "\\\"test\\\"",
expected: "\"test\"",
},
{
input: "a\\\\b",
expected: "a\\b", // Tests roundtrip: "a\\\\b" should become "a\\b"
},
{
input: "Hi \\\\(.value)",
expected: "Hi \\\\(.value)",
},
{
input: `a\\b`,
expected: "a\\b",
},
}
func TestProcessEscapeCharacters(t *testing.T) {
for _, tt := range processEscapeCharactersScenarios {
actual := processEscapeCharacters(tt.input)
test.AssertResultComplexWithContext(t, tt.expected, actual, fmt.Sprintf("Input: %q", tt.input))
}
}

11
pkg/yqlib/no_hcl.go Normal file
View File

@ -0,0 +1,11 @@
//go:build yq_nohcl
package yqlib
func NewHclDecoder() Decoder {
return nil
}
func NewHclEncoder(_ HclPreferences) Encoder {
return nil
}

7
pkg/yqlib/no_kyaml.go Normal file
View File

@ -0,0 +1,7 @@
//go:build yq_nokyaml
package yqlib
func NewKYamlEncoder(_ KYamlPreferences) Encoder {
return nil
}

View File

@ -5,3 +5,11 @@ package yqlib
func NewTomlDecoder() Decoder {
return nil
}
func NewTomlEncoder() Encoder {
return nil
}
func NewTomlEncoderWithPrefs(prefs TomlPreferences) Encoder {
return nil
}

View File

@ -4,7 +4,7 @@ import (
"testing"
)
var specDocument = `- &CENTER { x: 1, y: 2 }
var specDocument = `- &CENTRE { x: 1, y: 2 }
- &LEFT { x: 0, y: 2 }
- &BIG { r: 10 }
- &SMALL { r: 1 }
@ -139,7 +139,7 @@ var fixedAnchorOperatorScenarios = []expressionScenario{
{
description: "FIXED: Merge multiple maps",
subdescription: "Taken from https://yaml.org/type/merge.html. Same values as legacy, but with the correct key order.",
document: specDocument + "- << : [ *CENTER, *BIG ]\n",
document: specDocument + "- << : [ *CENTRE, *BIG ]\n",
expression: ".[4] | explode(.)",
expected: []string{"D0, P[4], (!!map)::x: 1\ny: 2\nr: 10\n"},
},
@ -171,7 +171,7 @@ var fixedAnchorOperatorScenarios = []expressionScenario{
},
{
skipDoc: true,
description: "Exploding merge anchor should not explode neighbors",
description: "Exploding merge anchor should not explode neighbours",
subdescription: "b must not be exploded, as `r: *a` will become invalid",
document: `{b: &b {a: &a 42}, r: *a, c: {<<: *b}}`,
expression: `explode(.c)`,
@ -181,7 +181,7 @@ var fixedAnchorOperatorScenarios = []expressionScenario{
},
{
skipDoc: true,
description: "Exploding sequence merge anchor should not explode neighbors",
description: "Exploding sequence merge anchor should not explode neighbours",
subdescription: "b must not be exploded, as `r: *a` will become invalid",
document: `{b: &b {a: &a 42}, r: *a, c: {<<: [*b]}}`,
expression: `explode(.c)`,
@ -265,7 +265,7 @@ var badAnchorOperatorScenarios = []expressionScenario{
{
description: "LEGACY: Merge multiple maps",
subdescription: "see https://yaml.org/type/merge.html. This has the correct data, but the wrong key order; set --yaml-fix-merge-anchor-to-spec=true to fix the key order.",
document: specDocument + "- << : [ *CENTER, *BIG ]\n",
document: specDocument + "- << : [ *CENTRE, *BIG ]\n",
expression: ".[4] | explode(.)",
expected: []string{"D0, P[4], (!!map)::r: 10\nx: 1\ny: 2\n"},
},
@ -297,7 +297,7 @@ var anchorOperatorScenarios = []expressionScenario{
{
description: "Merge one map",
subdescription: "see https://yaml.org/type/merge.html",
document: specDocument + "- << : *CENTER\n r: 10\n",
document: specDocument + "- << : *CENTRE\n r: 10\n",
expression: ".[4] | explode(.)",
expected: []string{expectedSpecResult},
},

View File

@ -32,7 +32,7 @@ func envOperator(_ *dataTreeNavigator, context Context, expressionNode *Expressi
node = &CandidateNode{
Kind: ScalarNode,
Tag: "!!str",
Value: processEscapeCharacters(rawValue),
Value: rawValue,
}
} else if rawValue == "" {
return Context{}, fmt.Errorf("value for env variable '%v' not provided in env()", envName)

View File

@ -63,78 +63,6 @@ var envOperatorScenarios = []expressionScenario{
"D0, P[], ()::a: \"12\"\n",
},
},
{
description: "strenv with newline escape",
skipDoc: true,
environmentVariables: map[string]string{"myenv": "string with a\\n"},
expression: `.a = strenv(myenv)`,
expected: []string{
"D0, P[], ()::a: |\n string with a\n",
},
},
{
description: "strenv with tab escape",
skipDoc: true,
environmentVariables: map[string]string{"myenv": "string with a\\t"},
expression: `.a = strenv(myenv)`,
expected: []string{
"D0, P[], ()::a: \"string with a\\t\"\n",
},
},
{
description: "strenv with carriage return escape",
skipDoc: true,
environmentVariables: map[string]string{"myenv": "string with a\\r"},
expression: `.a = strenv(myenv)`,
expected: []string{
"D0, P[], ()::a: \"string with a\\r\"\n",
},
},
{
description: "strenv with form feed escape",
skipDoc: true,
environmentVariables: map[string]string{"myenv": "string with a\\f"},
expression: `.a = strenv(myenv)`,
expected: []string{
"D0, P[], ()::a: \"string with a\\f\"\n",
},
},
{
description: "strenv with vertical tab escape",
skipDoc: true,
environmentVariables: map[string]string{"myenv": "string with a\\v"},
expression: `.a = strenv(myenv)`,
expected: []string{
"D0, P[], ()::a: \"string with a\\v\"\n",
},
},
{
description: "strenv with backspace escape",
skipDoc: true,
environmentVariables: map[string]string{"myenv": "string with a\\b"},
expression: `.a = strenv(myenv)`,
expected: []string{
"D0, P[], ()::a: \"string with a\\b\"\n",
},
},
{
description: "strenv with alert/bell escape",
skipDoc: true,
environmentVariables: map[string]string{"myenv": "string with a\\a"},
expression: `.a = strenv(myenv)`,
expected: []string{
"D0, P[], ()::a: \"string with a\\a\"\n",
},
},
{
description: "strenv with double quote escape",
skipDoc: true,
environmentVariables: map[string]string{"myenv": "string with a\\\""},
expression: `.a = strenv(myenv)`,
expected: []string{
"D0, P[], ()::a: string with a\"\n",
},
},
{
description: "Dynamically update a path from an environment variable",
subdescription: "The env variable can be any valid yq expression.",

View File

@ -30,7 +30,7 @@ func multiplyAssignOperator(d *dataTreeNavigator, context Context, expressionNod
func multiplyOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
log.Debugf("MultiplyOperator")
return crossFunction(d, context, expressionNode, multiply(expressionNode.Operation.Preferences.(multiplyPreferences)), false)
return crossFunction(d, context.ReadOnlyClone(), expressionNode, multiply(expressionNode.Operation.Preferences.(multiplyPreferences)), false)
}
func getComments(lhs *CandidateNode, rhs *CandidateNode) (leadingContent string, headComment string, footComment string) {
@ -168,7 +168,7 @@ func mergeObjects(d *dataTreeNavigator, context Context, lhs *CandidateNode, rhs
// only need to recurse the array if we are doing a deep merge
prefs := recursiveDescentPreferences{RecurseArray: preferences.DeepMergeArrays,
TraversePreferences: traversePreferences{DontFollowAlias: true, IncludeMapKeys: true}}
TraversePreferences: traversePreferences{DontFollowAlias: true, IncludeMapKeys: true, ExactKeyMatch: true}}
log.Debugf("merge - preferences.DeepMergeArrays %v", preferences.DeepMergeArrays)
log.Debugf("merge - preferences.AppendArrays %v", preferences.AppendArrays)
err := recursiveDecent(results, context.SingleChildContext(rhs), prefs)

View File

@ -86,7 +86,35 @@ c:
<<: *cat
`
var mergeWithGlobA = `
"**cat": things,
"meow**cat": stuff
`
var mergeWithGlobB = `
"**cat": newThings,
`
var multiplyOperatorScenarios = []expressionScenario{
{
description: "multiple should be readonly",
skipDoc: true,
document: "",
expression: ".x |= (root | (.a * .b))",
expected: []string{
"D0, P[], ()::x: null\n",
},
},
{
description: "glob keys are treated as literals when merging",
skipDoc: true,
document: mergeWithGlobA,
document2: mergeWithGlobB,
expression: `select(fi == 0) * select(fi == 1)`,
expected: []string{
"D0, P[], (!!map)::\n\"**cat\": newThings,\n\"meow**cat\": stuff\n",
},
},
{
skipDoc: true,
document: mergeArrayWithAnchors,

View File

@ -35,9 +35,28 @@ func getParentOperator(_ *dataTreeNavigator, context Context, expressionNode *Ex
for el := context.MatchingNodes.Front(); el != nil; el = el.Next() {
candidate := el.Value.(*CandidateNode)
// Handle negative levels: count total parents first
levelsToGoUp := prefs.Level
if prefs.Level < 0 {
// Count all parents
totalParents := 0
temp := candidate.Parent
for temp != nil {
totalParents++
temp = temp.Parent
}
// Convert negative index to positive
// -1 means last parent (root), -2 means second to last, etc.
levelsToGoUp = totalParents + prefs.Level + 1
if levelsToGoUp < 0 {
levelsToGoUp = 0
}
}
currentLevel := 0
for currentLevel < prefs.Level && candidate != nil {
log.Debugf("currentLevel: %v, desired: %v", currentLevel, prefs.Level)
for currentLevel < levelsToGoUp && candidate != nil {
log.Debugf("currentLevel: %v, desired: %v", currentLevel, levelsToGoUp)
log.Debugf("candidate: %v", NodeToString(candidate))
candidate = candidate.Parent
currentLevel++

View File

@ -38,6 +38,58 @@ var parentOperatorScenarios = []expressionScenario{
"D0, P[], (!!seq)::- {c: cat}\n- {b: {c: cat}}\n- {a: {b: {c: cat}}}\n",
},
},
{
description: "Get the top (root) parent",
subdescription: "Use negative numbers to get the top parents. You can think of this as indexing into the 'parents' array above",
document: "a:\n b:\n c: cat\n",
expression: `.a.b.c | parent(-1)`,
expected: []string{
"D0, P[], (!!map)::a:\n b:\n c: cat\n",
},
},
{
description: "Root",
subdescription: "Alias for parent(-1), returns the top level parent. This is usually the document node.",
document: "a:\n b:\n c: cat\n",
expression: `.a.b.c | root`,
expected: []string{
"D0, P[], (!!map)::a:\n b:\n c: cat\n",
},
},
{
description: "boundary negative",
skipDoc: true,
document: "a:\n b:\n c: cat\n",
expression: `.a.b.c | parent(-3)`,
expected: []string{
"D0, P[a b], (!!map)::c: cat\n",
},
},
{
description: "large negative",
skipDoc: true,
document: "a:\n b:\n c: cat\n",
expression: `.a.b.c | parent(-10)`,
expected: []string{
"D0, P[a b c], (!!str)::cat\n",
},
},
{
description: "parent zero",
skipDoc: true,
document: "a:\n b:\n c: cat\n",
expression: `.a.b.c | parent(0)`,
expected: []string{
"D0, P[a b c], (!!str)::cat\n",
},
},
{
description: "large positive",
skipDoc: true,
document: "a:\n b:\n c: cat\n",
expression: `.a.b.c | parent(10)`,
expected: []string{},
},
{
description: "N-th parent",
subdescription: "You can optionally supply the number of levels to go up for the parent, the default being 1.",
@ -55,6 +107,15 @@ var parentOperatorScenarios = []expressionScenario{
"D0, P[], (!!map)::a:\n b:\n c: cat\n",
},
},
{
description: "N-th negative",
subdescription: "Similarly, use negative numbers to index backwards from the parents array",
document: "a:\n b:\n c: cat\n",
expression: `.a.b.c | parent(-2)`,
expected: []string{
"D0, P[a], (!!map)::b:\n c: cat\n",
},
},
{
description: "No parent",
document: `{}`,

View File

@ -14,6 +14,7 @@ type traversePreferences struct {
DontAutoCreate bool // by default, we automatically create entries on the fly.
DontIncludeMapValues bool
OptionalTraverse bool // e.g. .adf?
ExactKeyMatch bool // by default we let wild/glob patterns. Don't do that for merge though.
}
func splat(context Context, prefs traversePreferences) (Context, error) {
@ -216,7 +217,11 @@ func traverseArrayWithIndices(node *CandidateNode, indices []*CandidateNode, pre
return newMatches, nil
}
func keyMatches(key *CandidateNode, wantedKey string) bool {
func keyMatches(key *CandidateNode, wantedKey string, exactKeyMatch bool) bool {
if exactKeyMatch {
// this is used for merge
return key.Value == wantedKey
}
return matchKey(key.Value, wantedKey)
}
@ -303,7 +308,7 @@ func doTraverseMap(newMatches *orderedmap.OrderedMap, node *CandidateNode, wante
return err
}
}
} else if splat || keyMatches(key, wantedKey) {
} else if splat || keyMatches(key, wantedKey, prefs.ExactKeyMatch) {
log.Debug("MATCHED")
if prefs.IncludeMapKeys {
log.Debug("including key")

View File

@ -132,7 +132,7 @@ func (p *resultsPrinter) PrintResults(matchingNodes *list.List) error {
tempBufferBytes := tempBuffer.Bytes()
if bytes.IndexByte(tempBufferBytes, 0) != -1 {
return fmt.Errorf(
"can't serialize value because it contains NUL char and you are using NUL separated output",
"can't serialise value because it contains NUL char and you are using NUL separated output",
)
}
if _, err := writer.Write(tempBufferBytes); err != nil {

View File

@ -49,3 +49,179 @@ func TestNodeInfoPrinter_PrintResults(t *testing.T) {
test.AssertResult(t, true, strings.Contains(outStr, "footComment: foot"))
test.AssertResult(t, true, strings.Contains(outStr, "anchor: anchor"))
}
func TestNodeInfoPrinter_PrintedAnything_True(t *testing.T) {
node := &CandidateNode{
Kind: ScalarNode,
Tag: "!!str",
Value: "test",
}
listNodes := list.New()
listNodes.PushBack(node)
var output bytes.Buffer
writer := bufio.NewWriter(&output)
printer := NewNodeInfoPrinter(NewSinglePrinterWriter(writer))
// Before printing, should be false
test.AssertResult(t, false, printer.PrintedAnything())
err := printer.PrintResults(listNodes)
writer.Flush()
if err != nil {
t.Fatalf("PrintResults error: %v", err)
}
// After printing, should be true
test.AssertResult(t, true, printer.PrintedAnything())
}
func TestNodeInfoPrinter_PrintedAnything_False(t *testing.T) {
listNodes := list.New()
var output bytes.Buffer
writer := bufio.NewWriter(&output)
printer := NewNodeInfoPrinter(NewSinglePrinterWriter(writer))
err := printer.PrintResults(listNodes)
writer.Flush()
if err != nil {
t.Fatalf("PrintResults error: %v", err)
}
// No nodes printed, should still be false
test.AssertResult(t, false, printer.PrintedAnything())
}
func TestNodeInfoPrinter_SetNulSepOutput(_ *testing.T) {
var output bytes.Buffer
writer := bufio.NewWriter(&output)
printer := NewNodeInfoPrinter(NewSinglePrinterWriter(writer))
// Should not panic or error
printer.SetNulSepOutput(true)
printer.SetNulSepOutput(false)
}
func TestNodeInfoPrinter_SetAppendix(t *testing.T) {
node := &CandidateNode{
Kind: ScalarNode,
Tag: "!!str",
Value: "test",
}
listNodes := list.New()
listNodes.PushBack(node)
var output bytes.Buffer
writer := bufio.NewWriter(&output)
printer := NewNodeInfoPrinter(NewSinglePrinterWriter(writer))
appendixText := "This is appendix text\n"
appendixReader := strings.NewReader(appendixText)
printer.SetAppendix(appendixReader)
err := printer.PrintResults(listNodes)
writer.Flush()
if err != nil {
t.Fatalf("PrintResults error: %v", err)
}
outStr := output.String()
test.AssertResult(t, true, strings.Contains(outStr, "test"))
test.AssertResult(t, true, strings.Contains(outStr, appendixText))
}
func TestNodeInfoPrinter_MultipleNodes(t *testing.T) {
node1 := &CandidateNode{
Kind: ScalarNode,
Tag: "!!str",
Value: "first",
}
node2 := &CandidateNode{
Kind: ScalarNode,
Tag: "!!str",
Value: "second",
}
listNodes := list.New()
listNodes.PushBack(node1)
listNodes.PushBack(node2)
var output bytes.Buffer
writer := bufio.NewWriter(&output)
printer := NewNodeInfoPrinter(NewSinglePrinterWriter(writer))
err := printer.PrintResults(listNodes)
writer.Flush()
if err != nil {
t.Fatalf("PrintResults error: %v", err)
}
outStr := output.String()
test.AssertResult(t, true, strings.Contains(outStr, "value: first"))
test.AssertResult(t, true, strings.Contains(outStr, "value: second"))
}
func TestNodeInfoPrinter_SequenceNode(t *testing.T) {
node := &CandidateNode{
Kind: SequenceNode,
Tag: "!!seq",
Style: FlowStyle,
}
listNodes := list.New()
listNodes.PushBack(node)
var output bytes.Buffer
writer := bufio.NewWriter(&output)
printer := NewNodeInfoPrinter(NewSinglePrinterWriter(writer))
err := printer.PrintResults(listNodes)
writer.Flush()
if err != nil {
t.Fatalf("PrintResults error: %v", err)
}
outStr := output.String()
test.AssertResult(t, true, strings.Contains(outStr, "kind: SequenceNode"))
test.AssertResult(t, true, strings.Contains(outStr, "tag: '!!seq'"))
test.AssertResult(t, true, strings.Contains(outStr, "style: FlowStyle"))
}
func TestNodeInfoPrinter_MappingNode(t *testing.T) {
node := &CandidateNode{
Kind: MappingNode,
Tag: "!!map",
}
listNodes := list.New()
listNodes.PushBack(node)
var output bytes.Buffer
writer := bufio.NewWriter(&output)
printer := NewNodeInfoPrinter(NewSinglePrinterWriter(writer))
err := printer.PrintResults(listNodes)
writer.Flush()
if err != nil {
t.Fatalf("PrintResults error: %v", err)
}
outStr := output.String()
test.AssertResult(t, true, strings.Contains(outStr, "kind: MappingNode"))
test.AssertResult(t, true, strings.Contains(outStr, "tag: '!!map'"))
}
func TestNodeInfoPrinter_EmptyList(t *testing.T) {
listNodes := list.New()
var output bytes.Buffer
writer := bufio.NewWriter(&output)
printer := NewNodeInfoPrinter(NewSinglePrinterWriter(writer))
err := printer.PrintResults(listNodes)
writer.Flush()
if err != nil {
t.Fatalf("PrintResults error: %v", err)
}
test.AssertResult(t, "", output.String())
test.AssertResult(t, false, printer.PrintedAnything())
}

View File

@ -481,7 +481,7 @@ func TestPrinterNulSeparatorWithNullChar(t *testing.T) {
t.Fatal("Expected error for null character in NUL separated output")
}
expectedError := "can't serialize value because it contains NUL char and you are using NUL separated output"
expectedError := "can't serialise value because it contains NUL char and you are using NUL separated output"
if err.Error() != expectedError {
t.Fatalf("Expected error '%s', got '%s'", expectedError, err.Error())
}

View File

@ -2,11 +2,13 @@ package yqlib
type ShellVariablesPreferences struct {
KeySeparator string
UnwrapScalar bool
}
func NewDefaultShellVariablesPreferences() ShellVariablesPreferences {
return ShellVariablesPreferences{
KeySeparator: "_",
UnwrapScalar: false,
}
}

15
pkg/yqlib/toml.go Normal file
View File

@ -0,0 +1,15 @@
package yqlib
type TomlPreferences struct {
ColorsEnabled bool
}
func NewDefaultTomlPreferences() TomlPreferences {
return TomlPreferences{ColorsEnabled: false}
}
func (p *TomlPreferences) Copy() TomlPreferences {
return TomlPreferences{ColorsEnabled: p.ColorsEnabled}
}
var ConfiguredTomlPreferences = NewDefaultTomlPreferences()

View File

@ -2,9 +2,12 @@ package yqlib
import (
"bufio"
"bytes"
"fmt"
"strings"
"testing"
"github.com/fatih/color"
"github.com/mikefarah/yq/v4/test"
)
@ -175,6 +178,115 @@ var expectedSampleWithHeader = `servers:
ip: 10.0.0.1
`
// Roundtrip fixtures
var rtInlineTableAttr = `name = { first = "Tom", last = "Preston-Werner" }
`
var rtTableSection = `[owner.contact]
name = "Tom"
age = 36
`
var rtArrayOfTables = `[[fruits]]
name = "apple"
[[fruits.varieties]]
name = "red delicious"
`
var rtArraysAndScalars = `A = ["hello", ["world", "again"]]
B = 12
`
var rtSimple = `A = "hello"
B = 12
`
var rtDeepPaths = `[person]
name = "hello"
address = "12 cat st"
`
var rtEmptyArray = `A = []
`
var rtSampleTable = `var = "x"
[owner.contact]
name = "Tom Preston-Werner"
age = 36
`
var rtEmptyTable = `[dependencies]
`
var rtComments = `# This is a comment
A = "hello" # inline comment
B = 12
# Table comment
[person]
name = "Tom" # name comment
`
// Reproduce bug for https://github.com/mikefarah/yq/issues/2588
// Bug: standalone comments inside a table cause subsequent key-values to be assigned at root.
var issue2588RustToolchainWithComments = `[owner]
# comment
name = "Tomer"
`
var tableWithComment = `[owner]
# comment
[things]
`
var sampleFromWeb = `# This is a TOML document
title = "TOML Example"
[owner]
name = "Tom Preston-Werner"
dob = 1979-05-27T07:32:00-08:00
[database]
enabled = true
ports = [8000, 8001, 8002]
data = [["delta", "phi"], [3.14]]
temp_targets = { cpu = 79.5, case = 72.0 }
# [servers] yq can't do this one yet
[servers.alpha]
ip = "10.0.0.1"
role = "frontend"
[servers.beta]
ip = "10.0.0.2"
role = "backend"
`
var subArrays = `
[[array]]
[[array.subarray]]
[[array.subarray.subsubarray]]
`
var tomlTableWithComments = `[section]
the_array = [
# comment
"value 1",
# comment
"value 2",
]
`
var expectedSubArrays = `array:
- subarray:
- subsubarray:
- {}
`
var tomlScenarios = []formatScenario{
{
skipDoc: true,
@ -382,6 +494,126 @@ var tomlScenarios = []formatScenario{
expected: expectedMultipleEmptyTables,
scenarioType: "decode",
},
{
description: "subArrays",
skipDoc: true,
input: subArrays,
expected: expectedSubArrays,
scenarioType: "decode",
},
// Roundtrip scenarios
{
description: "Roundtrip: inline table attribute",
input: rtInlineTableAttr,
expression: ".",
expected: rtInlineTableAttr,
scenarioType: "roundtrip",
},
{
description: "Roundtrip: table section",
input: rtTableSection,
expression: ".",
expected: rtTableSection,
scenarioType: "roundtrip",
},
{
description: "Roundtrip: array of tables",
input: rtArrayOfTables,
expression: ".",
expected: rtArrayOfTables,
scenarioType: "roundtrip",
},
{
description: "Roundtrip: arrays and scalars",
input: rtArraysAndScalars,
expression: ".",
expected: rtArraysAndScalars,
scenarioType: "roundtrip",
},
{
description: "Roundtrip: simple",
input: rtSimple,
expression: ".",
expected: rtSimple,
scenarioType: "roundtrip",
},
{
description: "Roundtrip: deep paths",
input: rtDeepPaths,
expression: ".",
expected: rtDeepPaths,
scenarioType: "roundtrip",
},
{
description: "Roundtrip: empty array",
input: rtEmptyArray,
expression: ".",
expected: rtEmptyArray,
scenarioType: "roundtrip",
},
{
description: "Roundtrip: sample table",
input: rtSampleTable,
expression: ".",
expected: rtSampleTable,
scenarioType: "roundtrip",
},
{
description: "Roundtrip: empty table",
input: rtEmptyTable,
expression: ".",
expected: rtEmptyTable,
scenarioType: "roundtrip",
},
{
description: "Roundtrip: comments",
input: rtComments,
expression: ".",
expected: rtComments,
scenarioType: "roundtrip",
},
{
skipDoc: true,
description: "Issue #2588: comments inside table must not flatten (.owner.name)",
input: issue2588RustToolchainWithComments,
expression: ".owner.name",
expected: "Tomer\n",
scenarioType: "decode",
},
{
skipDoc: true,
description: "Issue #2588: comments inside table must not flatten (.name)",
input: issue2588RustToolchainWithComments,
expression: ".name",
expected: "null\n",
scenarioType: "decode",
},
{
skipDoc: true,
input: issue2588RustToolchainWithComments,
expected: issue2588RustToolchainWithComments,
scenarioType: "roundtrip",
},
{
skipDoc: true,
input: tableWithComment,
expression: ".owner | headComment",
expected: "comment\n",
scenarioType: "roundtrip",
},
{
description: "Roundtrip: sample from web",
input: sampleFromWeb,
expression: ".",
expected: sampleFromWeb,
scenarioType: "roundtrip",
},
{
skipDoc: true,
input: tomlTableWithComments,
expected: tomlTableWithComments,
scenarioType: "roundtrip",
},
}
func testTomlScenario(t *testing.T, s formatScenario) {
@ -471,3 +703,295 @@ func TestTomlScenarios(t *testing.T) {
}
documentScenarios(t, "usage", "toml", genericScenarios, documentTomlScenario)
}
// TestTomlColourization tests that colourization correctly distinguishes
// between table section headers and inline arrays
func TestTomlColourization(t *testing.T) {
// Save and restore color state
oldNoColor := color.NoColor
color.NoColor = false
defer func() { color.NoColor = oldNoColor }()
// Test that inline arrays are not coloured as table sections
encoder := &tomlEncoder{prefs: TomlPreferences{ColorsEnabled: true}}
// Create TOML with both table sections and inline arrays
input := []byte(`[database]
enabled = true
ports = [8000, 8001, 8002]
[servers]
alpha = "test"
`)
result := encoder.colorizeToml(input)
resultStr := string(result)
// The bug would cause the inline array [8000, 8001, 8002] to be
// coloured with the section colour (Yellow + Bold) instead of being
// left uncoloured or coloured differently.
//
// To test this, we check that the section colour codes appear only
// for actual table sections, not for inline arrays.
// Get the ANSI codes for section colour (Yellow + Bold)
sectionColourObj := color.New(color.FgYellow, color.Bold)
sectionColourObj.EnableColor()
sampleSection := sectionColourObj.Sprint("[database]")
// Extract just the ANSI codes from the sample
// ANSI codes start with \x1b[
var ansiStart string
for i := 0; i < len(sampleSection); i++ {
if sampleSection[i] == '\x1b' {
// Find the end of the ANSI sequence (ends with 'm')
end := i
for end < len(sampleSection) && sampleSection[end] != 'm' {
end++
}
if end < len(sampleSection) {
ansiStart = sampleSection[i : end+1]
break
}
}
}
// Count how many times the section colour appears in the output
// It should appear exactly twice: once for [database] and once for [servers]
// If it appears more times (e.g., for [8000, 8001, 8002]), that's the bug
sectionColourCount := strings.Count(resultStr, ansiStart)
// We expect exactly 2 occurrences (for [database] and [servers])
// The bug would cause more occurrences (e.g., also for [8000)
if sectionColourCount != 2 {
t.Errorf("Expected section colour to appear exactly 2 times (for [database] and [servers]), but it appeared %d times.\nOutput: %s", sectionColourCount, resultStr)
}
}
func TestTomlColorisationNumberBug(t *testing.T) {
// Save and restore color state
oldNoColor := color.NoColor
color.NoColor = false
defer func() { color.NoColor = oldNoColor }()
encoder := NewTomlEncoder()
tomlEncoder := encoder.(*tomlEncoder)
// Test case that exposes the bug: "123-+-45" should NOT be colourised as a single number
input := "A = 123-+-45\n"
result := string(tomlEncoder.colorizeToml([]byte(input)))
// The bug causes "123-+-45" to be colourised as one token
// It should stop at "123" because the next character '-' is not valid in this position
if strings.Contains(result, "123-+-45") {
// Check if it's colourised as a single token (no color codes in the middle)
idx := strings.Index(result, "123-+-45")
// Look backwards for color code
beforeIdx := idx - 1
for beforeIdx >= 0 && result[beforeIdx] != '\x1b' {
beforeIdx--
}
// Look forward for reset code
afterIdx := idx + 8 // length of "123-+-45"
hasResetAfter := false
for afterIdx < len(result) && afterIdx < idx+20 {
if result[afterIdx] == '\x1b' {
hasResetAfter = true
break
}
afterIdx++
}
if beforeIdx >= 0 && hasResetAfter {
// The entire "123-+-45" is wrapped in color codes - this is the bug!
t.Errorf("BUG DETECTED: '123-+-45' is incorrectly colourised as a single number")
t.Errorf("Expected only '123' to be colourised as a number, but got the entire '123-+-45'")
t.Logf("Full output: %q", result)
t.Fail()
}
}
// Additional test cases for the bug
bugTests := []struct {
name string
input string
invalidSequence string
description string
}{
{
name: "consecutive minuses",
input: "A = 123--45\n",
invalidSequence: "123--45",
description: "'123--45' should not be colourised as a single number",
},
{
name: "plus in middle",
input: "A = 123+45\n",
invalidSequence: "123+45",
description: "'123+45' should not be colourised as a single number",
},
}
for _, tt := range bugTests {
t.Run(tt.name, func(t *testing.T) {
result := string(tomlEncoder.colorizeToml([]byte(tt.input)))
if strings.Contains(result, tt.invalidSequence) {
idx := strings.Index(result, tt.invalidSequence)
beforeIdx := idx - 1
for beforeIdx >= 0 && result[beforeIdx] != '\x1b' {
beforeIdx--
}
afterIdx := idx + len(tt.invalidSequence)
hasResetAfter := false
for afterIdx < len(result) && afterIdx < idx+20 {
if result[afterIdx] == '\x1b' {
hasResetAfter = true
break
}
afterIdx++
}
if beforeIdx >= 0 && hasResetAfter {
t.Errorf("BUG: %s", tt.description)
t.Logf("Full output: %q", result)
}
}
})
}
// Test that valid scientific notation still works
validTests := []struct {
name string
input string
}{
{"scientific positive", "A = 1.23e+45\n"},
{"scientific negative", "A = 6.626e-34\n"},
{"scientific uppercase", "A = 1.23E+10\n"},
}
for _, tt := range validTests {
t.Run(tt.name, func(t *testing.T) {
result := tomlEncoder.colorizeToml([]byte(tt.input))
if len(result) == 0 {
t.Error("Expected non-empty colourised output")
}
})
}
}
// Tests that the encoder handles empty path slices gracefully
func TestTomlEmptyPathPanic(t *testing.T) {
encoder := NewTomlEncoder()
tomlEncoder := encoder.(*tomlEncoder)
var buf bytes.Buffer
// Create a simple scalar node
scalarNode := &CandidateNode{
Kind: ScalarNode,
Tag: "!!str",
Value: "test",
}
// Test with empty path - this should not panic
err := tomlEncoder.encodeTopLevelEntry(&buf, []string{}, scalarNode)
if err == nil {
t.Error("Expected error when encoding with empty path, got nil")
}
}
// TestTomlStringEscapeColourization tests that string colourization correctly
// handles escape sequences, particularly escaped quotes at the end of strings
func TestTomlStringEscapeColourization(t *testing.T) {
// Save and restore color state
oldNoColor := color.NoColor
color.NoColor = false
defer func() { color.NoColor = oldNoColor }()
encoder := NewTomlEncoder()
tomlEncoder := encoder.(*tomlEncoder)
testCases := []struct {
name string
input string
description string
}{
{
name: "escaped quote at end",
input: `A = "test\""` + "\n",
description: "String ending with escaped quote should be colourised correctly",
},
{
name: "escaped backslash then quote",
input: `A = "test\\\""` + "\n",
description: "String with escaped backslash followed by escaped quote",
},
{
name: "escaped quote in middle",
input: `A = "test\"middle"` + "\n",
description: "String with escaped quote in the middle should be colourised correctly",
},
{
name: "multiple escaped quotes",
input: `A = "\"test\""` + "\n",
description: "String with escaped quotes at start and end",
},
{
name: "escaped newline",
input: `A = "test\n"` + "\n",
description: "String with escaped newline should be colourised correctly",
},
{
name: "single quote with escaped single quote",
input: `A = 'test\''` + "\n",
description: "Single-quoted string with escaped single quote",
},
}
for _, tt := range testCases {
t.Run(tt.name, func(t *testing.T) {
// The test should not panic and should return some output
result := tomlEncoder.colorizeToml([]byte(tt.input))
if len(result) == 0 {
t.Error("Expected non-empty colourised output")
}
// Check that the result contains the input string (with color codes)
// At minimum, it should contain "A" and "="
resultStr := string(result)
if !strings.Contains(resultStr, "A") || !strings.Contains(resultStr, "=") {
t.Errorf("Expected output to contain 'A' and '=', got: %q", resultStr)
}
})
}
}
func TestTomlEncoderPrintDocumentSeparator(t *testing.T) {
encoder := NewTomlEncoder()
var buf bytes.Buffer
writer := bufio.NewWriter(&buf)
err := encoder.PrintDocumentSeparator(writer)
writer.Flush()
test.AssertResult(t, nil, err)
test.AssertResult(t, "", buf.String())
}
func TestTomlEncoderPrintLeadingContent(t *testing.T) {
encoder := NewTomlEncoder()
var buf bytes.Buffer
writer := bufio.NewWriter(&buf)
err := encoder.PrintLeadingContent(writer, "some content")
writer.Flush()
test.AssertResult(t, nil, err)
test.AssertResult(t, "", buf.String())
}
func TestTomlEncoderCanHandleAliases(t *testing.T) {
encoder := NewTomlEncoder()
test.AssertResult(t, false, encoder.CanHandleAliases())
}

View File

@ -139,6 +139,66 @@ func TestWriteInPlaceHandlerImpl_FinishWriteInPlace_Failure(t *testing.T) {
}
}
func TestWriteInPlaceHandlerImpl_FinishWriteInPlace_Symlink_Success(t *testing.T) {
// Create a temporary directory and file for testing
tempDir := t.TempDir()
inputFile := filepath.Join(tempDir, "input.yaml")
symlinkFile := filepath.Join(tempDir, "symlink.yaml")
// Create input file with some content
content := []byte("test: value\n")
err := os.WriteFile(inputFile, content, 0600)
if err != nil {
t.Fatalf("Failed to create input file: %v", err)
}
err = os.Symlink(inputFile, symlinkFile)
if err != nil {
t.Fatalf("Failed to symlink to input file: %v", err)
}
handler := NewWriteInPlaceHandler(symlinkFile)
tempFile, err := handler.CreateTempFile()
if err != nil {
t.Fatalf("CreateTempFile failed: %v", err)
}
defer tempFile.Close()
// Write some content to temp file
tempContent := []byte("updated: content\n")
_, err = tempFile.Write(tempContent)
if err != nil {
t.Fatalf("Failed to write to temp file: %v", err)
}
tempFile.Close()
// Test successful finish
err = handler.FinishWriteInPlace(true)
if err != nil {
t.Fatalf("FinishWriteInPlace failed: %v", err)
}
// Verify that the symlink is still present
info, err := os.Lstat(symlinkFile)
if err != nil {
t.Fatalf("Failed to lstat input file: %v", err)
}
if info.Mode()&os.ModeSymlink == 0 {
t.Errorf("Input file symlink is no longer present")
}
// Verify the original file was updated
updatedContent, err := os.ReadFile(inputFile)
if err != nil {
t.Fatalf("Failed to read updated file: %v", err)
}
if string(updatedContent) != string(tempContent) {
t.Errorf("File content not updated correctly. Expected %q, got %q",
string(tempContent), string(updatedContent))
}
}
func TestWriteInPlaceHandlerImpl_CreateTempFile_Permissions(t *testing.T) {
// Create a temporary directory and file for testing
tempDir := t.TempDir()

View File

@ -713,10 +713,10 @@ func documentXMLDecodeScenario(w *bufio.Writer, s formatScenario) {
writeOrPanic(w, "then\n")
expression := s.expression
if expression == "" {
expression = "."
if s.expression != "" {
expression = fmt.Sprintf(" '%v'", s.expression)
}
writeOrPanic(w, fmt.Sprintf("```bash\nyq -oy '%v' sample.xml\n```\n", expression))
writeOrPanic(w, fmt.Sprintf("```bash\nyq -oy%v sample.xml\n```\n", expression))
writeOrPanic(w, "will output\n")
writeOrPanic(w, fmt.Sprintf("```yaml\n%v```\n\n", mustProcessFormatScenario(s, NewXMLDecoder(ConfiguredXMLPreferences), NewYamlEncoder(ConfiguredYamlPreferences))))
@ -734,7 +734,7 @@ func documentXMLDecodeKeepNsScenario(w *bufio.Writer, s formatScenario) {
writeOrPanic(w, fmt.Sprintf("```xml\n%v\n```\n", s.input))
writeOrPanic(w, "then\n")
writeOrPanic(w, "```bash\nyq --xml-keep-namespace=false '.' sample.xml\n```\n")
writeOrPanic(w, "```bash\nyq --xml-keep-namespace=false sample.xml\n```\n")
writeOrPanic(w, "will output\n")
prefs := NewDefaultXmlPreferences()
prefs.KeepNamespace = false
@ -758,7 +758,7 @@ func documentXMLDecodeKeepNsRawTokenScenario(w *bufio.Writer, s formatScenario)
writeOrPanic(w, fmt.Sprintf("```xml\n%v\n```\n", s.input))
writeOrPanic(w, "then\n")
writeOrPanic(w, "```bash\nyq --xml-raw-token=false '.' sample.xml\n```\n")
writeOrPanic(w, "```bash\nyq --xml-raw-token=false sample.xml\n```\n")
writeOrPanic(w, "will output\n")
prefs := NewDefaultXmlPreferences()
@ -803,7 +803,7 @@ func documentXMLRoundTripScenario(w *bufio.Writer, s formatScenario) {
writeOrPanic(w, fmt.Sprintf("```xml\n%v\n```\n", s.input))
writeOrPanic(w, "then\n")
writeOrPanic(w, "```bash\nyq '.' sample.xml\n```\n")
writeOrPanic(w, "```bash\nyq sample.xml\n```\n")
writeOrPanic(w, "will output\n")
writeOrPanic(w, fmt.Sprintf("```xml\n%v```\n\n", mustProcessFormatScenario(s, NewXMLDecoder(ConfiguredXMLPreferences), NewXMLEncoder(ConfiguredXMLPreferences))))
@ -821,7 +821,7 @@ func documentXMLSkipDirectivesScenario(w *bufio.Writer, s formatScenario) {
writeOrPanic(w, fmt.Sprintf("```xml\n%v\n```\n", s.input))
writeOrPanic(w, "then\n")
writeOrPanic(w, "```bash\nyq --xml-skip-directives '.' sample.xml\n```\n")
writeOrPanic(w, "```bash\nyq --xml-skip-directives sample.xml\n```\n")
writeOrPanic(w, "will output\n")
prefs := NewDefaultXmlPreferences()
prefs.SkipDirectives = true

View File

@ -8,6 +8,7 @@ type YamlPreferences struct {
UnwrapScalar bool
EvaluateTogether bool
FixMergeAnchorToSpec bool
CompactSequenceIndent bool
}
func NewDefaultYamlPreferences() YamlPreferences {
@ -19,6 +20,7 @@ func NewDefaultYamlPreferences() YamlPreferences {
UnwrapScalar: true,
EvaluateTogether: false,
FixMergeAnchorToSpec: false,
CompactSequenceIndent: false,
}
}
@ -31,6 +33,7 @@ func (p *YamlPreferences) Copy() YamlPreferences {
UnwrapScalar: p.UnwrapScalar,
EvaluateTogether: p.EvaluateTogether,
FixMergeAnchorToSpec: p.FixMergeAnchorToSpec,
CompactSequenceIndent: p.CompactSequenceIndent,
}
}

View File

@ -13,6 +13,31 @@ var yamlFormatScenarios = []formatScenario{
input: "--- cat",
expected: "---\ncat\n",
},
{
description: "CRLF doc separator",
skipDoc: true,
input: "---\r\ncat\r\n",
expected: "---\r\ncat\r\n",
},
{
description: "yaml directive preserved (LF)",
skipDoc: true,
input: "%YAML 1.1\n---\ncat\n",
expected: "%YAML 1.1\n---\ncat\n",
},
{
description: "yaml directive preserved (CRLF)",
skipDoc: true,
input: "%YAML 1.1\r\n---\r\ncat\r\n",
expected: "%YAML 1.1\r\n---\r\ncat\r\n",
},
{
description: "comment only no trailing newline",
skipDoc: true,
input: "# hello",
expected: "# hello\n",
},
{
description: "scalar with doc separator",
skipDoc: true,

View File

@ -38,6 +38,8 @@ cleanup
cmlu
colorise
colors
Colors
colourize
compinit
coolioo
coverprofile
@ -189,8 +191,11 @@ risentveber
rmescandon
Rosey
roundtrip
roundtrips
Roundtrip
roundtripping
Interp
interp
runningvms
sadface
selfupdate
@ -265,4 +270,31 @@ noprops
nosh
noshell
tinygo
nonexistent
nonexistent
hclsyntax
hclwrite
nohcl
zclconf
cty
go-cty
Colorisation
goimports
errorlint
RDBMS
expeñded
bananabananabananabanana
edwinjhlee
flox
unlabelled
kyaml
KYAML
nokyaml
buildvcs
behaviour
GOFLAGS
gocache
subsubarray
Ffile
Fquery
coverpkg
gsub

View File

@ -1,3 +1,41 @@
4.52.4:
- Dropping windows/arm - no longer supported in cross-compile
4.52.3:
- Fixing comments in TOML arrays (#2592)
- Bumped dependencies
4.52.2:
- Fixed bad instructions file breaking go-install (#2587) Thanks @theyoprst
- Fixed TOML table scope after comments (#2588) Thanks @tomers
- Multiply uses a readonly context (#2558)
- Fixed merge globbing wildcards in keys (#2564)
- Fixing TOML subarray parsing issue (#2581)
4.52.1:
- TOML encoder support - you can now roundtrip! #1364
- Parent now supports negative indices, and added a 'root' command for referencing the top level document
- Fixed scalar encoding for HCL
- Add --yaml-compact-seq-indent / -c flag for compact sequence indentation (#2583) Thanks @jfenal
- Add symlink check to file rename util (#2576) Thanks @Elias-elastisys
- Powershell fixed default command used for __completeNoDesc alias (#2568) Thanks @teejaded
- Unwrap scalars in shell output mode. (#2548) Thanks @flintwinters
- Added K8S KYAML output format support (#2560) Thanks @robbat2
- Bumped dependencies
- Special shout out to @ccoVeille for reviewing my PRs!
4.50.1:
- Added HCL support!
- Fixing handling of CRLF #2352
- Bumped dependencies
4.49.2:
- Fixing escape character bugs :sweat: #2517
- Fixing snap release pipeline #2518 Thanks @aalexjo
4.49.1:
- Added `--security` flags to disable env and file ops #2515
- Fixing TOML ArrayTable parsing issues #1758

View File

@ -1,2 +1,2 @@
#!/bin/bash
go build -tags "yq_nolua yq_noini yq_notoml yq_noxml yq_nojson" -ldflags "-s -w" .
go build -tags "yq_nolua yq_noini yq_notoml yq_noxml yq_nojson yq_nohcl yq_nokyaml" -ldflags "-s -w" .

View File

@ -1,4 +1,4 @@
#!/bin/bash
# Currently, the `yq_nojson` feature must be enabled when using TinyGo.
tinygo build -no-debug -tags "yq_nolua yq_noini yq_notoml yq_noxml yq_nojson yq_nocsv yq_nobase64 yq_nouri yq_noprops yq_nosh yq_noshell" .
tinygo build -no-debug -tags "yq_nolua yq_noini yq_notoml yq_noxml yq_nojson yq_nocsv yq_nobase64 yq_nouri yq_noprops yq_nosh yq_noshell yq_nohcl yq_nokyaml" .

View File

@ -3,8 +3,6 @@
set -o errexit
set -o pipefail
# TODO: Check if the found golangci-lint version matches the expected version (e.g., v1.62.0), especially if falling back to PATH version.
GOPATH_LINT="$(go env GOPATH)/bin/golangci-lint"
BIN_LINT="./bin/golangci-lint"
LINT_CMD=""
@ -22,4 +20,4 @@ else
exit 1
fi
"$LINT_CMD" run --verbose
GOFLAGS="${GOFLAGS}" "$LINT_CMD" run --verbose

View File

@ -3,7 +3,9 @@
set -e
echo "Running tests and generating coverage..."
go test -coverprofile=coverage.out -v $(go list ./... | grep -v -E 'examples' | grep -v -E 'test')
packages=$(go list ./... | grep -v -E 'examples' | grep -v -E 'test' | tr '\n' ',' | sed 's/,$//')
test_packages=$(go list ./... | grep -v -E 'examples' | grep -v -E 'test' | grep -v '^github.com/mikefarah/yq/v4$')
go test -coverprofile=coverage.out -coverpkg="$packages" -v $test_packages
echo "Generating HTML coverage report..."
go tool cover -html=coverage.out -o coverage.html
@ -58,11 +60,31 @@ tail -n +1 coverage_sorted.txt | while read percent file; do
done
echo ""
echo "Top 10 files needing attention (lowest coverage):"
echo "Top 10 files by uncovered statements:"
echo "================================================="
grep -v "TOTAL:" coverage_sorted.txt | tail -10 | while read percent file; do
# Calculate uncovered statements for each file and sort by that
go tool cover -func=coverage.out | grep -E "\.go:[0-9]+:" | \
awk '{
# Extract filename and percentage
split($1, parts, ":")
file = parts[1]
pct = $NF
gsub(/%/, "", pct)
# Track stats per file
total[file]++
covered[file] += pct
}
END {
for (file in total) {
avg_pct = covered[file] / total[file]
uncovered = total[file] * (100 - avg_pct) / 100
covered_count = total[file] - uncovered
printf "%.0f %d %.0f %.1f %s\n", uncovered, total[file], covered_count, avg_pct, file
}
}' | sort -rn | head -10 | while read uncovered total covered pct file; do
filename=$(basename "$file")
printf "%-60s %8.1f%%\n" "$filename" "$percent"
printf "%-60s %4d uncovered (%4d/%4d, %5.1f%%)\n" "$filename" "$uncovered" "$covered" "$total" "$pct"
done
echo ""

Some files were not shown because too many files have changed in this diff Show More