mirror of
https://github.com/mikefarah/yq.git
synced 2026-03-10 15:54:26 +00:00
Compare commits
154 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
16e4df2304 | ||
|
|
79a92d0478 | ||
|
|
88a31ae8c6 | ||
|
|
5a7e72a743 | ||
|
|
562531d936 | ||
|
|
2c471b6498 | ||
|
|
f4ef6ef3cf | ||
|
|
f49f2bd2d8 | ||
|
|
6ccc7b7797 | ||
|
|
b3e1fbb7d1 | ||
|
|
288ca2d114 | ||
|
|
eb04fa87af | ||
|
|
2be0094729 | ||
|
|
3c18d5b035 | ||
|
|
2dcc2293da | ||
|
|
eb4fde4ef8 | ||
|
|
06ea4cf62e | ||
|
|
37089d24af | ||
|
|
7cf88a0291 | ||
|
|
41adc1ad18 | ||
|
|
b4b96f2a68 | ||
|
|
2824d66a65 | ||
|
|
4bbffa9022 | ||
|
|
bdeedbd275 | ||
|
|
3d918acc2a | ||
|
|
01005cc8fd | ||
|
|
c4468165f2 | ||
|
|
e35d32a0b6 | ||
|
|
78192a915b | ||
|
|
c4f4e6d416 | ||
|
|
5f90039bdc | ||
|
|
c6fa371d8d | ||
|
|
3a27e39778 | ||
|
|
414a085563 | ||
|
|
542801926f | ||
|
|
1bcc44ff9b | ||
|
|
a6f1b02340 | ||
|
|
f98028c925 | ||
|
|
c6029376a5 | ||
|
|
23abf50fef | ||
|
|
64ec1f4aa7 | ||
|
|
4973c355e6 | ||
|
|
ecbdcada9f | ||
|
|
029ba68014 | ||
|
|
4a06cce376 | ||
|
|
37e48cea44 | ||
|
|
207bec6b29 | ||
|
|
7198d16575 | ||
|
|
5d6c2047cf | ||
|
|
7f60daad20 | ||
|
|
b7cbe59fd7 | ||
|
|
9fa353b123 | ||
|
|
c6ecad1546 | ||
|
|
56eb3655b8 | ||
|
|
1de4ec59f2 | ||
|
|
c132c32731 | ||
|
|
0914121d29 | ||
|
|
aa5134e645 | ||
|
|
4d620bfa26 | ||
|
|
b8d90fd574 | ||
|
|
c1b81f1a03 | ||
|
|
ea40e14fb1 | ||
|
|
b974d973ee | ||
|
|
66ec487792 | ||
|
|
161be10791 | ||
|
|
aa858520a8 | ||
|
|
ac2889c296 | ||
|
|
626624af7b | ||
|
|
b0d2522f80 | ||
|
|
2ee38e15b6 | ||
|
|
4e9d5e8e48 | ||
|
|
1338b521ff | ||
|
|
3a5323824f | ||
|
|
8780172b33 | ||
|
|
5f9bf8d241 | ||
|
|
065b200af9 | ||
|
|
745a7ffb3c | ||
|
|
a305d706d4 | ||
|
|
0671ccd2cc | ||
|
|
4d8cd450bd | ||
|
|
d2d657eacc | ||
|
|
f4fd8c585a | ||
|
|
e4bf8a1e0a | ||
|
|
fd405749f9 | ||
|
|
51ddf8d357 | ||
|
|
77eccfd3db | ||
|
|
554bf5a2f2 | ||
|
|
8162f3a100 | ||
|
|
48707369a0 | ||
|
|
4f72c37de7 | ||
|
|
795f9c954c | ||
|
|
3d35386ad9 | ||
|
|
154a4ace01 | ||
|
|
effdfe1221 | ||
|
|
8af768a015 | ||
|
|
5f3dcb1ccf | ||
|
|
6270c29f54 | ||
|
|
df3101ce53 | ||
|
|
65e79845d4 | ||
|
|
b4d8131197 | ||
|
|
c75a2fad86 | ||
|
|
8d430cf41c | ||
|
|
2e96a28270 | ||
|
|
656f07d0c2 | ||
|
|
1852073f29 | ||
|
|
7d2c774e8f | ||
|
|
69076dfe81 | ||
|
|
9e17cd683f | ||
|
|
eb3d0e63e3 | ||
|
|
2072808def | ||
|
|
7d47b36b69 | ||
|
|
53f10ae360 | ||
|
|
22510ab8d5 | ||
|
|
588d0bb3dd | ||
|
|
7ccaf8e700 | ||
|
|
a1a27b8536 | ||
|
|
1b91fc63ea | ||
|
|
9e0c5fd3c9 | ||
|
|
5d0481c0d2 | ||
|
|
f91176a204 | ||
|
|
8e86bdb876 | ||
|
|
fc164ca9c3 | ||
|
|
810e9d921e | ||
|
|
45be35c063 | ||
|
|
39fbf01fa8 | ||
|
|
306dc931a5 | ||
|
|
f00852bc6c | ||
|
|
c716d157f2 | ||
|
|
e49e588ab5 | ||
|
|
389486829d | ||
|
|
d32e71f25b | ||
|
|
796317b885 | ||
|
|
258b84a05e | ||
|
|
e056b91a00 | ||
|
|
85b0985a60 | ||
|
|
874cbc4d3c | ||
|
|
f6c780e793 | ||
|
|
8c25f33df4 | ||
|
|
2869919cb4 | ||
|
|
458d02f3ab | ||
|
|
877a47cb19 | ||
|
|
3050ca5303 | ||
|
|
49b6477c49 | ||
|
|
78bc9baffd | ||
|
|
1f2b0fe76b | ||
|
|
1228bcfa75 | ||
|
|
7f72595a12 | ||
|
|
ff2c1c930c | ||
|
|
36d410b348 | ||
|
|
6dfe002058 | ||
|
|
ed4f468c97 | ||
|
|
8b2ba41c6c | ||
|
|
02b28073bf | ||
|
|
6957399dc0 |
4
.github/ISSUE_TEMPLATE/bug_report_v4.md
vendored
4
.github/ISSUE_TEMPLATE/bug_report_v4.md
vendored
@ -34,13 +34,13 @@ The command you ran:
|
||||
yq eval-all 'select(fileIndex==0) | .a.b.c' data1.yml data2.yml
|
||||
```
|
||||
|
||||
**Actual behavior**
|
||||
**Actual behaviour**
|
||||
|
||||
```yaml
|
||||
cat: meow
|
||||
```
|
||||
|
||||
**Expected behavior**
|
||||
**Expected behaviour**
|
||||
|
||||
```yaml
|
||||
this: should really work
|
||||
|
||||
1
.github/instructions/instructions.md
vendored
Normal file
1
.github/instructions/instructions.md
vendored
Normal file
@ -0,0 +1 @@
|
||||
When you find a bug - make sure to include a new test that exposes the bug, as well as the fix for the bug itself.
|
||||
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
@ -38,7 +38,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
|
||||
8
.github/workflows/docker-release.yml
vendored
8
.github/workflows/docker-release.yml
vendored
@ -14,10 +14,10 @@ jobs:
|
||||
IMAGE_NAME: mikefarah/yq
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
uses: docker/setup-qemu-action@v4
|
||||
with:
|
||||
platforms: all
|
||||
|
||||
@ -31,13 +31,13 @@ jobs:
|
||||
run: echo ${{ steps.buildx.outputs.platforms }} && docker version
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v4
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v4
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
|
||||
4
.github/workflows/go.yml
vendored
4
.github/workflows/go.yml
vendored
@ -11,13 +11,13 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version: '^1.20'
|
||||
id: go
|
||||
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Get dependencies
|
||||
run: |
|
||||
|
||||
4
.github/workflows/release.yml
vendored
4
.github/workflows/release.yml
vendored
@ -9,8 +9,8 @@ jobs:
|
||||
publishGitRelease:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-go@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version: '^1.20'
|
||||
check-latest: true
|
||||
|
||||
8
.github/workflows/snap-release.yml
vendored
8
.github/workflows/snap-release.yml
vendored
@ -12,12 +12,16 @@ jobs:
|
||||
environment: snap
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: snapcore/action-build@v1
|
||||
id: build
|
||||
env:
|
||||
SNAPCRAFT_STORE_CREDENTIALS: ${{ secrets.STORE_LOGIN }}
|
||||
with:
|
||||
snapcraft-args: "remote-build --launchpad-accept-public-upload"
|
||||
- uses: snapcore/action-publish@v1
|
||||
env:
|
||||
SNAPCRAFT_STORE_CREDENTIALS: ${{ secrets.STORE_LOGIN }}
|
||||
with:
|
||||
snap: ${{ steps.build.outputs.snap }}
|
||||
release: stable
|
||||
release: stable
|
||||
|
||||
2
.github/workflows/test-yq.yml
vendored
2
.github/workflows/test-yq.yml
vendored
@ -13,7 +13,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- name: Get test
|
||||
id: get_value
|
||||
uses: mikefarah/yq@master
|
||||
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@ -25,6 +25,7 @@ _testmain.go
|
||||
cover.out
|
||||
coverage.out
|
||||
coverage.html
|
||||
coverage_sorted.txt
|
||||
*.exe
|
||||
*.test
|
||||
*.prof
|
||||
@ -42,9 +43,11 @@ yq*.snap
|
||||
|
||||
test.yml
|
||||
test*.yml
|
||||
test*.tf
|
||||
test*.xml
|
||||
test*.toml
|
||||
test*.yaml
|
||||
*.kyaml
|
||||
test_dir1/
|
||||
test_dir2/
|
||||
0.yml
|
||||
@ -67,3 +70,7 @@ debian/files
|
||||
.vscode
|
||||
|
||||
yq3
|
||||
|
||||
# Golang
|
||||
.gomodcache/
|
||||
.gocache/
|
||||
|
||||
@ -14,6 +14,11 @@ linters:
|
||||
- unconvert
|
||||
- unparam
|
||||
settings:
|
||||
misspell:
|
||||
locale: UK
|
||||
ignore-rules:
|
||||
- color
|
||||
- colors
|
||||
depguard:
|
||||
rules:
|
||||
prevent_unmaintained_packages:
|
||||
|
||||
@ -23,6 +23,7 @@ builds:
|
||||
- linux_amd64
|
||||
- linux_arm
|
||||
- linux_arm64
|
||||
- linux_loong64
|
||||
- linux_mips
|
||||
- linux_mips64
|
||||
- linux_mips64le
|
||||
@ -38,7 +39,6 @@ builds:
|
||||
- openbsd_amd64
|
||||
- windows_386
|
||||
- windows_amd64
|
||||
- windows_arm
|
||||
- windows_arm64
|
||||
|
||||
no_unique_dist_dir: true
|
||||
|
||||
@ -11,7 +11,7 @@ appearance, race, religion, or sexual identity and orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
Examples of behaviour that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
@ -20,7 +20,7 @@ include:
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
Examples of unacceptable behaviour by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
@ -34,13 +34,13 @@ Examples of unacceptable behavior by participants include:
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable
|
||||
behavior and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behavior.
|
||||
behaviour and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behaviour.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||
permanently any contributor for other behaviors that they deem inappropriate,
|
||||
permanently any contributor for other behaviours that they deem inappropriate,
|
||||
threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
@ -54,7 +54,7 @@ further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
Instances of abusive, harassing, or otherwise unacceptable behaviour may be
|
||||
reported by contacting the project team at mikefarah@gmail.com. All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
|
||||
240
CONTRIBUTING.md
240
CONTRIBUTING.md
@ -3,55 +3,227 @@ Not all new PRs will be merged in
|
||||
|
||||
It's recommended to check with the owner first (e.g. raise an issue) to discuss a new feature before developing, to ensure your hard efforts don't go to waste.
|
||||
|
||||
PRs to fix bugs and issues are almost always be welcome, just make sure you write tests as well.
|
||||
PRs to fix bugs and issues are almost always welcome :pray: please ensure you write tests as well.
|
||||
|
||||
** PRs that significantly refactor code and release pipelines PRs will generally _NOT_ be accepted **
|
||||
|
||||
Significant refactors take a lot of time to understand and can have all sorts of unintended side effects.
|
||||
|
||||
Release pipeline PRs are a security risk - it's too easy for a serious vulnerability to sneak in (either intended or not). If there is a new cool way of releasing things, raise an issue for discussion first - it will need to be gone over with a fine tooth comb.
|
||||
|
||||
At this stage, yq is not going to maintain any other release platforms other than GitHub and Docker - that said, I'm more than happy to put in other community maintained methods in the README for visibility :heart:
|
||||
The following types of PRs will _not_ be accepted:
|
||||
- **Significant refactors** take a lot of time to understand and can have all sorts of unintended side effects. If you think there's a better way to do things (that requires significant changes) raise an issue for discussion first :)
|
||||
- **Release pipeline PRs** are a security risk - it's too easy for a serious vulnerability to sneak in (either intended or not). If there is a new cool way of releasing things, raise an issue for discussion first - it will need to be gone over with a fine tooth comb.
|
||||
- **Version bumps** are handled by dependabot, the bot will auto-raise PRs and they will be regularly merged in.
|
||||
- **New release platforms** At this stage, yq is not going to maintain any other release platforms other than GitHub and Docker - that said, I'm more than happy to put in other community maintained methods in the README for visibility :heart:
|
||||
|
||||
|
||||
# Development
|
||||
|
||||
1. Install (Golang)[https://golang.org/]
|
||||
1. Run `scripts/devtools.sh` to install the required devtools
|
||||
2. Run `make [local] vendor` to install the vendor dependencies
|
||||
2. Run `make [local] test` to ensure you can run the existing tests
|
||||
3. Write unit tests - (see existing examples). Changes will not be accepted without corresponding unit tests.
|
||||
4. Make the code changes.
|
||||
5. `make [local] test` to lint code and run tests
|
||||
6. Profit! ok no profit, but raise a PR and get kudos :)
|
||||
## Initial Setup
|
||||
|
||||
1. Install [Golang](https://golang.org/) (version 1.24.0 or later)
|
||||
2. Run `scripts/devtools.sh` to install required development tools:
|
||||
- golangci-lint for code linting
|
||||
- gosec for security analysis
|
||||
3. Run `make [local] vendor` to install vendor dependencies
|
||||
4. Run `make [local] test` to ensure you can run the existing tests
|
||||
|
||||
## Development Workflow
|
||||
|
||||
1. **Write unit tests first** - Changes will not be accepted without corresponding unit tests (see Testing section below)
|
||||
2. **Make your code changes**
|
||||
3. **Run tests and linting**: `make [local] test` (this runs formatting, linting, security checks, and tests)
|
||||
4. **Create your PR** and get kudos! :)
|
||||
|
||||
## Make Commands
|
||||
|
||||
- Use `make [local] <command>` for local development (runs in Docker container)
|
||||
- Use `make <command>` for CI/CD environments
|
||||
- Common commands:
|
||||
- `make [local] vendor` - Install dependencies
|
||||
- `make [local] test` - Run all checks and tests
|
||||
- `make [local] build` - Build the yq binary
|
||||
- `make [local] format` - Format code
|
||||
- `make [local] check` - Run linting and security checks
|
||||
|
||||
# Code Quality
|
||||
|
||||
## Linting and Formatting
|
||||
|
||||
The project uses strict linting rules defined in `.golangci.yml`. All code must pass:
|
||||
|
||||
- **Code formatting**: gofmt, goimports, gci
|
||||
- **Linting**: revive, errorlint, gosec, misspell, and others
|
||||
- **Security checks**: gosec security analysis
|
||||
- **Spelling checks**: misspell detection
|
||||
|
||||
Run `make [local] check` to verify your code meets all quality standards.
|
||||
|
||||
## Code Style Guidelines
|
||||
|
||||
- Follow standard Go conventions
|
||||
- Use meaningful variable names
|
||||
- Add comments for public functions and complex logic
|
||||
- Keep functions focused and reasonably sized
|
||||
- Use the project's existing patterns and conventions
|
||||
|
||||
# Testing
|
||||
|
||||
## Test Structure
|
||||
|
||||
Tests in yq use the `expressionScenario` pattern. Each test scenario includes:
|
||||
- `expression`: The yq expression to test
|
||||
- `document`: Input YAML/JSON (optional)
|
||||
- `expected`: Expected output
|
||||
- `skipDoc`: Whether to skip documentation generation
|
||||
|
||||
## Writing Tests
|
||||
|
||||
1. **Find the appropriate test file** (e.g., `operator_add_test.go` for addition operations)
|
||||
2. **Add your test scenario** to the `*OperatorScenarios` slice
|
||||
3. **Run the specific test**: `go test -run TestAddOperatorScenarios` (replace with appropriate test name)
|
||||
4. **Verify documentation generation** (see Documentation section)
|
||||
|
||||
## Test Examples
|
||||
|
||||
```go
|
||||
var addOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
skipDoc: true,
|
||||
expression: `"foo" + "bar"`,
|
||||
expected: []string{
|
||||
"D0, P[], (!!str)::foobar\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
document: "apples: 3",
|
||||
expression: `.apples + 3`,
|
||||
expected: []string{
|
||||
"D0, P[apples], (!!int)::6\n",
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Running Tests
|
||||
|
||||
- **All tests**: `make [local] test`
|
||||
- **Specific test**: `go test -run TestName`
|
||||
- **With coverage**: `make [local] cover`
|
||||
|
||||
# Documentation
|
||||
|
||||
The various operator documentation (e.g. 'strings') are generated from the 'master' branch, and have a statically defined header (e.g. `pkg/yqlib/doc/operators/headers/add.md`) and the bulk of the docs are generated from the unit tests e.g. `pkg/yqlib/operator_add_test.go`.
|
||||
## Documentation Generation
|
||||
|
||||
The pipeline will run the tests and automatically concatenate the files together, and put them under
|
||||
`pkg/qylib/doc/add.md`. These files are checked in the master branch (and are copied to the gitbook branch as part of the release process).
|
||||
The project uses a documentation system that combines static headers with dynamically generated content from tests.
|
||||
|
||||
### How It Works
|
||||
|
||||
Remaining static documentation is in the 'githook' branch (where the generated docs are copied across into)
|
||||
1. **Static headers** are defined in `pkg/yqlib/doc/operators/headers/*.md`
|
||||
2. **Dynamic content** is generated from test scenarios in `*_test.go` files
|
||||
3. **Generated docs** are created in `pkg/yqlib/doc/*.md` by concatenating headers with test-generated content
|
||||
4. **Documentation is synced** to the gitbook branch for the website
|
||||
|
||||
## How to contribute
|
||||
### Updating Operator Documentation
|
||||
|
||||
The first step is to find if what you want is automatically generated or not - start by looking in the master branch.
|
||||
#### For Test-Generated Documentation
|
||||
|
||||
Note that PRs with small changes (e.g. minor typos) may not be merged (see https://joel.net/how-one-guy-ruined-hacktoberfest2020-drama).
|
||||
Most operator documentation is generated from tests. To update:
|
||||
|
||||
### Updating dynamic documentation from master
|
||||
- Search for the documentation you want to update. If you find matches in a `*_test.go` file - update that, as that will automatically update the matching `*.md` file
|
||||
- Assuming you are updating a `*_test.go` file, once updated, run the test to regenerated the docs. E.g. for the 'Add' test generated docs, from the pkg/yqlib folder run:
|
||||
`go test -run TestAddOperatorScenarios` which will run that test defined in the `operator_add_test.go` file.
|
||||
- Ensure the tests still pass, and check the generated documentation have your update.
|
||||
- Note: If the documentation is only in a `headers/*.md` file, then just update that directly
|
||||
- Raise a PR to merge the changes into master!
|
||||
1. **Find the test file** (e.g., `operator_add_test.go`)
|
||||
2. **Update test scenarios** - each `expressionScenario` with `skipDoc: false` becomes documentation
|
||||
3. **Run the test** to regenerate docs:
|
||||
```bash
|
||||
cd pkg/yqlib
|
||||
go test -run TestAddOperatorScenarios
|
||||
```
|
||||
4. **Verify the generated documentation** in `pkg/yqlib/doc/add.md`
|
||||
5. **Create a PR** with your changes
|
||||
|
||||
### Updating static documentation from the gitbook branch
|
||||
If you haven't found what you want to update in the master branch, then check the gitbook branch directly as there are a few pages in there that are not in master.
|
||||
#### For Header-Only Documentation
|
||||
|
||||
- Update the `*.md` files
|
||||
- Raise a PR to merge the changes into gitbook.
|
||||
If documentation exists only in `headers/*.md` files:
|
||||
1. **Update the header file directly** (e.g., `pkg/yqlib/doc/operators/headers/add.md`)
|
||||
2. **Create a PR** with your changes
|
||||
|
||||
### Updating Static Documentation
|
||||
|
||||
For documentation not in the master branch:
|
||||
|
||||
1. **Check the gitbook branch** for additional pages
|
||||
2. **Update the `*.md` files** directly
|
||||
3. **Create a PR** to the gitbook branch
|
||||
|
||||
### Documentation Best Practices
|
||||
|
||||
- **Write clear, concise examples** in test scenarios
|
||||
- **Use meaningful variable names** in examples
|
||||
- **Include edge cases** and error conditions
|
||||
- **Test your documentation changes** by running the specific test
|
||||
- **Verify generated output** matches expectations
|
||||
|
||||
Note: PRs with small changes (e.g. minor typos) may not be merged (see https://joel.net/how-one-guy-ruined-hacktoberfest2020-drama).
|
||||
|
||||
# Troubleshooting
|
||||
|
||||
## Common Setup Issues
|
||||
|
||||
### Docker/Podman Issues
|
||||
- **Problem**: `make` commands fail with Docker errors
|
||||
- **Solution**: Ensure Docker or Podman is running and accessible
|
||||
- **Alternative**: Use `make local <command>` to run in containers
|
||||
|
||||
### Go Version Issues
|
||||
- **Problem**: Build fails with Go version errors
|
||||
- **Solution**: Ensure you have Go 1.24.0 or later installed
|
||||
- **Check**: Run `go version` to verify
|
||||
|
||||
### Vendor Dependencies
|
||||
- **Problem**: `make vendor` fails or dependencies are outdated
|
||||
- **Solution**:
|
||||
```bash
|
||||
go mod tidy
|
||||
make [local] vendor
|
||||
```
|
||||
|
||||
### Linting Failures
|
||||
- **Problem**: `make check` fails with linting errors
|
||||
- **Solution**:
|
||||
```bash
|
||||
make [local] format # Auto-fix formatting
|
||||
# Manually fix remaining linting issues
|
||||
make [local] check # Verify fixes
|
||||
```
|
||||
|
||||
### Test Failures
|
||||
- **Problem**: Tests fail locally but pass in CI
|
||||
- **Solution**:
|
||||
```bash
|
||||
make [local] test # Run in Docker container
|
||||
```
|
||||
|
||||
- **Problem**: Tests fail with a VCS error:
|
||||
```bash
|
||||
error obtaining VCS status: exit status 128
|
||||
Use -buildvcs=false to disable VCS stamping.
|
||||
```
|
||||
- **Solution**:
|
||||
Git security mechanisms prevent Golang from detecting the Git details inside
|
||||
the container; either build with the `local` option, or pass GOFLAGS to
|
||||
disable Golang buildvcs behaviour.
|
||||
```bash
|
||||
make local test
|
||||
# OR
|
||||
make test GOFLAGS='-buildvcs=true'
|
||||
```
|
||||
|
||||
### Documentation Generation Issues
|
||||
- **Problem**: Generated docs don't update after test changes
|
||||
- **Solution**:
|
||||
```bash
|
||||
cd pkg/yqlib
|
||||
go test -run TestSpecificOperatorScenarios
|
||||
# Check if generated file updated in pkg/yqlib/doc/
|
||||
```
|
||||
|
||||
## Getting Help
|
||||
|
||||
- **Check existing issues**: Search GitHub issues for similar problems
|
||||
- **Create an issue**: If you can't find a solution, create a detailed issue
|
||||
- **Ask questions**: Use GitHub Discussions for general questions
|
||||
- **Join the community**: Check the project's community channels
|
||||
@ -1,4 +1,4 @@
|
||||
FROM golang:1.25.2 AS builder
|
||||
FROM golang:1.26.0 AS builder
|
||||
|
||||
WORKDIR /go/src/mikefarah/yq
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
FROM golang:1.25.2
|
||||
FROM golang:1.26.0
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y npm && \
|
||||
|
||||
1
Makefile
1
Makefile
@ -35,6 +35,7 @@ clean:
|
||||
## prefix before other make targets to run in your local dev environment
|
||||
local: | quiet
|
||||
@$(eval ENGINERUN= )
|
||||
@$(eval GOFLAGS="$(GOFLAGS)" )
|
||||
@mkdir -p tmp
|
||||
@touch tmp/dev_image_id
|
||||
quiet: # this is silly but shuts up 'Nothing to be done for `local`'
|
||||
|
||||
@ -4,6 +4,7 @@ IMPORT_PATH := github.com/mikefarah/${PROJECT}
|
||||
export GIT_COMMIT = $(shell git rev-parse --short HEAD)
|
||||
export GIT_DIRTY = $(shell test -n "$$(git status --porcelain)" && echo "+CHANGES" || true)
|
||||
export GIT_DESCRIBE = $(shell git describe --tags --always)
|
||||
GOFLAGS :=
|
||||
LDFLAGS :=
|
||||
LDFLAGS += -X main.GitCommit=${GIT_COMMIT}${GIT_DIRTY}
|
||||
LDFLAGS += -X main.GitDescribe=${GIT_DESCRIBE}
|
||||
@ -26,13 +27,15 @@ ifeq ($(CYG_CHECK),1)
|
||||
else
|
||||
# all non-windows environments
|
||||
ROOT := $(shell pwd)
|
||||
SELINUX := $(shell which getenforce 2>&1 >/dev/null && echo :z)
|
||||
# Deliberately use `command -v` instead of `which` to be POSIX compliant
|
||||
SELINUX := $(shell command -v getenforce >/dev/null 2>&1 && echo :z)
|
||||
endif
|
||||
|
||||
DEV_IMAGE := ${PROJECT}_dev
|
||||
|
||||
ENGINERUN := ${ENGINE} run --rm \
|
||||
-e LDFLAGS="${LDFLAGS}" \
|
||||
-e GOFLAGS="${GOFLAGS}" \
|
||||
-e GITHUB_TOKEN="${GITHUB_TOKEN}" \
|
||||
-v ${ROOT}/vendor:/go/src${SELINUX} \
|
||||
-v ${ROOT}:/${PROJECT}/src/${IMPORT_PATH}${SELINUX} \
|
||||
|
||||
211
README.md
211
README.md
@ -3,44 +3,46 @@
|
||||
    
|
||||
|
||||
|
||||
a lightweight and portable command-line YAML, JSON, INI and XML processor. `yq` uses [jq](https://github.com/stedolan/jq) like syntax but works with yaml files as well as json, xml, ini, properties, csv and tsv. It doesn't yet support everything `jq` does - but it does support the most common operations and functions, and more is being added continuously.
|
||||
A lightweight and portable command-line YAML, JSON, INI and XML processor. `yq` uses [jq](https://github.com/stedolan/jq) (a popular JSON processor) like syntax but works with yaml files as well as json, kyaml, xml, ini, properties, csv and tsv. It doesn't yet support everything `jq` does - but it does support the most common operations and functions, and more is being added continuously.
|
||||
|
||||
yq is written in go - so you can download a dependency free binary for your platform and you are good to go! If you prefer there are a variety of package managers that can be used as well as Docker and Podman, all listed below.
|
||||
yq is written in Go - so you can download a dependency free binary for your platform and you are good to go! If you prefer there are a variety of package managers that can be used as well as Docker and Podman, all listed below.
|
||||
|
||||
## Quick Usage Guide
|
||||
|
||||
Read a value:
|
||||
### Basic Operations
|
||||
|
||||
**Read a value:**
|
||||
```bash
|
||||
yq '.a.b[0].c' file.yaml
|
||||
```
|
||||
|
||||
Pipe from STDIN:
|
||||
**Pipe from STDIN:**
|
||||
```bash
|
||||
yq '.a.b[0].c' < file.yaml
|
||||
```
|
||||
|
||||
Update a yaml file, in place
|
||||
**Update a yaml file in place:**
|
||||
```bash
|
||||
yq -i '.a.b[0].c = "cool"' file.yaml
|
||||
```
|
||||
|
||||
Update using environment variables
|
||||
**Update using environment variables:**
|
||||
```bash
|
||||
NAME=mike yq -i '.a.b[0].c = strenv(NAME)' file.yaml
|
||||
```
|
||||
|
||||
Merge multiple files
|
||||
### Advanced Operations
|
||||
|
||||
**Merge multiple files:**
|
||||
```bash
|
||||
# merge two files
|
||||
yq -n 'load("file1.yaml") * load("file2.yaml")'
|
||||
|
||||
# merge using globs:
|
||||
# note the use of `ea` to evaluate all the files at once
|
||||
# instead of in sequence
|
||||
# merge using globs (note: `ea` evaluates all files at once instead of in sequence)
|
||||
yq ea '. as $item ireduce ({}; . * $item )' path/to/*.yml
|
||||
```
|
||||
|
||||
Multiple updates to a yaml file
|
||||
**Multiple updates to a yaml file:**
|
||||
```bash
|
||||
yq -i '
|
||||
.a.b[0].c = "cool" |
|
||||
@ -49,14 +51,22 @@ yq -i '
|
||||
' file.yaml
|
||||
```
|
||||
|
||||
Find and update an item in an array:
|
||||
**Find and update an item in an array:**
|
||||
```bash
|
||||
yq '(.[] | select(.name == "foo") | .address) = "12 cat st"'
|
||||
# Note: requires input file - add your file at the end
|
||||
yq -i '(.[] | select(.name == "foo") | .address) = "12 cat st"' data.yaml
|
||||
```
|
||||
|
||||
Convert JSON to YAML
|
||||
**Convert between formats:**
|
||||
```bash
|
||||
# Convert JSON to YAML (pretty print)
|
||||
yq -Poy sample.json
|
||||
|
||||
# Convert YAML to JSON
|
||||
yq -o json file.yaml
|
||||
|
||||
# Convert XML to YAML
|
||||
yq -o yaml file.xml
|
||||
```
|
||||
|
||||
See [recipes](https://mikefarah.gitbook.io/yq/recipes) for more examples and the [documentation](https://mikefarah.gitbook.io/yq/) for more information.
|
||||
@ -68,31 +78,31 @@ Take a look at the discussions for [common questions](https://github.com/mikefar
|
||||
### [Download the latest binary](https://github.com/mikefarah/yq/releases/latest)
|
||||
|
||||
### wget
|
||||
Use wget to download, gzipped pre-compiled binaries:
|
||||
Use wget to download pre-compiled binaries. Choose your platform and architecture:
|
||||
|
||||
|
||||
For instance, VERSION=v4.2.0 and BINARY=yq_linux_amd64
|
||||
|
||||
#### Compressed via tar.gz
|
||||
**For Linux (example):**
|
||||
```bash
|
||||
wget https://github.com/mikefarah/yq/releases/download/${VERSION}/${BINARY}.tar.gz -O - |\
|
||||
tar xz && mv ${BINARY} /usr/local/bin/yq
|
||||
```
|
||||
# Set your platform variables (adjust as needed)
|
||||
VERSION=v4.2.0
|
||||
PLATFORM=linux_amd64
|
||||
|
||||
#### Plain binary
|
||||
# Download compressed binary
|
||||
wget https://github.com/mikefarah/yq/releases/download/${VERSION}/yq_${PLATFORM}.tar.gz -O - |\
|
||||
tar xz && sudo mv yq_${PLATFORM} /usr/local/bin/yq
|
||||
|
||||
```bash
|
||||
wget https://github.com/mikefarah/yq/releases/download/${VERSION}/${BINARY} -O /usr/local/bin/yq &&\
|
||||
# Or download plain binary
|
||||
wget https://github.com/mikefarah/yq/releases/download/${VERSION}/yq_${PLATFORM} -O /usr/local/bin/yq &&\
|
||||
chmod +x /usr/local/bin/yq
|
||||
```
|
||||
|
||||
#### Latest version
|
||||
|
||||
**Latest version (Linux AMD64):**
|
||||
```bash
|
||||
wget https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64 -O /usr/local/bin/yq &&\
|
||||
chmod +x /usr/local/bin/yq
|
||||
```
|
||||
|
||||
**Available platforms:** `linux_amd64`, `linux_arm64`, `linux_arm`, `linux_386`, `darwin_amd64`, `darwin_arm64`, `windows_amd64`, `windows_386`, etc.
|
||||
|
||||
### MacOS / Linux via Homebrew:
|
||||
Using [Homebrew](https://brew.sh/)
|
||||
```
|
||||
@ -123,28 +133,31 @@ rm /etc/myfile.tmp
|
||||
```
|
||||
|
||||
### Run with Docker or Podman
|
||||
#### Oneshot use:
|
||||
|
||||
#### One-time use:
|
||||
```bash
|
||||
docker run --rm -v "${PWD}":/workdir mikefarah/yq [command] [flags] [expression ]FILE...
|
||||
# Docker - process files in current directory
|
||||
docker run --rm -v "${PWD}":/workdir mikefarah/yq '.a.b[0].c' file.yaml
|
||||
|
||||
# Podman - same usage as Docker
|
||||
podman run --rm -v "${PWD}":/workdir mikefarah/yq '.a.b[0].c' file.yaml
|
||||
```
|
||||
|
||||
Note that you can run `yq` in docker without network access and other privileges if you desire,
|
||||
namely `--security-opt=no-new-privileges --cap-drop all --network none`.
|
||||
|
||||
**Security note:** You can run `yq` in Docker with restricted privileges:
|
||||
```bash
|
||||
podman run --rm -v "${PWD}":/workdir mikefarah/yq [command] [flags] [expression ]FILE...
|
||||
docker run --rm --security-opt=no-new-privileges --cap-drop all --network none \
|
||||
-v "${PWD}":/workdir mikefarah/yq '.a.b[0].c' file.yaml
|
||||
```
|
||||
|
||||
#### Pipe in via STDIN:
|
||||
#### Pipe data via STDIN:
|
||||
|
||||
You'll need to pass the `-i\--interactive` flag to docker:
|
||||
You'll need to pass the `-i --interactive` flag to Docker/Podman:
|
||||
|
||||
```bash
|
||||
# Process piped data
|
||||
docker run -i --rm mikefarah/yq '.this.thing' < myfile.yml
|
||||
```
|
||||
|
||||
```bash
|
||||
# Same with Podman
|
||||
podman run -i --rm mikefarah/yq '.this.thing' < myfile.yml
|
||||
```
|
||||
|
||||
@ -340,7 +353,7 @@ gah install yq
|
||||
- Supports yaml [front matter](https://mikefarah.gitbook.io/yq/usage/front-matter) blocks (e.g. jekyll/assemble)
|
||||
- Colorized yaml output
|
||||
- [Date/Time manipulation and formatting with TZ](https://mikefarah.gitbook.io/yq/operators/datetime)
|
||||
- [Deeply data structures](https://mikefarah.gitbook.io/yq/operators/traverse-read)
|
||||
- [Deep data structures](https://mikefarah.gitbook.io/yq/operators/traverse-read)
|
||||
- [Sort keys](https://mikefarah.gitbook.io/yq/operators/sort-keys)
|
||||
- Manipulate yaml [comments](https://mikefarah.gitbook.io/yq/operators/comment-operators), [styling](https://mikefarah.gitbook.io/yq/operators/style), [tags](https://mikefarah.gitbook.io/yq/operators/tag) and [anchors and aliases](https://mikefarah.gitbook.io/yq/operators/anchor-and-alias-operators).
|
||||
- [Update in place](https://mikefarah.gitbook.io/yq/v/v4.x/commands/evaluate#flags)
|
||||
@ -350,6 +363,8 @@ gah install yq
|
||||
- [Load content from other files](https://mikefarah.gitbook.io/yq/operators/load)
|
||||
- [Convert to/from json/ndjson](https://mikefarah.gitbook.io/yq/v/v4.x/usage/convert)
|
||||
- [Convert to/from xml](https://mikefarah.gitbook.io/yq/v/v4.x/usage/xml)
|
||||
- [Convert to/from hcl (terraform)](https://mikefarah.gitbook.io/yq/v/v4.x/usage/hcl)
|
||||
- [Convert to/from toml](https://mikefarah.gitbook.io/yq/v/v4.x/usage/toml)
|
||||
- [Convert to/from properties](https://mikefarah.gitbook.io/yq/v/v4.x/usage/properties)
|
||||
- [Convert to/from csv/tsv](https://mikefarah.gitbook.io/yq/usage/csv-tsv)
|
||||
- [General shell completion scripts (bash/zsh/fish/powershell)](https://mikefarah.gitbook.io/yq/v/v4.x/commands/shell-completion)
|
||||
@ -367,10 +382,18 @@ Usage:
|
||||
|
||||
Examples:
|
||||
|
||||
# yq defaults to 'eval' command if no command is specified. See "yq eval --help" for more examples.
|
||||
yq '.stuff' < myfile.yml # outputs the data at the "stuff" node from "myfile.yml"
|
||||
# yq tries to auto-detect the file format based off the extension, and defaults to YAML if it's unknown (or piping through STDIN)
|
||||
# Use the '-p/--input-format' flag to specify a format type.
|
||||
cat file.xml | yq -p xml
|
||||
|
||||
yq -i '.stuff = "foo"' myfile.yml # update myfile.yml in place
|
||||
# read the "stuff" node from "myfile.yml"
|
||||
yq '.stuff' < myfile.yml
|
||||
|
||||
# update myfile.yml in place
|
||||
yq -i '.stuff = "foo"' myfile.yml
|
||||
|
||||
# print contents of sample.json as idiomatic YAML
|
||||
yq -P -oy sample.json
|
||||
|
||||
|
||||
Available Commands:
|
||||
@ -380,49 +403,75 @@ Available Commands:
|
||||
help Help about any command
|
||||
|
||||
Flags:
|
||||
-C, --colors force print with colors
|
||||
--csv-auto-parse parse CSV YAML/JSON values (default true)
|
||||
--csv-separator char CSV Separator character (default ,)
|
||||
-e, --exit-status set exit status if there are no matches or null or false is returned
|
||||
--expression string forcibly set the expression argument. Useful when yq argument detection thinks your expression is a file.
|
||||
--from-file string Load expression from specified file.
|
||||
-f, --front-matter string (extract|process) first input as yaml front-matter. Extract will pull out the yaml content, process will run the expression against the yaml content, leaving the remaining data intact
|
||||
--header-preprocess Slurp any header comments and separators before processing expression. (default true)
|
||||
-h, --help help for yq
|
||||
-I, --indent int sets indent level for output (default 2)
|
||||
-i, --inplace update the file in place of first file given.
|
||||
-p, --input-format string [auto|a|yaml|y|json|j|props|p|csv|c|tsv|t|xml|x|base64|uri|toml|lua|l|ini|i] parse format for input. (default "auto")
|
||||
--lua-globals output keys as top-level global variables
|
||||
--lua-prefix string prefix (default "return ")
|
||||
--lua-suffix string suffix (default ";\n")
|
||||
--lua-unquoted output unquoted string keys (e.g. {foo="bar"})
|
||||
-M, --no-colors force print with no colors
|
||||
-N, --no-doc Don't print document separators (---)
|
||||
-0, --nul-output Use NUL char to separate values. If unwrap scalar is also set, fail if unwrapped scalar contains NUL char.
|
||||
-n, --null-input Don't read input, simply evaluate the expression given. Useful for creating docs from scratch.
|
||||
-o, --output-format string [auto|a|yaml|y|json|j|props|p|csv|c|tsv|t|xml|x|base64|uri|toml|shell|s|lua|l|ini|i] output format type. (default "auto")
|
||||
-P, --prettyPrint pretty print, shorthand for '... style = ""'
|
||||
--properties-array-brackets use [x] in array paths (e.g. for SpringBoot)
|
||||
--properties-separator string separator to use between keys and values (default " = ")
|
||||
-s, --split-exp string print each result (or doc) into a file named (exp). [exp] argument must return a string. You can use $index in the expression as the result counter. The necessary directories will be created.
|
||||
--split-exp-file string Use a file to specify the split-exp expression.
|
||||
--string-interpolation Toggles strings interpolation of \(exp) (default true)
|
||||
--tsv-auto-parse parse TSV YAML/JSON values (default true)
|
||||
-r, --unwrapScalar unwrap scalar, print the value with no quotes, colors or comments. Defaults to true for yaml (default true)
|
||||
-v, --verbose verbose mode
|
||||
-V, --version Print version information and quit
|
||||
--xml-attribute-prefix string prefix for xml attributes (default "+@")
|
||||
--xml-content-name string name for xml content (if no attribute name is present). (default "+content")
|
||||
--xml-directive-name string name for xml directives (e.g. <!DOCTYPE thing cat>) (default "+directive")
|
||||
--xml-keep-namespace enables keeping namespace after parsing attributes (default true)
|
||||
--xml-proc-inst-prefix string prefix for xml processing instructions (e.g. <?xml version="1"?>) (default "+p_")
|
||||
--xml-raw-token enables using RawToken method instead Token. Commonly disables namespace translations. See https://pkg.go.dev/encoding/xml#Decoder.RawToken for details. (default true)
|
||||
--xml-skip-directives skip over directives (e.g. <!DOCTYPE thing cat>)
|
||||
--xml-skip-proc-inst skip over process instructions (e.g. <?xml version="1"?>)
|
||||
--xml-strict-mode enables strict parsing of XML. See https://pkg.go.dev/encoding/xml for more details.
|
||||
-C, --colors force print with colors
|
||||
--csv-auto-parse parse CSV YAML/JSON values (default true)
|
||||
--csv-separator char CSV Separator character (default ,)
|
||||
--debug-node-info debug node info
|
||||
-e, --exit-status set exit status if there are no matches or null or false is returned
|
||||
--expression string forcibly set the expression argument. Useful when yq argument detection thinks your expression is a file.
|
||||
--from-file string Load expression from specified file.
|
||||
-f, --front-matter string (extract|process) first input as yaml front-matter. Extract will pull out the yaml content, process will run the expression against the yaml content, leaving the remaining data intact
|
||||
--header-preprocess Slurp any header comments and separators before processing expression. (default true)
|
||||
-h, --help help for yq
|
||||
-I, --indent int sets indent level for output (default 2)
|
||||
-i, --inplace update the file in place of first file given.
|
||||
-p, --input-format string [auto|a|yaml|y|json|j|kyaml|ky|props|p|csv|c|tsv|t|xml|x|base64|uri|toml|hcl|h|lua|l|ini|i] parse format for input. (default "auto")
|
||||
--lua-globals output keys as top-level global variables
|
||||
--lua-prefix string prefix (default "return ")
|
||||
--lua-suffix string suffix (default ";\n")
|
||||
--lua-unquoted output unquoted string keys (e.g. {foo="bar"})
|
||||
-M, --no-colors force print with no colors
|
||||
-N, --no-doc Don't print document separators (---)
|
||||
-0, --nul-output Use NUL char to separate values. If unwrap scalar is also set, fail if unwrapped scalar contains NUL char.
|
||||
-n, --null-input Don't read input, simply evaluate the expression given. Useful for creating docs from scratch.
|
||||
-o, --output-format string [auto|a|yaml|y|json|j|kyaml|ky|props|p|csv|c|tsv|t|xml|x|base64|uri|toml|hcl|h|shell|s|lua|l|ini|i] output format type. (default "auto")
|
||||
-P, --prettyPrint pretty print, shorthand for '... style = ""'
|
||||
--properties-array-brackets use [x] in array paths (e.g. for SpringBoot)
|
||||
--properties-separator string separator to use between keys and values (default " = ")
|
||||
--security-disable-env-ops Disable env related operations.
|
||||
--security-disable-file-ops Disable file related operations (e.g. load)
|
||||
--shell-key-separator string separator for shell variable key paths (default "_")
|
||||
-s, --split-exp string print each result (or doc) into a file named (exp). [exp] argument must return a string. You can use $index in the expression as the result counter. The necessary directories will be created.
|
||||
--split-exp-file string Use a file to specify the split-exp expression.
|
||||
--string-interpolation Toggles strings interpolation of \(exp) (default true)
|
||||
--tsv-auto-parse parse TSV YAML/JSON values (default true)
|
||||
-r, --unwrapScalar unwrap scalar, print the value with no quotes, colors or comments. Defaults to true for yaml (default true)
|
||||
-v, --verbose verbose mode
|
||||
-V, --version Print version information and quit
|
||||
--xml-attribute-prefix string prefix for xml attributes (default "+@")
|
||||
--xml-content-name string name for xml content (if no attribute name is present). (default "+content")
|
||||
--xml-directive-name string name for xml directives (e.g. <!DOCTYPE thing cat>) (default "+directive")
|
||||
--xml-keep-namespace enables keeping namespace after parsing attributes (default true)
|
||||
--xml-proc-inst-prefix string prefix for xml processing instructions (e.g. <?xml version="1"?>) (default "+p_")
|
||||
--xml-raw-token enables using RawToken method instead Token. Commonly disables namespace translations. See https://pkg.go.dev/encoding/xml#Decoder.RawToken for details. (default true)
|
||||
--xml-skip-directives skip over directives (e.g. <!DOCTYPE thing cat>)
|
||||
--xml-skip-proc-inst skip over process instructions (e.g. <?xml version="1"?>)
|
||||
--xml-strict-mode enables strict parsing of XML. See https://pkg.go.dev/encoding/xml for more details.
|
||||
--yaml-fix-merge-anchor-to-spec Fix merge anchor to match YAML spec. Will default to true in late 2025
|
||||
|
||||
Use "yq [command] --help" for more information about a command.
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
**PowerShell quoting issues:**
|
||||
```powershell
|
||||
# Use single quotes for expressions
|
||||
yq '.a.b[0].c' file.yaml
|
||||
|
||||
# Or escape double quotes
|
||||
yq ".a.b[0].c = \"value\"" file.yaml
|
||||
```
|
||||
|
||||
### Getting Help
|
||||
|
||||
- **Check existing issues**: [GitHub Issues](https://github.com/mikefarah/yq/issues)
|
||||
- **Ask questions**: [GitHub Discussions](https://github.com/mikefarah/yq/discussions)
|
||||
- **Documentation**: [Complete documentation](https://mikefarah.gitbook.io/yq/)
|
||||
- **Examples**: [Recipes and examples](https://mikefarah.gitbook.io/yq/recipes)
|
||||
|
||||
## Known Issues / Missing Features
|
||||
- `yq` attempts to preserve comment positions and whitespace as much as possible, but it does not handle all scenarios (see https://github.com/go-yaml/yaml/tree/v3 for details)
|
||||
- Powershell has its own...[opinions on quoting yq](https://mikefarah.gitbook.io/yq/usage/tips-and-tricks#quotes-in-windows-powershell)
|
||||
|
||||
@ -6,6 +6,7 @@ setUp() {
|
||||
rm test*.csv 2>/dev/null || true
|
||||
rm test*.tsv 2>/dev/null || true
|
||||
rm test*.xml 2>/dev/null || true
|
||||
rm test*.tf 2>/dev/null || true
|
||||
}
|
||||
|
||||
testInputProperties() {
|
||||
@ -153,6 +154,37 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testInputKYaml() {
|
||||
cat >test.kyaml <<'EOL'
|
||||
# leading
|
||||
{
|
||||
a: 1, # a line
|
||||
# head b
|
||||
b: 2,
|
||||
c: [
|
||||
# head d
|
||||
"d", # d line
|
||||
],
|
||||
}
|
||||
EOL
|
||||
|
||||
read -r -d '' expected <<'EOM'
|
||||
# leading
|
||||
a: 1 # a line
|
||||
# head b
|
||||
b: 2
|
||||
c:
|
||||
# head d
|
||||
- d # d line
|
||||
EOM
|
||||
|
||||
X=$(./yq e -p=kyaml -P test.kyaml)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(./yq ea -p=kyaml -P test.kyaml)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@ -255,4 +287,61 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
source ./scripts/shunit2
|
||||
testInputTerraform() {
|
||||
cat >test.tf <<EOL
|
||||
resource "aws_s3_bucket" "example" {
|
||||
bucket = "my-bucket"
|
||||
tags = {
|
||||
Environment = "Dev"
|
||||
Project = "Test"
|
||||
}
|
||||
}
|
||||
EOL
|
||||
|
||||
read -r -d '' expected << EOM
|
||||
resource "aws_s3_bucket" "example" {
|
||||
bucket = "my-bucket"
|
||||
tags = {
|
||||
Environment = "Dev"
|
||||
Project = "Test"
|
||||
}
|
||||
}
|
||||
EOM
|
||||
|
||||
X=$(./yq test.tf)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(./yq ea test.tf)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testInputTerraformGithubAction() {
|
||||
cat >test.tf <<EOL
|
||||
resource "aws_s3_bucket" "example" {
|
||||
bucket = "my-bucket"
|
||||
|
||||
tags = {
|
||||
Environment = "Dev"
|
||||
Project = "Test"
|
||||
}
|
||||
}
|
||||
EOL
|
||||
|
||||
read -r -d '' expected << EOM
|
||||
resource "aws_s3_bucket" "example" {
|
||||
bucket = "my-bucket"
|
||||
tags = {
|
||||
Environment = "Dev"
|
||||
Project = "Test"
|
||||
}
|
||||
}
|
||||
EOM
|
||||
|
||||
X=$(cat /dev/null | ./yq test.tf)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(cat /dev/null | ./yq ea test.tf)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
source ./scripts/shunit2
|
||||
|
||||
@ -280,6 +280,55 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testOutputKYaml() {
|
||||
cat >test.yml <<'EOL'
|
||||
# leading
|
||||
a: 1 # a line
|
||||
# head b
|
||||
b: 2
|
||||
c:
|
||||
# head d
|
||||
- d # d line
|
||||
EOL
|
||||
|
||||
read -r -d '' expected <<'EOM'
|
||||
# leading
|
||||
{
|
||||
a: 1, # a line
|
||||
# head b
|
||||
b: 2,
|
||||
c: [
|
||||
# head d
|
||||
"d", # d line
|
||||
],
|
||||
}
|
||||
EOM
|
||||
|
||||
X=$(./yq e --output-format=kyaml test.yml)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(./yq ea --output-format=kyaml test.yml)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testOutputKYamlShort() {
|
||||
cat >test.yml <<EOL
|
||||
a: b
|
||||
EOL
|
||||
|
||||
read -r -d '' expected <<'EOM'
|
||||
{
|
||||
a: "b",
|
||||
}
|
||||
EOM
|
||||
|
||||
X=$(./yq e -o=ky test.yml)
|
||||
assertEquals "$expected" "$X"
|
||||
|
||||
X=$(./yq ea -o=ky test.yml)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testOutputXmComplex() {
|
||||
cat >test.yml <<EOL
|
||||
a: {b: {c: ["cat", "dog"], +@f: meow}}
|
||||
|
||||
422
agents.md
Normal file
422
agents.md
Normal file
@ -0,0 +1,422 @@
|
||||
# General rules
|
||||
✅ **DO:**
|
||||
- You can use ./yq with the `--debug-node-info` flag to get a deeper understanding of the ast.
|
||||
- run ./scripts/format.sh to format the code; then ./scripts/check.sh lint and finally ./scripts/spelling.sh to check spelling.
|
||||
- Add comprehensive tests to cover the changes
|
||||
- Run test suite to ensure there is no regression
|
||||
- Use UK english spelling
|
||||
|
||||
❌ **DON'T:**
|
||||
- Git add or commit
|
||||
- Add comments to functions that are self-explanatory
|
||||
|
||||
|
||||
|
||||
# Adding a New Encoder/Decoder
|
||||
|
||||
This guide explains how to add support for a new format (encoder/decoder) to yq without modifying `candidate_node.go`.
|
||||
|
||||
## Overview
|
||||
|
||||
The encoder/decoder architecture in yq is based on two main interfaces:
|
||||
|
||||
- **Encoder**: Converts a `CandidateNode` to output in a specific format
|
||||
- **Decoder**: Reads input in a specific format and creates a `CandidateNode`
|
||||
|
||||
Each format is registered in `pkg/yqlib/format.go` and made available through factory functions.
|
||||
|
||||
## Architecture
|
||||
|
||||
### Key Files
|
||||
|
||||
- `pkg/yqlib/encoder.go` - Defines the `Encoder` interface
|
||||
- `pkg/yqlib/decoder.go` - Defines the `Decoder` interface
|
||||
- `pkg/yqlib/format.go` - Format registry and factory functions
|
||||
- `pkg/yqlib/operator_encoder_decoder.go` - Encode/decode operators
|
||||
- `pkg/yqlib/encoder_*.go` - Encoder implementations
|
||||
- `pkg/yqlib/decoder_*.go` - Decoder implementations
|
||||
|
||||
### Interfaces
|
||||
|
||||
**Encoder Interface:**
|
||||
```go
|
||||
type Encoder interface {
|
||||
Encode(writer io.Writer, node *CandidateNode) error
|
||||
PrintDocumentSeparator(writer io.Writer) error
|
||||
PrintLeadingContent(writer io.Writer, content string) error
|
||||
CanHandleAliases() bool
|
||||
}
|
||||
```
|
||||
|
||||
**Decoder Interface:**
|
||||
```go
|
||||
type Decoder interface {
|
||||
Init(reader io.Reader) error
|
||||
Decode() (*CandidateNode, error)
|
||||
}
|
||||
```
|
||||
|
||||
## Step-by-Step: Adding a New Encoder/Decoder
|
||||
|
||||
### Step 1: Create the Encoder File
|
||||
|
||||
Create `pkg/yqlib/encoder_<format>.go` implementing the `Encoder` interface:
|
||||
- `Encode()` - Convert a `CandidateNode` to your format and write to the output writer
|
||||
- `PrintDocumentSeparator()` - Handle document separators if your format requires them
|
||||
- `PrintLeadingContent()` - Handle leading content/comments if supported
|
||||
- `CanHandleAliases()` - Return whether your format supports YAML aliases
|
||||
|
||||
See `encoder_json.go` or `encoder_base64.go` for examples.
|
||||
|
||||
### Step 2: Create the Decoder File
|
||||
|
||||
Create `pkg/yqlib/decoder_<format>.go` implementing the `Decoder` interface:
|
||||
- `Init()` - Initialize the decoder with the input reader and set up any needed state
|
||||
- `Decode()` - Decode one document from the input and return a `CandidateNode`, or `io.EOF` when finished
|
||||
|
||||
See `decoder_json.go` or `decoder_base64.go` for examples.
|
||||
|
||||
### Step 3: Create Tests (Mandatory)
|
||||
|
||||
Create a test file `pkg/yqlib/<format>_test.go` using the `formatScenario` pattern:
|
||||
- Define test scenarios as `formatScenario` structs with fields: `description`, `input`, `expected`, `scenarioType`
|
||||
- `scenarioType` can be `"decode"` (test decoding to YAML) or `"roundtrip"` (encode/decode preservation)
|
||||
- Create a helper function `test<Format>Scenario()` that switches on `scenarioType`
|
||||
- Create main test function `Test<Format>FormatScenarios()` that iterates over scenarios
|
||||
- The main test function should use `documentScenarios` to ensure testcase documentation is generated.
|
||||
|
||||
Test coverage must include:
|
||||
- Basic data types (scalars, arrays, objects/maps)
|
||||
- Nested structures
|
||||
- Edge cases (empty inputs, special characters, escape sequences)
|
||||
- Format-specific features or syntax
|
||||
- Round-trip tests: decode → encode → decode should preserve data
|
||||
|
||||
See `hcl_test.go` for a complete example.
|
||||
|
||||
### Step 4: Register the Format in format.go
|
||||
|
||||
Edit `pkg/yqlib/format.go`:
|
||||
|
||||
1. Add a new format variable:
|
||||
- `"<format>"` is the formal name (e.g., "json", "yaml")
|
||||
- `[]string{...}` contains short aliases (can be empty)
|
||||
- The first function creates an encoder (can be nil for encode-only formats)
|
||||
- The second function creates a decoder (can be nil for decode-only formats)
|
||||
|
||||
2. Add the format to the `Formats` slice in the same file
|
||||
|
||||
See existing formats in `format.go` for the exact structure.
|
||||
|
||||
### Step 5: Handle Encoder Configuration (if needed)
|
||||
|
||||
If your format has preferences/configuration options:
|
||||
|
||||
1. Create a preferences struct with your configuration fields
|
||||
2. Update the encoder to accept preferences in its factory function
|
||||
3. Update `format.go` to pass the configured preferences
|
||||
4. Update `operator_encoder_decoder.go` if special indent handling is needed (see existing formats like JSON and YAML for the pattern)
|
||||
|
||||
This pattern is optional and only needed if your format has user-configurable options.
|
||||
|
||||
## Build Tags
|
||||
|
||||
Use build tags to allow optional compilation of formats:
|
||||
- Add `//go:build !yq_no<format>` at the top of your encoder and decoder files
|
||||
- Create a no-build version in `pkg/yqlib/no_<format>.go` that returns nil for encoder/decoder factories
|
||||
|
||||
This allows users to compile yq without certain formats using: `go build -tags yq_no<format>`
|
||||
|
||||
## Working with CandidateNode
|
||||
|
||||
The `CandidateNode` struct represents a YAML node with:
|
||||
- `Kind`: The node type (ScalarNode, SequenceNode, MappingNode)
|
||||
- `Tag`: The YAML tag (e.g., "!!str", "!!int", "!!map")
|
||||
- `Value`: The scalar value (for ScalarNode only)
|
||||
- `Content`: Child nodes (for SequenceNode and MappingNode)
|
||||
|
||||
Key methods:
|
||||
- `node.guessTagFromCustomType()` - Infer the tag from Go type
|
||||
- `node.AsList()` - Convert to a list for processing
|
||||
- `node.CreateReplacement()` - Create a new replacement node
|
||||
- `NewCandidate()` - Create a new CandidateNode
|
||||
|
||||
## Key Points
|
||||
|
||||
✅ **DO:**
|
||||
- Implement only the `Encoder` and `Decoder` interfaces
|
||||
- Register your format in `format.go` only
|
||||
- Keep format-specific logic in your encoder/decoder files
|
||||
- Use the candidate_node style attribute to store style information for round-trip. Ask if this needs to be updated with new styles.
|
||||
- Use build tags for optional compilation
|
||||
- Add comprehensive tests
|
||||
- Run the specific encoder/decoder test (e.g. <format>_test.go) whenever you make ay changes to the encoder_<format> or decoder_<format>
|
||||
- Handle errors gracefully
|
||||
- Add the no build directive, like the xml encoder and decoder, that enables a minimal yq builds. e.g. `//go:build !yq_<format>`. Be sure to also update the build_small-yq.sh and build-tinygo-yq.sh to not include the new format.
|
||||
|
||||
❌ **DON'T:**
|
||||
- Modify `candidate_node.go` to add format-specific logic
|
||||
- Add format-specific fields to `CandidateNode`
|
||||
- Create special cases in core navigation or evaluation logic
|
||||
- Bypass the encoder/decoder interfaces
|
||||
- Use candidate_node tag attribute for anything other than indicate the data type
|
||||
|
||||
## Examples
|
||||
|
||||
Refer to existing format implementations for patterns:
|
||||
|
||||
- **Simple encoder/decoder**: `encoder_json.go`, `decoder_json.go`
|
||||
- **Complex with preferences**: `encoder_yaml.go`, `decoder_yaml.go`
|
||||
- **Encoder-only**: `encoder_sh.go` (ShFormat has nil decoder)
|
||||
- **String-only operations**: `encoder_base64.go`, `decoder_base64.go`
|
||||
|
||||
## Testing Your Implementation (Mandatory)
|
||||
|
||||
Tests must be implemented in `<format>_test.go` following the `formatScenario` pattern:
|
||||
|
||||
1. **Create test scenarios** using the `formatScenario` struct with fields:
|
||||
- `description`: Brief description of what's being tested
|
||||
- `input`: Sample input in your format
|
||||
- `expected`: Expected output (typically in YAML for decode tests)
|
||||
- `scenarioType`: Either `"decode"` or `"roundtrip"`
|
||||
|
||||
2. **Test coverage must include:**
|
||||
- Basic data types (scalars, arrays, objects/maps)
|
||||
- Nested structures
|
||||
- Edge cases (empty inputs, special characters, escape sequences)
|
||||
- Format-specific features or syntax
|
||||
- Round-trip tests: decode → encode → decode should preserve data
|
||||
|
||||
3. **Test function pattern:**
|
||||
- `test<Format>Scenario()`: Helper function that switches on `scenarioType`
|
||||
- `Test<Format>FormatScenarios()`: Main test function that iterates over scenarios
|
||||
|
||||
4. **Example from existing formats:**
|
||||
- See `hcl_test.go` for a complete example
|
||||
- See `yaml_test.go` for YAML-specific patterns
|
||||
- See `json_test.go` for more complex scenarios
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Format with Indentation
|
||||
Use preferences to control output formatting:
|
||||
```go
|
||||
type <format>Preferences struct {
|
||||
Indent int
|
||||
}
|
||||
|
||||
func (prefs *<format>Preferences) Copy() <format>Preferences {
|
||||
return *prefs
|
||||
}
|
||||
```
|
||||
|
||||
### Multiple Documents
|
||||
Decoders should support reading multiple documents:
|
||||
```go
|
||||
func (dec *<format>Decoder) Decode() (*CandidateNode, error) {
|
||||
if dec.finished {
|
||||
return nil, io.EOF
|
||||
}
|
||||
// ... decode next document ...
|
||||
if noMoreDocuments {
|
||||
dec.finished = true
|
||||
}
|
||||
return candidate, nil
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
# Adding a New Operator
|
||||
|
||||
This guide explains how to add a new operator to yq. Operators are the core of yq's expression language and process `CandidateNode` objects without requiring modifications to `candidate_node.go` itself.
|
||||
|
||||
## Overview
|
||||
|
||||
Operators transform data by implementing a handler function that processes a `Context` containing `CandidateNode` objects. Each operator is:
|
||||
|
||||
1. Defined as an `operationType` in `operation.go`
|
||||
2. Registered in the lexer in `lexer_participle.go`
|
||||
3. Implemented in its own `operator_<type>.go` file
|
||||
4. Tested in `operator_<type>_test.go`
|
||||
5. Documented in `pkg/yqlib/doc/operators/headers/<type>.md`
|
||||
|
||||
## Architecture
|
||||
|
||||
### Key Files
|
||||
|
||||
- `pkg/yqlib/operation.go` - Defines `operationType` and operator registry
|
||||
- `pkg/yqlib/lexer_participle.go` - Registers operators with their syntax patterns
|
||||
- `pkg/yqlib/operator_<type>.go` - Operator implementation
|
||||
- `pkg/yqlib/operator_<type>_test.go` - Operator tests using `expressionScenario`
|
||||
- `pkg/yqlib/doc/operators/headers/<type>.md` - Documentation header
|
||||
|
||||
### Core Types
|
||||
|
||||
**operationType:**
|
||||
```go
|
||||
type operationType struct {
|
||||
Type string // Unique operator name (e.g., "REVERSE")
|
||||
NumArgs uint // Number of arguments (0 for no args)
|
||||
Precedence uint // Operator precedence (higher = higher precedence)
|
||||
Handler operatorHandler // The function that executes the operator
|
||||
CheckForPostTraverse bool // Whether to apply post-traversal logic
|
||||
ToString func(*Operation) string // Custom string representation
|
||||
}
|
||||
```
|
||||
|
||||
**operatorHandler signature:**
|
||||
```go
|
||||
type operatorHandler func(*dataTreeNavigator, Context, *ExpressionNode) (Context, error)
|
||||
```
|
||||
|
||||
**expressionScenario for tests:**
|
||||
```go
|
||||
type expressionScenario struct {
|
||||
description string
|
||||
subdescription string
|
||||
document string
|
||||
expression string
|
||||
expected []string
|
||||
skipDoc bool
|
||||
expectedError string
|
||||
}
|
||||
```
|
||||
|
||||
## Step-by-Step: Adding a New Operator
|
||||
|
||||
### Step 1: Create the Operator Implementation File
|
||||
|
||||
Create `pkg/yqlib/operator_<type>.go` implementing the operator handler function:
|
||||
- Implement the `operatorHandler` function signature
|
||||
- Process nodes from `context.MatchingNodes`
|
||||
- Return a new `Context` with results using `context.ChildContext()`
|
||||
- Use `candidate.CreateReplacement()` or `candidate.CreateReplacementWithComments()` to create new nodes
|
||||
- Handle errors gracefully with meaningful error messages
|
||||
|
||||
See `operator_reverse.go` or `operator_keys.go` for examples.
|
||||
|
||||
### Step 2: Register the Operator in operation.go
|
||||
|
||||
Add the operator type definition to `pkg/yqlib/operation.go`:
|
||||
|
||||
```go
|
||||
var <type>OpType = &operationType{
|
||||
Type: "<TYPE>", // All caps, matches pattern in lexer
|
||||
NumArgs: 0, // 0 for no args, 1+ for args
|
||||
Precedence: 50, // Typical range: 40-55
|
||||
Handler: <type>Operator, // Reference to handler function
|
||||
}
|
||||
```
|
||||
|
||||
**Precedence guidelines:**
|
||||
- 10-20: Logical operators (OR, AND, UNION)
|
||||
- 30: Pipe operator
|
||||
- 40: Assignment and comparison operators
|
||||
- 42: Arithmetic operators (ADD, SUBTRACT, MULTIPLY, DIVIDE)
|
||||
- 50-52: Most other operators
|
||||
- 55: High precedence (e.g., GET_VARIABLE)
|
||||
|
||||
**Optional fields:**
|
||||
- `CheckForPostTraverse: true` - If your operator can have another directly after it without the pipe character. Most of the time this is false.
|
||||
- `ToString: customToString` - Custom string representation (rarely needed)
|
||||
|
||||
### Step 3: Register the Operator in lexer_participle.go
|
||||
|
||||
Edit `pkg/yqlib/lexer_participle.go` to add the operator to the lexer rules:
|
||||
- Use `simpleOp()` for simple keyword patterns
|
||||
- Use object syntax for regex patterns or complex syntax
|
||||
- Support optional characters with `_?` and aliases with `|`
|
||||
|
||||
See existing operators in `lexer_participle.go` for pattern examples.
|
||||
|
||||
### Step 4: Create Tests (Mandatory)
|
||||
|
||||
Create `pkg/yqlib/operator_<type>_test.go` using the `expressionScenario` pattern:
|
||||
- Define test scenarios with `description`, `document`, `expression`, and `expected` fields
|
||||
- `expected` is a slice of strings showing output format: `"D<doc>, P[<path>], (<tag>)::<value>\n"`
|
||||
- Set `skipDoc: true` for edge cases you don't want in generated documentation
|
||||
- Include `subdescription` for longer test names
|
||||
- Set `expectedError` if testing error cases
|
||||
- Create main test function that iterates over scenarios
|
||||
- The main test function should use `documentScenarios` to ensure testcase documentation is generated.
|
||||
|
||||
Test coverage must include:
|
||||
- Basic data types and nested structures
|
||||
- Edge cases (empty inputs, special characters, type errors)
|
||||
- Multiple outputs if applicable
|
||||
- Format-specific features
|
||||
|
||||
See `operator_reverse_test.go` for a simple example and `operator_keys_test.go` for complex cases.
|
||||
|
||||
### Step 5: Create Documentation Header
|
||||
|
||||
Create `pkg/yqlib/doc/operators/headers/<type>.md`:
|
||||
- Use the exact operator name as the title
|
||||
- Include a concise 1-2 sentence summary
|
||||
- Add additional context or examples if the operator is complex
|
||||
|
||||
See existing headers in `doc/operators/headers/` for examples.
|
||||
|
||||
## Working with Context and CandidateNode
|
||||
|
||||
### Context Management
|
||||
- `context.ChildContext(results)` - Create child context with results
|
||||
- `context.GetVariable("varName")` - Get variables stored in context
|
||||
- `context.SetVariable("varName", value)` - Set variables in context
|
||||
|
||||
### CandidateNode Operations
|
||||
- `candidate.CreateReplacement(ScalarNode, "!!str", stringValue)` - Create a replacement node
|
||||
- `candidate.CreateReplacementWithComments(SequenceNode, "!!seq", candidate.Style)` - With style preserved
|
||||
- `candidate.Kind` - The node type (ScalarNode, SequenceNode, MappingNode)
|
||||
- `candidate.Tag` - The YAML tag (!!str, !!int, etc.)
|
||||
- `candidate.Value` - The scalar value (for ScalarNode only)
|
||||
- `candidate.Content` - Child nodes (for SequenceNode and MappingNode)
|
||||
- `candidate.guessTagFromCustomType()` - Infer the tag from Go type
|
||||
- `candidate.AsList()` - Convert to a list representation
|
||||
|
||||
## Key Points
|
||||
|
||||
✅ **DO:**
|
||||
- Implement the operator handler with the correct signature
|
||||
- Register in `operation.go` with appropriate precedence
|
||||
- Add the lexer pattern in `lexer_participle.go`
|
||||
- Write comprehensive tests covering normal and edge cases
|
||||
- Create a documentation header in `doc/operators/headers/`
|
||||
- Use `Context.ChildContext()` for proper context threading
|
||||
- Handle all node types gracefully
|
||||
- Return meaningful error messages
|
||||
|
||||
❌ **DON'T:**
|
||||
- Modify `candidate_node.go` (operators shouldn't need this)
|
||||
- Modify core navigation or evaluation logic
|
||||
- Bypass the handler function pattern
|
||||
- Add format-specific or operator-specific fields to `CandidateNode`
|
||||
- Skip tests or documentation
|
||||
|
||||
## Examples
|
||||
|
||||
Refer to existing operator implementations for patterns:
|
||||
|
||||
- **No-argument operator**: `operator_reverse.go` - Processes arrays/sequences
|
||||
- **Single-argument operator**: `operator_map.go` - Takes an expression argument
|
||||
- **Complex multi-output**: `operator_keys.go` - Produces multiple results
|
||||
- **With preferences**: `operator_to_number.go` - Configuration options
|
||||
- **Error handling**: `operator_error.go` - Control flow with errors
|
||||
- **String operations**: `operator_strings.go` - Multiple related operators
|
||||
|
||||
## Testing Patterns
|
||||
|
||||
Refer to existing test files for specific patterns:
|
||||
- Basic expression tests in `operator_reverse_test.go`
|
||||
- Multi-output tests in `operator_keys_test.go`
|
||||
- Error handling tests in `operator_error_test.go`
|
||||
- Tests with `skipDoc` flag to exclude from generated documentation
|
||||
|
||||
## Common Patterns
|
||||
|
||||
Refer to existing operator implementations for these patterns:
|
||||
- Simple transformation: see `operator_reverse.go`
|
||||
- Type checking: see `operator_error.go`
|
||||
- Working with arguments: see `operator_map.go`
|
||||
- Post-traversal operators: see `operator_with.go`
|
||||
@ -60,7 +60,7 @@ func evaluateAll(cmd *cobra.Command, args []string) (cmdError error) {
|
||||
out := cmd.OutOrStdout()
|
||||
|
||||
if writeInplace {
|
||||
// only use colors if its forced
|
||||
// only use colours if its forced
|
||||
colorsEnabled = forceColor
|
||||
writeInPlaceHandler := yqlib.NewWriteInPlaceHandler(args[0])
|
||||
out, err = writeInPlaceHandler.CreateTempFile()
|
||||
|
||||
328
cmd/evaluate_all_command_test.go
Normal file
328
cmd/evaluate_all_command_test.go
Normal file
@ -0,0 +1,328 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCreateEvaluateAllCommand(t *testing.T) {
|
||||
cmd := createEvaluateAllCommand()
|
||||
|
||||
if cmd == nil {
|
||||
t.Fatal("createEvaluateAllCommand returned nil")
|
||||
}
|
||||
|
||||
// Test basic command properties
|
||||
if cmd.Use != "eval-all [expression] [yaml_file1]..." {
|
||||
t.Errorf("Expected Use to be 'eval-all [expression] [yaml_file1]...', got %q", cmd.Use)
|
||||
}
|
||||
|
||||
if cmd.Short == "" {
|
||||
t.Error("Expected Short description to be non-empty")
|
||||
}
|
||||
|
||||
if cmd.Long == "" {
|
||||
t.Error("Expected Long description to be non-empty")
|
||||
}
|
||||
|
||||
// Test aliases
|
||||
expectedAliases := []string{"ea"}
|
||||
if len(cmd.Aliases) != len(expectedAliases) {
|
||||
t.Errorf("Expected %d aliases, got %d", len(expectedAliases), len(cmd.Aliases))
|
||||
}
|
||||
|
||||
for i, expected := range expectedAliases {
|
||||
if i >= len(cmd.Aliases) || cmd.Aliases[i] != expected {
|
||||
t.Errorf("Expected alias %d to be %q, got %q", i, expected, cmd.Aliases[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestEvaluateAll_NoArgs(t *testing.T) {
|
||||
// Create a temporary command
|
||||
cmd := createEvaluateAllCommand()
|
||||
|
||||
// Set up command to capture output
|
||||
var output bytes.Buffer
|
||||
cmd.SetOut(&output)
|
||||
|
||||
// Test with no arguments and no null input
|
||||
nullInput = false
|
||||
defer func() { nullInput = false }()
|
||||
|
||||
err := evaluateAll(cmd, []string{})
|
||||
|
||||
// Should not error, but should print usage
|
||||
if err != nil {
|
||||
t.Errorf("evaluateAll with no args should not error, got: %v", err)
|
||||
}
|
||||
|
||||
// Should have printed usage information
|
||||
if output.Len() == 0 {
|
||||
t.Error("Expected usage information to be printed")
|
||||
}
|
||||
}
|
||||
|
||||
func TestEvaluateAll_NullInput(t *testing.T) {
|
||||
// Create a temporary command
|
||||
cmd := createEvaluateAllCommand()
|
||||
|
||||
// Set up command to capture output
|
||||
var output bytes.Buffer
|
||||
cmd.SetOut(&output)
|
||||
|
||||
// Test with null input
|
||||
nullInput = true
|
||||
defer func() { nullInput = false }()
|
||||
|
||||
err := evaluateAll(cmd, []string{})
|
||||
|
||||
// Should not error when using null input
|
||||
if err != nil {
|
||||
t.Errorf("evaluateAll with null input should not error, got: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEvaluateAll_WithSingleFile(t *testing.T) {
|
||||
// Create a temporary YAML file
|
||||
tempDir := t.TempDir()
|
||||
yamlFile := filepath.Join(tempDir, "test.yaml")
|
||||
yamlContent := []byte("name: test\nage: 25\n")
|
||||
err := os.WriteFile(yamlFile, yamlContent, 0600)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create test YAML file: %v", err)
|
||||
}
|
||||
|
||||
// Create a temporary command
|
||||
cmd := createEvaluateAllCommand()
|
||||
|
||||
// Set up command to capture output
|
||||
var output bytes.Buffer
|
||||
cmd.SetOut(&output)
|
||||
|
||||
// Test with a single file
|
||||
err = evaluateAll(cmd, []string{yamlFile})
|
||||
|
||||
// Should not error
|
||||
if err != nil {
|
||||
t.Errorf("evaluateAll with single file should not error, got: %v", err)
|
||||
}
|
||||
|
||||
// Should have some output
|
||||
if output.Len() == 0 {
|
||||
t.Error("Expected output from evaluateAll with single file")
|
||||
}
|
||||
}
|
||||
|
||||
func TestEvaluateAll_WithMultipleFiles(t *testing.T) {
|
||||
// Create temporary YAML files
|
||||
tempDir := t.TempDir()
|
||||
|
||||
yamlFile1 := filepath.Join(tempDir, "test1.yaml")
|
||||
yamlContent1 := []byte("name: test1\nage: 25\n")
|
||||
err := os.WriteFile(yamlFile1, yamlContent1, 0600)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create test YAML file 1: %v", err)
|
||||
}
|
||||
|
||||
yamlFile2 := filepath.Join(tempDir, "test2.yaml")
|
||||
yamlContent2 := []byte("name: test2\nage: 30\n")
|
||||
err = os.WriteFile(yamlFile2, yamlContent2, 0600)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create test YAML file 2: %v", err)
|
||||
}
|
||||
|
||||
// Create a temporary command
|
||||
cmd := createEvaluateAllCommand()
|
||||
|
||||
// Set up command to capture output
|
||||
var output bytes.Buffer
|
||||
cmd.SetOut(&output)
|
||||
|
||||
// Test with multiple files
|
||||
err = evaluateAll(cmd, []string{yamlFile1, yamlFile2})
|
||||
|
||||
// Should not error
|
||||
if err != nil {
|
||||
t.Errorf("evaluateAll with multiple files should not error, got: %v", err)
|
||||
}
|
||||
|
||||
// Should have output
|
||||
if output.Len() == 0 {
|
||||
t.Error("Expected output from evaluateAll with multiple files")
|
||||
}
|
||||
}
|
||||
|
||||
func TestEvaluateAll_WithExpression(t *testing.T) {
|
||||
// Create a temporary YAML file
|
||||
tempDir := t.TempDir()
|
||||
yamlFile := filepath.Join(tempDir, "test.yaml")
|
||||
yamlContent := []byte("name: test\nage: 25\n")
|
||||
err := os.WriteFile(yamlFile, yamlContent, 0600)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create test YAML file: %v", err)
|
||||
}
|
||||
|
||||
// Create a temporary command
|
||||
cmd := createEvaluateAllCommand()
|
||||
|
||||
// Set up command to capture output
|
||||
var output bytes.Buffer
|
||||
cmd.SetOut(&output)
|
||||
|
||||
// Test with expression
|
||||
err = evaluateAll(cmd, []string{".name", yamlFile})
|
||||
|
||||
// Should not error
|
||||
if err != nil {
|
||||
t.Errorf("evaluateAll with expression should not error, got: %v", err)
|
||||
}
|
||||
|
||||
// Should have output
|
||||
if output.Len() == 0 {
|
||||
t.Error("Expected output from evaluateAll with expression")
|
||||
}
|
||||
}
|
||||
|
||||
func TestEvaluateAll_WriteInPlace(t *testing.T) {
|
||||
// Create a temporary YAML file
|
||||
tempDir := t.TempDir()
|
||||
yamlFile := filepath.Join(tempDir, "test.yaml")
|
||||
yamlContent := []byte("name: test\nage: 25\n")
|
||||
err := os.WriteFile(yamlFile, yamlContent, 0600)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create test YAML file: %v", err)
|
||||
}
|
||||
|
||||
// Create a temporary command
|
||||
cmd := createEvaluateAllCommand()
|
||||
|
||||
// Set up command to capture output
|
||||
var output bytes.Buffer
|
||||
cmd.SetOut(&output)
|
||||
|
||||
// Enable write in place
|
||||
originalWriteInplace := writeInplace
|
||||
writeInplace = true
|
||||
defer func() { writeInplace = originalWriteInplace }()
|
||||
|
||||
// Test with write in place
|
||||
err = evaluateAll(cmd, []string{".name = \"updated\"", yamlFile})
|
||||
|
||||
// Should not error
|
||||
if err != nil {
|
||||
t.Errorf("evaluateAll with write in place should not error, got: %v", err)
|
||||
}
|
||||
|
||||
// Verify the file was updated
|
||||
updatedContent, err := os.ReadFile(yamlFile)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read updated file: %v", err)
|
||||
}
|
||||
|
||||
// Should contain the updated content
|
||||
if !strings.Contains(string(updatedContent), "updated") {
|
||||
t.Errorf("Expected file to contain 'updated', got: %s", string(updatedContent))
|
||||
}
|
||||
}
|
||||
|
||||
func TestEvaluateAll_ExitStatus(t *testing.T) {
|
||||
// Create a temporary YAML file
|
||||
tempDir := t.TempDir()
|
||||
yamlFile := filepath.Join(tempDir, "test.yaml")
|
||||
yamlContent := []byte("name: test\nage: 25\n")
|
||||
err := os.WriteFile(yamlFile, yamlContent, 0600)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create test YAML file: %v", err)
|
||||
}
|
||||
|
||||
// Create a temporary command
|
||||
cmd := createEvaluateAllCommand()
|
||||
|
||||
// Set up command to capture output
|
||||
var output bytes.Buffer
|
||||
cmd.SetOut(&output)
|
||||
|
||||
// Enable exit status
|
||||
originalExitStatus := exitStatus
|
||||
exitStatus = true
|
||||
defer func() { exitStatus = originalExitStatus }()
|
||||
|
||||
// Test with expression that should find no matches
|
||||
err = evaluateAll(cmd, []string{".nonexistent", yamlFile})
|
||||
|
||||
// Should error when no matches found and exit status is enabled
|
||||
if err == nil {
|
||||
t.Error("Expected error when no matches found and exit status is enabled")
|
||||
}
|
||||
}
|
||||
|
||||
func TestEvaluateAll_WithMultipleDocuments(t *testing.T) {
|
||||
// Create a temporary YAML file with multiple documents
|
||||
tempDir := t.TempDir()
|
||||
yamlFile := filepath.Join(tempDir, "test.yaml")
|
||||
yamlContent := []byte("---\nname: doc1\nage: 25\n---\nname: doc2\nage: 30\n")
|
||||
err := os.WriteFile(yamlFile, yamlContent, 0600)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create test YAML file: %v", err)
|
||||
}
|
||||
|
||||
// Create a temporary command
|
||||
cmd := createEvaluateAllCommand()
|
||||
|
||||
// Set up command to capture output
|
||||
var output bytes.Buffer
|
||||
cmd.SetOut(&output)
|
||||
|
||||
// Test with multiple documents
|
||||
err = evaluateAll(cmd, []string{".", yamlFile})
|
||||
|
||||
// Should not error
|
||||
if err != nil {
|
||||
t.Errorf("evaluateAll with multiple documents should not error, got: %v", err)
|
||||
}
|
||||
|
||||
// Should have output
|
||||
if output.Len() == 0 {
|
||||
t.Error("Expected output from evaluateAll with multiple documents")
|
||||
}
|
||||
}
|
||||
|
||||
func TestEvaluateAll_NulSepOutput(t *testing.T) {
|
||||
// Create a temporary YAML file
|
||||
tempDir := t.TempDir()
|
||||
yamlFile := filepath.Join(tempDir, "test.yaml")
|
||||
yamlContent := []byte("name: test\nage: 25\n")
|
||||
err := os.WriteFile(yamlFile, yamlContent, 0600)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create test YAML file: %v", err)
|
||||
}
|
||||
|
||||
// Create a temporary command
|
||||
cmd := createEvaluateAllCommand()
|
||||
|
||||
// Set up command to capture output
|
||||
var output bytes.Buffer
|
||||
cmd.SetOut(&output)
|
||||
|
||||
// Enable nul separator output
|
||||
originalNulSepOutput := nulSepOutput
|
||||
nulSepOutput = true
|
||||
defer func() { nulSepOutput = originalNulSepOutput }()
|
||||
|
||||
// Test with nul separator output
|
||||
err = evaluateAll(cmd, []string{".name", yamlFile})
|
||||
|
||||
// Should not error
|
||||
if err != nil {
|
||||
t.Errorf("evaluateAll with nul separator output should not error, got: %v", err)
|
||||
}
|
||||
|
||||
// Should have output
|
||||
if output.Len() == 0 {
|
||||
t.Error("Expected output from evaluateAll with nul separator output")
|
||||
}
|
||||
}
|
||||
@ -74,7 +74,7 @@ func evaluateSequence(cmd *cobra.Command, args []string) (cmdError error) {
|
||||
}
|
||||
|
||||
if writeInplace {
|
||||
// only use colors if its forced
|
||||
// only use colours if its forced
|
||||
colorsEnabled = forceColor
|
||||
writeInPlaceHandler := yqlib.NewWriteInPlaceHandler(args[0])
|
||||
out, err = writeInPlaceHandler.CreateTempFile()
|
||||
|
||||
276
cmd/evaluate_sequence_command_test.go
Normal file
276
cmd/evaluate_sequence_command_test.go
Normal file
@ -0,0 +1,276 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCreateEvaluateSequenceCommand(t *testing.T) {
|
||||
cmd := createEvaluateSequenceCommand()
|
||||
|
||||
if cmd == nil {
|
||||
t.Fatal("createEvaluateSequenceCommand returned nil")
|
||||
}
|
||||
|
||||
// Test basic command properties
|
||||
if cmd.Use != "eval [expression] [yaml_file1]..." {
|
||||
t.Errorf("Expected Use to be 'eval [expression] [yaml_file1]...', got %q", cmd.Use)
|
||||
}
|
||||
|
||||
if cmd.Short == "" {
|
||||
t.Error("Expected Short description to be non-empty")
|
||||
}
|
||||
|
||||
if cmd.Long == "" {
|
||||
t.Error("Expected Long description to be non-empty")
|
||||
}
|
||||
|
||||
// Test aliases
|
||||
expectedAliases := []string{"e"}
|
||||
if len(cmd.Aliases) != len(expectedAliases) {
|
||||
t.Errorf("Expected %d aliases, got %d", len(expectedAliases), len(cmd.Aliases))
|
||||
}
|
||||
|
||||
for i, expected := range expectedAliases {
|
||||
if i >= len(cmd.Aliases) || cmd.Aliases[i] != expected {
|
||||
t.Errorf("Expected alias %d to be %q, got %q", i, expected, cmd.Aliases[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestProcessExpression(t *testing.T) {
|
||||
// Reset global variables
|
||||
originalPrettyPrint := prettyPrint
|
||||
defer func() { prettyPrint = originalPrettyPrint }()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
prettyPrint bool
|
||||
expression string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "empty expression without pretty print",
|
||||
prettyPrint: false,
|
||||
expression: "",
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "empty expression with pretty print",
|
||||
prettyPrint: true,
|
||||
expression: "",
|
||||
expected: `(... | (select(tag != "!!str"), select(tag == "!!str") | select(test("(?i)^(y|yes|n|no|on|off)$") | not)) ) style=""`,
|
||||
},
|
||||
{
|
||||
name: "simple expression without pretty print",
|
||||
prettyPrint: false,
|
||||
expression: ".a.b",
|
||||
expected: ".a.b",
|
||||
},
|
||||
{
|
||||
name: "simple expression with pretty print",
|
||||
prettyPrint: true,
|
||||
expression: ".a.b",
|
||||
expected: `.a.b | (... | (select(tag != "!!str"), select(tag == "!!str") | select(test("(?i)^(y|yes|n|no|on|off)$") | not)) ) style=""`,
|
||||
},
|
||||
{
|
||||
name: "complex expression with pretty print",
|
||||
prettyPrint: true,
|
||||
expression: ".items[] | select(.active == true)",
|
||||
expected: `.items[] | select(.active == true) | (... | (select(tag != "!!str"), select(tag == "!!str") | select(test("(?i)^(y|yes|n|no|on|off)$") | not)) ) style=""`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
prettyPrint = tt.prettyPrint
|
||||
result := processExpression(tt.expression)
|
||||
if result != tt.expected {
|
||||
t.Errorf("processExpression(%q) = %q, want %q", tt.expression, result, tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestEvaluateSequence_NoArgs(t *testing.T) {
|
||||
// Create a temporary command
|
||||
cmd := createEvaluateSequenceCommand()
|
||||
|
||||
// Set up command to capture output
|
||||
var output bytes.Buffer
|
||||
cmd.SetOut(&output)
|
||||
|
||||
// Test with no arguments and no null input
|
||||
nullInput = false
|
||||
defer func() { nullInput = false }()
|
||||
|
||||
err := evaluateSequence(cmd, []string{})
|
||||
|
||||
// Should not error, but should print usage
|
||||
if err != nil {
|
||||
t.Errorf("evaluateSequence with no args should not error, got: %v", err)
|
||||
}
|
||||
|
||||
// Should have printed usage information
|
||||
if output.Len() == 0 {
|
||||
t.Error("Expected usage information to be printed")
|
||||
}
|
||||
}
|
||||
|
||||
func TestEvaluateSequence_NullInput(t *testing.T) {
|
||||
// Create a temporary command
|
||||
cmd := createEvaluateSequenceCommand()
|
||||
|
||||
// Set up command to capture output
|
||||
var output bytes.Buffer
|
||||
cmd.SetOut(&output)
|
||||
|
||||
// Test with null input
|
||||
nullInput = true
|
||||
defer func() { nullInput = false }()
|
||||
|
||||
err := evaluateSequence(cmd, []string{})
|
||||
|
||||
// Should not error when using null input
|
||||
if err != nil {
|
||||
t.Errorf("evaluateSequence with null input should not error, got: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEvaluateSequence_WithFile(t *testing.T) {
|
||||
// Create a temporary YAML file
|
||||
tempDir := t.TempDir()
|
||||
yamlFile := filepath.Join(tempDir, "test.yaml")
|
||||
yamlContent := []byte("name: test\nage: 25\n")
|
||||
err := os.WriteFile(yamlFile, yamlContent, 0600)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create test YAML file: %v", err)
|
||||
}
|
||||
|
||||
// Create a temporary command
|
||||
cmd := createEvaluateSequenceCommand()
|
||||
|
||||
// Set up command to capture output
|
||||
var output bytes.Buffer
|
||||
cmd.SetOut(&output)
|
||||
|
||||
// Test with a file
|
||||
err = evaluateSequence(cmd, []string{yamlFile})
|
||||
|
||||
// Should not error
|
||||
if err != nil {
|
||||
t.Errorf("evaluateSequence with file should not error, got: %v", err)
|
||||
}
|
||||
|
||||
// Should have some output
|
||||
if output.Len() == 0 {
|
||||
t.Error("Expected output from evaluateSequence with file")
|
||||
}
|
||||
}
|
||||
|
||||
func TestEvaluateSequence_WithExpressionAndFile(t *testing.T) {
|
||||
// Create a temporary YAML file
|
||||
tempDir := t.TempDir()
|
||||
yamlFile := filepath.Join(tempDir, "test.yaml")
|
||||
yamlContent := []byte("name: test\nage: 25\n")
|
||||
err := os.WriteFile(yamlFile, yamlContent, 0600)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create test YAML file: %v", err)
|
||||
}
|
||||
|
||||
// Create a temporary command
|
||||
cmd := createEvaluateSequenceCommand()
|
||||
|
||||
// Set up command to capture output
|
||||
var output bytes.Buffer
|
||||
cmd.SetOut(&output)
|
||||
|
||||
// Test with expression and file
|
||||
err = evaluateSequence(cmd, []string{".name", yamlFile})
|
||||
|
||||
// Should not error
|
||||
if err != nil {
|
||||
t.Errorf("evaluateSequence with expression and file should not error, got: %v", err)
|
||||
}
|
||||
|
||||
// Should have output
|
||||
if output.Len() == 0 {
|
||||
t.Error("Expected output from evaluateSequence with expression and file")
|
||||
}
|
||||
}
|
||||
|
||||
func TestEvaluateSequence_WriteInPlace(t *testing.T) {
|
||||
// Create a temporary YAML file
|
||||
tempDir := t.TempDir()
|
||||
yamlFile := filepath.Join(tempDir, "test.yaml")
|
||||
yamlContent := []byte("name: test\nage: 25\n")
|
||||
err := os.WriteFile(yamlFile, yamlContent, 0600)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create test YAML file: %v", err)
|
||||
}
|
||||
|
||||
// Create a temporary command
|
||||
cmd := createEvaluateSequenceCommand()
|
||||
|
||||
// Set up command to capture output
|
||||
var output bytes.Buffer
|
||||
cmd.SetOut(&output)
|
||||
|
||||
// Enable write in place
|
||||
originalWriteInplace := writeInplace
|
||||
writeInplace = true
|
||||
defer func() { writeInplace = originalWriteInplace }()
|
||||
|
||||
// Test with write in place
|
||||
err = evaluateSequence(cmd, []string{".name = \"updated\"", yamlFile})
|
||||
|
||||
// Should not error
|
||||
if err != nil {
|
||||
t.Errorf("evaluateSequence with write in place should not error, got: %v", err)
|
||||
}
|
||||
|
||||
// Verify the file was updated
|
||||
updatedContent, err := os.ReadFile(yamlFile)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read updated file: %v", err)
|
||||
}
|
||||
|
||||
// Should contain the updated content
|
||||
if !strings.Contains(string(updatedContent), "updated") {
|
||||
t.Errorf("Expected file to contain 'updated', got: %s", string(updatedContent))
|
||||
}
|
||||
}
|
||||
|
||||
func TestEvaluateSequence_ExitStatus(t *testing.T) {
|
||||
// Create a temporary YAML file
|
||||
tempDir := t.TempDir()
|
||||
yamlFile := filepath.Join(tempDir, "test.yaml")
|
||||
yamlContent := []byte("name: test\nage: 25\n")
|
||||
err := os.WriteFile(yamlFile, yamlContent, 0600)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create test YAML file: %v", err)
|
||||
}
|
||||
|
||||
// Create a temporary command
|
||||
cmd := createEvaluateSequenceCommand()
|
||||
|
||||
// Set up command to capture output
|
||||
var output bytes.Buffer
|
||||
cmd.SetOut(&output)
|
||||
|
||||
// Enable exit status
|
||||
originalExitStatus := exitStatus
|
||||
exitStatus = true
|
||||
defer func() { exitStatus = originalExitStatus }()
|
||||
|
||||
// Test with expression that should find no matches
|
||||
err = evaluateSequence(cmd, []string{".nonexistent", yamlFile})
|
||||
|
||||
// Should error when no matches found and exit status is enabled
|
||||
if err == nil {
|
||||
t.Error("Expected error when no matches found and exit status is enabled")
|
||||
}
|
||||
}
|
||||
11
cmd/root.go
11
cmd/root.go
@ -168,6 +168,11 @@ yq -P -oy sample.json
|
||||
rootCmd.PersistentFlags().StringVar(&yqlib.ConfiguredPropertiesPreferences.KeyValueSeparator, "properties-separator", yqlib.ConfiguredPropertiesPreferences.KeyValueSeparator, "separator to use between keys and values")
|
||||
rootCmd.PersistentFlags().BoolVar(&yqlib.ConfiguredPropertiesPreferences.UseArrayBrackets, "properties-array-brackets", yqlib.ConfiguredPropertiesPreferences.UseArrayBrackets, "use [x] in array paths (e.g. for SpringBoot)")
|
||||
|
||||
rootCmd.PersistentFlags().StringVar(&yqlib.ConfiguredShellVariablesPreferences.KeySeparator, "shell-key-separator", yqlib.ConfiguredShellVariablesPreferences.KeySeparator, "separator for shell variable key paths")
|
||||
if err = rootCmd.RegisterFlagCompletionFunc("shell-key-separator", cobra.NoFileCompletions); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
rootCmd.PersistentFlags().BoolVar(&yqlib.StringInterpolationEnabled, "string-interpolation", yqlib.StringInterpolationEnabled, "Toggles strings interpolation of \\(exp)")
|
||||
|
||||
rootCmd.PersistentFlags().BoolVarP(&nullInput, "null-input", "n", false, "Don't read input, simply evaluate the expression given. Useful for creating docs from scratch.")
|
||||
@ -179,7 +184,7 @@ yq -P -oy sample.json
|
||||
}
|
||||
rootCmd.Flags().BoolVarP(&version, "version", "V", false, "Print version information and quit")
|
||||
rootCmd.PersistentFlags().BoolVarP(&writeInplace, "inplace", "i", false, "update the file in place of first file given.")
|
||||
rootCmd.PersistentFlags().VarP(unwrapScalarFlag, "unwrapScalar", "r", "unwrap scalar, print the value with no quotes, colors or comments. Defaults to true for yaml")
|
||||
rootCmd.PersistentFlags().VarP(unwrapScalarFlag, "unwrapScalar", "r", "unwrap scalar, print the value with no quotes, colours or comments. Defaults to true for yaml")
|
||||
rootCmd.PersistentFlags().Lookup("unwrapScalar").NoOptDefVal = "true"
|
||||
rootCmd.PersistentFlags().BoolVarP(&nulSepOutput, "nul-output", "0", false, "Use NUL char to separate values. If unwrap scalar is also set, fail if unwrapped scalar contains NUL char.")
|
||||
|
||||
@ -198,6 +203,7 @@ yq -P -oy sample.json
|
||||
}
|
||||
rootCmd.PersistentFlags().BoolVarP(&yqlib.ConfiguredYamlPreferences.LeadingContentPreProcessing, "header-preprocess", "", true, "Slurp any header comments and separators before processing expression.")
|
||||
rootCmd.PersistentFlags().BoolVarP(&yqlib.ConfiguredYamlPreferences.FixMergeAnchorToSpec, "yaml-fix-merge-anchor-to-spec", "", false, "Fix merge anchor to match YAML spec. Will default to true in late 2025")
|
||||
rootCmd.PersistentFlags().BoolVarP(&yqlib.ConfiguredYamlPreferences.CompactSequenceIndent, "yaml-compact-seq-indent", "c", false, "Use compact sequence indentation where '- ' is considered part of the indentation.")
|
||||
|
||||
rootCmd.PersistentFlags().StringVarP(&splitFileExp, "split-exp", "s", "", "print each result (or doc) into a file named (exp). [exp] argument must return a string. You can use $index in the expression as the result counter. The necessary directories will be created.")
|
||||
if err = rootCmd.RegisterFlagCompletionFunc("split-exp", cobra.NoFileCompletions); err != nil {
|
||||
@ -213,6 +219,9 @@ yq -P -oy sample.json
|
||||
panic(err)
|
||||
}
|
||||
|
||||
rootCmd.PersistentFlags().BoolVarP(&yqlib.ConfiguredSecurityPreferences.DisableEnvOps, "security-disable-env-ops", "", false, "Disable env related operations.")
|
||||
rootCmd.PersistentFlags().BoolVarP(&yqlib.ConfiguredSecurityPreferences.DisableFileOps, "security-disable-file-ops", "", false, "Disable file related operations (e.g. load)")
|
||||
|
||||
rootCmd.AddCommand(
|
||||
createEvaluateSequenceCommand(),
|
||||
createEvaluateAllCommand(),
|
||||
|
||||
264
cmd/root_test.go
Normal file
264
cmd/root_test.go
Normal file
@ -0,0 +1,264 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestNewRuneVar(t *testing.T) {
|
||||
var r rune
|
||||
runeVar := newRuneVar(&r)
|
||||
|
||||
if runeVar == nil {
|
||||
t.Fatal("newRuneVar returned nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRuneValue_String(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
runeVal rune
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple character",
|
||||
runeVal: 'a',
|
||||
expected: "a",
|
||||
},
|
||||
{
|
||||
name: "special character",
|
||||
runeVal: '\n',
|
||||
expected: "\n",
|
||||
},
|
||||
{
|
||||
name: "unicode character",
|
||||
runeVal: 'ñ',
|
||||
expected: "ñ",
|
||||
},
|
||||
{
|
||||
name: "zero rune",
|
||||
runeVal: 0,
|
||||
expected: string(rune(0)),
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
runeVal := runeValue(tt.runeVal)
|
||||
result := runeVal.String()
|
||||
if result != tt.expected {
|
||||
t.Errorf("runeValue.String() = %q, want %q", result, tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestRuneValue_Set(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
expected rune
|
||||
expectError bool
|
||||
}{
|
||||
{
|
||||
name: "simple character",
|
||||
input: "a",
|
||||
expected: 'a',
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "newline escape",
|
||||
input: "\\n",
|
||||
expected: '\n',
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "tab escape",
|
||||
input: "\\t",
|
||||
expected: '\t',
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "carriage return escape",
|
||||
input: "\\r",
|
||||
expected: '\r',
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "form feed escape",
|
||||
input: "\\f",
|
||||
expected: '\f',
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "vertical tab escape",
|
||||
input: "\\v",
|
||||
expected: '\v',
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "empty string",
|
||||
input: "",
|
||||
expected: 0,
|
||||
expectError: true,
|
||||
},
|
||||
{
|
||||
name: "multiple characters",
|
||||
input: "ab",
|
||||
expected: 0,
|
||||
expectError: true,
|
||||
},
|
||||
{
|
||||
name: "special character",
|
||||
input: "ñ",
|
||||
expected: 'ñ',
|
||||
expectError: true, // This will fail because the Set function checks len(val) != 1
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
var r rune
|
||||
runeVal := newRuneVar(&r)
|
||||
|
||||
err := runeVal.Set(tt.input)
|
||||
|
||||
if tt.expectError {
|
||||
if err == nil {
|
||||
t.Errorf("Expected error for input %q, but got none", tt.input)
|
||||
}
|
||||
} else {
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error for input %q: %v", tt.input, err)
|
||||
}
|
||||
if r != tt.expected {
|
||||
t.Errorf("Expected rune %q (%d), got %q (%d)",
|
||||
string(tt.expected), tt.expected, string(r), r)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestRuneValue_Set_ErrorMessages(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
expectedError string
|
||||
}{
|
||||
{
|
||||
name: "empty string error",
|
||||
input: "",
|
||||
expectedError: "[] is not a valid character. Must be length 1 was 0",
|
||||
},
|
||||
{
|
||||
name: "multiple characters error",
|
||||
input: "abc",
|
||||
expectedError: "[abc] is not a valid character. Must be length 1 was 3",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
var r rune
|
||||
runeVal := newRuneVar(&r)
|
||||
|
||||
err := runeVal.Set(tt.input)
|
||||
|
||||
if err == nil {
|
||||
t.Errorf("Expected error for input %q, but got none", tt.input)
|
||||
return
|
||||
}
|
||||
|
||||
if !strings.Contains(err.Error(), tt.expectedError) {
|
||||
t.Errorf("Expected error message to contain %q, got %q",
|
||||
tt.expectedError, err.Error())
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestRuneValue_Type(t *testing.T) {
|
||||
var r rune
|
||||
runeVal := newRuneVar(&r)
|
||||
|
||||
result := runeVal.Type()
|
||||
expected := "char"
|
||||
|
||||
if result != expected {
|
||||
t.Errorf("runeValue.Type() = %q, want %q", result, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNew(t *testing.T) {
|
||||
rootCmd := New()
|
||||
|
||||
if rootCmd == nil {
|
||||
t.Fatal("New() returned nil")
|
||||
}
|
||||
|
||||
// Test basic command properties
|
||||
if rootCmd.Use != "yq" {
|
||||
t.Errorf("Expected Use to be 'yq', got %q", rootCmd.Use)
|
||||
}
|
||||
|
||||
if rootCmd.Short == "" {
|
||||
t.Error("Expected Short description to be non-empty")
|
||||
}
|
||||
|
||||
if rootCmd.Long == "" {
|
||||
t.Error("Expected Long description to be non-empty")
|
||||
}
|
||||
|
||||
// Test that the command has the expected subcommands
|
||||
expectedCommands := []string{"eval", "eval-all", "completion"}
|
||||
actualCommands := make([]string, 0, len(rootCmd.Commands()))
|
||||
|
||||
for _, cmd := range rootCmd.Commands() {
|
||||
actualCommands = append(actualCommands, cmd.Name())
|
||||
}
|
||||
|
||||
for _, expected := range expectedCommands {
|
||||
found := false
|
||||
for _, actual := range actualCommands {
|
||||
if actual == expected {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Errorf("Expected command %q not found in actual commands: %v",
|
||||
expected, actualCommands)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestNew_FlagCompletions(t *testing.T) {
|
||||
rootCmd := New()
|
||||
|
||||
// Test that flag completion functions are registered
|
||||
// This is a basic smoke test - we can't easily test the actual completion logic
|
||||
// without more complex setup
|
||||
flags := []string{
|
||||
"output-format",
|
||||
"input-format",
|
||||
"xml-attribute-prefix",
|
||||
"xml-content-name",
|
||||
"xml-proc-inst-prefix",
|
||||
"xml-directive-name",
|
||||
"lua-prefix",
|
||||
"lua-suffix",
|
||||
"properties-separator",
|
||||
"indent",
|
||||
"front-matter",
|
||||
"expression",
|
||||
"split-exp",
|
||||
}
|
||||
|
||||
for _, flagName := range flags {
|
||||
flag := rootCmd.PersistentFlags().Lookup(flagName)
|
||||
if flag == nil {
|
||||
t.Errorf("Expected flag %q to exist", flagName)
|
||||
}
|
||||
}
|
||||
}
|
||||
10
cmd/utils.go
10
cmd/utils.go
@ -166,6 +166,9 @@ func configureDecoder(evaluateTogether bool) (yqlib.Decoder, error) {
|
||||
}
|
||||
yqlib.ConfiguredYamlPreferences.EvaluateTogether = evaluateTogether
|
||||
|
||||
if format.DecoderFactory == nil {
|
||||
return nil, fmt.Errorf("no support for %s input format", inputFormat)
|
||||
}
|
||||
yqlibDecoder := format.DecoderFactory()
|
||||
if yqlibDecoder == nil {
|
||||
return nil, fmt.Errorf("no support for %s input format", inputFormat)
|
||||
@ -197,16 +200,23 @@ func configureEncoder() (yqlib.Encoder, error) {
|
||||
}
|
||||
yqlib.ConfiguredXMLPreferences.Indent = indent
|
||||
yqlib.ConfiguredYamlPreferences.Indent = indent
|
||||
yqlib.ConfiguredKYamlPreferences.Indent = indent
|
||||
yqlib.ConfiguredJSONPreferences.Indent = indent
|
||||
|
||||
yqlib.ConfiguredYamlPreferences.UnwrapScalar = unwrapScalar
|
||||
yqlib.ConfiguredKYamlPreferences.UnwrapScalar = unwrapScalar
|
||||
yqlib.ConfiguredPropertiesPreferences.UnwrapScalar = unwrapScalar
|
||||
yqlib.ConfiguredJSONPreferences.UnwrapScalar = unwrapScalar
|
||||
yqlib.ConfiguredShellVariablesPreferences.UnwrapScalar = unwrapScalar
|
||||
|
||||
yqlib.ConfiguredYamlPreferences.ColorsEnabled = colorsEnabled
|
||||
yqlib.ConfiguredKYamlPreferences.ColorsEnabled = colorsEnabled
|
||||
yqlib.ConfiguredJSONPreferences.ColorsEnabled = colorsEnabled
|
||||
yqlib.ConfiguredHclPreferences.ColorsEnabled = colorsEnabled
|
||||
yqlib.ConfiguredTomlPreferences.ColorsEnabled = colorsEnabled
|
||||
|
||||
yqlib.ConfiguredYamlPreferences.PrintDocSeparators = !noDocSeparators
|
||||
yqlib.ConfiguredKYamlPreferences.PrintDocSeparators = !noDocSeparators
|
||||
|
||||
encoder := yqlibOutputFormat.EncoderFactory()
|
||||
|
||||
|
||||
@ -926,13 +926,13 @@ func TestSetupColors(t *testing.T) {
|
||||
expectColors bool
|
||||
}{
|
||||
{
|
||||
name: "force color enabled",
|
||||
name: "force colour enabled",
|
||||
forceColor: true,
|
||||
forceNoColor: false,
|
||||
expectColors: true,
|
||||
},
|
||||
{
|
||||
name: "force no color enabled",
|
||||
name: "force no colour enabled",
|
||||
forceColor: false,
|
||||
forceNoColor: true,
|
||||
expectColors: false,
|
||||
|
||||
@ -11,7 +11,7 @@ var (
|
||||
GitDescribe string
|
||||
|
||||
// Version is main version number that is being run at the moment.
|
||||
Version = "v4.48.1"
|
||||
Version = "v4.52.4"
|
||||
|
||||
// VersionPrerelease is a pre-release marker for the version. If this is "" (empty string)
|
||||
// then it means that it is a final release. Otherwise, this is a pre-release
|
||||
|
||||
@ -1,6 +1,9 @@
|
||||
package cmd
|
||||
|
||||
import "testing"
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestGetVersionDisplay(t *testing.T) {
|
||||
var expectedVersion = ProductName + " (https://github.com/mikefarah/yq/) version " + Version
|
||||
@ -25,6 +28,18 @@ func TestGetVersionDisplay(t *testing.T) {
|
||||
}
|
||||
|
||||
func Test_getHumanVersion(t *testing.T) {
|
||||
// Save original values
|
||||
origGitDescribe := GitDescribe
|
||||
origGitCommit := GitCommit
|
||||
origVersionPrerelease := VersionPrerelease
|
||||
|
||||
// Restore after test
|
||||
defer func() {
|
||||
GitDescribe = origGitDescribe
|
||||
GitCommit = origGitCommit
|
||||
VersionPrerelease = origVersionPrerelease
|
||||
}()
|
||||
|
||||
GitDescribe = "e42813d"
|
||||
GitCommit = "e42813d+CHANGES"
|
||||
var wanted string
|
||||
@ -49,3 +64,118 @@ func Test_getHumanVersion(t *testing.T) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Test_getHumanVersion_NoGitDescribe(t *testing.T) {
|
||||
// Save original values
|
||||
origGitDescribe := GitDescribe
|
||||
origGitCommit := GitCommit
|
||||
origVersionPrerelease := VersionPrerelease
|
||||
|
||||
// Restore after test
|
||||
defer func() {
|
||||
GitDescribe = origGitDescribe
|
||||
GitCommit = origGitCommit
|
||||
VersionPrerelease = origVersionPrerelease
|
||||
}()
|
||||
|
||||
GitDescribe = ""
|
||||
GitCommit = ""
|
||||
VersionPrerelease = ""
|
||||
|
||||
got := getHumanVersion()
|
||||
if got != Version {
|
||||
t.Errorf("getHumanVersion() = %v, want %v", got, Version)
|
||||
}
|
||||
}
|
||||
|
||||
func Test_getHumanVersion_WithPrerelease(t *testing.T) {
|
||||
// Save original values
|
||||
origGitDescribe := GitDescribe
|
||||
origGitCommit := GitCommit
|
||||
origVersionPrerelease := VersionPrerelease
|
||||
|
||||
// Restore after test
|
||||
defer func() {
|
||||
GitDescribe = origGitDescribe
|
||||
GitCommit = origGitCommit
|
||||
VersionPrerelease = origVersionPrerelease
|
||||
}()
|
||||
|
||||
GitDescribe = ""
|
||||
GitCommit = "abc123"
|
||||
VersionPrerelease = "beta"
|
||||
|
||||
got := getHumanVersion()
|
||||
expected := Version + "-beta (abc123)"
|
||||
if got != expected {
|
||||
t.Errorf("getHumanVersion() = %v, want %v", got, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func Test_getHumanVersion_PrereleaseInVersion(t *testing.T) {
|
||||
// Save original values
|
||||
origGitDescribe := GitDescribe
|
||||
origGitCommit := GitCommit
|
||||
origVersionPrerelease := VersionPrerelease
|
||||
|
||||
// Restore after test
|
||||
defer func() {
|
||||
GitDescribe = origGitDescribe
|
||||
GitCommit = origGitCommit
|
||||
VersionPrerelease = origVersionPrerelease
|
||||
}()
|
||||
|
||||
GitDescribe = "v1.2.3-rc1"
|
||||
GitCommit = "xyz789"
|
||||
VersionPrerelease = "rc1"
|
||||
|
||||
got := getHumanVersion()
|
||||
// Should not duplicate "rc1" since it's already in GitDescribe
|
||||
expected := "v1.2.3-rc1 (xyz789)"
|
||||
if got != expected {
|
||||
t.Errorf("getHumanVersion() = %v, want %v", got, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func Test_getHumanVersion_StripSingleQuotes(t *testing.T) {
|
||||
// Save original values
|
||||
origGitDescribe := GitDescribe
|
||||
origGitCommit := GitCommit
|
||||
origVersionPrerelease := VersionPrerelease
|
||||
|
||||
// Restore after test
|
||||
defer func() {
|
||||
GitDescribe = origGitDescribe
|
||||
GitCommit = origGitCommit
|
||||
VersionPrerelease = origVersionPrerelease
|
||||
}()
|
||||
|
||||
GitDescribe = "'v1.2.3'"
|
||||
GitCommit = "'commit123'"
|
||||
VersionPrerelease = ""
|
||||
|
||||
got := getHumanVersion()
|
||||
// Should strip single quotes
|
||||
if strings.Contains(got, "'") {
|
||||
t.Errorf("getHumanVersion() = %v, should not contain single quotes", got)
|
||||
}
|
||||
expected := "v1.2.3"
|
||||
if got != expected {
|
||||
t.Errorf("getHumanVersion() = %v, want %v", got, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestProductName(t *testing.T) {
|
||||
if ProductName != "yq" {
|
||||
t.Errorf("ProductName = %v, want yq", ProductName)
|
||||
}
|
||||
}
|
||||
|
||||
func TestVersionIsSet(t *testing.T) {
|
||||
if Version == "" {
|
||||
t.Error("Version should not be empty")
|
||||
}
|
||||
if !strings.HasPrefix(Version, "v") {
|
||||
t.Errorf("Version %v should start with 'v'", Version)
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,8 +1 @@
|
||||
# 001
|
||||
---
|
||||
abc: # 001
|
||||
- 1 # one
|
||||
- 2 # two
|
||||
|
||||
---
|
||||
def # 002
|
||||
a: apple
|
||||
10
examples/kyaml.kyaml
Normal file
10
examples/kyaml.kyaml
Normal file
@ -0,0 +1,10 @@
|
||||
# leading
|
||||
{
|
||||
a: 1, # a line
|
||||
# head b
|
||||
b: 2,
|
||||
c: [
|
||||
# head d
|
||||
"d", # d line
|
||||
],
|
||||
}
|
||||
7
examples/kyaml.yml
Normal file
7
examples/kyaml.yml
Normal file
@ -0,0 +1,7 @@
|
||||
# leading
|
||||
a: 1 # a line
|
||||
# head b
|
||||
b: 2
|
||||
c:
|
||||
# head d
|
||||
- d # d line
|
||||
8
examples/sample.hcl
Normal file
8
examples/sample.hcl
Normal file
@ -0,0 +1,8 @@
|
||||
# Arithmetic with literals and application-provided variables
|
||||
sum = 1 + addend
|
||||
|
||||
# String interpolation and templates
|
||||
message = "Hello, ${name}!"
|
||||
|
||||
# Application-provided functions
|
||||
shouty_message = upper(message)
|
||||
27
examples/sample.tf
Normal file
27
examples/sample.tf
Normal file
@ -0,0 +1,27 @@
|
||||
# main.tf
|
||||
|
||||
# Define required providers and minimum Terraform version
|
||||
terraform {
|
||||
required_providers {
|
||||
aws = {
|
||||
source = "hashicorp/aws"
|
||||
version = "~> 5.0"
|
||||
}
|
||||
}
|
||||
required_version = ">= 1.2"
|
||||
}
|
||||
|
||||
# Configure the AWS provider
|
||||
provider "aws" {
|
||||
region = var.aws_region
|
||||
}
|
||||
|
||||
# Define an S3 bucket resource
|
||||
resource "aws_s3_bucket" "example_bucket" {
|
||||
bucket = var.bucket_name
|
||||
|
||||
tags = {
|
||||
Environment = "Development"
|
||||
Project = "TerraformExample"
|
||||
}
|
||||
}
|
||||
8
examples/sample2.hcl
Normal file
8
examples/sample2.hcl
Normal file
@ -0,0 +1,8 @@
|
||||
# Arithmetic with literals and application-provided variables
|
||||
sum = 1 + addend
|
||||
|
||||
# String interpolation and templates
|
||||
message = "Hello, ${name}!"
|
||||
|
||||
# Application-provided functions
|
||||
shouty_message = upper(message)
|
||||
21
go.mod
21
go.mod
@ -9,25 +9,34 @@ require (
|
||||
github.com/fatih/color v1.18.0
|
||||
github.com/go-ini/ini v1.67.0
|
||||
github.com/goccy/go-json v0.10.5
|
||||
github.com/goccy/go-yaml v1.18.0
|
||||
github.com/goccy/go-yaml v1.19.2
|
||||
github.com/hashicorp/hcl/v2 v2.24.0
|
||||
github.com/jinzhu/copier v0.4.0
|
||||
github.com/magiconair/properties v1.8.10
|
||||
github.com/pelletier/go-toml/v2 v2.2.4
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e
|
||||
github.com/spf13/cobra v1.10.1
|
||||
github.com/spf13/cobra v1.10.2
|
||||
github.com/spf13/pflag v1.0.10
|
||||
github.com/yuin/gopher-lua v1.1.1
|
||||
go.yaml.in/yaml/v3 v3.0.4
|
||||
golang.org/x/net v0.46.0
|
||||
golang.org/x/text v0.30.0
|
||||
github.com/zclconf/go-cty v1.17.0
|
||||
go.yaml.in/yaml/v4 v4.0.0-rc.3
|
||||
golang.org/x/mod v0.33.0
|
||||
golang.org/x/net v0.50.0
|
||||
golang.org/x/text v0.34.0
|
||||
gopkg.in/op/go-logging.v1 v1.0.0-20160211212156-b2cb9fa56473
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/agext/levenshtein v1.2.1 // indirect
|
||||
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
|
||||
github.com/google/go-cmp v0.6.0 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
golang.org/x/sys v0.37.0 // indirect
|
||||
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
|
||||
golang.org/x/sync v0.19.0 // indirect
|
||||
golang.org/x/sys v0.41.0 // indirect
|
||||
golang.org/x/tools v0.41.0 // indirect
|
||||
)
|
||||
|
||||
go 1.24.0
|
||||
|
||||
52
go.sum
52
go.sum
@ -1,14 +1,19 @@
|
||||
github.com/a8m/envsubst v1.4.3 h1:kDF7paGK8QACWYaQo6KtyYBozY2jhQrTuNNuUxQkhJY=
|
||||
github.com/a8m/envsubst v1.4.3/go.mod h1:4jjHWQlZoaXPoLQUb7H2qT4iLkZDdmEQiOUogdUmqVU=
|
||||
github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8=
|
||||
github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
|
||||
github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0=
|
||||
github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
|
||||
github.com/alecthomas/participle/v2 v2.1.4 h1:W/H79S8Sat/krZ3el6sQMvMaahJ+XcM9WSI2naI7w2U=
|
||||
github.com/alecthomas/participle/v2 v2.1.4/go.mod h1:8tqVbpTX20Ru4NfYQgZf4mP18eXPTBViyMWiArNEgGI=
|
||||
github.com/alecthomas/repr v0.5.2 h1:SU73FTI9D1P5UNtvseffFSGmdNci/O6RsqzeXJtP0Qs=
|
||||
github.com/alecthomas/repr v0.5.2/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
|
||||
github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY=
|
||||
github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U=
|
||||
github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE=
|
||||
github.com/elliotchance/orderedmap v1.8.0 h1:TrOREecvh3JbS+NCgwposXG5ZTFHtEsQiCGOhPElnMw=
|
||||
@ -17,10 +22,16 @@ github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
|
||||
github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
|
||||
github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A=
|
||||
github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
|
||||
github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68=
|
||||
github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
|
||||
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
|
||||
github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
|
||||
github.com/goccy/go-yaml v1.19.2 h1:PmFC1S6h8ljIz6gMRBopkjP1TVT7xuwrButHID66PoM=
|
||||
github.com/goccy/go-yaml v1.19.2/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/hashicorp/hcl/v2 v2.24.0 h1:2QJdZ454DSsYGoaE6QheQZjtKZSUs9Nh2izTWiwQxvE=
|
||||
github.com/hashicorp/hcl/v2 v2.24.0/go.mod h1:oGoO1FIQYfn/AgyOhlg9qLC6/nOJPX3qGbkZpYAcqfM=
|
||||
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
|
||||
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
@ -33,6 +44,8 @@ github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHP
|
||||
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0=
|
||||
github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e h1:aoZm08cpOy4WuID//EZDgcC4zIxODThtZNPirFr42+A=
|
||||
@ -40,8 +53,8 @@ github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsK
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
|
||||
github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
|
||||
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
|
||||
github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
|
||||
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk=
|
||||
github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
@ -50,19 +63,28 @@ github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5Cc
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M=
|
||||
github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw=
|
||||
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
|
||||
github.com/zclconf/go-cty v1.17.0 h1:seZvECve6XX4tmnvRzWtJNHdscMtYEx5R7bnnVyd/d0=
|
||||
github.com/zclconf/go-cty v1.17.0/go.mod h1:wqFzcImaLTI6A5HfsRwB0nj5n0MRZFwmey8YoFPPs3U=
|
||||
github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6ZMSMNJFMOjqrGHynW3DIBuR2H9j0ug+Mo=
|
||||
github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM=
|
||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||
golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=
|
||||
golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210=
|
||||
go.yaml.in/yaml/v4 v4.0.0-rc.3 h1:3h1fjsh1CTAPjW7q/EMe+C8shx5d8ctzZTrLcs/j8Go=
|
||||
go.yaml.in/yaml/v4 v4.0.0-rc.3/go.mod h1:aZqd9kCMsGL7AuUv/m/PvWLdg5sjJsZ4oHDEnfPPfY0=
|
||||
golang.org/x/mod v0.33.0 h1:tHFzIWbBifEmbwtGz65eaWyGiGZatSrT9prnU8DbVL8=
|
||||
golang.org/x/mod v0.33.0/go.mod h1:swjeQEj+6r7fODbD2cqrnje9PnziFuw4bmLbBZFrQ5w=
|
||||
golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60=
|
||||
golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM=
|
||||
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
|
||||
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
|
||||
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
|
||||
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk=
|
||||
golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA=
|
||||
golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc=
|
||||
golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/op/go-logging.v1 v1.0.0-20160211212156-b2cb9fa56473 h1:6D+BvnJ/j6e222UW8s2qTSe3wGBtvo0MbVQG/c5k8RE=
|
||||
gopkg.in/op/go-logging.v1 v1.0.0-20160211212156-b2cb9fa56473/go.mod h1:N1eN2tsCx0Ydtgjl4cqmbRCsY4/+z4cYDeqwZTk6zog=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
||||
24
go_install_test.go
Normal file
24
go_install_test.go
Normal file
@ -0,0 +1,24 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"io"
|
||||
"testing"
|
||||
|
||||
"golang.org/x/mod/module"
|
||||
"golang.org/x/mod/zip"
|
||||
)
|
||||
|
||||
// TestGoInstallCompatibility ensures the module can be zipped for go install.
|
||||
// This is an integration test that uses the same zip.CreateFromDir function
|
||||
// that go install uses internally. If this test fails, go install will fail.
|
||||
// See: https://github.com/mikefarah/yq/issues/2587
|
||||
func TestGoInstallCompatibility(t *testing.T) {
|
||||
mod := module.Version{
|
||||
Path: "github.com/mikefarah/yq/v4",
|
||||
Version: "v4.0.0", // the actual version doesn't matter for validation
|
||||
}
|
||||
|
||||
if err := zip.CreateFromDir(io.Discard, mod, "."); err != nil {
|
||||
t.Fatalf("Module cannot be zipped for go install: %v", err)
|
||||
}
|
||||
}
|
||||
@ -1,8 +1,63 @@
|
||||
# How it works
|
||||
# Expression Syntax: A Visual Guide
|
||||
In `yq`, expressions are made up of operators and pipes. A context of nodes is passed through the expression, and each operation takes the context as input and returns a new context as output. That output is piped in as input for the next operation in the expression.
|
||||
|
||||
In `yq` expressions are made up of operators and pipes. A context of nodes is passed through the expression and each operation takes the context as input and returns a new context as output. That output is piped in as input for the next operation in the expression. To begin with, the context is set to the first yaml document of the first yaml file (if processing in sequence using eval).
|
||||
Let's break down the process step by step using a diagram. We'll start with a single YAML document, apply an expression, and observe how the context changes at each step.
|
||||
|
||||
Lets look at a couple of examples.
|
||||
Given a document like:
|
||||
|
||||
```yaml
|
||||
root:
|
||||
items:
|
||||
- name: apple
|
||||
type: fruit
|
||||
- name: carrot
|
||||
type: vegetable
|
||||
- name: banana
|
||||
type: fruit
|
||||
```
|
||||
|
||||
You can use dot notation to access nested structures. For example, to access the `name` of the first item, you would use the expression `.root.items[0].name`, which would return `apple`.
|
||||
|
||||
But lets see how we could find all the fruit under `items`
|
||||
|
||||
## Step 1: Initial Context
|
||||
The context starts at the root of the YAML document. In this case, the entire document is the initial context.
|
||||
|
||||
```
|
||||
root
|
||||
└── items
|
||||
├── name: apple
|
||||
│ type: fruit
|
||||
├── name: carrot
|
||||
│ type: vegetable
|
||||
└── name: banana
|
||||
type: fruit
|
||||
```
|
||||
|
||||
## Step 2: Splatting the Array
|
||||
Using the expression `.root.items[]`, we "splat" the items array. This means each element of the array becomes its own node in the context:
|
||||
|
||||
```
|
||||
Node 1: { name: apple, type: fruit }
|
||||
Node 2: { name: carrot, type: vegetable }
|
||||
Node 3: { name: banana, type: fruit }
|
||||
```
|
||||
|
||||
## Step 3: Filtering the Nodes
|
||||
Next, we apply a filter to select only the nodes where type is fruit. The expression `.root.items[] | select(.type == "fruit")` filters the nodes:
|
||||
|
||||
```
|
||||
Filtered Node 1: { name: apple, type: fruit }
|
||||
Filtered Node 2: { name: banana, type: fruit }
|
||||
```
|
||||
|
||||
## Step 4: Extracting a Field
|
||||
Finally, we extract the name field from the filtered nodes using `.root.items[] | select(.type == "fruit") | .name` This results in:
|
||||
|
||||
```
|
||||
apple
|
||||
banana
|
||||
```
|
||||
|
||||
## Simple assignment example
|
||||
|
||||
@ -44,7 +99,6 @@ a: dog
|
||||
b: dog
|
||||
```
|
||||
|
||||
|
||||
## Complex assignment, operator precedence rules
|
||||
|
||||
Just like math expressions - `yq` expressions have an order of precedence. The pipe `|` operator has a low order of precedence, so operators with higher precedence will get evaluated first.
|
||||
@ -73,7 +127,7 @@ name: sally
|
||||
fruit: mango
|
||||
```
|
||||
|
||||
To properly update this yaml, you will need to use brackets (think BODMAS from maths) and wrap the entire LHS:
|
||||
**Important**: To properly update this YAML, you must wrap the entire LHS in parentheses. Think of it like using brackets in math to ensure the correct order of operations.
|
||||
`(.[] | select(.name == "sally") | .fruit) = "mango"`
|
||||
|
||||
|
||||
@ -126,4 +180,4 @@ The assignment operator then copies across the value from the RHS to the value o
|
||||
```yaml
|
||||
a: 2
|
||||
b: thing
|
||||
```
|
||||
```
|
||||
269
pkg/yqlib/base64_test.go
Normal file
269
pkg/yqlib/base64_test.go
Normal file
@ -0,0 +1,269 @@
|
||||
//go:build !yq_nobase64
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/mikefarah/yq/v4/test"
|
||||
)
|
||||
|
||||
const base64EncodedSimple = "YSBzcGVjaWFsIHN0cmluZw=="
|
||||
const base64DecodedSimpleExtraSpaces = "\n " + base64EncodedSimple + " \n"
|
||||
const base64DecodedSimple = "a special string"
|
||||
|
||||
const base64EncodedUTF8 = "V29ya3Mgd2l0aCBVVEYtMTYg8J+Yig=="
|
||||
const base64DecodedUTF8 = "Works with UTF-16 😊"
|
||||
|
||||
const base64EncodedYaml = "YTogYXBwbGUK"
|
||||
const base64DecodedYaml = "a: apple\n"
|
||||
|
||||
const base64EncodedEmpty = ""
|
||||
const base64DecodedEmpty = ""
|
||||
|
||||
const base64MissingPadding = "Y2F0cw"
|
||||
const base64DecodedMissingPadding = "cats"
|
||||
|
||||
const base64EncodedCats = "Y2F0cw=="
|
||||
const base64DecodedCats = "cats"
|
||||
|
||||
var base64Scenarios = []formatScenario{
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "empty decode",
|
||||
input: base64EncodedEmpty,
|
||||
expected: base64DecodedEmpty + "\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "simple decode",
|
||||
input: base64EncodedSimple,
|
||||
expected: base64DecodedSimple + "\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "Decode base64: simple",
|
||||
subdescription: "Decoded data is assumed to be a string.",
|
||||
input: base64EncodedSimple,
|
||||
expected: base64DecodedSimple + "\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "Decode base64: UTF-8",
|
||||
subdescription: "Base64 decoding supports UTF-8 encoded strings.",
|
||||
input: base64EncodedUTF8,
|
||||
expected: base64DecodedUTF8 + "\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "decode missing padding",
|
||||
input: base64MissingPadding,
|
||||
expected: base64DecodedMissingPadding + "\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
|
||||
description: "Decode with extra spaces",
|
||||
subdescription: "Extra leading/trailing whitespace is stripped",
|
||||
input: base64DecodedSimpleExtraSpaces,
|
||||
expected: base64DecodedSimple + "\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "decode with padding",
|
||||
input: base64EncodedCats,
|
||||
expected: base64DecodedCats + "\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "decode yaml document",
|
||||
input: base64EncodedYaml,
|
||||
expected: base64DecodedYaml + "\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "Encode base64: string",
|
||||
input: "\"" + base64DecodedSimple + "\"",
|
||||
expected: base64EncodedSimple,
|
||||
scenarioType: "encode",
|
||||
},
|
||||
{
|
||||
description: "Encode base64: string from document",
|
||||
subdescription: "Extract a string field and encode it to base64.",
|
||||
input: "coolData: \"" + base64DecodedSimple + "\"",
|
||||
expression: ".coolData",
|
||||
expected: base64EncodedSimple,
|
||||
scenarioType: "encode",
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "encode empty string",
|
||||
input: "\"\"",
|
||||
expected: "",
|
||||
scenarioType: "encode",
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "encode UTF-8 string",
|
||||
input: "\"" + base64DecodedUTF8 + "\"",
|
||||
expected: base64EncodedUTF8,
|
||||
scenarioType: "encode",
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "encode cats",
|
||||
input: "\"" + base64DecodedCats + "\"",
|
||||
expected: base64EncodedCats,
|
||||
scenarioType: "encode",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: simple",
|
||||
skipDoc: true,
|
||||
input: base64EncodedSimple,
|
||||
expected: base64EncodedSimple,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: UTF-8",
|
||||
skipDoc: true,
|
||||
input: base64EncodedUTF8,
|
||||
expected: base64EncodedUTF8,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: missing padding",
|
||||
skipDoc: true,
|
||||
input: base64MissingPadding,
|
||||
expected: base64EncodedCats,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: empty",
|
||||
skipDoc: true,
|
||||
input: base64EncodedEmpty,
|
||||
expected: base64EncodedEmpty,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Encode error: non-string",
|
||||
skipDoc: true,
|
||||
input: "123",
|
||||
expectedError: "cannot encode !!int as base64, can only operate on strings",
|
||||
scenarioType: "encode-error",
|
||||
},
|
||||
{
|
||||
description: "Encode error: array",
|
||||
skipDoc: true,
|
||||
input: "[1, 2, 3]",
|
||||
expectedError: "cannot encode !!seq as base64, can only operate on strings",
|
||||
scenarioType: "encode-error",
|
||||
},
|
||||
{
|
||||
description: "Encode error: map",
|
||||
skipDoc: true,
|
||||
input: "{b: c}",
|
||||
expectedError: "cannot encode !!map as base64, can only operate on strings",
|
||||
scenarioType: "encode-error",
|
||||
},
|
||||
}
|
||||
|
||||
func testBase64Scenario(t *testing.T, s formatScenario) {
|
||||
switch s.scenarioType {
|
||||
case "", "decode":
|
||||
yamlPrefs := ConfiguredYamlPreferences.Copy()
|
||||
yamlPrefs.Indent = 4
|
||||
test.AssertResultWithContext(t, s.expected, mustProcessFormatScenario(s, NewBase64Decoder(), NewYamlEncoder(yamlPrefs)), s.description)
|
||||
case "encode":
|
||||
test.AssertResultWithContext(t, s.expected, mustProcessFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewBase64Encoder()), s.description)
|
||||
case "roundtrip":
|
||||
test.AssertResultWithContext(t, s.expected, mustProcessFormatScenario(s, NewBase64Decoder(), NewBase64Encoder()), s.description)
|
||||
case "encode-error":
|
||||
result, err := processFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewBase64Encoder())
|
||||
if err == nil {
|
||||
t.Errorf("Expected error '%v' but it worked: %v", s.expectedError, result)
|
||||
} else {
|
||||
test.AssertResultComplexWithContext(t, s.expectedError, err.Error(), s.description)
|
||||
}
|
||||
|
||||
default:
|
||||
panic(fmt.Sprintf("unhandled scenario type %q", s.scenarioType))
|
||||
}
|
||||
}
|
||||
|
||||
func documentBase64Scenario(_ *testing.T, w *bufio.Writer, i interface{}) {
|
||||
s := i.(formatScenario)
|
||||
|
||||
if s.skipDoc {
|
||||
return
|
||||
}
|
||||
switch s.scenarioType {
|
||||
case "", "decode":
|
||||
documentBase64DecodeScenario(w, s)
|
||||
case "encode":
|
||||
documentBase64EncodeScenario(w, s)
|
||||
|
||||
default:
|
||||
panic(fmt.Sprintf("unhandled scenario type %q", s.scenarioType))
|
||||
}
|
||||
}
|
||||
|
||||
func documentBase64DecodeScenario(w *bufio.Writer, s formatScenario) {
|
||||
writeOrPanic(w, fmt.Sprintf("## %v\n", s.description))
|
||||
|
||||
if s.subdescription != "" {
|
||||
writeOrPanic(w, s.subdescription)
|
||||
writeOrPanic(w, "\n\n")
|
||||
}
|
||||
|
||||
writeOrPanic(w, "Given a sample.txt file of:\n")
|
||||
writeOrPanic(w, fmt.Sprintf("```\n%v\n```\n", s.input))
|
||||
|
||||
writeOrPanic(w, "then\n")
|
||||
expression := s.expression
|
||||
if expression == "" {
|
||||
expression = "."
|
||||
}
|
||||
writeOrPanic(w, fmt.Sprintf("```bash\nyq -p=base64 -oy '%v' sample.txt\n```\n", expression))
|
||||
writeOrPanic(w, "will output\n")
|
||||
|
||||
writeOrPanic(w, fmt.Sprintf("```yaml\n%v```\n\n", mustProcessFormatScenario(s, NewBase64Decoder(), NewYamlEncoder(ConfiguredYamlPreferences))))
|
||||
}
|
||||
|
||||
func documentBase64EncodeScenario(w *bufio.Writer, s formatScenario) {
|
||||
writeOrPanic(w, fmt.Sprintf("## %v\n", s.description))
|
||||
|
||||
if s.subdescription != "" {
|
||||
writeOrPanic(w, s.subdescription)
|
||||
writeOrPanic(w, "\n\n")
|
||||
}
|
||||
|
||||
writeOrPanic(w, "Given a sample.yml file of:\n")
|
||||
writeOrPanic(w, fmt.Sprintf("```yaml\n%v\n```\n", s.input))
|
||||
|
||||
writeOrPanic(w, "then\n")
|
||||
expression := s.expression
|
||||
if expression == "" {
|
||||
expression = "."
|
||||
}
|
||||
writeOrPanic(w, fmt.Sprintf("```bash\nyq -o=base64 '%v' sample.yml\n```\n", expression))
|
||||
writeOrPanic(w, "will output\n")
|
||||
|
||||
writeOrPanic(w, fmt.Sprintf("```\n%v```\n\n", mustProcessFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewBase64Encoder())))
|
||||
}
|
||||
|
||||
func TestBase64Scenarios(t *testing.T) {
|
||||
for _, tt := range base64Scenarios {
|
||||
testBase64Scenario(t, tt)
|
||||
}
|
||||
genericScenarios := make([]interface{}, len(base64Scenarios))
|
||||
for i, s := range base64Scenarios {
|
||||
genericScenarios[i] = s
|
||||
}
|
||||
documentScenarios(t, "usage", "base64", genericScenarios, documentBase64Scenario)
|
||||
}
|
||||
@ -97,6 +97,9 @@ type CandidateNode struct {
|
||||
// (e.g. top level cross document merge). This property does not propagate to child nodes.
|
||||
EvaluateTogether bool
|
||||
IsMapKey bool
|
||||
// For formats like HCL and TOML: indicates that child entries should be emitted as separate blocks/tables
|
||||
// rather than consolidated into nested mappings (default behaviour)
|
||||
EncodeSeparate bool
|
||||
}
|
||||
|
||||
func (n *CandidateNode) CreateChild() *CandidateNode {
|
||||
@ -407,6 +410,8 @@ func (n *CandidateNode) doCopy(cloneContent bool) *CandidateNode {
|
||||
|
||||
EvaluateTogether: n.EvaluateTogether,
|
||||
IsMapKey: n.IsMapKey,
|
||||
|
||||
EncodeSeparate: n.EncodeSeparate,
|
||||
}
|
||||
|
||||
if cloneContent {
|
||||
@ -460,6 +465,9 @@ func (n *CandidateNode) UpdateAttributesFrom(other *CandidateNode, prefs assignP
|
||||
n.Anchor = other.Anchor
|
||||
}
|
||||
|
||||
// Preserve EncodeSeparate flag for format-specific encoding hints
|
||||
n.EncodeSeparate = other.EncodeSeparate
|
||||
|
||||
// merge will pickup the style of the new thing
|
||||
// when autocreating nodes
|
||||
|
||||
|
||||
@ -152,8 +152,6 @@ func TestCandidateNodeAddKeyValueChild(t *testing.T) {
|
||||
key := CandidateNode{Value: "cool", IsMapKey: true}
|
||||
node := CandidateNode{}
|
||||
|
||||
// if we use a key in a new node as a value, it should no longer be marked as a key
|
||||
|
||||
_, keyIsValueNow := node.AddKeyValueChild(&CandidateNode{Value: "newKey"}, &key)
|
||||
|
||||
test.AssertResult(t, keyIsValueNow.IsMapKey, false)
|
||||
@ -204,3 +202,193 @@ func TestConvertToNodeInfo(t *testing.T) {
|
||||
test.AssertResult(t, 2, childInfo.Line)
|
||||
test.AssertResult(t, 3, childInfo.Column)
|
||||
}
|
||||
|
||||
func TestCandidateNodeGetPath(t *testing.T) {
|
||||
// Test root node with no parent
|
||||
root := CandidateNode{Value: "root"}
|
||||
path := root.GetPath()
|
||||
test.AssertResult(t, 0, len(path))
|
||||
|
||||
// Test node with key
|
||||
key := createStringScalarNode("myKey")
|
||||
node := CandidateNode{Key: key, Value: "myValue"}
|
||||
path = node.GetPath()
|
||||
test.AssertResult(t, 1, len(path))
|
||||
test.AssertResult(t, "myKey", path[0])
|
||||
|
||||
// Test nested path
|
||||
parent := CandidateNode{}
|
||||
parentKey := createStringScalarNode("parent")
|
||||
parent.Key = parentKey
|
||||
node.Parent = &parent
|
||||
path = node.GetPath()
|
||||
test.AssertResult(t, 2, len(path))
|
||||
test.AssertResult(t, "parent", path[0])
|
||||
test.AssertResult(t, "myKey", path[1])
|
||||
}
|
||||
|
||||
func TestCandidateNodeGetNicePath(t *testing.T) {
|
||||
// Test simple key
|
||||
key := createStringScalarNode("simple")
|
||||
node := CandidateNode{Key: key}
|
||||
nicePath := node.GetNicePath()
|
||||
test.AssertResult(t, "simple", nicePath)
|
||||
|
||||
// Test array index
|
||||
arrayKey := createScalarNode(0, "0")
|
||||
arrayNode := CandidateNode{Key: arrayKey}
|
||||
nicePath = arrayNode.GetNicePath()
|
||||
test.AssertResult(t, "[0]", nicePath)
|
||||
|
||||
dotKey := createStringScalarNode("key.with.dots")
|
||||
dotNode := CandidateNode{Key: dotKey}
|
||||
nicePath = dotNode.GetNicePath()
|
||||
test.AssertResult(t, "key.with.dots", nicePath)
|
||||
|
||||
// Test nested path
|
||||
parentKey := createStringScalarNode("parent")
|
||||
parent := CandidateNode{Key: parentKey}
|
||||
childKey := createStringScalarNode("child")
|
||||
child := CandidateNode{Key: childKey, Parent: &parent}
|
||||
nicePath = child.GetNicePath()
|
||||
test.AssertResult(t, "parent.child", nicePath)
|
||||
}
|
||||
|
||||
func TestCandidateNodeFilterMapContentByKey(t *testing.T) {
|
||||
// Create a map with multiple key-value pairs
|
||||
key1 := createStringScalarNode("key1")
|
||||
value1 := createStringScalarNode("value1")
|
||||
key2 := createStringScalarNode("key2")
|
||||
value2 := createStringScalarNode("value2")
|
||||
key3 := createStringScalarNode("key3")
|
||||
value3 := createStringScalarNode("value3")
|
||||
|
||||
mapNode := &CandidateNode{
|
||||
Kind: MappingNode,
|
||||
Content: []*CandidateNode{key1, value1, key2, value2, key3, value3},
|
||||
}
|
||||
|
||||
// Filter by key predicate that matches key1 and key3
|
||||
filtered := mapNode.FilterMapContentByKey(func(key *CandidateNode) bool {
|
||||
return key.Value == "key1" || key.Value == "key3"
|
||||
})
|
||||
|
||||
// Should return key1, value1, key3, value3
|
||||
test.AssertResult(t, 4, len(filtered))
|
||||
test.AssertResult(t, "key1", filtered[0].Value)
|
||||
test.AssertResult(t, "value1", filtered[1].Value)
|
||||
test.AssertResult(t, "key3", filtered[2].Value)
|
||||
test.AssertResult(t, "value3", filtered[3].Value)
|
||||
}
|
||||
|
||||
func TestCandidateNodeVisitValues(t *testing.T) {
|
||||
// Test mapping node
|
||||
key1 := createStringScalarNode("key1")
|
||||
value1 := createStringScalarNode("value1")
|
||||
key2 := createStringScalarNode("key2")
|
||||
value2 := createStringScalarNode("value2")
|
||||
|
||||
mapNode := &CandidateNode{
|
||||
Kind: MappingNode,
|
||||
Content: []*CandidateNode{key1, value1, key2, value2},
|
||||
}
|
||||
|
||||
var visited []string
|
||||
err := mapNode.VisitValues(func(node *CandidateNode) error {
|
||||
visited = append(visited, node.Value)
|
||||
return nil
|
||||
})
|
||||
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, 2, len(visited))
|
||||
test.AssertResult(t, "value1", visited[0])
|
||||
test.AssertResult(t, "value2", visited[1])
|
||||
|
||||
// Test sequence node
|
||||
item1 := createStringScalarNode("item1")
|
||||
item2 := createStringScalarNode("item2")
|
||||
|
||||
seqNode := &CandidateNode{
|
||||
Kind: SequenceNode,
|
||||
Content: []*CandidateNode{item1, item2},
|
||||
}
|
||||
|
||||
visited = []string{}
|
||||
err = seqNode.VisitValues(func(node *CandidateNode) error {
|
||||
visited = append(visited, node.Value)
|
||||
return nil
|
||||
})
|
||||
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, 2, len(visited))
|
||||
test.AssertResult(t, "item1", visited[0])
|
||||
test.AssertResult(t, "item2", visited[1])
|
||||
|
||||
// Test scalar node (should not visit anything)
|
||||
scalarNode := &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Value: "scalar",
|
||||
}
|
||||
|
||||
visited = []string{}
|
||||
err = scalarNode.VisitValues(func(node *CandidateNode) error {
|
||||
visited = append(visited, node.Value)
|
||||
return nil
|
||||
})
|
||||
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, 0, len(visited))
|
||||
}
|
||||
|
||||
func TestCandidateNodeCanVisitValues(t *testing.T) {
|
||||
mapNode := &CandidateNode{Kind: MappingNode}
|
||||
seqNode := &CandidateNode{Kind: SequenceNode}
|
||||
scalarNode := &CandidateNode{Kind: ScalarNode}
|
||||
|
||||
test.AssertResult(t, true, mapNode.CanVisitValues())
|
||||
test.AssertResult(t, true, seqNode.CanVisitValues())
|
||||
test.AssertResult(t, false, scalarNode.CanVisitValues())
|
||||
}
|
||||
|
||||
func TestCandidateNodeAddChild(t *testing.T) {
|
||||
parent := &CandidateNode{Kind: SequenceNode}
|
||||
child := createStringScalarNode("child")
|
||||
|
||||
parent.AddChild(child)
|
||||
|
||||
test.AssertResult(t, 1, len(parent.Content))
|
||||
test.AssertResult(t, false, parent.Content[0].IsMapKey)
|
||||
test.AssertResult(t, "0", parent.Content[0].Key.Value)
|
||||
// Check that parent is set correctly
|
||||
if parent.Content[0].Parent != parent {
|
||||
t.Errorf("Expected parent to be set correctly")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCandidateNodeAddChildren(t *testing.T) {
|
||||
// Test sequence node
|
||||
parent := &CandidateNode{Kind: SequenceNode}
|
||||
child1 := createStringScalarNode("child1")
|
||||
child2 := createStringScalarNode("child2")
|
||||
|
||||
parent.AddChildren([]*CandidateNode{child1, child2})
|
||||
|
||||
test.AssertResult(t, 2, len(parent.Content))
|
||||
test.AssertResult(t, "child1", parent.Content[0].Value)
|
||||
test.AssertResult(t, "child2", parent.Content[1].Value)
|
||||
|
||||
// Test mapping node
|
||||
mapParent := &CandidateNode{Kind: MappingNode}
|
||||
key1 := createStringScalarNode("key1")
|
||||
value1 := createStringScalarNode("value1")
|
||||
key2 := createStringScalarNode("key2")
|
||||
value2 := createStringScalarNode("value2")
|
||||
|
||||
mapParent.AddChildren([]*CandidateNode{key1, value1, key2, value2})
|
||||
|
||||
test.AssertResult(t, 4, len(mapParent.Content))
|
||||
test.AssertResult(t, true, mapParent.Content[0].IsMapKey) // key1
|
||||
test.AssertResult(t, false, mapParent.Content[1].IsMapKey) // value1
|
||||
test.AssertResult(t, true, mapParent.Content[2].IsMapKey) // key2
|
||||
test.AssertResult(t, false, mapParent.Content[3].IsMapKey) // value2
|
||||
}
|
||||
|
||||
@ -3,7 +3,7 @@ package yqlib
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
yaml "go.yaml.in/yaml/v3"
|
||||
yaml "go.yaml.in/yaml/v4"
|
||||
)
|
||||
|
||||
func MapYamlStyle(original yaml.Style) Style {
|
||||
|
||||
139
pkg/yqlib/chown_linux_test.go
Normal file
139
pkg/yqlib/chown_linux_test.go
Normal file
@ -0,0 +1,139 @@
|
||||
//go:build linux
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestChangeOwner(t *testing.T) {
|
||||
// Create a temporary file for testing
|
||||
tempDir := t.TempDir()
|
||||
testFile := filepath.Join(tempDir, "testfile.txt")
|
||||
|
||||
// Create a test file
|
||||
err := os.WriteFile(testFile, []byte("test content"), 0600)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create test file: %v", err)
|
||||
}
|
||||
|
||||
// Get file info
|
||||
info, err := os.Stat(testFile)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to stat test file: %v", err)
|
||||
}
|
||||
|
||||
// Create another temporary file to change ownership of
|
||||
tempFile, err := os.CreateTemp(tempDir, "chown_test_*.txt")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp file: %v", err)
|
||||
}
|
||||
defer os.Remove(tempFile.Name())
|
||||
tempFile.Close()
|
||||
|
||||
// Test changeOwner function
|
||||
err = changeOwner(info, tempFile)
|
||||
if err != nil {
|
||||
t.Errorf("changeOwner failed: %v", err)
|
||||
}
|
||||
|
||||
// Verify that the function doesn't panic with valid input
|
||||
tempFile2, err := os.CreateTemp(tempDir, "chown_test2_*.txt")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create second temp file: %v", err)
|
||||
}
|
||||
defer os.Remove(tempFile2.Name())
|
||||
tempFile2.Close()
|
||||
|
||||
// Test with the second file
|
||||
err = changeOwner(info, tempFile2)
|
||||
if err != nil {
|
||||
t.Errorf("changeOwner failed on second file: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestChangeOwnerWithInvalidFileInfo(t *testing.T) {
|
||||
// Create a mock file info that doesn't have syscall.Stat_t
|
||||
mockInfo := &mockFileInfo{
|
||||
name: "mock",
|
||||
size: 0,
|
||||
mode: 0600,
|
||||
}
|
||||
|
||||
// Create a temporary file
|
||||
tempFile, err := os.CreateTemp(t.TempDir(), "chown_test_*.txt")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp file: %v", err)
|
||||
}
|
||||
defer os.Remove(tempFile.Name())
|
||||
tempFile.Close()
|
||||
|
||||
// Test changeOwner with mock file info (should not panic)
|
||||
err = changeOwner(mockInfo, tempFile)
|
||||
if err != nil {
|
||||
t.Errorf("changeOwner failed with mock file info: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestChangeOwnerWithNonExistentFile(t *testing.T) {
|
||||
// Create a temporary file
|
||||
tempFile, err := os.CreateTemp(t.TempDir(), "chown_test_*.txt")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp file: %v", err)
|
||||
}
|
||||
defer os.Remove(tempFile.Name())
|
||||
tempFile.Close()
|
||||
|
||||
// Get file info
|
||||
info, err := os.Stat(tempFile.Name())
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to stat temp file: %v", err)
|
||||
}
|
||||
|
||||
// Remove the file
|
||||
os.Remove(tempFile.Name())
|
||||
|
||||
err = changeOwner(info, tempFile)
|
||||
// The function should not panic even if the file doesn't exist
|
||||
if err != nil {
|
||||
t.Logf("Expected error when changing owner of non-existent file: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// mockFileInfo implements fs.FileInfo but doesn't have syscall.Stat_t
|
||||
type mockFileInfo struct {
|
||||
name string
|
||||
size int64
|
||||
mode os.FileMode
|
||||
}
|
||||
|
||||
func (m *mockFileInfo) Name() string { return m.name }
|
||||
func (m *mockFileInfo) Size() int64 { return m.size }
|
||||
func (m *mockFileInfo) Mode() os.FileMode { return m.mode }
|
||||
func (m *mockFileInfo) ModTime() time.Time { return time.Time{} }
|
||||
func (m *mockFileInfo) IsDir() bool { return false }
|
||||
func (m *mockFileInfo) Sys() interface{} { return nil } // This will cause the type assertion to fail
|
||||
|
||||
func TestChangeOwnerWithSyscallStatT(t *testing.T) {
|
||||
// Create a temporary file
|
||||
tempFile, err := os.CreateTemp(t.TempDir(), "chown_test_*.txt")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp file: %v", err)
|
||||
}
|
||||
defer os.Remove(tempFile.Name())
|
||||
tempFile.Close()
|
||||
|
||||
// Get file info
|
||||
info, err := os.Stat(tempFile.Name())
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to stat temp file: %v", err)
|
||||
}
|
||||
|
||||
err = changeOwner(info, tempFile)
|
||||
if err != nil {
|
||||
t.Logf("changeOwner returned error (this might be expected in some environments): %v", err)
|
||||
}
|
||||
}
|
||||
153
pkg/yqlib/color_print_test.go
Normal file
153
pkg/yqlib/color_print_test.go
Normal file
@ -0,0 +1,153 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/fatih/color"
|
||||
)
|
||||
|
||||
func TestFormat(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
attr color.Attribute
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "reset color",
|
||||
attr: color.Reset,
|
||||
expected: "\x1b[0m",
|
||||
},
|
||||
{
|
||||
name: "red color",
|
||||
attr: color.FgRed,
|
||||
expected: "\x1b[31m",
|
||||
},
|
||||
{
|
||||
name: "green color",
|
||||
attr: color.FgGreen,
|
||||
expected: "\x1b[32m",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := format(tt.attr)
|
||||
if result != tt.expected {
|
||||
t.Errorf("format(%d) = %q, want %q", tt.attr, result, tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestColorizeAndPrint(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
yamlBytes []byte
|
||||
expectErr bool
|
||||
}{
|
||||
{
|
||||
name: "simple yaml",
|
||||
yamlBytes: []byte("name: test\nage: 25\n"),
|
||||
expectErr: false,
|
||||
},
|
||||
{
|
||||
name: "yaml with strings",
|
||||
yamlBytes: []byte("name: \"hello world\"\nactive: true\ncount: 42\n"),
|
||||
expectErr: false,
|
||||
},
|
||||
{
|
||||
name: "yaml with anchors and aliases",
|
||||
yamlBytes: []byte("default: &default\n name: test\nuser: *default\n"),
|
||||
expectErr: false,
|
||||
},
|
||||
{
|
||||
name: "yaml with comments",
|
||||
yamlBytes: []byte("# This is a comment\nname: test\n"),
|
||||
expectErr: false,
|
||||
},
|
||||
{
|
||||
name: "empty yaml",
|
||||
yamlBytes: []byte(""),
|
||||
expectErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := colorizeAndPrint(tt.yamlBytes, &buf)
|
||||
|
||||
if tt.expectErr && err == nil {
|
||||
t.Error("Expected error but got none")
|
||||
}
|
||||
if !tt.expectErr && err != nil {
|
||||
t.Errorf("Unexpected error: %v", err)
|
||||
}
|
||||
|
||||
// Check that output contains escape sequences (color codes)
|
||||
if !tt.expectErr && len(tt.yamlBytes) > 0 {
|
||||
output := buf.String()
|
||||
if !strings.Contains(output, "\x1b[") {
|
||||
t.Error("Expected output to contain color escape sequences")
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestColorizeAndPrintWithDifferentYamlTypes(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
yaml string
|
||||
expectErr bool
|
||||
}{
|
||||
{
|
||||
name: "boolean values",
|
||||
yaml: "active: true\ninactive: false\n",
|
||||
},
|
||||
{
|
||||
name: "numeric values",
|
||||
yaml: "integer: 42\nfloat: 3.14\nnegative: -10\n",
|
||||
},
|
||||
{
|
||||
name: "map keys",
|
||||
yaml: "user:\n name: john\n age: 30\n",
|
||||
},
|
||||
{
|
||||
name: "string values",
|
||||
yaml: "message: \"hello world\"\ndescription: 'single quotes'\n",
|
||||
},
|
||||
{
|
||||
name: "mixed types",
|
||||
yaml: "config:\n debug: true\n port: 8080\n host: \"localhost\"\n",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := colorizeAndPrint([]byte(tc.yaml), &buf)
|
||||
|
||||
if tc.expectErr && err == nil {
|
||||
t.Error("Expected error but got none")
|
||||
}
|
||||
if !tc.expectErr && err != nil {
|
||||
t.Errorf("Unexpected error: %v", err)
|
||||
}
|
||||
|
||||
// Verify output contains color codes
|
||||
if !tc.expectErr {
|
||||
output := buf.String()
|
||||
if !strings.Contains(output, "\x1b[") {
|
||||
t.Error("Expected output to contain color escape sequences")
|
||||
}
|
||||
// Should end with newline
|
||||
if !strings.HasSuffix(output, "\n") {
|
||||
t.Error("Expected output to end with newline")
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -312,7 +312,6 @@ func TestDeeplyAssign_ErrorHandling(t *testing.T) {
|
||||
Value: "value",
|
||||
}
|
||||
|
||||
// Try to assign to a path on a scalar (should fail)
|
||||
path := []interface{}{"key"}
|
||||
err := navigator.DeeplyAssign(context, path, assignNode)
|
||||
|
||||
@ -321,7 +320,6 @@ func TestDeeplyAssign_ErrorHandling(t *testing.T) {
|
||||
t.Logf("Actual error: %v", err)
|
||||
}
|
||||
|
||||
// This should fail because we can't assign to a scalar
|
||||
test.AssertResult(t, nil, err)
|
||||
}
|
||||
|
||||
|
||||
@ -9,28 +9,6 @@ import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
type base64Padder struct {
|
||||
count int
|
||||
io.Reader
|
||||
}
|
||||
|
||||
func (c *base64Padder) pad(buf []byte) (int, error) {
|
||||
pad := strings.Repeat("=", (4 - c.count%4))
|
||||
n, err := strings.NewReader(pad).Read(buf)
|
||||
c.count += n
|
||||
return n, err
|
||||
}
|
||||
|
||||
func (c *base64Padder) Read(buf []byte) (int, error) {
|
||||
n, err := c.Reader.Read(buf)
|
||||
c.count += n
|
||||
|
||||
if err == io.EOF && c.count%4 != 0 {
|
||||
return c.pad(buf)
|
||||
}
|
||||
return n, err
|
||||
}
|
||||
|
||||
type base64Decoder struct {
|
||||
reader io.Reader
|
||||
finished bool
|
||||
@ -43,7 +21,25 @@ func NewBase64Decoder() Decoder {
|
||||
}
|
||||
|
||||
func (dec *base64Decoder) Init(reader io.Reader) error {
|
||||
dec.reader = &base64Padder{Reader: reader}
|
||||
// Read all data from the reader and strip leading/trailing whitespace
|
||||
// This is necessary because base64 decoding needs to see the complete input
|
||||
// to handle padding correctly, and we need to strip whitespace before decoding.
|
||||
buf := new(bytes.Buffer)
|
||||
if _, err := buf.ReadFrom(reader); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Strip leading and trailing whitespace
|
||||
stripped := strings.TrimSpace(buf.String())
|
||||
|
||||
// Add padding if needed (base64 strings should be a multiple of 4 characters)
|
||||
padLen := len(stripped) % 4
|
||||
if padLen > 0 {
|
||||
stripped += strings.Repeat("=", 4-padLen)
|
||||
}
|
||||
|
||||
// Create a new reader from the stripped and padded data
|
||||
dec.reader = strings.NewReader(stripped)
|
||||
dec.readAnything = false
|
||||
dec.finished = false
|
||||
return nil
|
||||
|
||||
471
pkg/yqlib/decoder_hcl.go
Normal file
471
pkg/yqlib/decoder_hcl.go
Normal file
@ -0,0 +1,471 @@
|
||||
//go:build !yq_nohcl
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"math/big"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
"github.com/hashicorp/hcl/v2/hclsyntax"
|
||||
"github.com/zclconf/go-cty/cty"
|
||||
)
|
||||
|
||||
type hclDecoder struct {
|
||||
file *hcl.File
|
||||
fileBytes []byte
|
||||
readAnything bool
|
||||
documentIndex uint
|
||||
}
|
||||
|
||||
func NewHclDecoder() Decoder {
|
||||
return &hclDecoder{}
|
||||
}
|
||||
|
||||
// sortedAttributes returns attributes in declaration order by source position
|
||||
func sortedAttributes(attrs hclsyntax.Attributes) []*attributeWithName {
|
||||
var sorted []*attributeWithName
|
||||
for name, attr := range attrs {
|
||||
sorted = append(sorted, &attributeWithName{Name: name, Attr: attr})
|
||||
}
|
||||
sort.Slice(sorted, func(i, j int) bool {
|
||||
return sorted[i].Attr.Range().Start.Byte < sorted[j].Attr.Range().Start.Byte
|
||||
})
|
||||
return sorted
|
||||
}
|
||||
|
||||
type attributeWithName struct {
|
||||
Name string
|
||||
Attr *hclsyntax.Attribute
|
||||
}
|
||||
|
||||
// extractLineComment extracts any inline comment after the given position
|
||||
func extractLineComment(src []byte, endPos int) string {
|
||||
// Look for # comment after the token
|
||||
for i := endPos; i < len(src); i++ {
|
||||
if src[i] == '#' {
|
||||
// Found comment, extract until end of line
|
||||
start := i
|
||||
for i < len(src) && src[i] != '\n' {
|
||||
i++
|
||||
}
|
||||
return strings.TrimSpace(string(src[start:i]))
|
||||
}
|
||||
if src[i] == '\n' {
|
||||
// Hit newline before comment
|
||||
break
|
||||
}
|
||||
// Skip whitespace and other characters
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// extractHeadComment extracts comments before a given start position
|
||||
func extractHeadComment(src []byte, startPos int) string {
|
||||
var comments []string
|
||||
|
||||
// Start just before the token and skip trailing whitespace
|
||||
i := startPos - 1
|
||||
for i >= 0 && (src[i] == ' ' || src[i] == '\t' || src[i] == '\n' || src[i] == '\r') {
|
||||
i--
|
||||
}
|
||||
|
||||
for i >= 0 {
|
||||
// Find line boundaries
|
||||
lineEnd := i
|
||||
for i >= 0 && src[i] != '\n' {
|
||||
i--
|
||||
}
|
||||
lineStart := i + 1
|
||||
|
||||
line := strings.TrimRight(string(src[lineStart:lineEnd+1]), " \t\r")
|
||||
trimmed := strings.TrimSpace(line)
|
||||
|
||||
if trimmed == "" {
|
||||
break
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(trimmed, "#") {
|
||||
break
|
||||
}
|
||||
|
||||
comments = append([]string{trimmed}, comments...)
|
||||
|
||||
// Move to previous line (skip any whitespace/newlines)
|
||||
i = lineStart - 1
|
||||
for i >= 0 && (src[i] == ' ' || src[i] == '\t' || src[i] == '\n' || src[i] == '\r') {
|
||||
i--
|
||||
}
|
||||
}
|
||||
|
||||
if len(comments) > 0 {
|
||||
return strings.Join(comments, "\n")
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (dec *hclDecoder) Init(reader io.Reader) error {
|
||||
data, err := io.ReadAll(reader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
file, diags := hclsyntax.ParseConfig(data, "input.hcl", hcl.Pos{Line: 1, Column: 1})
|
||||
if diags != nil && diags.HasErrors() {
|
||||
return fmt.Errorf("hcl parse error: %w", diags)
|
||||
}
|
||||
dec.file = file
|
||||
dec.fileBytes = data
|
||||
dec.readAnything = false
|
||||
dec.documentIndex = 0
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dec *hclDecoder) Decode() (*CandidateNode, error) {
|
||||
if dec.readAnything {
|
||||
return nil, io.EOF
|
||||
}
|
||||
dec.readAnything = true
|
||||
|
||||
if dec.file == nil {
|
||||
return nil, fmt.Errorf("no hcl file parsed")
|
||||
}
|
||||
|
||||
root := &CandidateNode{Kind: MappingNode}
|
||||
|
||||
// process attributes in declaration order
|
||||
body := dec.file.Body.(*hclsyntax.Body)
|
||||
firstAttr := true
|
||||
for _, attrWithName := range sortedAttributes(body.Attributes) {
|
||||
keyNode := createStringScalarNode(attrWithName.Name)
|
||||
valNode := convertHclExprToNode(attrWithName.Attr.Expr, dec.fileBytes)
|
||||
|
||||
// Attach comments if any
|
||||
attrRange := attrWithName.Attr.Range()
|
||||
headComment := extractHeadComment(dec.fileBytes, attrRange.Start.Byte)
|
||||
if firstAttr && headComment != "" {
|
||||
// For the first attribute, apply its head comment to the root
|
||||
root.HeadComment = headComment
|
||||
firstAttr = false
|
||||
} else if headComment != "" {
|
||||
keyNode.HeadComment = headComment
|
||||
}
|
||||
if lineComment := extractLineComment(dec.fileBytes, attrRange.End.Byte); lineComment != "" {
|
||||
valNode.LineComment = lineComment
|
||||
}
|
||||
|
||||
root.AddKeyValueChild(keyNode, valNode)
|
||||
}
|
||||
|
||||
// process blocks
|
||||
// Count blocks by type at THIS level to detect multiple separate blocks
|
||||
blocksByType := make(map[string]int)
|
||||
for _, block := range body.Blocks {
|
||||
blocksByType[block.Type]++
|
||||
}
|
||||
|
||||
for _, block := range body.Blocks {
|
||||
addBlockToMapping(root, block, dec.fileBytes, blocksByType[block.Type] > 1)
|
||||
}
|
||||
|
||||
dec.documentIndex++
|
||||
root.document = dec.documentIndex - 1
|
||||
return root, nil
|
||||
}
|
||||
|
||||
func hclBodyToNode(body *hclsyntax.Body, src []byte) *CandidateNode {
|
||||
node := &CandidateNode{Kind: MappingNode}
|
||||
for _, attrWithName := range sortedAttributes(body.Attributes) {
|
||||
key := createStringScalarNode(attrWithName.Name)
|
||||
val := convertHclExprToNode(attrWithName.Attr.Expr, src)
|
||||
|
||||
// Attach comments if any
|
||||
attrRange := attrWithName.Attr.Range()
|
||||
if headComment := extractHeadComment(src, attrRange.Start.Byte); headComment != "" {
|
||||
key.HeadComment = headComment
|
||||
}
|
||||
if lineComment := extractLineComment(src, attrRange.End.Byte); lineComment != "" {
|
||||
val.LineComment = lineComment
|
||||
}
|
||||
|
||||
node.AddKeyValueChild(key, val)
|
||||
}
|
||||
|
||||
// Process nested blocks, counting blocks by type at THIS level
|
||||
// to detect which block types appear multiple times
|
||||
blocksByType := make(map[string]int)
|
||||
for _, block := range body.Blocks {
|
||||
blocksByType[block.Type]++
|
||||
}
|
||||
|
||||
for _, block := range body.Blocks {
|
||||
addBlockToMapping(node, block, src, blocksByType[block.Type] > 1)
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
// addBlockToMapping nests block type and labels into the parent mapping, merging children.
|
||||
// isMultipleBlocksOfType indicates if there are multiple blocks of this type at THIS level
|
||||
func addBlockToMapping(parent *CandidateNode, block *hclsyntax.Block, src []byte, isMultipleBlocksOfType bool) {
|
||||
bodyNode := hclBodyToNode(block.Body, src)
|
||||
current := parent
|
||||
|
||||
// ensure block type mapping exists
|
||||
var typeNode *CandidateNode
|
||||
for i := 0; i < len(current.Content); i += 2 {
|
||||
if current.Content[i].Value == block.Type {
|
||||
typeNode = current.Content[i+1]
|
||||
break
|
||||
}
|
||||
}
|
||||
if typeNode == nil {
|
||||
_, typeNode = current.AddKeyValueChild(createStringScalarNode(block.Type), &CandidateNode{Kind: MappingNode})
|
||||
// Mark the type node if there are multiple blocks of this type at this level
|
||||
// This tells the encoder to emit them as separate blocks rather than consolidating them
|
||||
if isMultipleBlocksOfType {
|
||||
typeNode.EncodeSeparate = true
|
||||
}
|
||||
}
|
||||
current = typeNode
|
||||
|
||||
// walk labels, creating/merging mappings
|
||||
for _, label := range block.Labels {
|
||||
var next *CandidateNode
|
||||
for i := 0; i < len(current.Content); i += 2 {
|
||||
if current.Content[i].Value == label {
|
||||
next = current.Content[i+1]
|
||||
break
|
||||
}
|
||||
}
|
||||
if next == nil {
|
||||
_, next = current.AddKeyValueChild(createStringScalarNode(label), &CandidateNode{Kind: MappingNode})
|
||||
}
|
||||
current = next
|
||||
}
|
||||
|
||||
// merge body attributes/blocks into the final mapping
|
||||
for i := 0; i < len(bodyNode.Content); i += 2 {
|
||||
current.AddKeyValueChild(bodyNode.Content[i], bodyNode.Content[i+1])
|
||||
}
|
||||
}
|
||||
|
||||
func convertHclExprToNode(expr hclsyntax.Expression, src []byte) *CandidateNode {
|
||||
// handle literal values directly
|
||||
switch e := expr.(type) {
|
||||
case *hclsyntax.LiteralValueExpr:
|
||||
v := e.Val
|
||||
if v.IsNull() {
|
||||
return createScalarNode(nil, "")
|
||||
}
|
||||
switch {
|
||||
case v.Type().Equals(cty.String):
|
||||
// prefer to extract exact source (to avoid extra quoting) when available
|
||||
// Prefer the actual cty string value
|
||||
s := v.AsString()
|
||||
node := createScalarNode(s, s)
|
||||
// Don't set style for regular quoted strings - let YAML handle naturally
|
||||
return node
|
||||
case v.Type().Equals(cty.Bool):
|
||||
b := v.True()
|
||||
return createScalarNode(b, strconv.FormatBool(b))
|
||||
case v.Type() == cty.Number:
|
||||
// prefer integers when the numeric value is integral
|
||||
bf := v.AsBigFloat()
|
||||
if bf == nil {
|
||||
// fallback to string
|
||||
return createStringScalarNode(v.GoString())
|
||||
}
|
||||
// check if bf represents an exact integer
|
||||
if intVal, acc := bf.Int(nil); acc == big.Exact {
|
||||
s := intVal.String()
|
||||
return createScalarNode(intVal.Int64(), s)
|
||||
}
|
||||
s := bf.Text('g', -1)
|
||||
return createScalarNode(0.0, s)
|
||||
case v.Type().IsTupleType() || v.Type().IsListType() || v.Type().IsSetType():
|
||||
seq := &CandidateNode{Kind: SequenceNode}
|
||||
it := v.ElementIterator()
|
||||
for it.Next() {
|
||||
_, val := it.Element()
|
||||
// convert cty.Value to a node by wrapping in literal expr via string representation
|
||||
child := convertCtyValueToNode(val)
|
||||
seq.AddChild(child)
|
||||
}
|
||||
return seq
|
||||
case v.Type().IsMapType() || v.Type().IsObjectType():
|
||||
m := &CandidateNode{Kind: MappingNode}
|
||||
it := v.ElementIterator()
|
||||
for it.Next() {
|
||||
key, val := it.Element()
|
||||
keyStr := key.AsString()
|
||||
keyNode := createStringScalarNode(keyStr)
|
||||
valNode := convertCtyValueToNode(val)
|
||||
m.AddKeyValueChild(keyNode, valNode)
|
||||
}
|
||||
return m
|
||||
default:
|
||||
// fallback to string
|
||||
s := v.GoString()
|
||||
return createStringScalarNode(s)
|
||||
}
|
||||
case *hclsyntax.TupleConsExpr:
|
||||
// parse tuple/list into YAML sequence
|
||||
seq := &CandidateNode{Kind: SequenceNode}
|
||||
for _, exprVal := range e.Exprs {
|
||||
child := convertHclExprToNode(exprVal, src)
|
||||
seq.AddChild(child)
|
||||
}
|
||||
return seq
|
||||
case *hclsyntax.ObjectConsExpr:
|
||||
// parse object into YAML mapping
|
||||
m := &CandidateNode{Kind: MappingNode}
|
||||
m.Style = FlowStyle // Mark as inline object (flow style) for encoder
|
||||
for _, item := range e.Items {
|
||||
// evaluate key expression to get the key string
|
||||
keyVal, keyDiags := item.KeyExpr.Value(nil)
|
||||
if keyDiags != nil && keyDiags.HasErrors() {
|
||||
// fallback: try to extract key from source
|
||||
r := item.KeyExpr.Range()
|
||||
start := r.Start.Byte
|
||||
end := r.End.Byte
|
||||
if start >= 0 && end >= start && end <= len(src) {
|
||||
keyNode := createStringScalarNode(strings.TrimSpace(string(src[start:end])))
|
||||
valNode := convertHclExprToNode(item.ValueExpr, src)
|
||||
m.AddKeyValueChild(keyNode, valNode)
|
||||
}
|
||||
continue
|
||||
}
|
||||
keyStr := keyVal.AsString()
|
||||
keyNode := createStringScalarNode(keyStr)
|
||||
valNode := convertHclExprToNode(item.ValueExpr, src)
|
||||
m.AddKeyValueChild(keyNode, valNode)
|
||||
}
|
||||
return m
|
||||
case *hclsyntax.TemplateExpr:
|
||||
// Reconstruct template string, preserving ${} syntax for interpolations
|
||||
var parts []string
|
||||
for _, p := range e.Parts {
|
||||
switch lp := p.(type) {
|
||||
case *hclsyntax.LiteralValueExpr:
|
||||
if lp.Val.Type().Equals(cty.String) {
|
||||
parts = append(parts, lp.Val.AsString())
|
||||
} else {
|
||||
parts = append(parts, lp.Val.GoString())
|
||||
}
|
||||
default:
|
||||
// Non-literal expression - reconstruct with ${} wrapper
|
||||
r := p.Range()
|
||||
start := r.Start.Byte
|
||||
end := r.End.Byte
|
||||
if start >= 0 && end >= start && end <= len(src) {
|
||||
exprText := string(src[start:end])
|
||||
parts = append(parts, "${"+exprText+"}")
|
||||
} else {
|
||||
parts = append(parts, fmt.Sprintf("${%v}", p))
|
||||
}
|
||||
}
|
||||
}
|
||||
combined := strings.Join(parts, "")
|
||||
node := createScalarNode(combined, combined)
|
||||
// Set DoubleQuotedStyle for all templates (which includes all quoted strings in HCL)
|
||||
// This ensures HCL roundtrips preserve quotes, and YAML properly quotes strings with ${}
|
||||
node.Style = DoubleQuotedStyle
|
||||
return node
|
||||
case *hclsyntax.ScopeTraversalExpr:
|
||||
// Simple identifier/traversal (e.g. unquoted string literal in HCL)
|
||||
r := e.Range()
|
||||
start := r.Start.Byte
|
||||
end := r.End.Byte
|
||||
if start >= 0 && end >= start && end <= len(src) {
|
||||
text := strings.TrimSpace(string(src[start:end]))
|
||||
return createStringScalarNode(text)
|
||||
}
|
||||
// Fallback to root name if source unavailable
|
||||
if len(e.Traversal) > 0 {
|
||||
if root, ok := e.Traversal[0].(hcl.TraverseRoot); ok {
|
||||
return createStringScalarNode(root.Name)
|
||||
}
|
||||
}
|
||||
return createStringScalarNode("")
|
||||
case *hclsyntax.FunctionCallExpr:
|
||||
// Preserve function calls as raw expressions for roundtrip
|
||||
r := e.Range()
|
||||
start := r.Start.Byte
|
||||
end := r.End.Byte
|
||||
if start >= 0 && end >= start && end <= len(src) {
|
||||
text := strings.TrimSpace(string(src[start:end]))
|
||||
node := createStringScalarNode(text)
|
||||
node.Style = 0
|
||||
return node
|
||||
}
|
||||
node := createStringScalarNode(e.Name)
|
||||
node.Style = 0
|
||||
return node
|
||||
default:
|
||||
// try to evaluate the expression (handles unary, binary ops, etc.)
|
||||
val, diags := expr.Value(nil)
|
||||
if diags == nil || !diags.HasErrors() {
|
||||
// successfully evaluated, convert cty.Value to node
|
||||
return convertCtyValueToNode(val)
|
||||
}
|
||||
// fallback: extract source text for the expression
|
||||
r := expr.Range()
|
||||
start := r.Start.Byte
|
||||
end := r.End.Byte
|
||||
if start >= 0 && end >= start && end <= len(src) {
|
||||
text := string(src[start:end])
|
||||
// Mark as unquoted expression so encoder emits without quoting
|
||||
node := createStringScalarNode(text)
|
||||
node.Style = 0
|
||||
return node
|
||||
}
|
||||
return createStringScalarNode(fmt.Sprintf("%v", expr))
|
||||
}
|
||||
}
|
||||
|
||||
func convertCtyValueToNode(v cty.Value) *CandidateNode {
|
||||
if v.IsNull() {
|
||||
return createScalarNode(nil, "")
|
||||
}
|
||||
switch {
|
||||
case v.Type().Equals(cty.String):
|
||||
return createScalarNode("", v.AsString())
|
||||
case v.Type().Equals(cty.Bool):
|
||||
b := v.True()
|
||||
return createScalarNode(b, strconv.FormatBool(b))
|
||||
case v.Type() == cty.Number:
|
||||
bf := v.AsBigFloat()
|
||||
if bf == nil {
|
||||
return createStringScalarNode(v.GoString())
|
||||
}
|
||||
if intVal, acc := bf.Int(nil); acc == big.Exact {
|
||||
s := intVal.String()
|
||||
return createScalarNode(intVal.Int64(), s)
|
||||
}
|
||||
s := bf.Text('g', -1)
|
||||
return createScalarNode(0.0, s)
|
||||
case v.Type().IsTupleType() || v.Type().IsListType() || v.Type().IsSetType():
|
||||
seq := &CandidateNode{Kind: SequenceNode}
|
||||
it := v.ElementIterator()
|
||||
for it.Next() {
|
||||
_, val := it.Element()
|
||||
seq.AddChild(convertCtyValueToNode(val))
|
||||
}
|
||||
return seq
|
||||
case v.Type().IsMapType() || v.Type().IsObjectType():
|
||||
m := &CandidateNode{Kind: MappingNode}
|
||||
it := v.ElementIterator()
|
||||
for it.Next() {
|
||||
key, val := it.Element()
|
||||
keyNode := createStringScalarNode(key.AsString())
|
||||
valNode := convertCtyValueToNode(val)
|
||||
m.AddKeyValueChild(keyNode, valNode)
|
||||
}
|
||||
return m
|
||||
default:
|
||||
return createStringScalarNode(v.GoString())
|
||||
}
|
||||
}
|
||||
@ -16,7 +16,7 @@ type iniDecoder struct {
|
||||
|
||||
func NewINIDecoder() Decoder {
|
||||
return &iniDecoder{
|
||||
finished: false, // Initialize the flag as false
|
||||
finished: false, // Initialise the flag as false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -8,16 +8,19 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
toml "github.com/pelletier/go-toml/v2/unstable"
|
||||
)
|
||||
|
||||
type tomlDecoder struct {
|
||||
parser toml.Parser
|
||||
finished bool
|
||||
d DataTreeNavigator
|
||||
rootMap *CandidateNode
|
||||
parser toml.Parser
|
||||
finished bool
|
||||
d DataTreeNavigator
|
||||
rootMap *CandidateNode
|
||||
pendingComments []string // Head comments collected from Comment nodes
|
||||
firstContentSeen bool // Track if we've processed the first non-comment node
|
||||
}
|
||||
|
||||
func NewTomlDecoder() Decoder {
|
||||
@ -28,7 +31,7 @@ func NewTomlDecoder() Decoder {
|
||||
}
|
||||
|
||||
func (dec *tomlDecoder) Init(reader io.Reader) error {
|
||||
dec.parser = toml.Parser{}
|
||||
dec.parser = toml.Parser{KeepComments: true}
|
||||
buf := new(bytes.Buffer)
|
||||
_, err := buf.ReadFrom(reader)
|
||||
if err != nil {
|
||||
@ -39,9 +42,23 @@ func (dec *tomlDecoder) Init(reader io.Reader) error {
|
||||
Kind: MappingNode,
|
||||
Tag: "!!map",
|
||||
}
|
||||
dec.pendingComments = make([]string, 0)
|
||||
dec.firstContentSeen = false
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dec *tomlDecoder) attachOrphanedCommentsToNode(tableNodeValue *CandidateNode) {
|
||||
if len(dec.pendingComments) > 0 {
|
||||
comments := strings.Join(dec.pendingComments, "\n")
|
||||
if tableNodeValue.HeadComment == "" {
|
||||
tableNodeValue.HeadComment = comments
|
||||
} else {
|
||||
tableNodeValue.HeadComment = tableNodeValue.HeadComment + "\n" + comments
|
||||
}
|
||||
dec.pendingComments = make([]string, 0)
|
||||
}
|
||||
}
|
||||
|
||||
func (dec *tomlDecoder) getFullPath(tomlNode *toml.Node) []interface{} {
|
||||
path := make([]interface{}, 0)
|
||||
for {
|
||||
@ -56,13 +73,24 @@ func (dec *tomlDecoder) getFullPath(tomlNode *toml.Node) []interface{} {
|
||||
func (dec *tomlDecoder) processKeyValueIntoMap(rootMap *CandidateNode, tomlNode *toml.Node) error {
|
||||
value := tomlNode.Value()
|
||||
path := dec.getFullPath(value.Next())
|
||||
log.Debug("processKeyValueIntoMap: %v", path)
|
||||
|
||||
valueNode, err := dec.decodeNode(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Attach pending head comments
|
||||
if len(dec.pendingComments) > 0 {
|
||||
valueNode.HeadComment = strings.Join(dec.pendingComments, "\n")
|
||||
dec.pendingComments = make([]string, 0)
|
||||
}
|
||||
|
||||
// Check for inline comment chained to the KeyValue node
|
||||
nextNode := tomlNode.Next()
|
||||
if nextNode != nil && nextNode.Kind == toml.Comment {
|
||||
valueNode.LineComment = string(nextNode.Data)
|
||||
}
|
||||
|
||||
context := Context{}
|
||||
context = context.SingleChildContext(rootMap)
|
||||
|
||||
@ -79,11 +107,15 @@ func (dec *tomlDecoder) decodeKeyValuesIntoMap(rootMap *CandidateNode, tomlNode
|
||||
nextItem := dec.parser.Expression()
|
||||
log.Debug("decodeKeyValuesIntoMap -- next exp, its a %v", nextItem.Kind)
|
||||
|
||||
if nextItem.Kind == toml.KeyValue {
|
||||
switch nextItem.Kind {
|
||||
case toml.KeyValue:
|
||||
if err := dec.processKeyValueIntoMap(rootMap, nextItem); err != nil {
|
||||
return false, err
|
||||
}
|
||||
} else {
|
||||
case toml.Comment:
|
||||
// Standalone comment - add to pending for next element
|
||||
dec.pendingComments = append(dec.pendingComments, string(nextItem.Data))
|
||||
default:
|
||||
// run out of key values
|
||||
log.Debug("done in decodeKeyValuesIntoMap, gota a %v", nextItem.Kind)
|
||||
return true, nil
|
||||
@ -125,13 +157,30 @@ func (dec *tomlDecoder) createInlineTableMap(tomlNode *toml.Node) (*CandidateNod
|
||||
|
||||
func (dec *tomlDecoder) createArray(tomlNode *toml.Node) (*CandidateNode, error) {
|
||||
content := make([]*CandidateNode, 0)
|
||||
var pendingArrayComments []string
|
||||
|
||||
iterator := tomlNode.Children()
|
||||
for iterator.Next() {
|
||||
child := iterator.Node()
|
||||
|
||||
// Handle comments within arrays
|
||||
if child.Kind == toml.Comment {
|
||||
// Collect comments to attach to the next array element
|
||||
pendingArrayComments = append(pendingArrayComments, string(child.Data))
|
||||
continue
|
||||
}
|
||||
|
||||
yamlNode, err := dec.decodeNode(child)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Attach any pending comments to this array element
|
||||
if len(pendingArrayComments) > 0 {
|
||||
yamlNode.HeadComment = strings.Join(pendingArrayComments, "\n")
|
||||
pendingArrayComments = make([]string, 0)
|
||||
}
|
||||
|
||||
content = append(content, yamlNode)
|
||||
}
|
||||
|
||||
@ -250,11 +299,29 @@ func (dec *tomlDecoder) processTopLevelNode(currentNode *toml.Node) (bool, error
|
||||
var err error
|
||||
log.Debug("processTopLevelNode: Going to process %v state is current %v", currentNode.Kind, NodeToString(dec.rootMap))
|
||||
switch currentNode.Kind {
|
||||
case toml.Comment:
|
||||
// Collect comment to attach to next element
|
||||
commentText := string(currentNode.Data)
|
||||
// If we haven't seen any content yet, accumulate comments for root
|
||||
if !dec.firstContentSeen {
|
||||
if dec.rootMap.HeadComment == "" {
|
||||
dec.rootMap.HeadComment = commentText
|
||||
} else {
|
||||
dec.rootMap.HeadComment = dec.rootMap.HeadComment + "\n" + commentText
|
||||
}
|
||||
} else {
|
||||
// We've seen content, so these comments are for the next element
|
||||
dec.pendingComments = append(dec.pendingComments, commentText)
|
||||
}
|
||||
return false, nil
|
||||
case toml.Table:
|
||||
dec.firstContentSeen = true
|
||||
runAgainstCurrentExp, err = dec.processTable(currentNode)
|
||||
case toml.ArrayTable:
|
||||
dec.firstContentSeen = true
|
||||
runAgainstCurrentExp, err = dec.processArrayTable(currentNode)
|
||||
default:
|
||||
dec.firstContentSeen = true
|
||||
runAgainstCurrentExp, err = dec.decodeKeyValuesIntoMap(dec.rootMap, currentNode)
|
||||
}
|
||||
|
||||
@ -264,36 +331,68 @@ func (dec *tomlDecoder) processTopLevelNode(currentNode *toml.Node) (bool, error
|
||||
|
||||
func (dec *tomlDecoder) processTable(currentNode *toml.Node) (bool, error) {
|
||||
log.Debug("Enter processTable")
|
||||
fullPath := dec.getFullPath(currentNode.Child())
|
||||
child := currentNode.Child()
|
||||
fullPath := dec.getFullPath(child)
|
||||
log.Debug("fullpath: %v", fullPath)
|
||||
|
||||
c := Context{}
|
||||
c = c.SingleChildContext(dec.rootMap)
|
||||
|
||||
fullPath, err := getPathToUse(fullPath, dec, c)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
tableNodeValue := &CandidateNode{
|
||||
Kind: MappingNode,
|
||||
Tag: "!!map",
|
||||
Content: make([]*CandidateNode, 0),
|
||||
Kind: MappingNode,
|
||||
Tag: "!!map",
|
||||
Content: make([]*CandidateNode, 0),
|
||||
EncodeSeparate: true,
|
||||
}
|
||||
|
||||
// Attach pending head comments to the table
|
||||
if len(dec.pendingComments) > 0 {
|
||||
tableNodeValue.HeadComment = strings.Join(dec.pendingComments, "\n")
|
||||
dec.pendingComments = make([]string, 0)
|
||||
}
|
||||
|
||||
var tableValue *toml.Node
|
||||
runAgainstCurrentExp := false
|
||||
var err error
|
||||
hasValue := dec.parser.NextExpression()
|
||||
// check to see if there is any table data
|
||||
if hasValue {
|
||||
sawKeyValue := false
|
||||
for dec.parser.NextExpression() {
|
||||
tableValue = dec.parser.Expression()
|
||||
// next expression is not table data, so we are done
|
||||
if tableValue.Kind != toml.KeyValue {
|
||||
log.Debug("got an empty table")
|
||||
runAgainstCurrentExp = true
|
||||
} else {
|
||||
runAgainstCurrentExp, err = dec.decodeKeyValuesIntoMap(tableNodeValue, tableValue)
|
||||
if err != nil && !errors.Is(err, io.EOF) {
|
||||
return false, err
|
||||
}
|
||||
// Allow standalone comments inside the table before the first key-value.
|
||||
// These should be associated with the next element in the table (usually the first key-value),
|
||||
// not treated as "end of table" (which would cause subsequent key-values to be parsed at root).
|
||||
if tableValue.Kind == toml.Comment {
|
||||
dec.pendingComments = append(dec.pendingComments, string(tableValue.Data))
|
||||
continue
|
||||
}
|
||||
|
||||
// next expression is not table data, so we are done (but we need to re-process it at top-level)
|
||||
if tableValue.Kind != toml.KeyValue {
|
||||
log.Debug("got an empty table (or reached next section)")
|
||||
// If the table had only comments, attach them to the table itself so they don't leak to the next node.
|
||||
if !sawKeyValue {
|
||||
dec.attachOrphanedCommentsToNode(tableNodeValue)
|
||||
}
|
||||
runAgainstCurrentExp = true
|
||||
break
|
||||
}
|
||||
|
||||
sawKeyValue = true
|
||||
runAgainstCurrentExp, err = dec.decodeKeyValuesIntoMap(tableNodeValue, tableValue)
|
||||
if err != nil && !errors.Is(err, io.EOF) {
|
||||
return false, err
|
||||
}
|
||||
break
|
||||
}
|
||||
// If we hit EOF after only seeing comments inside this table, attach them to the table itself
|
||||
// so they don't leak to whatever comes next.
|
||||
if !sawKeyValue {
|
||||
dec.attachOrphanedCommentsToNode(tableNodeValue)
|
||||
}
|
||||
|
||||
c := Context{}
|
||||
c = c.SingleChildContext(dec.rootMap)
|
||||
err = dec.d.DeeplyAssign(c, fullPath, tableNodeValue)
|
||||
if err != nil {
|
||||
return false, err
|
||||
@ -324,35 +423,130 @@ func (dec *tomlDecoder) arrayAppend(context Context, path []interface{}, rhsNode
|
||||
}
|
||||
|
||||
func (dec *tomlDecoder) processArrayTable(currentNode *toml.Node) (bool, error) {
|
||||
log.Debug("Entering processArrayTable")
|
||||
fullPath := dec.getFullPath(currentNode.Child())
|
||||
log.Debug("Enter processArrayTable")
|
||||
child := currentNode.Child()
|
||||
fullPath := dec.getFullPath(child)
|
||||
log.Debug("Fullpath: %v", fullPath)
|
||||
|
||||
c := Context{}
|
||||
c = c.SingleChildContext(dec.rootMap)
|
||||
|
||||
fullPath, err := getPathToUse(fullPath, dec, c)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
// need to use the array append exp to add another entry to
|
||||
// this array: fullpath += [ thing ]
|
||||
|
||||
hasValue := dec.parser.NextExpression()
|
||||
if !hasValue {
|
||||
return false, fmt.Errorf("error retrieving table %v value: %w", fullPath, dec.parser.Error())
|
||||
}
|
||||
|
||||
tableNodeValue := &CandidateNode{
|
||||
Kind: MappingNode,
|
||||
Tag: "!!map",
|
||||
Kind: MappingNode,
|
||||
Tag: "!!map",
|
||||
EncodeSeparate: true,
|
||||
}
|
||||
|
||||
tableValue := dec.parser.Expression()
|
||||
runAgainstCurrentExp, err := dec.decodeKeyValuesIntoMap(tableNodeValue, tableValue)
|
||||
log.Debugf("table node err: %w", err)
|
||||
if err != nil && !errors.Is(err, io.EOF) {
|
||||
return false, err
|
||||
// Attach pending head comments to the array table
|
||||
if len(dec.pendingComments) > 0 {
|
||||
tableNodeValue.HeadComment = strings.Join(dec.pendingComments, "\n")
|
||||
dec.pendingComments = make([]string, 0)
|
||||
}
|
||||
c := Context{}
|
||||
|
||||
c = c.SingleChildContext(dec.rootMap)
|
||||
runAgainstCurrentExp := false
|
||||
sawKeyValue := false
|
||||
if hasValue {
|
||||
for {
|
||||
exp := dec.parser.Expression()
|
||||
// Allow standalone comments inside array tables before the first key-value.
|
||||
if exp.Kind == toml.Comment {
|
||||
dec.pendingComments = append(dec.pendingComments, string(exp.Data))
|
||||
hasValue = dec.parser.NextExpression()
|
||||
if !hasValue {
|
||||
break
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// if the next value is a ArrayTable or Table, then its not part of this declaration (not a key value pair)
|
||||
// so lets leave that expression for the next round of parsing
|
||||
if exp.Kind == toml.ArrayTable || exp.Kind == toml.Table {
|
||||
// If this array-table entry had only comments, attach them to the entry so they don't leak.
|
||||
if !sawKeyValue {
|
||||
dec.attachOrphanedCommentsToNode(tableNodeValue)
|
||||
}
|
||||
runAgainstCurrentExp = true
|
||||
break
|
||||
}
|
||||
|
||||
sawKeyValue = true
|
||||
// otherwise, if there is a value, it must be some key value pairs of the
|
||||
// first object in the array!
|
||||
runAgainstCurrentExp, err = dec.decodeKeyValuesIntoMap(tableNodeValue, exp)
|
||||
if err != nil && !errors.Is(err, io.EOF) {
|
||||
return false, err
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
// If we hit EOF after only seeing comments inside this array-table entry, attach them to the entry
|
||||
// so they don't leak to whatever comes next.
|
||||
if !sawKeyValue && len(dec.pendingComments) > 0 {
|
||||
comments := strings.Join(dec.pendingComments, "\n")
|
||||
if tableNodeValue.HeadComment == "" {
|
||||
tableNodeValue.HeadComment = comments
|
||||
} else {
|
||||
tableNodeValue.HeadComment = tableNodeValue.HeadComment + "\n" + comments
|
||||
}
|
||||
dec.pendingComments = make([]string, 0)
|
||||
}
|
||||
|
||||
// += function
|
||||
err = dec.arrayAppend(c, fullPath, tableNodeValue)
|
||||
|
||||
return runAgainstCurrentExp, err
|
||||
}
|
||||
|
||||
// if fullPath points to an array of maps rather than a map
|
||||
// then it should set this element into the _last_ element of that array.
|
||||
// Because TOML. So we'll inject the last index into the path.
|
||||
|
||||
func getPathToUse(fullPath []interface{}, dec *tomlDecoder, c Context) ([]interface{}, error) {
|
||||
// We need to check the entire path (except the last element), not just the immediate parent,
|
||||
// because we may have nested array tables like [[array.subarray.subsubarray]]
|
||||
// where both 'array' and 'subarray' are arrays that already exist.
|
||||
|
||||
if len(fullPath) == 0 {
|
||||
return fullPath, nil
|
||||
}
|
||||
|
||||
resultPath := make([]interface{}, 0, len(fullPath)*2) // preallocate with extra space for indices
|
||||
|
||||
// Process all segments except the last one
|
||||
for i := 0; i < len(fullPath)-1; i++ {
|
||||
resultPath = append(resultPath, fullPath[i])
|
||||
|
||||
// Check if the current path segment points to an array
|
||||
readOp := createTraversalTree(resultPath, traversePreferences{DontAutoCreate: true}, false)
|
||||
resultContext, err := dec.d.GetMatchingNodes(c, readOp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if resultContext.MatchingNodes.Len() >= 1 {
|
||||
match := resultContext.MatchingNodes.Front().Value.(*CandidateNode)
|
||||
// If this segment points to an array, we need to add the last index
|
||||
// before continuing with the rest of the path
|
||||
if match.Kind == SequenceNode && len(match.Content) > 0 {
|
||||
lastIndex := len(match.Content) - 1
|
||||
resultPath = append(resultPath, lastIndex)
|
||||
log.Debugf("Path segment %v is an array, injecting index %d", resultPath[:len(resultPath)-1], lastIndex)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add the last segment
|
||||
resultPath = append(resultPath, fullPath[len(fullPath)-1])
|
||||
|
||||
log.Debugf("getPathToUse: original path %v -> result path %v", fullPath, resultPath)
|
||||
return resultPath, nil
|
||||
}
|
||||
|
||||
160
pkg/yqlib/decoder_uri_test.go
Normal file
160
pkg/yqlib/decoder_uri_test.go
Normal file
@ -0,0 +1,160 @@
|
||||
//go:build !yq_nouri
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"io"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/mikefarah/yq/v4/test"
|
||||
)
|
||||
|
||||
func TestUriDecoder_Init(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("test")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodeSimpleString(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("hello%20world")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
node, err := decoder.Decode()
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "!!str", node.Tag)
|
||||
test.AssertResult(t, "hello world", node.Value)
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodeSpecialCharacters(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("hello%21%40%23%24%25")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
node, err := decoder.Decode()
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "hello!@#$%", node.Value)
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodeUTF8(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("%E2%9C%93%20check")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
node, err := decoder.Decode()
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "✓ check", node.Value)
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodePlusSign(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("a+b")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
node, err := decoder.Decode()
|
||||
test.AssertResult(t, nil, err)
|
||||
// Note: url.QueryUnescape does NOT convert + to space
|
||||
// That's only for form encoding (url.ParseQuery)
|
||||
test.AssertResult(t, "a b", node.Value)
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodeEmptyString(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
node, err := decoder.Decode()
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "", node.Value)
|
||||
|
||||
// Second decode should return EOF
|
||||
node, err = decoder.Decode()
|
||||
test.AssertResult(t, io.EOF, err)
|
||||
test.AssertResult(t, (*CandidateNode)(nil), node)
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodeMultipleCalls(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("test")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
// First decode
|
||||
node, err := decoder.Decode()
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "test", node.Value)
|
||||
|
||||
// Second decode should return EOF since we've consumed all input
|
||||
node, err = decoder.Decode()
|
||||
test.AssertResult(t, io.EOF, err)
|
||||
test.AssertResult(t, (*CandidateNode)(nil), node)
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodeInvalidEscape(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("test%ZZ")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
_, err = decoder.Decode()
|
||||
// Should return an error for invalid escape sequence
|
||||
if err == nil {
|
||||
t.Error("Expected error for invalid escape sequence, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodeSlashAndQuery(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("path%2Fto%2Ffile%3Fquery%3Dvalue")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
node, err := decoder.Decode()
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "path/to/file?query=value", node.Value)
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodePercent(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("100%25")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
node, err := decoder.Decode()
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "100%", node.Value)
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodeNoEscaping(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
reader := strings.NewReader("simple_text-123")
|
||||
err := decoder.Init(reader)
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
node, err := decoder.Decode()
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "simple_text-123", node.Value)
|
||||
}
|
||||
|
||||
// Mock reader that returns an error
|
||||
type errorReader struct{}
|
||||
|
||||
func (e *errorReader) Read(_ []byte) (n int, err error) {
|
||||
return 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
|
||||
func TestUriDecoder_DecodeReadError(t *testing.T) {
|
||||
decoder := NewUriDecoder()
|
||||
err := decoder.Init(&errorReader{})
|
||||
test.AssertResult(t, nil, err)
|
||||
|
||||
_, err = decoder.Decode()
|
||||
test.AssertResult(t, io.ErrUnexpectedEOF, err)
|
||||
}
|
||||
@ -8,7 +8,14 @@ import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
yaml "go.yaml.in/yaml/v3"
|
||||
yaml "go.yaml.in/yaml/v4"
|
||||
)
|
||||
|
||||
var (
|
||||
commentLineRe = regexp.MustCompile(`^\s*#`)
|
||||
yamlDirectiveLineRe = regexp.MustCompile(`^\s*%YAML`)
|
||||
separatorLineRe = regexp.MustCompile(`^\s*---\s*$`)
|
||||
separatorPrefixRe = regexp.MustCompile(`^\s*---\s+`)
|
||||
)
|
||||
|
||||
type yamlDecoder struct {
|
||||
@ -33,51 +40,72 @@ func NewYamlDecoder(prefs YamlPreferences) Decoder {
|
||||
}
|
||||
|
||||
func (dec *yamlDecoder) processReadStream(reader *bufio.Reader) (io.Reader, string, error) {
|
||||
var commentLineRegEx = regexp.MustCompile(`^\s*#`)
|
||||
var yamlDirectiveLineRegEx = regexp.MustCompile(`^\s*%YA`)
|
||||
var sb strings.Builder
|
||||
|
||||
for {
|
||||
peekBytes, err := reader.Peek(4)
|
||||
if errors.Is(err, io.EOF) {
|
||||
// EOF are handled else where..
|
||||
return reader, sb.String(), nil
|
||||
} else if err != nil {
|
||||
return reader, sb.String(), err
|
||||
} else if string(peekBytes[0]) == "\n" {
|
||||
_, err := reader.ReadString('\n')
|
||||
sb.WriteString("\n")
|
||||
if errors.Is(err, io.EOF) {
|
||||
return reader, sb.String(), nil
|
||||
} else if err != nil {
|
||||
return reader, sb.String(), err
|
||||
}
|
||||
} else if string(peekBytes) == "--- " {
|
||||
_, err := reader.ReadString(' ')
|
||||
sb.WriteString("$yqDocSeparator$\n")
|
||||
if errors.Is(err, io.EOF) {
|
||||
return reader, sb.String(), nil
|
||||
} else if err != nil {
|
||||
return reader, sb.String(), err
|
||||
}
|
||||
} else if string(peekBytes) == "---\n" {
|
||||
_, err := reader.ReadString('\n')
|
||||
sb.WriteString("$yqDocSeparator$\n")
|
||||
if errors.Is(err, io.EOF) {
|
||||
return reader, sb.String(), nil
|
||||
} else if err != nil {
|
||||
return reader, sb.String(), err
|
||||
}
|
||||
} else if commentLineRegEx.MatchString(string(peekBytes)) || yamlDirectiveLineRegEx.MatchString(string(peekBytes)) {
|
||||
line, err := reader.ReadString('\n')
|
||||
sb.WriteString(line)
|
||||
if errors.Is(err, io.EOF) {
|
||||
return reader, sb.String(), nil
|
||||
} else if err != nil {
|
||||
return reader, sb.String(), err
|
||||
}
|
||||
} else {
|
||||
line, err := reader.ReadString('\n')
|
||||
if errors.Is(err, io.EOF) && line == "" {
|
||||
// no more data
|
||||
return reader, sb.String(), nil
|
||||
}
|
||||
if err != nil && !errors.Is(err, io.EOF) {
|
||||
return reader, sb.String(), err
|
||||
}
|
||||
|
||||
// Determine newline style and strip it for inspection
|
||||
newline := ""
|
||||
if strings.HasSuffix(line, "\r\n") {
|
||||
newline = "\r\n"
|
||||
line = strings.TrimSuffix(line, "\r\n")
|
||||
} else if strings.HasSuffix(line, "\n") {
|
||||
newline = "\n"
|
||||
line = strings.TrimSuffix(line, "\n")
|
||||
}
|
||||
|
||||
trimmed := strings.TrimSpace(line)
|
||||
|
||||
// Document separator: exact line '---' or a '--- ' prefix followed by content
|
||||
if separatorLineRe.MatchString(trimmed) {
|
||||
sb.WriteString("$yqDocSeparator$")
|
||||
sb.WriteString(newline)
|
||||
if errors.Is(err, io.EOF) {
|
||||
return reader, sb.String(), nil
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Handle lines that start with '--- ' followed by more content (e.g. '--- cat')
|
||||
if separatorPrefixRe.MatchString(line) {
|
||||
match := separatorPrefixRe.FindString(line)
|
||||
remainder := line[len(match):]
|
||||
// normalise separator newline: if original had none, default to LF
|
||||
sepNewline := newline
|
||||
if sepNewline == "" {
|
||||
sepNewline = "\n"
|
||||
}
|
||||
sb.WriteString("$yqDocSeparator$")
|
||||
sb.WriteString(sepNewline)
|
||||
// push the remainder back onto the reader and continue processing
|
||||
reader = bufio.NewReader(io.MultiReader(strings.NewReader(remainder), reader))
|
||||
if errors.Is(err, io.EOF) && remainder == "" {
|
||||
return reader, sb.String(), nil
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Comments, YAML directives, and blank lines are leading content
|
||||
if commentLineRe.MatchString(line) || yamlDirectiveLineRe.MatchString(line) || trimmed == "" {
|
||||
sb.WriteString(line)
|
||||
sb.WriteString(newline)
|
||||
if errors.Is(err, io.EOF) {
|
||||
return reader, sb.String(), nil
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// First non-leading line: push it back onto a reader and return
|
||||
originalLine := line + newline
|
||||
return io.MultiReader(strings.NewReader(originalLine), reader), sb.String(), nil
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -22,7 +22,7 @@ see https://yaml.org/type/merge.html
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- &CENTER
|
||||
- &CENTRE
|
||||
x: 1
|
||||
y: 2
|
||||
- &LEFT
|
||||
@ -32,7 +32,7 @@ Given a sample.yml file of:
|
||||
r: 10
|
||||
- &SMALL
|
||||
r: 1
|
||||
- !!merge <<: *CENTER
|
||||
- !!merge <<: *CENTRE
|
||||
r: 10
|
||||
```
|
||||
then
|
||||
@ -191,7 +191,7 @@ Given a sample.yml file of:
|
||||
```yaml
|
||||
f:
|
||||
a: &a cat
|
||||
*a: b
|
||||
*a : b
|
||||
```
|
||||
then
|
||||
```bash
|
||||
@ -288,7 +288,7 @@ see https://yaml.org/type/merge.html. This has the correct data, but the wrong k
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- &CENTER
|
||||
- &CENTRE
|
||||
x: 1
|
||||
y: 2
|
||||
- &LEFT
|
||||
@ -299,7 +299,7 @@ Given a sample.yml file of:
|
||||
- &SMALL
|
||||
r: 1
|
||||
- !!merge <<:
|
||||
- *CENTER
|
||||
- *CENTRE
|
||||
- *BIG
|
||||
```
|
||||
then
|
||||
@ -318,7 +318,7 @@ see https://yaml.org/type/merge.html. This has the correct data, but the wrong k
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- &CENTER
|
||||
- &CENTRE
|
||||
x: 1
|
||||
y: 2
|
||||
- &LEFT
|
||||
@ -401,7 +401,7 @@ Taken from https://yaml.org/type/merge.html. Same values as legacy, but with the
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- &CENTER
|
||||
- &CENTRE
|
||||
x: 1
|
||||
y: 2
|
||||
- &LEFT
|
||||
@ -412,7 +412,7 @@ Given a sample.yml file of:
|
||||
- &SMALL
|
||||
r: 1
|
||||
- !!merge <<:
|
||||
- *CENTER
|
||||
- *CENTRE
|
||||
- *BIG
|
||||
```
|
||||
then
|
||||
@ -432,7 +432,7 @@ Taken from https://yaml.org/type/merge.html. Same values as legacy, but with the
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- &CENTER
|
||||
- &CENTRE
|
||||
x: 1
|
||||
y: 2
|
||||
- &LEFT
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
|
||||
Various operators for parsing and manipulating dates.
|
||||
|
||||
## Date time formattings
|
||||
## Date time formatting
|
||||
This uses Golang's built in time library for parsing and formatting date times.
|
||||
|
||||
When not specified, the RFC3339 standard is assumed `2006-01-02T15:04:05Z07:00` for parsing.
|
||||
|
||||
@ -29,6 +29,9 @@ as follows:
|
||||
yq '(.. | select(tag == "!!str")) |= envsubst' file.yaml
|
||||
```
|
||||
|
||||
## Disabling env operators
|
||||
If required, you can use the `--security-disable-env-ops` to disable env operations.
|
||||
|
||||
|
||||
## Read string environment variable
|
||||
Running
|
||||
@ -254,3 +257,39 @@ will output
|
||||
Error: variable ${notThere} not set
|
||||
```
|
||||
|
||||
## env() operation fails when security is enabled
|
||||
Use `--security-disable-env-ops` to disable env operations for security.
|
||||
|
||||
Running
|
||||
```bash
|
||||
yq --null-input 'env("MYENV")'
|
||||
```
|
||||
will output
|
||||
```bash
|
||||
Error: env operations have been disabled
|
||||
```
|
||||
|
||||
## strenv() operation fails when security is enabled
|
||||
Use `--security-disable-env-ops` to disable env operations for security.
|
||||
|
||||
Running
|
||||
```bash
|
||||
yq --null-input 'strenv("MYENV")'
|
||||
```
|
||||
will output
|
||||
```bash
|
||||
Error: env operations have been disabled
|
||||
```
|
||||
|
||||
## envsubst() operation fails when security is enabled
|
||||
Use `--security-disable-env-ops` to disable env operations for security.
|
||||
|
||||
Running
|
||||
```bash
|
||||
yq --null-input '"value: ${MYENV}" | envsubst'
|
||||
```
|
||||
will output
|
||||
```bash
|
||||
Error: env operations have been disabled
|
||||
```
|
||||
|
||||
|
||||
@ -1,3 +1,8 @@
|
||||
# First
|
||||
|
||||
Returns the first matching element in an array, or first matching value in a map.
|
||||
|
||||
Can be given an expression to match with, otherwise will just return the first.
|
||||
|
||||
## First matching element from array
|
||||
Given a sample.yml file of:
|
||||
@ -20,8 +25,10 @@ Given a sample.yml file of:
|
||||
```yaml
|
||||
- a: banana
|
||||
- a: cat
|
||||
b: firstCat
|
||||
- a: apple
|
||||
- a: cat
|
||||
b: secondCat
|
||||
```
|
||||
then
|
||||
```bash
|
||||
@ -30,6 +37,7 @@ yq 'first(.a == "cat")' sample.yml
|
||||
will output
|
||||
```yaml
|
||||
a: cat
|
||||
b: firstCat
|
||||
```
|
||||
|
||||
## First matching element from array with numeric condition
|
||||
@ -38,6 +46,7 @@ Given a sample.yml file of:
|
||||
- a: 10
|
||||
- a: 100
|
||||
- a: 1
|
||||
- a: 101
|
||||
```
|
||||
then
|
||||
```bash
|
||||
@ -53,7 +62,10 @@ Given a sample.yml file of:
|
||||
```yaml
|
||||
- a: false
|
||||
- a: true
|
||||
b: firstTrue
|
||||
- a: false
|
||||
- a: true
|
||||
b: secondTrue
|
||||
```
|
||||
then
|
||||
```bash
|
||||
@ -62,6 +74,7 @@ yq 'first(.a == true)' sample.yml
|
||||
will output
|
||||
```yaml
|
||||
a: true
|
||||
b: firstTrue
|
||||
```
|
||||
|
||||
## First matching element from array with null values
|
||||
@ -84,19 +97,19 @@ a: cat
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- a: dog
|
||||
b: 5
|
||||
b: 7
|
||||
- a: cat
|
||||
b: 3
|
||||
- a: apple
|
||||
b: 7
|
||||
b: 5
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq 'first(.b > 4)' sample.yml
|
||||
yq 'first(.b > 4 and .b < 6)' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
a: dog
|
||||
a: apple
|
||||
b: 5
|
||||
```
|
||||
|
||||
@ -127,7 +140,7 @@ x:
|
||||
y:
|
||||
a: 100
|
||||
z:
|
||||
a: 1
|
||||
a: 101
|
||||
```
|
||||
then
|
||||
```bash
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
|
||||
Various operators for parsing and manipulating dates.
|
||||
|
||||
## Date time formattings
|
||||
## Date time formatting
|
||||
This uses Golang's built in time library for parsing and formatting date times.
|
||||
|
||||
When not specified, the RFC3339 standard is assumed `2006-01-02T15:04:05Z07:00` for parsing.
|
||||
|
||||
@ -29,3 +29,6 @@ as follows:
|
||||
yq '(.. | select(tag == "!!str")) |= envsubst' file.yaml
|
||||
```
|
||||
|
||||
## Disabling env operators
|
||||
If required, you can use the `--security-disable-env-ops` to disable env operations.
|
||||
|
||||
|
||||
5
pkg/yqlib/doc/operators/headers/first.md
Normal file
5
pkg/yqlib/doc/operators/headers/first.md
Normal file
@ -0,0 +1,5 @@
|
||||
# First
|
||||
|
||||
Returns the first matching element in an array, or first matching value in a map.
|
||||
|
||||
Can be given an expression to match with, otherwise will just return the first.
|
||||
@ -46,3 +46,7 @@ this.is = a properties file
|
||||
```
|
||||
bXkgc2VjcmV0IGNoaWxsaSByZWNpcGUgaXMuLi4u
|
||||
```
|
||||
|
||||
## Disabling file operators
|
||||
If required, you can use the `--security-disable-file-ops` to disable file operations.
|
||||
|
||||
|
||||
@ -47,6 +47,10 @@ this.is = a properties file
|
||||
bXkgc2VjcmV0IGNoaWxsaSByZWNpcGUgaXMuLi4u
|
||||
```
|
||||
|
||||
## Disabling file operators
|
||||
If required, you can use the `--security-disable-file-ops` to disable file operations.
|
||||
|
||||
|
||||
## Simple example
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
@ -194,3 +198,63 @@ cool: things
|
||||
more_stuff: my secret chilli recipe is....
|
||||
```
|
||||
|
||||
## load() operation fails when security is enabled
|
||||
Use `--security-disable-file-ops` to disable file operations for security.
|
||||
|
||||
Running
|
||||
```bash
|
||||
yq --null-input 'load("../../examples/thing.yml")'
|
||||
```
|
||||
will output
|
||||
```bash
|
||||
Error: file operations have been disabled
|
||||
```
|
||||
|
||||
## load_str() operation fails when security is enabled
|
||||
Use `--security-disable-file-ops` to disable file operations for security.
|
||||
|
||||
Running
|
||||
```bash
|
||||
yq --null-input 'load_str("../../examples/thing.yml")'
|
||||
```
|
||||
will output
|
||||
```bash
|
||||
Error: file operations have been disabled
|
||||
```
|
||||
|
||||
## load_xml() operation fails when security is enabled
|
||||
Use `--security-disable-file-ops` to disable file operations for security.
|
||||
|
||||
Running
|
||||
```bash
|
||||
yq --null-input 'load_xml("../../examples/small.xml")'
|
||||
```
|
||||
will output
|
||||
```bash
|
||||
Error: file operations have been disabled
|
||||
```
|
||||
|
||||
## load_props() operation fails when security is enabled
|
||||
Use `--security-disable-file-ops` to disable file operations for security.
|
||||
|
||||
Running
|
||||
```bash
|
||||
yq --null-input 'load_props("../../examples/small.properties")'
|
||||
```
|
||||
will output
|
||||
```bash
|
||||
Error: file operations have been disabled
|
||||
```
|
||||
|
||||
## load_base64() operation fails when security is enabled
|
||||
Use `--security-disable-file-ops` to disable file operations for security.
|
||||
|
||||
Running
|
||||
```bash
|
||||
yq --null-input 'load_base64("../../examples/base64.txt")'
|
||||
```
|
||||
will output
|
||||
```bash
|
||||
Error: file operations have been disabled
|
||||
```
|
||||
|
||||
|
||||
@ -79,6 +79,46 @@ will output
|
||||
c: cat
|
||||
```
|
||||
|
||||
## Get the top (root) parent
|
||||
Use negative numbers to get the top parents. You can think of this as indexing into the 'parents' array above
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
a:
|
||||
b:
|
||||
c: cat
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.a.b.c | parent(-1)' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
a:
|
||||
b:
|
||||
c: cat
|
||||
```
|
||||
|
||||
## Root
|
||||
Alias for parent(-1), returns the top level parent. This is usually the document node.
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
a:
|
||||
b:
|
||||
c: cat
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.a.b.c | root' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
a:
|
||||
b:
|
||||
c: cat
|
||||
```
|
||||
|
||||
## N-th parent
|
||||
You can optionally supply the number of levels to go up for the parent, the default being 1.
|
||||
|
||||
@ -116,6 +156,25 @@ a:
|
||||
c: cat
|
||||
```
|
||||
|
||||
## N-th negative
|
||||
Similarly, use negative numbers to index backwards from the parents array
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
a:
|
||||
b:
|
||||
c: cat
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.a.b.c | parent(-2)' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
b:
|
||||
c: cat
|
||||
```
|
||||
|
||||
## No parent
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
|
||||
88
pkg/yqlib/doc/usage/base64.md
Normal file
88
pkg/yqlib/doc/usage/base64.md
Normal file
@ -0,0 +1,88 @@
|
||||
# Base64
|
||||
|
||||
Encode and decode to and from Base64.
|
||||
|
||||
Base64 assumes [RFC4648](https://rfc-editor.org/rfc/rfc4648.html) encoding. Encoding and decoding both assume that the content is a UTF-8 string and not binary content.
|
||||
|
||||
|
||||
See below for examples
|
||||
|
||||
|
||||
## Decode base64: simple
|
||||
Decoded data is assumed to be a string.
|
||||
|
||||
Given a sample.txt file of:
|
||||
```
|
||||
YSBzcGVjaWFsIHN0cmluZw==
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -p=base64 -oy '.' sample.txt
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
a special string
|
||||
```
|
||||
|
||||
## Decode base64: UTF-8
|
||||
Base64 decoding supports UTF-8 encoded strings.
|
||||
|
||||
Given a sample.txt file of:
|
||||
```
|
||||
V29ya3Mgd2l0aCBVVEYtMTYg8J+Yig==
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -p=base64 -oy '.' sample.txt
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
Works with UTF-16 😊
|
||||
```
|
||||
|
||||
## Decode with extra spaces
|
||||
Extra leading/trailing whitespace is stripped
|
||||
|
||||
Given a sample.txt file of:
|
||||
```
|
||||
|
||||
YSBzcGVjaWFsIHN0cmluZw==
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -p=base64 -oy '.' sample.txt
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
a special string
|
||||
```
|
||||
|
||||
## Encode base64: string
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
"a special string"
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=base64 '.' sample.yml
|
||||
```
|
||||
will output
|
||||
```
|
||||
YSBzcGVjaWFsIHN0cmluZw==```
|
||||
|
||||
## Encode base64: string from document
|
||||
Extract a string field and encode it to base64.
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
coolData: "a special string"
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=base64 '.coolData' sample.yml
|
||||
```
|
||||
will output
|
||||
```
|
||||
YSBzcGVjaWFsIHN0cmluZw==```
|
||||
|
||||
201
pkg/yqlib/doc/usage/hcl.md
Normal file
201
pkg/yqlib/doc/usage/hcl.md
Normal file
@ -0,0 +1,201 @@
|
||||
# HCL
|
||||
|
||||
Encode and decode to and from [HashiCorp Configuration Language (HCL)](https://github.com/hashicorp/hcl).
|
||||
|
||||
HCL is commonly used in HashiCorp tools like Terraform for configuration files. The yq HCL encoder and decoder support:
|
||||
- Blocks and attributes
|
||||
- String interpolation and expressions (preserved without quotes)
|
||||
- Comments (leading, head, and line comments)
|
||||
- Nested structures (maps and lists)
|
||||
- Syntax colorisation when enabled
|
||||
|
||||
|
||||
## Parse HCL
|
||||
Given a sample.hcl file of:
|
||||
```hcl
|
||||
io_mode = "async"
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -oy sample.hcl
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
io_mode: "async"
|
||||
```
|
||||
|
||||
## Roundtrip: Sample Doc
|
||||
Given a sample.hcl file of:
|
||||
```hcl
|
||||
service "cat" {
|
||||
process "main" {
|
||||
command = ["/usr/local/bin/awesome-app", "server"]
|
||||
}
|
||||
|
||||
process "management" {
|
||||
command = ["/usr/local/bin/awesome-app", "management"]
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq sample.hcl
|
||||
```
|
||||
will output
|
||||
```hcl
|
||||
service "cat" {
|
||||
process "main" {
|
||||
command = ["/usr/local/bin/awesome-app", "server"]
|
||||
}
|
||||
process "management" {
|
||||
command = ["/usr/local/bin/awesome-app", "management"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Roundtrip: With an update
|
||||
Given a sample.hcl file of:
|
||||
```hcl
|
||||
service "cat" {
|
||||
process "main" {
|
||||
command = ["/usr/local/bin/awesome-app", "server"]
|
||||
}
|
||||
|
||||
process "management" {
|
||||
command = ["/usr/local/bin/awesome-app", "management"]
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.service.cat.process.main.command += "meow"' sample.hcl
|
||||
```
|
||||
will output
|
||||
```hcl
|
||||
service "cat" {
|
||||
process "main" {
|
||||
command = ["/usr/local/bin/awesome-app", "server", "meow"]
|
||||
}
|
||||
process "management" {
|
||||
command = ["/usr/local/bin/awesome-app", "management"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Parse HCL: Sample Doc
|
||||
Given a sample.hcl file of:
|
||||
```hcl
|
||||
service "cat" {
|
||||
process "main" {
|
||||
command = ["/usr/local/bin/awesome-app", "server"]
|
||||
}
|
||||
|
||||
process "management" {
|
||||
command = ["/usr/local/bin/awesome-app", "management"]
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -oy sample.hcl
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
service:
|
||||
cat:
|
||||
process:
|
||||
main:
|
||||
command:
|
||||
- "/usr/local/bin/awesome-app"
|
||||
- "server"
|
||||
management:
|
||||
command:
|
||||
- "/usr/local/bin/awesome-app"
|
||||
- "management"
|
||||
```
|
||||
|
||||
## Parse HCL: with comments
|
||||
Given a sample.hcl file of:
|
||||
```hcl
|
||||
# Configuration
|
||||
port = 8080 # server port
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -oy sample.hcl
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
# Configuration
|
||||
port: 8080 # server port
|
||||
```
|
||||
|
||||
## Roundtrip: with comments
|
||||
Given a sample.hcl file of:
|
||||
```hcl
|
||||
# Configuration
|
||||
port = 8080
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq sample.hcl
|
||||
```
|
||||
will output
|
||||
```hcl
|
||||
# Configuration
|
||||
port = 8080
|
||||
```
|
||||
|
||||
## Roundtrip: With templates, functions and arithmetic
|
||||
Given a sample.hcl file of:
|
||||
```hcl
|
||||
# Arithmetic with literals and application-provided variables
|
||||
sum = 1 + addend
|
||||
|
||||
# String interpolation and templates
|
||||
message = "Hello, ${name}!"
|
||||
|
||||
# Application-provided functions
|
||||
shouty_message = upper(message)
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq sample.hcl
|
||||
```
|
||||
will output
|
||||
```hcl
|
||||
# Arithmetic with literals and application-provided variables
|
||||
sum = 1 + addend
|
||||
# String interpolation and templates
|
||||
message = "Hello, ${name}!"
|
||||
# Application-provided functions
|
||||
shouty_message = upper(message)
|
||||
```
|
||||
|
||||
## Roundtrip: Separate blocks with same name.
|
||||
Given a sample.hcl file of:
|
||||
```hcl
|
||||
resource "aws_instance" "web" {
|
||||
ami = "ami-12345"
|
||||
}
|
||||
resource "aws_instance" "db" {
|
||||
ami = "ami-67890"
|
||||
}
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq sample.hcl
|
||||
```
|
||||
will output
|
||||
```hcl
|
||||
resource "aws_instance" "web" {
|
||||
ami = "ami-12345"
|
||||
}
|
||||
resource "aws_instance" "db" {
|
||||
ami = "ami-67890"
|
||||
}
|
||||
```
|
||||
|
||||
9
pkg/yqlib/doc/usage/headers/base64.md
Normal file
9
pkg/yqlib/doc/usage/headers/base64.md
Normal file
@ -0,0 +1,9 @@
|
||||
# Base64
|
||||
|
||||
Encode and decode to and from Base64.
|
||||
|
||||
Base64 assumes [RFC4648](https://rfc-editor.org/rfc/rfc4648.html) encoding. Encoding and decoding both assume that the content is a UTF-8 string and not binary content.
|
||||
|
||||
|
||||
See below for examples
|
||||
|
||||
11
pkg/yqlib/doc/usage/headers/hcl.md
Normal file
11
pkg/yqlib/doc/usage/headers/hcl.md
Normal file
@ -0,0 +1,11 @@
|
||||
# HCL
|
||||
|
||||
Encode and decode to and from [HashiCorp Configuration Language (HCL)](https://github.com/hashicorp/hcl).
|
||||
|
||||
HCL is commonly used in HashiCorp tools like Terraform for configuration files. The yq HCL encoder and decoder support:
|
||||
- Blocks and attributes
|
||||
- String interpolation and expressions (preserved without quotes)
|
||||
- Comments (leading, head, and line comments)
|
||||
- Nested structures (maps and lists)
|
||||
- Syntax colorisation when enabled
|
||||
|
||||
9
pkg/yqlib/doc/usage/headers/kyaml.md
Normal file
9
pkg/yqlib/doc/usage/headers/kyaml.md
Normal file
@ -0,0 +1,9 @@
|
||||
# KYaml
|
||||
|
||||
Encode and decode to and from KYaml (a restricted subset of YAML that uses flow-style collections).
|
||||
|
||||
KYaml is useful when you want YAML data rendered in a compact, JSON-like form while still supporting YAML features like comments.
|
||||
|
||||
Notes:
|
||||
- Strings are always double-quoted in KYaml output.
|
||||
- Anchors and aliases are expanded (KYaml output does not emit them).
|
||||
253
pkg/yqlib/doc/usage/kyaml.md
Normal file
253
pkg/yqlib/doc/usage/kyaml.md
Normal file
@ -0,0 +1,253 @@
|
||||
# KYaml
|
||||
|
||||
Encode and decode to and from KYaml (a restricted subset of YAML that uses flow-style collections).
|
||||
|
||||
KYaml is useful when you want YAML data rendered in a compact, JSON-like form while still supporting YAML features like comments.
|
||||
|
||||
Notes:
|
||||
- Strings are always double-quoted in KYaml output.
|
||||
- Anchors and aliases are expanded (KYaml output does not emit them).
|
||||
|
||||
## Encode kyaml: plain string scalar
|
||||
Strings are always double-quoted in KYaml output.
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
cat
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=kyaml '.' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
"cat"
|
||||
```
|
||||
|
||||
## encode flow mapping and sequence
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
a: b
|
||||
c:
|
||||
- d
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=kyaml '.' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
{
|
||||
a: "b",
|
||||
c: [
|
||||
"d",
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## encode non-string scalars
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
a: 12
|
||||
b: true
|
||||
c: null
|
||||
d: "true"
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=kyaml '.' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
{
|
||||
a: 12,
|
||||
b: true,
|
||||
c: null,
|
||||
d: "true",
|
||||
}
|
||||
```
|
||||
|
||||
## quote non-identifier keys
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
"1a": b
|
||||
"has space": c
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=kyaml '.' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
{
|
||||
"1a": "b",
|
||||
"has space": "c",
|
||||
}
|
||||
```
|
||||
|
||||
## escape quoted strings
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
a: "line1\nline2\t\"q\""
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=kyaml '.' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
{
|
||||
a: "line1\nline2\t\"q\"",
|
||||
}
|
||||
```
|
||||
|
||||
## preserve comments when encoding
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
# leading
|
||||
a: 1 # a line
|
||||
# head b
|
||||
b: 2
|
||||
c:
|
||||
# head d
|
||||
- d # d line
|
||||
- e
|
||||
# trailing
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=kyaml '.' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
# leading
|
||||
{
|
||||
a: 1, # a line
|
||||
# head b
|
||||
b: 2,
|
||||
c: [
|
||||
# head d
|
||||
"d", # d line
|
||||
"e",
|
||||
],
|
||||
# trailing
|
||||
}
|
||||
```
|
||||
|
||||
## Encode kyaml: anchors and aliases
|
||||
KYaml output does not support anchors/aliases; they are expanded to concrete values.
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
base: &base
|
||||
a: b
|
||||
copy: *base
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=kyaml '.' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
{
|
||||
base: {
|
||||
a: "b",
|
||||
},
|
||||
copy: {
|
||||
a: "b",
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Encode kyaml: yaml to kyaml shows formatting differences
|
||||
KYaml uses flow-style collections (braces/brackets) and explicit commas.
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
person:
|
||||
name: John
|
||||
pets:
|
||||
- cat
|
||||
- dog
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=kyaml '.' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
{
|
||||
person: {
|
||||
name: "John",
|
||||
pets: [
|
||||
"cat",
|
||||
"dog",
|
||||
],
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Encode kyaml: nested lists of objects
|
||||
Lists and objects can be nested arbitrarily; KYaml always uses flow-style collections.
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- name: a
|
||||
items:
|
||||
- id: 1
|
||||
tags:
|
||||
- k: x
|
||||
v: y
|
||||
- k: x2
|
||||
v: y2
|
||||
- id: 2
|
||||
tags:
|
||||
- k: z
|
||||
v: w
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=kyaml '.' sample.yml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
[
|
||||
{
|
||||
name: "a",
|
||||
items: [
|
||||
{
|
||||
id: 1,
|
||||
tags: [
|
||||
{
|
||||
k: "x",
|
||||
v: "y",
|
||||
},
|
||||
{
|
||||
k: "x2",
|
||||
v: "y2",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
tags: [
|
||||
{
|
||||
k: "z",
|
||||
v: "w",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
```
|
||||
|
||||
@ -84,3 +84,23 @@ will output
|
||||
name='Miles O'"'"'Brien'
|
||||
```
|
||||
|
||||
## Encode shell variables: custom separator
|
||||
Use --shell-key-separator to specify a custom separator between keys. This is useful when the original keys contain underscores.
|
||||
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
my_app:
|
||||
db_config:
|
||||
host: localhost
|
||||
port: 5432
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -o=shell --shell-key-separator="__" sample.yml
|
||||
```
|
||||
will output
|
||||
```sh
|
||||
my_app__db_config__host=localhost
|
||||
my_app__db_config__port=5432
|
||||
```
|
||||
|
||||
|
||||
@ -104,6 +104,27 @@ owner:
|
||||
suburb: nice
|
||||
```
|
||||
|
||||
## Parse: Array of Array Table
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
|
||||
[[fruits]]
|
||||
name = "apple"
|
||||
[[fruits.varieties]] # nested array of tables
|
||||
name = "red delicious"
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -oy '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
fruits:
|
||||
- name: apple
|
||||
varieties:
|
||||
- name: red delicious
|
||||
```
|
||||
|
||||
## Parse: Empty Table
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
@ -120,3 +141,246 @@ will output
|
||||
dependencies: {}
|
||||
```
|
||||
|
||||
## Roundtrip: inline table attribute
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
name = { first = "Tom", last = "Preston-Werner" }
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
name = { first = "Tom", last = "Preston-Werner" }
|
||||
```
|
||||
|
||||
## Roundtrip: table section
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
[owner.contact]
|
||||
name = "Tom"
|
||||
age = 36
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
[owner.contact]
|
||||
name = "Tom"
|
||||
age = 36
|
||||
```
|
||||
|
||||
## Roundtrip: array of tables
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
[[fruits]]
|
||||
name = "apple"
|
||||
[[fruits.varieties]]
|
||||
name = "red delicious"
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
[[fruits]]
|
||||
name = "apple"
|
||||
[[fruits.varieties]]
|
||||
name = "red delicious"
|
||||
```
|
||||
|
||||
## Roundtrip: arrays and scalars
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
A = ["hello", ["world", "again"]]
|
||||
B = 12
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
A = ["hello", ["world", "again"]]
|
||||
B = 12
|
||||
```
|
||||
|
||||
## Roundtrip: simple
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
A = "hello"
|
||||
B = 12
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
A = "hello"
|
||||
B = 12
|
||||
```
|
||||
|
||||
## Roundtrip: deep paths
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
[person]
|
||||
name = "hello"
|
||||
address = "12 cat st"
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
[person]
|
||||
name = "hello"
|
||||
address = "12 cat st"
|
||||
```
|
||||
|
||||
## Roundtrip: empty array
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
A = []
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
A = []
|
||||
```
|
||||
|
||||
## Roundtrip: sample table
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
var = "x"
|
||||
|
||||
[owner.contact]
|
||||
name = "Tom Preston-Werner"
|
||||
age = 36
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
var = "x"
|
||||
|
||||
[owner.contact]
|
||||
name = "Tom Preston-Werner"
|
||||
age = 36
|
||||
```
|
||||
|
||||
## Roundtrip: empty table
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
[dependencies]
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
[dependencies]
|
||||
```
|
||||
|
||||
## Roundtrip: comments
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
# This is a comment
|
||||
A = "hello" # inline comment
|
||||
B = 12
|
||||
|
||||
# Table comment
|
||||
[person]
|
||||
name = "Tom" # name comment
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
# This is a comment
|
||||
A = "hello" # inline comment
|
||||
B = 12
|
||||
|
||||
# Table comment
|
||||
[person]
|
||||
name = "Tom" # name comment
|
||||
```
|
||||
|
||||
## Roundtrip: sample from web
|
||||
Given a sample.toml file of:
|
||||
```toml
|
||||
# This is a TOML document
|
||||
title = "TOML Example"
|
||||
|
||||
[owner]
|
||||
name = "Tom Preston-Werner"
|
||||
dob = 1979-05-27T07:32:00-08:00
|
||||
|
||||
[database]
|
||||
enabled = true
|
||||
ports = [8000, 8001, 8002]
|
||||
data = [["delta", "phi"], [3.14]]
|
||||
temp_targets = { cpu = 79.5, case = 72.0 }
|
||||
|
||||
# [servers] yq can't do this one yet
|
||||
[servers.alpha]
|
||||
ip = "10.0.0.1"
|
||||
role = "frontend"
|
||||
|
||||
[servers.beta]
|
||||
ip = "10.0.0.2"
|
||||
role = "backend"
|
||||
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.toml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
# This is a TOML document
|
||||
title = "TOML Example"
|
||||
|
||||
[owner]
|
||||
name = "Tom Preston-Werner"
|
||||
dob = 1979-05-27T07:32:00-08:00
|
||||
|
||||
[database]
|
||||
enabled = true
|
||||
ports = [8000, 8001, 8002]
|
||||
data = [["delta", "phi"], [3.14]]
|
||||
temp_targets = { cpu = 79.5, case = 72.0 }
|
||||
|
||||
# [servers] yq can't do this one yet
|
||||
[servers.alpha]
|
||||
ip = "10.0.0.1"
|
||||
role = "frontend"
|
||||
|
||||
[servers.beta]
|
||||
ip = "10.0.0.2"
|
||||
role = "backend"
|
||||
```
|
||||
|
||||
|
||||
@ -53,7 +53,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -oy '.' sample.xml
|
||||
yq -oy sample.xml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
@ -100,7 +100,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -oy '.' sample.xml
|
||||
yq -oy sample.xml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
@ -157,7 +157,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -oy '.' sample.xml
|
||||
yq -oy sample.xml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
@ -177,7 +177,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -oy '.' sample.xml
|
||||
yq -oy sample.xml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
@ -196,7 +196,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -oy '.' sample.xml
|
||||
yq -oy sample.xml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
@ -225,7 +225,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.xml
|
||||
yq sample.xml
|
||||
```
|
||||
will output
|
||||
```xml
|
||||
@ -256,7 +256,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq --xml-skip-directives '.' sample.xml
|
||||
yq --xml-skip-directives sample.xml
|
||||
```
|
||||
will output
|
||||
```xml
|
||||
@ -292,7 +292,7 @@ for x --></x>
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq -oy '.' sample.xml
|
||||
yq -oy sample.xml
|
||||
```
|
||||
will output
|
||||
```yaml
|
||||
@ -327,7 +327,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq --xml-keep-namespace=false '.' sample.xml
|
||||
yq --xml-keep-namespace=false sample.xml
|
||||
```
|
||||
will output
|
||||
```xml
|
||||
@ -361,7 +361,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq --xml-raw-token=false '.' sample.xml
|
||||
yq --xml-raw-token=false sample.xml
|
||||
```
|
||||
will output
|
||||
```xml
|
||||
@ -542,7 +542,7 @@ for x --></x>
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.xml
|
||||
yq sample.xml
|
||||
```
|
||||
will output
|
||||
```xml
|
||||
@ -575,7 +575,7 @@ Given a sample.xml file of:
|
||||
```
|
||||
then
|
||||
```bash
|
||||
yq '.' sample.xml
|
||||
yq sample.xml
|
||||
```
|
||||
will output
|
||||
```xml
|
||||
|
||||
@ -1,7 +1,12 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"github.com/fatih/color"
|
||||
)
|
||||
|
||||
type Encoder interface {
|
||||
@ -25,3 +30,63 @@ func mapKeysToStrings(node *CandidateNode) {
|
||||
mapKeysToStrings(child)
|
||||
}
|
||||
}
|
||||
|
||||
// Some funcs are shared between encoder_yaml and encoder_kyaml
|
||||
func PrintYAMLDocumentSeparator(writer io.Writer, PrintDocSeparators bool) error {
|
||||
if PrintDocSeparators {
|
||||
log.Debug("writing doc sep")
|
||||
if err := writeString(writer, "---\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
func PrintYAMLLeadingContent(writer io.Writer, content string, PrintDocSeparators bool, ColorsEnabled bool) error {
|
||||
reader := bufio.NewReader(strings.NewReader(content))
|
||||
|
||||
// reuse precompiled package-level regex
|
||||
// (declared in decoder_yaml.go)
|
||||
|
||||
for {
|
||||
|
||||
readline, errReading := reader.ReadString('\n')
|
||||
if errReading != nil && !errors.Is(errReading, io.EOF) {
|
||||
return errReading
|
||||
}
|
||||
if strings.Contains(readline, "$yqDocSeparator$") {
|
||||
// Preserve the original line ending (CRLF or LF)
|
||||
lineEnding := "\n"
|
||||
if strings.HasSuffix(readline, "\r\n") {
|
||||
lineEnding = "\r\n"
|
||||
}
|
||||
if PrintDocSeparators {
|
||||
if err := writeString(writer, "---"+lineEnding); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
if len(readline) > 0 && readline != "\n" && readline[0] != '%' && !commentLineRe.MatchString(readline) {
|
||||
readline = "# " + readline
|
||||
}
|
||||
if ColorsEnabled && strings.TrimSpace(readline) != "" {
|
||||
readline = format(color.FgHiBlack) + readline + format(color.Reset)
|
||||
}
|
||||
if err := writeString(writer, readline); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if errors.Is(errReading, io.EOF) {
|
||||
if readline != "" {
|
||||
// the last comment we read didn't have a newline, put one in
|
||||
if err := writeString(writer, "\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
690
pkg/yqlib/encoder_hcl.go
Normal file
690
pkg/yqlib/encoder_hcl.go
Normal file
@ -0,0 +1,690 @@
|
||||
//go:build !yq_nohcl
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/fatih/color"
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
"github.com/hashicorp/hcl/v2/hclsyntax"
|
||||
hclwrite "github.com/hashicorp/hcl/v2/hclwrite"
|
||||
"github.com/zclconf/go-cty/cty"
|
||||
)
|
||||
|
||||
type hclEncoder struct {
|
||||
prefs HclPreferences
|
||||
}
|
||||
|
||||
// commentPathSep is used to join path segments when collecting comments.
|
||||
// It uses a rarely used ASCII control character to avoid collisions with
|
||||
// normal key names (including dots).
|
||||
const commentPathSep = "\x1e"
|
||||
|
||||
// NewHclEncoder creates a new HCL encoder
|
||||
func NewHclEncoder(prefs HclPreferences) Encoder {
|
||||
return &hclEncoder{prefs: prefs}
|
||||
}
|
||||
|
||||
func (he *hclEncoder) CanHandleAliases() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (he *hclEncoder) PrintDocumentSeparator(_ io.Writer) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (he *hclEncoder) PrintLeadingContent(_ io.Writer, _ string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (he *hclEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
log.Debugf("I need to encode %v", NodeToString(node))
|
||||
if node.Kind == ScalarNode {
|
||||
return writeString(writer, node.Value+"\n")
|
||||
}
|
||||
|
||||
f := hclwrite.NewEmptyFile()
|
||||
body := f.Body()
|
||||
|
||||
// Collect comments as we encode
|
||||
commentMap := make(map[string]string)
|
||||
he.collectComments(node, "", commentMap)
|
||||
|
||||
if err := he.encodeNode(body, node); err != nil {
|
||||
return fmt.Errorf("failed to encode HCL: %w", err)
|
||||
}
|
||||
|
||||
// Get the formatted output and remove extra spacing before '='
|
||||
output := f.Bytes()
|
||||
compactOutput := he.compactSpacing(output)
|
||||
|
||||
// Inject comments back into the output
|
||||
finalOutput := he.injectComments(compactOutput, commentMap)
|
||||
|
||||
if he.prefs.ColorsEnabled {
|
||||
colourized := he.colorizeHcl(finalOutput)
|
||||
_, err := writer.Write(colourized)
|
||||
return err
|
||||
}
|
||||
|
||||
_, err := writer.Write(finalOutput)
|
||||
return err
|
||||
}
|
||||
|
||||
// compactSpacing removes extra whitespace before '=' in attribute assignments
|
||||
func (he *hclEncoder) compactSpacing(input []byte) []byte {
|
||||
// Use regex to replace multiple spaces before = with single space
|
||||
re := regexp.MustCompile(`(\S)\s{2,}=`)
|
||||
return re.ReplaceAll(input, []byte("$1 ="))
|
||||
}
|
||||
|
||||
// collectComments recursively collects comments from nodes for later injection
|
||||
func (he *hclEncoder) collectComments(node *CandidateNode, prefix string, commentMap map[string]string) {
|
||||
if node == nil {
|
||||
return
|
||||
}
|
||||
|
||||
// For mapping nodes, collect comments from keys and values
|
||||
if node.Kind == MappingNode {
|
||||
// Collect root-level head comment if at root (prefix is empty)
|
||||
if prefix == "" && node.HeadComment != "" {
|
||||
commentMap[joinCommentPath("__root__", "head")] = node.HeadComment
|
||||
}
|
||||
|
||||
for i := 0; i < len(node.Content); i += 2 {
|
||||
keyNode := node.Content[i]
|
||||
valueNode := node.Content[i+1]
|
||||
key := keyNode.Value
|
||||
|
||||
// Create a path for this key
|
||||
path := joinCommentPath(prefix, key)
|
||||
|
||||
// Store comments from the key (head comments appear before the attribute)
|
||||
if keyNode.HeadComment != "" {
|
||||
commentMap[joinCommentPath(path, "head")] = keyNode.HeadComment
|
||||
}
|
||||
// Store comments from the value (line comments appear after the value)
|
||||
if valueNode.LineComment != "" {
|
||||
commentMap[joinCommentPath(path, "line")] = valueNode.LineComment
|
||||
}
|
||||
if valueNode.FootComment != "" {
|
||||
commentMap[joinCommentPath(path, "foot")] = valueNode.FootComment
|
||||
}
|
||||
|
||||
// Recurse into nested mappings
|
||||
if valueNode.Kind == MappingNode {
|
||||
he.collectComments(valueNode, path, commentMap)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// joinCommentPath concatenates path segments using commentPathSep, safely handling empty prefixes.
|
||||
func joinCommentPath(prefix, segment string) string {
|
||||
if prefix == "" {
|
||||
return segment
|
||||
}
|
||||
return prefix + commentPathSep + segment
|
||||
}
|
||||
|
||||
// injectComments adds collected comments back into the HCL output
|
||||
func (he *hclEncoder) injectComments(output []byte, commentMap map[string]string) []byte {
|
||||
// Convert output to string for easier manipulation
|
||||
result := string(output)
|
||||
|
||||
// Root-level head comment (stored on the synthetic __root__/head path)
|
||||
for path, comment := range commentMap {
|
||||
if path == joinCommentPath("__root__", "head") {
|
||||
trimmed := strings.TrimSpace(comment)
|
||||
if trimmed != "" && !strings.HasPrefix(result, trimmed) {
|
||||
result = trimmed + "\n" + result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Attribute head comments: insert above matching assignment
|
||||
for path, comment := range commentMap {
|
||||
parts := strings.Split(path, commentPathSep)
|
||||
if len(parts) < 2 {
|
||||
continue
|
||||
}
|
||||
|
||||
commentType := parts[len(parts)-1]
|
||||
key := parts[len(parts)-2]
|
||||
if commentType != "head" || key == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
trimmed := strings.TrimSpace(comment)
|
||||
if trimmed == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
re := regexp.MustCompile(`(?m)^(\s*)` + regexp.QuoteMeta(key) + `\s*=`)
|
||||
if re.MatchString(result) {
|
||||
result = re.ReplaceAllString(result, "$1"+trimmed+"\n$0")
|
||||
}
|
||||
}
|
||||
|
||||
return []byte(result)
|
||||
}
|
||||
|
||||
func (he *hclEncoder) colorizeHcl(input []byte) []byte {
|
||||
hcl := string(input)
|
||||
result := strings.Builder{}
|
||||
|
||||
// Create colour functions for different token types
|
||||
commentColor := color.New(color.FgHiBlack).SprintFunc()
|
||||
stringColor := color.New(color.FgGreen).SprintFunc()
|
||||
numberColor := color.New(color.FgHiMagenta).SprintFunc()
|
||||
keyColor := color.New(color.FgCyan).SprintFunc()
|
||||
boolColor := color.New(color.FgHiMagenta).SprintFunc()
|
||||
|
||||
// Simple tokenization for HCL colouring
|
||||
i := 0
|
||||
for i < len(hcl) {
|
||||
ch := hcl[i]
|
||||
|
||||
// Comments - from # to end of line
|
||||
if ch == '#' {
|
||||
end := i
|
||||
for end < len(hcl) && hcl[end] != '\n' {
|
||||
end++
|
||||
}
|
||||
result.WriteString(commentColor(hcl[i:end]))
|
||||
i = end
|
||||
continue
|
||||
}
|
||||
|
||||
// Strings - quoted text
|
||||
if ch == '"' || ch == '\'' {
|
||||
quote := ch
|
||||
end := i + 1
|
||||
for end < len(hcl) && hcl[end] != quote {
|
||||
if hcl[end] == '\\' {
|
||||
end++ // skip escaped char
|
||||
}
|
||||
end++
|
||||
}
|
||||
if end < len(hcl) {
|
||||
end++ // include closing quote
|
||||
}
|
||||
result.WriteString(stringColor(hcl[i:end]))
|
||||
i = end
|
||||
continue
|
||||
}
|
||||
|
||||
// Numbers - sequences of digits, possibly with decimal point or minus
|
||||
if (ch >= '0' && ch <= '9') || (ch == '-' && i+1 < len(hcl) && hcl[i+1] >= '0' && hcl[i+1] <= '9') {
|
||||
end := i
|
||||
if ch == '-' {
|
||||
end++
|
||||
}
|
||||
for end < len(hcl) && ((hcl[end] >= '0' && hcl[end] <= '9') || hcl[end] == '.') {
|
||||
end++
|
||||
}
|
||||
result.WriteString(numberColor(hcl[i:end]))
|
||||
i = end
|
||||
continue
|
||||
}
|
||||
|
||||
// Identifiers/keys - alphanumeric + underscore
|
||||
if (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || ch == '_' {
|
||||
end := i
|
||||
for end < len(hcl) && ((hcl[end] >= 'a' && hcl[end] <= 'z') ||
|
||||
(hcl[end] >= 'A' && hcl[end] <= 'Z') ||
|
||||
(hcl[end] >= '0' && hcl[end] <= '9') ||
|
||||
hcl[end] == '_' || hcl[end] == '-') {
|
||||
end++
|
||||
}
|
||||
ident := hcl[i:end]
|
||||
|
||||
// Check if this is a keyword/reserved word
|
||||
switch ident {
|
||||
case "true", "false", "null":
|
||||
result.WriteString(boolColor(ident))
|
||||
default:
|
||||
// Check if followed by = (it's a key)
|
||||
j := end
|
||||
for j < len(hcl) && (hcl[j] == ' ' || hcl[j] == '\t') {
|
||||
j++
|
||||
}
|
||||
if j < len(hcl) && hcl[j] == '=' {
|
||||
result.WriteString(keyColor(ident))
|
||||
} else if j < len(hcl) && hcl[j] == '{' {
|
||||
// Block type
|
||||
result.WriteString(keyColor(ident))
|
||||
} else {
|
||||
result.WriteString(ident) // plain text for other identifiers
|
||||
}
|
||||
}
|
||||
i = end
|
||||
continue
|
||||
}
|
||||
|
||||
// Everything else (whitespace, operators, brackets) - no color
|
||||
result.WriteByte(ch)
|
||||
i++
|
||||
}
|
||||
|
||||
return []byte(result.String())
|
||||
}
|
||||
|
||||
// Helper runes for unquoted identifiers
|
||||
func isHCLIdentifierStart(r rune) bool {
|
||||
return (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') || r == '_'
|
||||
}
|
||||
|
||||
func isHCLIdentifierPart(r rune) bool {
|
||||
return (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') || (r >= '0' && r <= '9') || r == '_' || r == '-'
|
||||
}
|
||||
|
||||
func isValidHCLIdentifier(s string) bool {
|
||||
if s == "" {
|
||||
return false
|
||||
}
|
||||
// HCL identifiers must start with a letter or underscore
|
||||
// and contain only letters, digits, underscores, and hyphens
|
||||
for i, r := range s {
|
||||
if i == 0 {
|
||||
if !isHCLIdentifierStart(r) {
|
||||
return false
|
||||
}
|
||||
continue
|
||||
}
|
||||
if !isHCLIdentifierPart(r) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// tokensForRawHCLExpr produces a minimal token stream for a simple HCL expression so we can
|
||||
// write it without introducing quotes (e.g. function calls like upper(message)).
|
||||
func tokensForRawHCLExpr(expr string) (hclwrite.Tokens, error) {
|
||||
var tokens hclwrite.Tokens
|
||||
for i := 0; i < len(expr); {
|
||||
ch := expr[i]
|
||||
switch {
|
||||
case ch == ' ' || ch == '\t':
|
||||
i++
|
||||
continue
|
||||
case isHCLIdentifierStart(rune(ch)):
|
||||
start := i
|
||||
i++
|
||||
for i < len(expr) && isHCLIdentifierPart(rune(expr[i])) {
|
||||
i++
|
||||
}
|
||||
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenIdent, Bytes: []byte(expr[start:i])})
|
||||
continue
|
||||
case ch >= '0' && ch <= '9':
|
||||
start := i
|
||||
i++
|
||||
for i < len(expr) && ((expr[i] >= '0' && expr[i] <= '9') || expr[i] == '.') {
|
||||
i++
|
||||
}
|
||||
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenNumberLit, Bytes: []byte(expr[start:i])})
|
||||
continue
|
||||
case ch == '(':
|
||||
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenOParen, Bytes: []byte{'('}})
|
||||
case ch == ')':
|
||||
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenCParen, Bytes: []byte{')'}})
|
||||
case ch == ',':
|
||||
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenComma, Bytes: []byte{','}})
|
||||
case ch == '.':
|
||||
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenDot, Bytes: []byte{'.'}})
|
||||
case ch == '+':
|
||||
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenPlus, Bytes: []byte{'+'}})
|
||||
case ch == '-':
|
||||
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenMinus, Bytes: []byte{'-'}})
|
||||
case ch == '*':
|
||||
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenStar, Bytes: []byte{'*'}})
|
||||
case ch == '/':
|
||||
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenSlash, Bytes: []byte{'/'}})
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported character %q in raw HCL expression", ch)
|
||||
}
|
||||
i++
|
||||
}
|
||||
return tokens, nil
|
||||
}
|
||||
|
||||
// encodeAttribute encodes a value as an HCL attribute
|
||||
func (he *hclEncoder) encodeAttribute(body *hclwrite.Body, key string, valueNode *CandidateNode) error {
|
||||
if valueNode.Kind == ScalarNode && valueNode.Tag == "!!str" {
|
||||
// Handle unquoted expressions (as-is, without quotes)
|
||||
if valueNode.Style == 0 {
|
||||
tokens, err := tokensForRawHCLExpr(valueNode.Value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
body.SetAttributeRaw(key, tokens)
|
||||
return nil
|
||||
}
|
||||
if valueNode.Style&LiteralStyle != 0 {
|
||||
tokens, err := tokensForRawHCLExpr(valueNode.Value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
body.SetAttributeRaw(key, tokens)
|
||||
return nil
|
||||
}
|
||||
// Check if template with interpolation
|
||||
if valueNode.Style&DoubleQuotedStyle != 0 && strings.Contains(valueNode.Value, "${") {
|
||||
return he.encodeTemplateAttribute(body, key, valueNode.Value)
|
||||
}
|
||||
// Check if unquoted identifier
|
||||
if isValidHCLIdentifier(valueNode.Value) && valueNode.Style == 0 {
|
||||
traversal := hcl.Traversal{
|
||||
hcl.TraverseRoot{Name: valueNode.Value},
|
||||
}
|
||||
body.SetAttributeTraversal(key, traversal)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
// Default: use cty.Value for quoted strings and all other types
|
||||
ctyValue, err := nodeToCtyValue(valueNode)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
body.SetAttributeValue(key, ctyValue)
|
||||
return nil
|
||||
}
|
||||
|
||||
// encodeTemplateAttribute encodes a template string with ${} interpolations
|
||||
func (he *hclEncoder) encodeTemplateAttribute(body *hclwrite.Body, key string, templateStr string) error {
|
||||
tokens := hclwrite.Tokens{
|
||||
{Type: hclsyntax.TokenOQuote, Bytes: []byte{'"'}},
|
||||
}
|
||||
|
||||
for i := 0; i < len(templateStr); i++ {
|
||||
if i < len(templateStr)-1 && templateStr[i] == '$' && templateStr[i+1] == '{' {
|
||||
// Start of template interpolation
|
||||
tokens = append(tokens, &hclwrite.Token{
|
||||
Type: hclsyntax.TokenTemplateInterp,
|
||||
Bytes: []byte("${"),
|
||||
})
|
||||
i++ // skip the '{'
|
||||
// Find the matching '}'
|
||||
start := i + 1
|
||||
depth := 1
|
||||
for i++; i < len(templateStr) && depth > 0; i++ {
|
||||
switch templateStr[i] {
|
||||
case '{':
|
||||
depth++
|
||||
case '}':
|
||||
depth--
|
||||
}
|
||||
}
|
||||
i-- // back up to the '}'
|
||||
interpExpr := templateStr[start:i]
|
||||
tokens = append(tokens, &hclwrite.Token{
|
||||
Type: hclsyntax.TokenIdent,
|
||||
Bytes: []byte(interpExpr),
|
||||
})
|
||||
tokens = append(tokens, &hclwrite.Token{
|
||||
Type: hclsyntax.TokenTemplateSeqEnd,
|
||||
Bytes: []byte("}"),
|
||||
})
|
||||
} else {
|
||||
// Regular character
|
||||
tokens = append(tokens, &hclwrite.Token{
|
||||
Type: hclsyntax.TokenQuotedLit,
|
||||
Bytes: []byte{templateStr[i]},
|
||||
})
|
||||
}
|
||||
}
|
||||
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenCQuote, Bytes: []byte{'"'}})
|
||||
body.SetAttributeRaw(key, tokens)
|
||||
return nil
|
||||
}
|
||||
|
||||
// encodeBlockIfMapping attempts to encode a value as a block. Returns true if it was encoded as a block.
|
||||
func (he *hclEncoder) encodeBlockIfMapping(body *hclwrite.Body, key string, valueNode *CandidateNode) bool {
|
||||
if valueNode.Kind != MappingNode || valueNode.Style == FlowStyle {
|
||||
return false
|
||||
}
|
||||
|
||||
// If EncodeSeparate is set, emit children as separate blocks regardless of label extraction
|
||||
if valueNode.EncodeSeparate {
|
||||
if handled, _ := he.encodeMappingChildrenAsBlocks(body, key, valueNode); handled {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// Try to extract block labels from a single-entry mapping chain
|
||||
if labels, bodyNode, ok := extractBlockLabels(valueNode); ok {
|
||||
if len(labels) > 1 && mappingChildrenAllMappings(bodyNode) {
|
||||
primaryLabels := labels[:len(labels)-1]
|
||||
nestedType := labels[len(labels)-1]
|
||||
block := body.AppendNewBlock(key, primaryLabels)
|
||||
if handled, err := he.encodeMappingChildrenAsBlocks(block.Body(), nestedType, bodyNode); err == nil && handled {
|
||||
return true
|
||||
}
|
||||
if err := he.encodeNodeAttributes(block.Body(), bodyNode); err == nil {
|
||||
return true
|
||||
}
|
||||
}
|
||||
block := body.AppendNewBlock(key, labels)
|
||||
if err := he.encodeNodeAttributes(block.Body(), bodyNode); err == nil {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// If all child values are mappings, treat each child key as a labelled instance of this block type
|
||||
if handled, _ := he.encodeMappingChildrenAsBlocks(body, key, valueNode); handled {
|
||||
return true
|
||||
}
|
||||
|
||||
// No labels detected, render as unlabelled block
|
||||
block := body.AppendNewBlock(key, nil)
|
||||
if err := he.encodeNodeAttributes(block.Body(), valueNode); err == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// encodeNode encodes a CandidateNode directly to HCL, preserving style information
|
||||
func (he *hclEncoder) encodeNode(body *hclwrite.Body, node *CandidateNode) error {
|
||||
if node.Kind != MappingNode {
|
||||
return fmt.Errorf("HCL encoder expects a mapping at the root level, got %v", kindToString(node.Kind))
|
||||
}
|
||||
|
||||
for i := 0; i < len(node.Content); i += 2 {
|
||||
keyNode := node.Content[i]
|
||||
valueNode := node.Content[i+1]
|
||||
key := keyNode.Value
|
||||
|
||||
// Render as block or attribute depending on value type
|
||||
if he.encodeBlockIfMapping(body, key, valueNode) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Render as attribute: key = value
|
||||
if err := he.encodeAttribute(body, key, valueNode); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// mappingChildrenAllMappings reports whether all values in a mapping node are non-flow mappings.
|
||||
func mappingChildrenAllMappings(node *CandidateNode) bool {
|
||||
if node == nil || node.Kind != MappingNode || node.Style == FlowStyle {
|
||||
return false
|
||||
}
|
||||
if len(node.Content) == 0 {
|
||||
return false
|
||||
}
|
||||
for i := 0; i < len(node.Content); i += 2 {
|
||||
childVal := node.Content[i+1]
|
||||
if childVal.Kind != MappingNode || childVal.Style == FlowStyle {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// encodeMappingChildrenAsBlocks emits a block for each mapping child, treating the child key as a label.
|
||||
// Returns handled=true when it emitted blocks.
|
||||
func (he *hclEncoder) encodeMappingChildrenAsBlocks(body *hclwrite.Body, blockType string, valueNode *CandidateNode) (bool, error) {
|
||||
if !mappingChildrenAllMappings(valueNode) {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
// Only emit as separate blocks if EncodeSeparate is true
|
||||
// This allows the encoder to respect the original block structure preserved by the decoder
|
||||
if !valueNode.EncodeSeparate {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
for i := 0; i < len(valueNode.Content); i += 2 {
|
||||
childKey := valueNode.Content[i].Value
|
||||
childVal := valueNode.Content[i+1]
|
||||
|
||||
// Check if this child also represents multiple blocks (all children are mappings)
|
||||
if mappingChildrenAllMappings(childVal) {
|
||||
// Recursively emit each grandchild as a separate block with extended labels
|
||||
for j := 0; j < len(childVal.Content); j += 2 {
|
||||
grandchildKey := childVal.Content[j].Value
|
||||
grandchildVal := childVal.Content[j+1]
|
||||
labels := []string{childKey, grandchildKey}
|
||||
|
||||
// Try to extract additional labels if this is a single-entry chain
|
||||
if extraLabels, bodyNode, ok := extractBlockLabels(grandchildVal); ok {
|
||||
labels = append(labels, extraLabels...)
|
||||
grandchildVal = bodyNode
|
||||
}
|
||||
|
||||
block := body.AppendNewBlock(blockType, labels)
|
||||
if err := he.encodeNodeAttributes(block.Body(), grandchildVal); err != nil {
|
||||
return true, err
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Single block with this child as label(s)
|
||||
labels := []string{childKey}
|
||||
if extraLabels, bodyNode, ok := extractBlockLabels(childVal); ok {
|
||||
labels = append(labels, extraLabels...)
|
||||
childVal = bodyNode
|
||||
}
|
||||
block := body.AppendNewBlock(blockType, labels)
|
||||
if err := he.encodeNodeAttributes(block.Body(), childVal); err != nil {
|
||||
return true, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// encodeNodeAttributes encodes the attributes of a mapping node (used for blocks)
|
||||
func (he *hclEncoder) encodeNodeAttributes(body *hclwrite.Body, node *CandidateNode) error {
|
||||
if node.Kind != MappingNode {
|
||||
return fmt.Errorf("expected mapping node for block body")
|
||||
}
|
||||
|
||||
for i := 0; i < len(node.Content); i += 2 {
|
||||
keyNode := node.Content[i]
|
||||
valueNode := node.Content[i+1]
|
||||
key := keyNode.Value
|
||||
|
||||
// Render as block or attribute depending on value type
|
||||
if he.encodeBlockIfMapping(body, key, valueNode) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Render attribute for non-block value
|
||||
if err := he.encodeAttribute(body, key, valueNode); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// extractBlockLabels detects a chain of single-entry mappings that encode block labels.
|
||||
// It returns the collected labels and the final mapping to be used as the block body.
|
||||
// Pattern: {label1: {label2: { ... {bodyMap} }}}
|
||||
func extractBlockLabels(node *CandidateNode) ([]string, *CandidateNode, bool) {
|
||||
var labels []string
|
||||
current := node
|
||||
for current != nil && current.Kind == MappingNode && len(current.Content) == 2 {
|
||||
keyNode := current.Content[0]
|
||||
valNode := current.Content[1]
|
||||
if valNode.Kind != MappingNode {
|
||||
break
|
||||
}
|
||||
labels = append(labels, keyNode.Value)
|
||||
// If the child is itself a single mapping entry with a mapping value, keep descending.
|
||||
if len(valNode.Content) == 2 && valNode.Content[1].Kind == MappingNode {
|
||||
current = valNode
|
||||
continue
|
||||
}
|
||||
// Otherwise, we have reached the body mapping.
|
||||
return labels, valNode, true
|
||||
}
|
||||
return nil, nil, false
|
||||
}
|
||||
|
||||
// nodeToCtyValue converts a CandidateNode directly to cty.Value, preserving order
|
||||
func nodeToCtyValue(node *CandidateNode) (cty.Value, error) {
|
||||
switch node.Kind {
|
||||
case ScalarNode:
|
||||
// Parse scalar value based on its tag
|
||||
switch node.Tag {
|
||||
case "!!bool":
|
||||
return cty.BoolVal(node.Value == "true"), nil
|
||||
case "!!int":
|
||||
var i int64
|
||||
_, err := fmt.Sscanf(node.Value, "%d", &i)
|
||||
if err != nil {
|
||||
return cty.NilVal, err
|
||||
}
|
||||
return cty.NumberIntVal(i), nil
|
||||
case "!!float":
|
||||
var f float64
|
||||
_, err := fmt.Sscanf(node.Value, "%f", &f)
|
||||
if err != nil {
|
||||
return cty.NilVal, err
|
||||
}
|
||||
return cty.NumberFloatVal(f), nil
|
||||
case "!!null":
|
||||
return cty.NullVal(cty.DynamicPseudoType), nil
|
||||
default:
|
||||
// Default to string
|
||||
return cty.StringVal(node.Value), nil
|
||||
}
|
||||
case MappingNode:
|
||||
// Preserve order by iterating Content directly
|
||||
m := make(map[string]cty.Value)
|
||||
for i := 0; i < len(node.Content); i += 2 {
|
||||
keyNode := node.Content[i]
|
||||
valueNode := node.Content[i+1]
|
||||
v, err := nodeToCtyValue(valueNode)
|
||||
if err != nil {
|
||||
return cty.NilVal, err
|
||||
}
|
||||
m[keyNode.Value] = v
|
||||
}
|
||||
return cty.ObjectVal(m), nil
|
||||
case SequenceNode:
|
||||
vals := make([]cty.Value, len(node.Content))
|
||||
for i, item := range node.Content {
|
||||
v, err := nodeToCtyValue(item)
|
||||
if err != nil {
|
||||
return cty.NilVal, err
|
||||
}
|
||||
vals[i] = v
|
||||
}
|
||||
return cty.TupleVal(vals), nil
|
||||
case AliasNode:
|
||||
return cty.NilVal, fmt.Errorf("HCL encoder does not support aliases")
|
||||
default:
|
||||
return cty.NilVal, fmt.Errorf("unsupported node kind: %v", node.Kind)
|
||||
}
|
||||
}
|
||||
318
pkg/yqlib/encoder_kyaml.go
Normal file
318
pkg/yqlib/encoder_kyaml.go
Normal file
@ -0,0 +1,318 @@
|
||||
//go:build !yq_nokyaml
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type kyamlEncoder struct {
|
||||
prefs KYamlPreferences
|
||||
}
|
||||
|
||||
func NewKYamlEncoder(prefs KYamlPreferences) Encoder {
|
||||
return &kyamlEncoder{prefs: prefs}
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) CanHandleAliases() bool {
|
||||
// KYAML is a restricted subset; avoid emitting anchors/aliases.
|
||||
return false
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) PrintDocumentSeparator(writer io.Writer) error {
|
||||
return PrintYAMLDocumentSeparator(writer, ke.prefs.PrintDocSeparators)
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) PrintLeadingContent(writer io.Writer, content string) error {
|
||||
return PrintYAMLLeadingContent(writer, content, ke.prefs.PrintDocSeparators, ke.prefs.ColorsEnabled)
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
log.Debug("encoderKYaml - going to print %v", NodeToString(node))
|
||||
if node.Kind == ScalarNode && ke.prefs.UnwrapScalar {
|
||||
return writeString(writer, node.Value+"\n")
|
||||
}
|
||||
|
||||
destination := writer
|
||||
tempBuffer := bytes.NewBuffer(nil)
|
||||
if ke.prefs.ColorsEnabled {
|
||||
destination = tempBuffer
|
||||
}
|
||||
|
||||
// Mirror the YAML encoder behaviour: trailing comments on the document root
|
||||
// are stored in FootComment and need to be printed after the document.
|
||||
trailingContent := node.FootComment
|
||||
|
||||
if err := ke.writeCommentBlock(destination, node.HeadComment, 0); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := ke.writeNode(destination, node, 0); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := ke.writeInlineComment(destination, node.LineComment); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeString(destination, "\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := ke.PrintLeadingContent(destination, trailingContent); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if ke.prefs.ColorsEnabled {
|
||||
return colorizeAndPrint(tempBuffer.Bytes(), writer)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) writeNode(writer io.Writer, node *CandidateNode, indent int) error {
|
||||
switch node.Kind {
|
||||
case MappingNode:
|
||||
return ke.writeMapping(writer, node, indent)
|
||||
case SequenceNode:
|
||||
return ke.writeSequence(writer, node, indent)
|
||||
case ScalarNode:
|
||||
return writeString(writer, ke.formatScalar(node))
|
||||
case AliasNode:
|
||||
// Should have been exploded by the printer, but handle defensively.
|
||||
if node.Alias == nil {
|
||||
return writeString(writer, "null")
|
||||
}
|
||||
return ke.writeNode(writer, node.Alias, indent)
|
||||
default:
|
||||
return writeString(writer, "null")
|
||||
}
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) writeMapping(writer io.Writer, node *CandidateNode, indent int) error {
|
||||
if len(node.Content) == 0 {
|
||||
return writeString(writer, "{}")
|
||||
}
|
||||
if err := writeString(writer, "{\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i := 0; i+1 < len(node.Content); i += 2 {
|
||||
keyNode := node.Content[i]
|
||||
valueNode := node.Content[i+1]
|
||||
|
||||
entryIndent := indent + ke.prefs.Indent
|
||||
if err := ke.writeCommentBlock(writer, keyNode.HeadComment, entryIndent); err != nil {
|
||||
return err
|
||||
}
|
||||
if valueNode.HeadComment != "" && valueNode.HeadComment != keyNode.HeadComment {
|
||||
if err := ke.writeCommentBlock(writer, valueNode.HeadComment, entryIndent); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := ke.writeIndent(writer, entryIndent); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeString(writer, ke.formatKey(keyNode)); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeString(writer, ": "); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := ke.writeNode(writer, valueNode, entryIndent); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Always emit a trailing comma; KYAML encourages explicit separators,
|
||||
// and this ensures all quoted strings have a trailing `",` as requested.
|
||||
if err := writeString(writer, ","); err != nil {
|
||||
return err
|
||||
}
|
||||
inline := valueNode.LineComment
|
||||
if inline == "" {
|
||||
inline = keyNode.LineComment
|
||||
}
|
||||
if err := ke.writeInlineComment(writer, inline); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeString(writer, "\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
foot := valueNode.FootComment
|
||||
if foot == "" {
|
||||
foot = keyNode.FootComment
|
||||
}
|
||||
if err := ke.writeCommentBlock(writer, foot, entryIndent); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := ke.writeIndent(writer, indent); err != nil {
|
||||
return err
|
||||
}
|
||||
return writeString(writer, "}")
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) writeSequence(writer io.Writer, node *CandidateNode, indent int) error {
|
||||
if len(node.Content) == 0 {
|
||||
return writeString(writer, "[]")
|
||||
}
|
||||
if err := writeString(writer, "[\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, child := range node.Content {
|
||||
itemIndent := indent + ke.prefs.Indent
|
||||
if err := ke.writeCommentBlock(writer, child.HeadComment, itemIndent); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := ke.writeIndent(writer, itemIndent); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := ke.writeNode(writer, child, itemIndent); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeString(writer, ","); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := ke.writeInlineComment(writer, child.LineComment); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeString(writer, "\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := ke.writeCommentBlock(writer, child.FootComment, itemIndent); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := ke.writeIndent(writer, indent); err != nil {
|
||||
return err
|
||||
}
|
||||
return writeString(writer, "]")
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) writeIndent(writer io.Writer, indent int) error {
|
||||
if indent <= 0 {
|
||||
return nil
|
||||
}
|
||||
return writeString(writer, strings.Repeat(" ", indent))
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) formatKey(keyNode *CandidateNode) string {
|
||||
// KYAML examples use bare keys. Quote keys only when needed.
|
||||
key := keyNode.Value
|
||||
if isValidKYamlBareKey(key) {
|
||||
return key
|
||||
}
|
||||
return `"` + escapeDoubleQuotedString(key) + `"`
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) formatScalar(node *CandidateNode) string {
|
||||
switch node.Tag {
|
||||
case "!!null":
|
||||
return "null"
|
||||
case "!!bool":
|
||||
return strings.ToLower(node.Value)
|
||||
case "!!int", "!!float":
|
||||
return node.Value
|
||||
case "!!str":
|
||||
return `"` + escapeDoubleQuotedString(node.Value) + `"`
|
||||
default:
|
||||
// Fall back to a string representation to avoid implicit typing surprises.
|
||||
return `"` + escapeDoubleQuotedString(node.Value) + `"`
|
||||
}
|
||||
}
|
||||
|
||||
var kyamlBareKeyRe = regexp.MustCompile(`^[A-Za-z_][A-Za-z0-9_-]*$`)
|
||||
|
||||
func isValidKYamlBareKey(s string) bool {
|
||||
// Conservative: require an identifier-like key; otherwise quote.
|
||||
if s == "" {
|
||||
return false
|
||||
}
|
||||
return kyamlBareKeyRe.MatchString(s)
|
||||
}
|
||||
|
||||
func escapeDoubleQuotedString(s string) string {
|
||||
var b strings.Builder
|
||||
b.Grow(len(s) + 2)
|
||||
|
||||
for _, r := range s {
|
||||
switch r {
|
||||
case '\\':
|
||||
b.WriteString(`\\`)
|
||||
case '"':
|
||||
b.WriteString(`\"`)
|
||||
case '\n':
|
||||
b.WriteString(`\n`)
|
||||
case '\r':
|
||||
b.WriteString(`\r`)
|
||||
case '\t':
|
||||
b.WriteString(`\t`)
|
||||
default:
|
||||
if r < 0x20 {
|
||||
// YAML double-quoted strings support \uXXXX escapes.
|
||||
b.WriteString(`\u`)
|
||||
hex := "0000" + strings.ToUpper(strconv.FormatInt(int64(r), 16))
|
||||
b.WriteString(hex[len(hex)-4:])
|
||||
} else {
|
||||
b.WriteRune(r)
|
||||
}
|
||||
}
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) writeCommentBlock(writer io.Writer, comment string, indent int) error {
|
||||
if strings.TrimSpace(comment) == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
lines := strings.Split(strings.ReplaceAll(comment, "\r\n", "\n"), "\n")
|
||||
for _, line := range lines {
|
||||
trimmed := strings.TrimSpace(line)
|
||||
if trimmed == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := ke.writeIndent(writer, indent); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
toWrite := line
|
||||
if !commentLineRe.MatchString(toWrite) {
|
||||
toWrite = "# " + toWrite
|
||||
}
|
||||
if err := writeString(writer, toWrite); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeString(writer, "\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ke *kyamlEncoder) writeInlineComment(writer io.Writer, comment string) error {
|
||||
comment = strings.TrimSpace(strings.ReplaceAll(comment, "\r\n", "\n"))
|
||||
if comment == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
lines := strings.Split(comment, "\n")
|
||||
first := strings.TrimSpace(lines[0])
|
||||
if first == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(first, "#") {
|
||||
first = "# " + first
|
||||
}
|
||||
|
||||
if err := writeString(writer, " "); err != nil {
|
||||
return err
|
||||
}
|
||||
return writeString(writer, first)
|
||||
}
|
||||
@ -12,10 +12,13 @@ import (
|
||||
)
|
||||
|
||||
type shellVariablesEncoder struct {
|
||||
prefs ShellVariablesPreferences
|
||||
}
|
||||
|
||||
func NewShellVariablesEncoder() Encoder {
|
||||
return &shellVariablesEncoder{}
|
||||
return &shellVariablesEncoder{
|
||||
prefs: ConfiguredShellVariablesPreferences,
|
||||
}
|
||||
}
|
||||
|
||||
func (pe *shellVariablesEncoder) CanHandleAliases() bool {
|
||||
@ -54,11 +57,17 @@ func (pe *shellVariablesEncoder) doEncode(w *io.Writer, node *CandidateNode, pat
|
||||
// let's just pick a fallback key to use if we are encoding a single scalar
|
||||
nonemptyPath = "value"
|
||||
}
|
||||
_, err := io.WriteString(*w, nonemptyPath+"="+quoteValue(node.Value)+"\n")
|
||||
var valueString string
|
||||
if pe.prefs.UnwrapScalar {
|
||||
valueString = node.Value
|
||||
} else {
|
||||
valueString = quoteValue(node.Value)
|
||||
}
|
||||
_, err := io.WriteString(*w, nonemptyPath+"="+valueString+"\n")
|
||||
return err
|
||||
case SequenceNode:
|
||||
for index, child := range node.Content {
|
||||
err := pe.doEncode(w, child, appendPath(path, index))
|
||||
err := pe.doEncode(w, child, pe.appendPath(path, index))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -68,7 +77,7 @@ func (pe *shellVariablesEncoder) doEncode(w *io.Writer, node *CandidateNode, pat
|
||||
for index := 0; index < len(node.Content); index = index + 2 {
|
||||
key := node.Content[index]
|
||||
value := node.Content[index+1]
|
||||
err := pe.doEncode(w, value, appendPath(path, key.Value))
|
||||
err := pe.doEncode(w, value, pe.appendPath(path, key.Value))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -81,7 +90,7 @@ func (pe *shellVariablesEncoder) doEncode(w *io.Writer, node *CandidateNode, pat
|
||||
}
|
||||
}
|
||||
|
||||
func appendPath(cookedPath string, rawKey interface{}) string {
|
||||
func (pe *shellVariablesEncoder) appendPath(cookedPath string, rawKey interface{}) string {
|
||||
|
||||
// Shell variable names must match
|
||||
// [a-zA-Z_]+[a-zA-Z0-9_]*
|
||||
@ -126,7 +135,7 @@ func appendPath(cookedPath string, rawKey interface{}) string {
|
||||
}
|
||||
return key
|
||||
}
|
||||
return cookedPath + "_" + key
|
||||
return cookedPath + pe.prefs.KeySeparator + key
|
||||
}
|
||||
|
||||
func quoteValue(value string) string {
|
||||
|
||||
@ -91,3 +91,80 @@ func TestShellVariablesEncoderEmptyMap(t *testing.T) {
|
||||
func TestShellVariablesEncoderScalarNode(t *testing.T) {
|
||||
assertEncodesTo(t, "some string", "value='some string'")
|
||||
}
|
||||
|
||||
func assertEncodesToWithSeparator(t *testing.T, yaml string, shellvars string, separator string) {
|
||||
var output bytes.Buffer
|
||||
writer := bufio.NewWriter(&output)
|
||||
|
||||
// Save the original separator
|
||||
originalSeparator := ConfiguredShellVariablesPreferences.KeySeparator
|
||||
defer func() {
|
||||
ConfiguredShellVariablesPreferences.KeySeparator = originalSeparator
|
||||
}()
|
||||
|
||||
// Set the custom separator
|
||||
ConfiguredShellVariablesPreferences.KeySeparator = separator
|
||||
|
||||
var encoder = NewShellVariablesEncoder()
|
||||
inputs, err := readDocuments(strings.NewReader(yaml), "test.yml", 0, NewYamlDecoder(ConfiguredYamlPreferences))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
node := inputs.Front().Value.(*CandidateNode)
|
||||
err = encoder.Encode(writer, node)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
writer.Flush()
|
||||
|
||||
test.AssertResult(t, shellvars, strings.TrimSuffix(output.String(), "\n"))
|
||||
}
|
||||
|
||||
func TestShellVariablesEncoderCustomSeparator(t *testing.T) {
|
||||
assertEncodesToWithSeparator(t, "a:\n b: Lewis\n c: Carroll", "a__b=Lewis\na__c=Carroll", "__")
|
||||
}
|
||||
|
||||
func TestShellVariablesEncoderCustomSeparatorNested(t *testing.T) {
|
||||
assertEncodesToWithSeparator(t, "my_app:\n db_config:\n host: localhost", "my_app__db_config__host=localhost", "__")
|
||||
}
|
||||
|
||||
func TestShellVariablesEncoderCustomSeparatorArray(t *testing.T) {
|
||||
assertEncodesToWithSeparator(t, "a: [{n: Alice}, {n: Bob}]", "a__0__n=Alice\na__1__n=Bob", "__")
|
||||
}
|
||||
|
||||
func TestShellVariablesEncoderCustomSeparatorSingleChar(t *testing.T) {
|
||||
assertEncodesToWithSeparator(t, "a:\n b: value", "aXb=value", "X")
|
||||
}
|
||||
|
||||
func assertEncodesToUnwrapped(t *testing.T, yaml string, shellvars string) {
|
||||
var output bytes.Buffer
|
||||
writer := bufio.NewWriter(&output)
|
||||
|
||||
originalUnwrapScalar := ConfiguredShellVariablesPreferences.UnwrapScalar
|
||||
defer func() {
|
||||
ConfiguredShellVariablesPreferences.UnwrapScalar = originalUnwrapScalar
|
||||
}()
|
||||
|
||||
ConfiguredShellVariablesPreferences.UnwrapScalar = true
|
||||
|
||||
var encoder = NewShellVariablesEncoder()
|
||||
inputs, err := readDocuments(strings.NewReader(yaml), "test.yml", 0, NewYamlDecoder(ConfiguredYamlPreferences))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
node := inputs.Front().Value.(*CandidateNode)
|
||||
err = encoder.Encode(writer, node)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
writer.Flush()
|
||||
|
||||
test.AssertResult(t, shellvars, strings.TrimSuffix(output.String(), "\n"))
|
||||
}
|
||||
|
||||
func TestShellVariablesEncoderUnwrapScalar(t *testing.T) {
|
||||
assertEncodesToUnwrapped(t, "a: Lewis Carroll", "a=Lewis Carroll")
|
||||
assertEncodesToUnwrapped(t, "b: 123", "b=123")
|
||||
assertEncodesToUnwrapped(t, "c: true", "c=true")
|
||||
assertEncodesToUnwrapped(t, "d: value with spaces", "d=value with spaces")
|
||||
}
|
||||
|
||||
@ -1,22 +1,58 @@
|
||||
//go:build !yq_notoml
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"github.com/fatih/color"
|
||||
)
|
||||
|
||||
type tomlEncoder struct {
|
||||
wroteRootAttr bool // Track if we wrote root-level attributes before tables
|
||||
prefs TomlPreferences
|
||||
}
|
||||
|
||||
func NewTomlEncoder() Encoder {
|
||||
return &tomlEncoder{}
|
||||
return NewTomlEncoderWithPrefs(ConfiguredTomlPreferences)
|
||||
}
|
||||
|
||||
func NewTomlEncoderWithPrefs(prefs TomlPreferences) Encoder {
|
||||
return &tomlEncoder{prefs: prefs}
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
if node.Kind == ScalarNode {
|
||||
return writeString(writer, node.Value+"\n")
|
||||
if node.Kind != MappingNode {
|
||||
// For standalone selections, TOML tests expect raw value for scalars
|
||||
if node.Kind == ScalarNode {
|
||||
return writeString(writer, node.Value+"\n")
|
||||
}
|
||||
return fmt.Errorf("TOML encoder expects a mapping at the root level")
|
||||
}
|
||||
return fmt.Errorf("only scalars (e.g. strings, numbers, booleans) are supported for TOML output at the moment. Please use yaml output format (-oy) until the encoder has been fully implemented")
|
||||
|
||||
// Encode to a buffer first if colors are enabled
|
||||
var buf bytes.Buffer
|
||||
var targetWriter io.Writer
|
||||
targetWriter = writer
|
||||
if te.prefs.ColorsEnabled {
|
||||
targetWriter = &buf
|
||||
}
|
||||
|
||||
// Encode a root mapping as a sequence of attributes, tables, and arrays of tables
|
||||
if err := te.encodeRootMapping(targetWriter, node); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if te.prefs.ColorsEnabled {
|
||||
colourised := te.colorizeToml(buf.Bytes())
|
||||
_, err := writer.Write(colourised)
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) PrintDocumentSeparator(_ io.Writer) error {
|
||||
@ -30,3 +66,725 @@ func (te *tomlEncoder) PrintLeadingContent(_ io.Writer, _ string) error {
|
||||
func (te *tomlEncoder) CanHandleAliases() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// ---- helpers ----
|
||||
|
||||
func (te *tomlEncoder) writeComment(w io.Writer, comment string) error {
|
||||
if comment == "" {
|
||||
return nil
|
||||
}
|
||||
lines := strings.Split(comment, "\n")
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
if !strings.HasPrefix(line, "#") {
|
||||
line = "# " + line
|
||||
}
|
||||
if _, err := w.Write([]byte(line + "\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) formatScalar(node *CandidateNode) string {
|
||||
switch node.Tag {
|
||||
case "!!str":
|
||||
// Quote strings per TOML spec
|
||||
return fmt.Sprintf("%q", node.Value)
|
||||
case "!!bool", "!!int", "!!float":
|
||||
return node.Value
|
||||
case "!!null":
|
||||
// TOML does not have null; encode as empty string
|
||||
return `""`
|
||||
default:
|
||||
return node.Value
|
||||
}
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) encodeRootMapping(w io.Writer, node *CandidateNode) error {
|
||||
te.wroteRootAttr = false // Reset state
|
||||
|
||||
// Write root head comment if present (at the very beginning, no leading blank line)
|
||||
if node.HeadComment != "" {
|
||||
if err := te.writeComment(w, node.HeadComment); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Preserve existing order by iterating Content
|
||||
for i := 0; i < len(node.Content); i += 2 {
|
||||
keyNode := node.Content[i]
|
||||
valNode := node.Content[i+1]
|
||||
if err := te.encodeTopLevelEntry(w, []string{keyNode.Value}, valNode); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// encodeTopLevelEntry encodes a key/value at the root, dispatching to attribute, table, or array-of-tables
|
||||
func (te *tomlEncoder) encodeTopLevelEntry(w io.Writer, path []string, node *CandidateNode) error {
|
||||
if len(path) == 0 {
|
||||
return fmt.Errorf("cannot encode TOML entry with empty path")
|
||||
}
|
||||
|
||||
switch node.Kind {
|
||||
case ScalarNode:
|
||||
// key = value
|
||||
return te.writeAttribute(w, path[len(path)-1], node)
|
||||
case SequenceNode:
|
||||
// Empty arrays should be encoded as [] attributes
|
||||
if len(node.Content) == 0 {
|
||||
return te.writeArrayAttribute(w, path[len(path)-1], node)
|
||||
}
|
||||
|
||||
// If all items are mappings => array of tables; else => array attribute
|
||||
allMaps := true
|
||||
for _, it := range node.Content {
|
||||
if it.Kind != MappingNode {
|
||||
allMaps = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if allMaps {
|
||||
key := path[len(path)-1]
|
||||
for _, it := range node.Content {
|
||||
// [[key]] then body
|
||||
if _, err := w.Write([]byte("[[" + key + "]]\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := te.encodeMappingBodyWithPath(w, []string{key}, it); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
// Regular array attribute
|
||||
return te.writeArrayAttribute(w, path[len(path)-1], node)
|
||||
case MappingNode:
|
||||
// Inline table if not EncodeSeparate, else emit separate tables/arrays of tables for children under this path
|
||||
if !node.EncodeSeparate {
|
||||
// If children contain mappings or arrays of mappings, prefer separate sections
|
||||
if te.hasEncodeSeparateChild(node) || te.hasStructuralChildren(node) {
|
||||
return te.encodeSeparateMapping(w, path, node)
|
||||
}
|
||||
return te.writeInlineTableAttribute(w, path[len(path)-1], node)
|
||||
}
|
||||
return te.encodeSeparateMapping(w, path, node)
|
||||
default:
|
||||
return fmt.Errorf("unsupported node kind for TOML: %v", node.Kind)
|
||||
}
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) writeAttribute(w io.Writer, key string, value *CandidateNode) error {
|
||||
te.wroteRootAttr = true // Mark that we wrote a root attribute
|
||||
|
||||
// Write head comment before the attribute
|
||||
if err := te.writeComment(w, value.HeadComment); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Write the attribute
|
||||
line := key + " = " + te.formatScalar(value)
|
||||
|
||||
// Add line comment if present
|
||||
if value.LineComment != "" {
|
||||
lineComment := strings.TrimSpace(value.LineComment)
|
||||
if !strings.HasPrefix(lineComment, "#") {
|
||||
lineComment = "# " + lineComment
|
||||
}
|
||||
line += " " + lineComment
|
||||
}
|
||||
|
||||
_, err := w.Write([]byte(line + "\n"))
|
||||
return err
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) writeArrayAttribute(w io.Writer, key string, seq *CandidateNode) error {
|
||||
te.wroteRootAttr = true // Mark that we wrote a root attribute
|
||||
|
||||
// Write head comment before the array
|
||||
if err := te.writeComment(w, seq.HeadComment); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Handle empty arrays
|
||||
if len(seq.Content) == 0 {
|
||||
line := key + " = []"
|
||||
if seq.LineComment != "" {
|
||||
lineComment := strings.TrimSpace(seq.LineComment)
|
||||
if !strings.HasPrefix(lineComment, "#") {
|
||||
lineComment = "# " + lineComment
|
||||
}
|
||||
line += " " + lineComment
|
||||
}
|
||||
_, err := w.Write([]byte(line + "\n"))
|
||||
return err
|
||||
}
|
||||
|
||||
// Check if any array elements have head comments - if so, use multiline format
|
||||
hasElementComments := false
|
||||
for _, it := range seq.Content {
|
||||
if it.HeadComment != "" {
|
||||
hasElementComments = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if hasElementComments {
|
||||
// Write multiline array format with comments
|
||||
if _, err := w.Write([]byte(key + " = [\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i, it := range seq.Content {
|
||||
// Write head comment for this element
|
||||
if it.HeadComment != "" {
|
||||
commentLines := strings.Split(it.HeadComment, "\n")
|
||||
for _, commentLine := range commentLines {
|
||||
if strings.TrimSpace(commentLine) != "" {
|
||||
if !strings.HasPrefix(strings.TrimSpace(commentLine), "#") {
|
||||
commentLine = "# " + commentLine
|
||||
}
|
||||
if _, err := w.Write([]byte(" " + commentLine + "\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Write the element value
|
||||
var itemStr string
|
||||
switch it.Kind {
|
||||
case ScalarNode:
|
||||
itemStr = te.formatScalar(it)
|
||||
case SequenceNode:
|
||||
nested, err := te.sequenceToInlineArray(it)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
itemStr = nested
|
||||
case MappingNode:
|
||||
inline, err := te.mappingToInlineTable(it)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
itemStr = inline
|
||||
case AliasNode:
|
||||
return fmt.Errorf("aliases are not supported in TOML")
|
||||
default:
|
||||
return fmt.Errorf("unsupported array item kind: %v", it.Kind)
|
||||
}
|
||||
|
||||
// Always add trailing comma in multiline arrays
|
||||
itemStr += ","
|
||||
|
||||
if _, err := w.Write([]byte(" " + itemStr + "\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Add blank line between elements (except after the last one)
|
||||
if i < len(seq.Content)-1 {
|
||||
if _, err := w.Write([]byte("\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if _, err := w.Write([]byte("]\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Join scalars or nested arrays recursively into TOML array syntax
|
||||
items := make([]string, 0, len(seq.Content))
|
||||
for _, it := range seq.Content {
|
||||
switch it.Kind {
|
||||
case ScalarNode:
|
||||
items = append(items, te.formatScalar(it))
|
||||
case SequenceNode:
|
||||
// Nested arrays: encode inline
|
||||
nested, err := te.sequenceToInlineArray(it)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
items = append(items, nested)
|
||||
case MappingNode:
|
||||
// Inline table inside array
|
||||
inline, err := te.mappingToInlineTable(it)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
items = append(items, inline)
|
||||
case AliasNode:
|
||||
return fmt.Errorf("aliases are not supported in TOML")
|
||||
default:
|
||||
return fmt.Errorf("unsupported array item kind: %v", it.Kind)
|
||||
}
|
||||
}
|
||||
|
||||
line := key + " = [" + strings.Join(items, ", ") + "]"
|
||||
|
||||
// Add line comment if present
|
||||
if seq.LineComment != "" {
|
||||
lineComment := strings.TrimSpace(seq.LineComment)
|
||||
if !strings.HasPrefix(lineComment, "#") {
|
||||
lineComment = "# " + lineComment
|
||||
}
|
||||
line += " " + lineComment
|
||||
}
|
||||
|
||||
_, err := w.Write([]byte(line + "\n"))
|
||||
return err
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) sequenceToInlineArray(seq *CandidateNode) (string, error) {
|
||||
items := make([]string, 0, len(seq.Content))
|
||||
for _, it := range seq.Content {
|
||||
switch it.Kind {
|
||||
case ScalarNode:
|
||||
items = append(items, te.formatScalar(it))
|
||||
case SequenceNode:
|
||||
nested, err := te.sequenceToInlineArray(it)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
items = append(items, nested)
|
||||
case MappingNode:
|
||||
inline, err := te.mappingToInlineTable(it)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
items = append(items, inline)
|
||||
default:
|
||||
return "", fmt.Errorf("unsupported array item kind: %v", it.Kind)
|
||||
}
|
||||
}
|
||||
return "[" + strings.Join(items, ", ") + "]", nil
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) mappingToInlineTable(m *CandidateNode) (string, error) {
|
||||
// key = { a = 1, b = "x" }
|
||||
parts := make([]string, 0, len(m.Content)/2)
|
||||
for i := 0; i < len(m.Content); i += 2 {
|
||||
k := m.Content[i].Value
|
||||
v := m.Content[i+1]
|
||||
switch v.Kind {
|
||||
case ScalarNode:
|
||||
parts = append(parts, fmt.Sprintf("%s = %s", k, te.formatScalar(v)))
|
||||
case SequenceNode:
|
||||
// inline array in inline table
|
||||
arr, err := te.sequenceToInlineArray(v)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
parts = append(parts, fmt.Sprintf("%s = %s", k, arr))
|
||||
case MappingNode:
|
||||
// nested inline table
|
||||
inline, err := te.mappingToInlineTable(v)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
parts = append(parts, fmt.Sprintf("%s = %s", k, inline))
|
||||
default:
|
||||
return "", fmt.Errorf("unsupported inline table value kind: %v", v.Kind)
|
||||
}
|
||||
}
|
||||
return "{ " + strings.Join(parts, ", ") + " }", nil
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) writeInlineTableAttribute(w io.Writer, key string, m *CandidateNode) error {
|
||||
inline, err := te.mappingToInlineTable(m)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = w.Write([]byte(key + " = " + inline + "\n"))
|
||||
return err
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) writeTableHeader(w io.Writer, path []string, m *CandidateNode) error {
|
||||
// Add blank line before table header (or before comment if present) if we wrote root attributes
|
||||
needsBlankLine := te.wroteRootAttr
|
||||
if needsBlankLine {
|
||||
if _, err := w.Write([]byte("\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
te.wroteRootAttr = false // Only add once
|
||||
}
|
||||
|
||||
// Write head comment before the table header
|
||||
if m.HeadComment != "" {
|
||||
if err := te.writeComment(w, m.HeadComment); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Write table header [a.b.c]
|
||||
header := "[" + strings.Join(path, ".") + "]\n"
|
||||
_, err := w.Write([]byte(header))
|
||||
return err
|
||||
}
|
||||
|
||||
// encodeSeparateMapping handles a mapping that should be encoded as table sections.
|
||||
// It emits the table header for this mapping if it has any content, then processes children.
|
||||
func (te *tomlEncoder) encodeSeparateMapping(w io.Writer, path []string, m *CandidateNode) error {
|
||||
// Check if this mapping has any non-mapping, non-array-of-tables children (i.e., attributes)
|
||||
hasAttrs := false
|
||||
for i := 0; i < len(m.Content); i += 2 {
|
||||
v := m.Content[i+1]
|
||||
if v.Kind == ScalarNode {
|
||||
hasAttrs = true
|
||||
break
|
||||
}
|
||||
if v.Kind == SequenceNode {
|
||||
// Check if it's NOT an array of tables
|
||||
allMaps := true
|
||||
for _, it := range v.Content {
|
||||
if it.Kind != MappingNode {
|
||||
allMaps = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if !allMaps {
|
||||
hasAttrs = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If there are attributes or if the mapping is empty, emit the table header
|
||||
if hasAttrs || len(m.Content) == 0 {
|
||||
if err := te.writeTableHeader(w, path, m); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := te.encodeMappingBodyWithPath(w, path, m); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// No attributes, just nested structures - process children
|
||||
for i := 0; i < len(m.Content); i += 2 {
|
||||
k := m.Content[i].Value
|
||||
v := m.Content[i+1]
|
||||
switch v.Kind {
|
||||
case MappingNode:
|
||||
// Emit [path.k]
|
||||
newPath := append(append([]string{}, path...), k)
|
||||
if err := te.writeTableHeader(w, newPath, v); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := te.encodeMappingBodyWithPath(w, newPath, v); err != nil {
|
||||
return err
|
||||
}
|
||||
case SequenceNode:
|
||||
// If sequence of maps, emit [[path.k]] per element
|
||||
allMaps := true
|
||||
for _, it := range v.Content {
|
||||
if it.Kind != MappingNode {
|
||||
allMaps = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if allMaps {
|
||||
key := strings.Join(append(append([]string{}, path...), k), ".")
|
||||
for _, it := range v.Content {
|
||||
if _, err := w.Write([]byte("[[" + key + "]]\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := te.encodeMappingBodyWithPath(w, append(append([]string{}, path...), k), it); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Regular array attribute under the current table path
|
||||
if err := te.writeArrayAttribute(w, k, v); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
case ScalarNode:
|
||||
// Attributes directly under the current table path
|
||||
if err := te.writeAttribute(w, k, v); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) hasEncodeSeparateChild(m *CandidateNode) bool {
|
||||
for i := 0; i < len(m.Content); i += 2 {
|
||||
v := m.Content[i+1]
|
||||
if v.Kind == MappingNode && v.EncodeSeparate {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (te *tomlEncoder) hasStructuralChildren(m *CandidateNode) bool {
|
||||
for i := 0; i < len(m.Content); i += 2 {
|
||||
v := m.Content[i+1]
|
||||
// Only consider it structural if mapping has EncodeSeparate or is non-empty
|
||||
if v.Kind == MappingNode && v.EncodeSeparate {
|
||||
return true
|
||||
}
|
||||
if v.Kind == SequenceNode {
|
||||
allMaps := true
|
||||
for _, it := range v.Content {
|
||||
if it.Kind != MappingNode {
|
||||
allMaps = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if allMaps {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// encodeMappingBodyWithPath encodes attributes and nested arrays of tables using full dotted path context
|
||||
func (te *tomlEncoder) encodeMappingBodyWithPath(w io.Writer, path []string, m *CandidateNode) error {
|
||||
// First, attributes (scalars and non-map arrays)
|
||||
for i := 0; i < len(m.Content); i += 2 {
|
||||
k := m.Content[i].Value
|
||||
v := m.Content[i+1]
|
||||
switch v.Kind {
|
||||
case ScalarNode:
|
||||
if err := te.writeAttribute(w, k, v); err != nil {
|
||||
return err
|
||||
}
|
||||
case SequenceNode:
|
||||
allMaps := true
|
||||
for _, it := range v.Content {
|
||||
if it.Kind != MappingNode {
|
||||
allMaps = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if !allMaps {
|
||||
if err := te.writeArrayAttribute(w, k, v); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Then, nested arrays of tables with full path
|
||||
for i := 0; i < len(m.Content); i += 2 {
|
||||
k := m.Content[i].Value
|
||||
v := m.Content[i+1]
|
||||
if v.Kind == SequenceNode {
|
||||
allMaps := true
|
||||
for _, it := range v.Content {
|
||||
if it.Kind != MappingNode {
|
||||
allMaps = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if allMaps {
|
||||
dotted := strings.Join(append(append([]string{}, path...), k), ".")
|
||||
for _, it := range v.Content {
|
||||
if _, err := w.Write([]byte("[[" + dotted + "]]\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := te.encodeMappingBodyWithPath(w, append(append([]string{}, path...), k), it); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Finally, child mappings that are not marked EncodeSeparate get inlined as attributes
|
||||
for i := 0; i < len(m.Content); i += 2 {
|
||||
k := m.Content[i].Value
|
||||
v := m.Content[i+1]
|
||||
if v.Kind == MappingNode && !v.EncodeSeparate {
|
||||
if err := te.writeInlineTableAttribute(w, k, v); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// colorizeToml applies syntax highlighting to TOML output using fatih/color
|
||||
func (te *tomlEncoder) colorizeToml(input []byte) []byte {
|
||||
toml := string(input)
|
||||
result := strings.Builder{}
|
||||
|
||||
// Force color output (don't check for TTY)
|
||||
color.NoColor = false
|
||||
|
||||
// Create color functions for different token types
|
||||
// Use EnableColor() to ensure colors work even when NO_COLOR env is set
|
||||
commentColorObj := color.New(color.FgHiBlack)
|
||||
commentColorObj.EnableColor()
|
||||
stringColorObj := color.New(color.FgGreen)
|
||||
stringColorObj.EnableColor()
|
||||
numberColorObj := color.New(color.FgHiMagenta)
|
||||
numberColorObj.EnableColor()
|
||||
keyColorObj := color.New(color.FgCyan)
|
||||
keyColorObj.EnableColor()
|
||||
boolColorObj := color.New(color.FgHiMagenta)
|
||||
boolColorObj.EnableColor()
|
||||
sectionColorObj := color.New(color.FgYellow, color.Bold)
|
||||
sectionColorObj.EnableColor()
|
||||
|
||||
commentColor := commentColorObj.SprintFunc()
|
||||
stringColor := stringColorObj.SprintFunc()
|
||||
numberColor := numberColorObj.SprintFunc()
|
||||
keyColor := keyColorObj.SprintFunc()
|
||||
boolColor := boolColorObj.SprintFunc()
|
||||
sectionColor := sectionColorObj.SprintFunc()
|
||||
|
||||
// Simple tokenization for TOML colouring
|
||||
i := 0
|
||||
for i < len(toml) {
|
||||
ch := toml[i]
|
||||
|
||||
// Comments - from # to end of line
|
||||
if ch == '#' {
|
||||
end := i
|
||||
for end < len(toml) && toml[end] != '\n' {
|
||||
end++
|
||||
}
|
||||
result.WriteString(commentColor(toml[i:end]))
|
||||
i = end
|
||||
continue
|
||||
}
|
||||
|
||||
// Table sections - [section] or [[array]]
|
||||
// Only treat '[' as a table section if it appears at the start of the line
|
||||
// (possibly after whitespace). This avoids mis-colouring inline arrays like
|
||||
// "ports = [8000, 8001]" as table sections.
|
||||
if ch == '[' {
|
||||
isSectionHeader := true
|
||||
if i > 0 {
|
||||
isSectionHeader = false
|
||||
j := i - 1
|
||||
for j >= 0 && toml[j] != '\n' {
|
||||
if toml[j] != ' ' && toml[j] != '\t' && toml[j] != '\r' {
|
||||
// Found a non-whitespace character before this '[' on the same line,
|
||||
// so this is not a table header.
|
||||
break
|
||||
}
|
||||
j--
|
||||
}
|
||||
if j < 0 || toml[j] == '\n' {
|
||||
// Reached the start of the string or a newline without encountering
|
||||
// any non-whitespace, so '[' is at the logical start of the line.
|
||||
isSectionHeader = true
|
||||
}
|
||||
}
|
||||
if isSectionHeader {
|
||||
end := i + 1
|
||||
// Check for [[
|
||||
if end < len(toml) && toml[end] == '[' {
|
||||
end++
|
||||
}
|
||||
// Find closing ]
|
||||
for end < len(toml) && toml[end] != ']' {
|
||||
end++
|
||||
}
|
||||
// Include closing ]
|
||||
if end < len(toml) {
|
||||
end++
|
||||
// Check for ]]
|
||||
if end < len(toml) && toml[end] == ']' {
|
||||
end++
|
||||
}
|
||||
}
|
||||
result.WriteString(sectionColor(toml[i:end]))
|
||||
i = end
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Strings - quoted text (double or single quotes)
|
||||
if ch == '"' || ch == '\'' {
|
||||
quote := ch
|
||||
end := i + 1
|
||||
for end < len(toml) {
|
||||
if toml[end] == quote {
|
||||
break
|
||||
}
|
||||
if toml[end] == '\\' && end+1 < len(toml) {
|
||||
// Skip the backslash and the escaped character
|
||||
end += 2
|
||||
continue
|
||||
}
|
||||
end++
|
||||
}
|
||||
if end < len(toml) {
|
||||
end++ // include closing quote
|
||||
}
|
||||
result.WriteString(stringColor(toml[i:end]))
|
||||
i = end
|
||||
continue
|
||||
}
|
||||
|
||||
// Numbers - sequences of digits, possibly with decimal point or minus
|
||||
if (ch >= '0' && ch <= '9') || (ch == '-' && i+1 < len(toml) && toml[i+1] >= '0' && toml[i+1] <= '9') {
|
||||
end := i
|
||||
if ch == '-' {
|
||||
end++
|
||||
}
|
||||
for end < len(toml) {
|
||||
c := toml[end]
|
||||
if (c >= '0' && c <= '9') || c == '.' || c == 'e' || c == 'E' {
|
||||
end++
|
||||
} else if (c == '+' || c == '-') && end > 0 && (toml[end-1] == 'e' || toml[end-1] == 'E') {
|
||||
// Only allow + or - immediately after 'e' or 'E' for scientific notation
|
||||
end++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
result.WriteString(numberColor(toml[i:end]))
|
||||
i = end
|
||||
continue
|
||||
}
|
||||
|
||||
// Identifiers/keys - alphanumeric + underscore + dash
|
||||
if (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || ch == '_' {
|
||||
end := i
|
||||
for end < len(toml) && ((toml[end] >= 'a' && toml[end] <= 'z') ||
|
||||
(toml[end] >= 'A' && toml[end] <= 'Z') ||
|
||||
(toml[end] >= '0' && toml[end] <= '9') ||
|
||||
toml[end] == '_' || toml[end] == '-') {
|
||||
end++
|
||||
}
|
||||
ident := toml[i:end]
|
||||
|
||||
// Check if this is a boolean/null keyword
|
||||
switch ident {
|
||||
case "true", "false":
|
||||
result.WriteString(boolColor(ident))
|
||||
default:
|
||||
// Check if followed by = or whitespace then = (it's a key)
|
||||
j := end
|
||||
for j < len(toml) && (toml[j] == ' ' || toml[j] == '\t') {
|
||||
j++
|
||||
}
|
||||
if j < len(toml) && toml[j] == '=' {
|
||||
result.WriteString(keyColor(ident))
|
||||
} else {
|
||||
result.WriteString(ident) // plain text for other identifiers
|
||||
}
|
||||
}
|
||||
i = end
|
||||
continue
|
||||
}
|
||||
|
||||
// Everything else (whitespace, operators, brackets) - no color
|
||||
result.WriteByte(ch)
|
||||
i++
|
||||
}
|
||||
|
||||
return []byte(result.String())
|
||||
}
|
||||
|
||||
@ -1,15 +1,11 @@
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"errors"
|
||||
"io"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/fatih/color"
|
||||
"go.yaml.in/yaml/v3"
|
||||
"go.yaml.in/yaml/v4"
|
||||
)
|
||||
|
||||
type yamlEncoder struct {
|
||||
@ -25,64 +21,24 @@ func (ye *yamlEncoder) CanHandleAliases() bool {
|
||||
}
|
||||
|
||||
func (ye *yamlEncoder) PrintDocumentSeparator(writer io.Writer) error {
|
||||
if ye.prefs.PrintDocSeparators {
|
||||
log.Debug("writing doc sep")
|
||||
if err := writeString(writer, "---\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
return PrintYAMLDocumentSeparator(writer, ye.prefs.PrintDocSeparators)
|
||||
}
|
||||
|
||||
func (ye *yamlEncoder) PrintLeadingContent(writer io.Writer, content string) error {
|
||||
reader := bufio.NewReader(strings.NewReader(content))
|
||||
|
||||
var commentLineRegEx = regexp.MustCompile(`^\s*#`)
|
||||
|
||||
for {
|
||||
|
||||
readline, errReading := reader.ReadString('\n')
|
||||
if errReading != nil && !errors.Is(errReading, io.EOF) {
|
||||
return errReading
|
||||
}
|
||||
if strings.Contains(readline, "$yqDocSeparator$") {
|
||||
|
||||
if err := ye.PrintDocumentSeparator(writer); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
} else {
|
||||
if len(readline) > 0 && readline != "\n" && readline[0] != '%' && !commentLineRegEx.MatchString(readline) {
|
||||
readline = "# " + readline
|
||||
}
|
||||
if ye.prefs.ColorsEnabled && strings.TrimSpace(readline) != "" {
|
||||
readline = format(color.FgHiBlack) + readline + format(color.Reset)
|
||||
}
|
||||
if err := writeString(writer, readline); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if errors.Is(errReading, io.EOF) {
|
||||
if readline != "" {
|
||||
// the last comment we read didn't have a newline, put one in
|
||||
if err := writeString(writer, "\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
return PrintYAMLLeadingContent(writer, content, ye.prefs.PrintDocSeparators, ye.prefs.ColorsEnabled)
|
||||
}
|
||||
|
||||
func (ye *yamlEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
log.Debug("encoderYaml - going to print %v", NodeToString(node))
|
||||
// Detect line ending style from LeadingContent
|
||||
lineEnding := "\n"
|
||||
if strings.Contains(node.LeadingContent, "\r\n") {
|
||||
lineEnding = "\r\n"
|
||||
}
|
||||
if node.Kind == ScalarNode && ye.prefs.UnwrapScalar {
|
||||
valueToPrint := node.Value
|
||||
if node.LeadingContent == "" || valueToPrint != "" {
|
||||
valueToPrint = valueToPrint + "\n"
|
||||
valueToPrint = valueToPrint + lineEnding
|
||||
}
|
||||
return writeString(writer, valueToPrint)
|
||||
}
|
||||
@ -96,6 +52,9 @@ func (ye *yamlEncoder) Encode(writer io.Writer, node *CandidateNode) error {
|
||||
var encoder = yaml.NewEncoder(destination)
|
||||
|
||||
encoder.SetIndent(ye.prefs.Indent)
|
||||
if ye.prefs.CompactSequenceIndent {
|
||||
encoder.CompactSeqIndent()
|
||||
}
|
||||
|
||||
target, err := node.MarshalYAML()
|
||||
|
||||
|
||||
@ -84,3 +84,42 @@ func TestParserExtraArgs(t *testing.T) {
|
||||
_, err := getExpressionParser().ParseExpression("sortKeys(.) explode(.)")
|
||||
test.AssertResultComplex(t, "bad expression, please check expression syntax", err.Error())
|
||||
}
|
||||
|
||||
func TestParserEmptyExpression(t *testing.T) {
|
||||
_, err := getExpressionParser().ParseExpression("")
|
||||
test.AssertResultComplex(t, nil, err)
|
||||
}
|
||||
|
||||
func TestParserSingleOperation(t *testing.T) {
|
||||
result, err := getExpressionParser().ParseExpression(".")
|
||||
test.AssertResultComplex(t, nil, err)
|
||||
if result == nil {
|
||||
t.Fatal("Expected non-nil result for single operation")
|
||||
}
|
||||
if result.Operation == nil {
|
||||
t.Fatal("Expected operation to be set")
|
||||
}
|
||||
}
|
||||
|
||||
func TestParserFirstOpWithZeroArgs(t *testing.T) {
|
||||
// Test the special case where firstOpType can accept zero args
|
||||
result, err := getExpressionParser().ParseExpression("first")
|
||||
test.AssertResultComplex(t, nil, err)
|
||||
if result == nil {
|
||||
t.Fatal("Expected non-nil result for first operation with zero args")
|
||||
}
|
||||
}
|
||||
|
||||
func TestParserInvalidExpressionTree(t *testing.T) {
|
||||
// This tests the createExpressionTree function with malformed postfix
|
||||
parser := getExpressionParser().(*expressionParserImpl)
|
||||
|
||||
// Create invalid postfix operations that would leave more than one item on stack
|
||||
invalidOps := []*Operation{
|
||||
{OperationType: &operationType{NumArgs: 0}},
|
||||
{OperationType: &operationType{NumArgs: 0}},
|
||||
}
|
||||
|
||||
_, err := parser.createExpressionTree(invalidOps)
|
||||
test.AssertResultComplex(t, "bad expression, please check expression syntax", err.Error())
|
||||
}
|
||||
|
||||
@ -7,7 +7,15 @@ import (
|
||||
)
|
||||
|
||||
func tryRenameFile(from string, to string) error {
|
||||
if renameError := os.Rename(from, to); renameError != nil {
|
||||
if info, err := os.Lstat(to); err == nil && info.Mode()&os.ModeSymlink != 0 {
|
||||
log.Debug("Target file is symlink, skipping rename and attempting to copy contents")
|
||||
|
||||
if copyError := copyFileContents(from, to); copyError != nil {
|
||||
return fmt.Errorf("failed copying from %v to %v: %w", from, to, copyError)
|
||||
}
|
||||
tryRemoveTempFile(from)
|
||||
return nil
|
||||
} else if renameError := os.Rename(from, to); renameError != nil {
|
||||
log.Debugf("Error renaming from %v to %v, attempting to copy contents", from, to)
|
||||
log.Debug(renameError.Error())
|
||||
log.Debug("going to try copying instead")
|
||||
|
||||
@ -22,6 +22,12 @@ var YamlFormat = &Format{"yaml", []string{"y", "yml"},
|
||||
func() Decoder { return NewYamlDecoder(ConfiguredYamlPreferences) },
|
||||
}
|
||||
|
||||
var KYamlFormat = &Format{"kyaml", []string{"ky"},
|
||||
func() Encoder { return NewKYamlEncoder(ConfiguredKYamlPreferences) },
|
||||
// KYaml is stricter YAML
|
||||
func() Decoder { return NewYamlDecoder(ConfiguredYamlPreferences) },
|
||||
}
|
||||
|
||||
var JSONFormat = &Format{"json", []string{"j"},
|
||||
func() Encoder { return NewJSONEncoder(ConfiguredJSONPreferences) },
|
||||
func() Decoder { return NewJSONDecoder() },
|
||||
@ -63,10 +69,15 @@ var ShFormat = &Format{"", nil,
|
||||
}
|
||||
|
||||
var TomlFormat = &Format{"toml", []string{},
|
||||
func() Encoder { return NewTomlEncoder() },
|
||||
func() Encoder { return NewTomlEncoderWithPrefs(ConfiguredTomlPreferences) },
|
||||
func() Decoder { return NewTomlDecoder() },
|
||||
}
|
||||
|
||||
var HclFormat = &Format{"hcl", []string{"h", "tf"},
|
||||
func() Encoder { return NewHclEncoder(ConfiguredHclPreferences) },
|
||||
func() Decoder { return NewHclDecoder() },
|
||||
}
|
||||
|
||||
var ShellVariablesFormat = &Format{"shell", []string{"s", "sh"},
|
||||
func() Encoder { return NewShellVariablesEncoder() },
|
||||
nil,
|
||||
@ -84,6 +95,7 @@ var INIFormat = &Format{"ini", []string{"i"},
|
||||
|
||||
var Formats = []*Format{
|
||||
YamlFormat,
|
||||
KYamlFormat,
|
||||
JSONFormat,
|
||||
PropertiesFormat,
|
||||
CSVFormat,
|
||||
@ -93,6 +105,7 @@ var Formats = []*Format{
|
||||
UriFormat,
|
||||
ShFormat,
|
||||
TomlFormat,
|
||||
HclFormat,
|
||||
ShellVariablesFormat,
|
||||
LuaFormat,
|
||||
INIFormat,
|
||||
|
||||
15
pkg/yqlib/hcl.go
Normal file
15
pkg/yqlib/hcl.go
Normal file
@ -0,0 +1,15 @@
|
||||
package yqlib
|
||||
|
||||
type HclPreferences struct {
|
||||
ColorsEnabled bool
|
||||
}
|
||||
|
||||
func NewDefaultHclPreferences() HclPreferences {
|
||||
return HclPreferences{ColorsEnabled: false}
|
||||
}
|
||||
|
||||
func (p *HclPreferences) Copy() HclPreferences {
|
||||
return HclPreferences{ColorsEnabled: p.ColorsEnabled}
|
||||
}
|
||||
|
||||
var ConfiguredHclPreferences = NewDefaultHclPreferences()
|
||||
585
pkg/yqlib/hcl_test.go
Normal file
585
pkg/yqlib/hcl_test.go
Normal file
@ -0,0 +1,585 @@
|
||||
//go:build !yq_nohcl
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/mikefarah/yq/v4/test"
|
||||
)
|
||||
|
||||
var nestedExample = `service "http" "web_proxy" {
|
||||
listen_addr = "127.0.0.1:8080"
|
||||
}`
|
||||
|
||||
var nestedExampleYaml = "service:\n http:\n web_proxy:\n listen_addr: \"127.0.0.1:8080\"\n"
|
||||
|
||||
var multipleBlockLabelKeys = `service "cat" {
|
||||
process "main" {
|
||||
command = ["/usr/local/bin/awesome-app", "server"]
|
||||
}
|
||||
|
||||
process "management" {
|
||||
command = ["/usr/local/bin/awesome-app", "management"]
|
||||
}
|
||||
}
|
||||
`
|
||||
var multipleBlockLabelKeysExpected = `service "cat" {
|
||||
process "main" {
|
||||
command = ["/usr/local/bin/awesome-app", "server"]
|
||||
}
|
||||
process "management" {
|
||||
command = ["/usr/local/bin/awesome-app", "management"]
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
var multipleBlockLabelKeysExpectedUpdate = `service "cat" {
|
||||
process "main" {
|
||||
command = ["/usr/local/bin/awesome-app", "server", "meow"]
|
||||
}
|
||||
process "management" {
|
||||
command = ["/usr/local/bin/awesome-app", "management"]
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
var multipleBlockLabelKeysExpectedYaml = `service:
|
||||
cat:
|
||||
process:
|
||||
main:
|
||||
command:
|
||||
- "/usr/local/bin/awesome-app"
|
||||
- "server"
|
||||
management:
|
||||
command:
|
||||
- "/usr/local/bin/awesome-app"
|
||||
- "management"
|
||||
`
|
||||
|
||||
var simpleSample = `# Arithmetic with literals and application-provided variables
|
||||
sum = 1 + addend
|
||||
|
||||
# String interpolation and templates
|
||||
message = "Hello, ${name}!"
|
||||
|
||||
# Application-provided functions
|
||||
shouty_message = upper(message)`
|
||||
|
||||
var simpleSampleExpected = `# Arithmetic with literals and application-provided variables
|
||||
sum = 1 + addend
|
||||
# String interpolation and templates
|
||||
message = "Hello, ${name}!"
|
||||
# Application-provided functions
|
||||
shouty_message = upper(message)
|
||||
`
|
||||
|
||||
var simpleSampleExpectedYaml = `# Arithmetic with literals and application-provided variables
|
||||
sum: 1 + addend
|
||||
# String interpolation and templates
|
||||
message: "Hello, ${name}!"
|
||||
# Application-provided functions
|
||||
shouty_message: upper(message)
|
||||
`
|
||||
|
||||
var hclFormatScenarios = []formatScenario{
|
||||
{
|
||||
description: "Parse HCL",
|
||||
input: `io_mode = "async"`,
|
||||
expected: "io_mode: \"async\"\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "Simple decode, no quotes",
|
||||
skipDoc: true,
|
||||
input: `io_mode = async`,
|
||||
expected: "io_mode: async\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "Simple roundtrip, no quotes",
|
||||
skipDoc: true,
|
||||
input: `io_mode = async`,
|
||||
expected: "io_mode = async\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Nested decode",
|
||||
skipDoc: true,
|
||||
input: nestedExample,
|
||||
expected: nestedExampleYaml,
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "Template decode",
|
||||
skipDoc: true,
|
||||
input: `message = "Hello, ${name}!"`,
|
||||
expected: "message: \"Hello, ${name}!\"\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: with template",
|
||||
skipDoc: true,
|
||||
input: `message = "Hello, ${name}!"`,
|
||||
expected: "message = \"Hello, ${name}!\"\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: with function",
|
||||
skipDoc: true,
|
||||
input: `shouty_message = upper(message)`,
|
||||
expected: "shouty_message = upper(message)\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: with arithmetic",
|
||||
skipDoc: true,
|
||||
input: `sum = 1 + addend`,
|
||||
expected: "sum = 1 + addend\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Arithmetic decode",
|
||||
skipDoc: true,
|
||||
input: `sum = 1 + addend`,
|
||||
expected: "sum: 1 + addend\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "number attribute",
|
||||
skipDoc: true,
|
||||
input: `port = 8080`,
|
||||
expected: "port: 8080\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "float attribute",
|
||||
skipDoc: true,
|
||||
input: `pi = 3.14`,
|
||||
expected: "pi: 3.14\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "boolean attribute",
|
||||
skipDoc: true,
|
||||
input: `enabled = true`,
|
||||
expected: "enabled: true\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "object/map attribute",
|
||||
skipDoc: true,
|
||||
input: `obj = { a = 1, b = "two" }`,
|
||||
expected: "obj: {a: 1, b: \"two\"}\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "nested block",
|
||||
skipDoc: true,
|
||||
input: `server { port = 8080 }`,
|
||||
expected: "server:\n port: 8080\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "multiple attributes",
|
||||
skipDoc: true,
|
||||
input: "name = \"app\"\nversion = 1\nenabled = true",
|
||||
expected: "name: \"app\"\nversion: 1\nenabled: true\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "binary expression",
|
||||
skipDoc: true,
|
||||
input: `count = 0 - 42`,
|
||||
expected: "count: -42\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "negative number",
|
||||
skipDoc: true,
|
||||
input: `count = -42`,
|
||||
expected: "count: -42\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "scientific notation",
|
||||
skipDoc: true,
|
||||
input: `value = 1e-3`,
|
||||
expected: "value: 0.001\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "nested object",
|
||||
skipDoc: true,
|
||||
input: `config = { db = { host = "localhost", port = 5432 } }`,
|
||||
expected: "config: {db: {host: \"localhost\", port: 5432}}\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "mixed list",
|
||||
skipDoc: true,
|
||||
input: `values = [1, "two", true]`,
|
||||
expected: "values:\n - 1\n - \"two\"\n - true\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: Sample Doc",
|
||||
input: multipleBlockLabelKeys,
|
||||
expected: multipleBlockLabelKeysExpected,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: With an update",
|
||||
input: multipleBlockLabelKeys,
|
||||
expression: `.service.cat.process.main.command += "meow"`,
|
||||
expected: multipleBlockLabelKeysExpectedUpdate,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Parse HCL: Sample Doc",
|
||||
input: multipleBlockLabelKeys,
|
||||
expected: multipleBlockLabelKeysExpectedYaml,
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "block with labels",
|
||||
skipDoc: true,
|
||||
input: `resource "aws_instance" "example" { ami = "ami-12345" }`,
|
||||
expected: "resource:\n aws_instance:\n example:\n ami: \"ami-12345\"\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "block with labels roundtrip",
|
||||
skipDoc: true,
|
||||
input: `resource "aws_instance" "example" { ami = "ami-12345" }`,
|
||||
expected: "resource \"aws_instance\" \"example\" {\n ami = \"ami-12345\"\n}\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "roundtrip simple attribute",
|
||||
skipDoc: true,
|
||||
input: `io_mode = "async"`,
|
||||
expected: `io_mode = "async"` + "\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "roundtrip number attribute",
|
||||
skipDoc: true,
|
||||
input: `port = 8080`,
|
||||
expected: "port = 8080\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "roundtrip float attribute",
|
||||
skipDoc: true,
|
||||
input: `pi = 3.14`,
|
||||
expected: "pi = 3.14\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "roundtrip boolean attribute",
|
||||
skipDoc: true,
|
||||
input: `enabled = true`,
|
||||
expected: "enabled = true\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "roundtrip list of strings",
|
||||
skipDoc: true,
|
||||
input: `tags = ["a", "b"]`,
|
||||
expected: "tags = [\"a\", \"b\"]\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "roundtrip object/map attribute",
|
||||
skipDoc: true,
|
||||
input: `obj = { a = 1, b = "two" }`,
|
||||
expected: "obj = {\n a = 1\n b = \"two\"\n}\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "roundtrip nested block",
|
||||
skipDoc: true,
|
||||
input: `server { port = 8080 }`,
|
||||
expected: "server {\n port = 8080\n}\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "roundtrip multiple attributes",
|
||||
skipDoc: true,
|
||||
input: "name = \"app\"\nversion = 1\nenabled = true",
|
||||
expected: "name = \"app\"\nversion = 1\nenabled = true\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Parse HCL: with comments",
|
||||
input: "# Configuration\nport = 8080 # server port",
|
||||
expected: "# Configuration\nport: 8080 # server port\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: with comments",
|
||||
input: "# Configuration\nport = 8080",
|
||||
expected: "# Configuration\nport = 8080\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: extraction",
|
||||
skipDoc: true,
|
||||
input: simpleSample,
|
||||
expression: ".shouty_message",
|
||||
expected: "upper(message)\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: With templates, functions and arithmetic",
|
||||
input: simpleSample,
|
||||
expected: simpleSampleExpected,
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "roundtrip example",
|
||||
skipDoc: true,
|
||||
input: simpleSample,
|
||||
expected: simpleSampleExpectedYaml,
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "Parse HCL: List of strings",
|
||||
skipDoc: true,
|
||||
input: `tags = ["a", "b"]`,
|
||||
expected: "tags:\n - \"a\"\n - \"b\"\n",
|
||||
scenarioType: "decode",
|
||||
},
|
||||
{
|
||||
description: "roundtrip list of objects",
|
||||
skipDoc: true,
|
||||
input: `items = [{ name = "a", value = 1 }, { name = "b", value = 2 }]`,
|
||||
expected: "items = [{\n name = \"a\"\n value = 1\n }, {\n name = \"b\"\n value = 2\n}]\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "roundtrip nested blocks with same name",
|
||||
skipDoc: true,
|
||||
input: "database \"primary\" {\n host = \"localhost\"\n port = 5432\n}\ndatabase \"replica\" {\n host = \"replica.local\"\n port = 5433\n}",
|
||||
expected: "database \"primary\" {\n host = \"localhost\"\n port = 5432\n}\ndatabase \"replica\" {\n host = \"replica.local\"\n port = 5433\n}\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "roundtrip mixed nested structure",
|
||||
skipDoc: true,
|
||||
input: "servers \"web\" {\n addresses = [\"10.0.1.1\", \"10.0.1.2\"]\n port = 8080\n}",
|
||||
expected: "servers \"web\" {\n addresses = [\"10.0.1.1\", \"10.0.1.2\"]\n port = 8080\n}\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "roundtrip null value",
|
||||
skipDoc: true,
|
||||
input: `value = null`,
|
||||
expected: "value = null\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "roundtrip empty list",
|
||||
skipDoc: true,
|
||||
input: `items = []`,
|
||||
expected: "items = []\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "roundtrip empty object",
|
||||
skipDoc: true,
|
||||
input: `config = {}`,
|
||||
expected: "config = {}\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Roundtrip: Separate blocks with same name.",
|
||||
input: "resource \"aws_instance\" \"web\" {\n ami = \"ami-12345\"\n}\nresource \"aws_instance\" \"db\" {\n ami = \"ami-67890\"\n}",
|
||||
expected: "resource \"aws_instance\" \"web\" {\n ami = \"ami-12345\"\n}\nresource \"aws_instance\" \"db\" {\n ami = \"ami-67890\"\n}\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "roundtrip deeply nested structure",
|
||||
skipDoc: true,
|
||||
input: "app \"database\" \"primary\" \"connection\" {\n host = \"db.local\"\n port = 5432\n}",
|
||||
expected: "app \"database\" \"primary\" \"connection\" {\n host = \"db.local\"\n port = 5432\n}\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "roundtrip with leading comments",
|
||||
skipDoc: true,
|
||||
input: "# Main config\nenabled = true\nport = 8080",
|
||||
expected: "# Main config\nenabled = true\nport = 8080\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Multiple attributes with comments (comment safety with safe path separator)",
|
||||
skipDoc: true,
|
||||
input: "# Database config\ndb_host = \"localhost\"\n# Connection pool\ndb_pool = 10",
|
||||
expected: "# Database config\ndb_host = \"localhost\"\n# Connection pool\ndb_pool = 10\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Nested blocks with head comments",
|
||||
skipDoc: true,
|
||||
input: "service \"api\" {\n # Listen address\n listen = \"0.0.0.0:8080\"\n # TLS enabled\n tls = true\n}",
|
||||
expected: "service \"api\" {\n # Listen address\n listen = \"0.0.0.0:8080\"\n # TLS enabled\n tls = true\n}\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Multiple blocks with EncodeSeparate preservation",
|
||||
skipDoc: true,
|
||||
input: "resource \"aws_s3_bucket\" \"bucket1\" {\n bucket = \"my-bucket-1\"\n}\nresource \"aws_s3_bucket\" \"bucket2\" {\n bucket = \"my-bucket-2\"\n}",
|
||||
expected: "resource \"aws_s3_bucket\" \"bucket1\" {\n bucket = \"my-bucket-1\"\n}\nresource \"aws_s3_bucket\" \"bucket2\" {\n bucket = \"my-bucket-2\"\n}\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Blocks with same name handled separately",
|
||||
skipDoc: true,
|
||||
input: "server \"primary\" { port = 8080 }\nserver \"backup\" { port = 8081 }",
|
||||
expected: "server \"primary\" {\n port = 8080\n}\nserver \"backup\" {\n port = 8081\n}\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Block label with dot roundtrip (commentPathSep)",
|
||||
skipDoc: true,
|
||||
input: "service \"api.service\" {\n port = 8080\n}",
|
||||
expected: "service \"api.service\" {\n port = 8080\n}\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Nested template expression",
|
||||
skipDoc: true,
|
||||
input: `message = "User: ${username}, Role: ${user_role}"`,
|
||||
expected: "message = \"User: ${username}, Role: ${user_role}\"\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Empty object roundtrip",
|
||||
skipDoc: true,
|
||||
input: `obj = {}`,
|
||||
expected: "obj = {}\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
{
|
||||
description: "Null value in block",
|
||||
skipDoc: true,
|
||||
input: `service { optional_field = null }`,
|
||||
expected: "service {\n optional_field = null\n}\n",
|
||||
scenarioType: "roundtrip",
|
||||
},
|
||||
}
|
||||
|
||||
func testHclScenario(t *testing.T, s formatScenario) {
|
||||
switch s.scenarioType {
|
||||
case "decode":
|
||||
result := mustProcessFormatScenario(s, NewHclDecoder(), NewYamlEncoder(ConfiguredYamlPreferences))
|
||||
test.AssertResultWithContext(t, s.expected, result, s.description)
|
||||
case "roundtrip":
|
||||
test.AssertResultWithContext(t, s.expected, mustProcessFormatScenario(s, NewHclDecoder(), NewHclEncoder(ConfiguredHclPreferences)), s.description)
|
||||
}
|
||||
}
|
||||
|
||||
func documentHclScenario(_ *testing.T, w *bufio.Writer, i interface{}) {
|
||||
s := i.(formatScenario)
|
||||
|
||||
if s.skipDoc {
|
||||
return
|
||||
}
|
||||
switch s.scenarioType {
|
||||
case "", "decode":
|
||||
documentHclDecodeScenario(w, s)
|
||||
case "roundtrip":
|
||||
documentHclRoundTripScenario(w, s)
|
||||
default:
|
||||
panic(fmt.Sprintf("unhandled scenario type %q", s.scenarioType))
|
||||
}
|
||||
}
|
||||
|
||||
func documentHclDecodeScenario(w *bufio.Writer, s formatScenario) {
|
||||
writeOrPanic(w, fmt.Sprintf("## %v\n", s.description))
|
||||
|
||||
if s.subdescription != "" {
|
||||
writeOrPanic(w, s.subdescription)
|
||||
writeOrPanic(w, "\n\n")
|
||||
}
|
||||
|
||||
writeOrPanic(w, "Given a sample.hcl file of:\n")
|
||||
writeOrPanic(w, fmt.Sprintf("```hcl\n%v\n```\n", s.input))
|
||||
|
||||
writeOrPanic(w, "then\n")
|
||||
expression := s.expression
|
||||
if s.expression != "" {
|
||||
expression = fmt.Sprintf(" '%v'", s.expression)
|
||||
}
|
||||
writeOrPanic(w, fmt.Sprintf("```bash\nyq -oy%v sample.hcl\n```\n", expression))
|
||||
writeOrPanic(w, "will output\n")
|
||||
|
||||
writeOrPanic(w, fmt.Sprintf("```yaml\n%v```\n\n", mustProcessFormatScenario(s, NewHclDecoder(), NewYamlEncoder(ConfiguredYamlPreferences))))
|
||||
}
|
||||
|
||||
func documentHclRoundTripScenario(w *bufio.Writer, s formatScenario) {
|
||||
writeOrPanic(w, fmt.Sprintf("## %v\n", s.description))
|
||||
|
||||
if s.subdescription != "" {
|
||||
writeOrPanic(w, s.subdescription)
|
||||
writeOrPanic(w, "\n\n")
|
||||
}
|
||||
|
||||
writeOrPanic(w, "Given a sample.hcl file of:\n")
|
||||
writeOrPanic(w, fmt.Sprintf("```hcl\n%v\n```\n", s.input))
|
||||
|
||||
writeOrPanic(w, "then\n")
|
||||
expression := s.expression
|
||||
if s.expression != "" {
|
||||
expression = fmt.Sprintf(" '%v'", s.expression)
|
||||
}
|
||||
writeOrPanic(w, fmt.Sprintf("```bash\nyq%v sample.hcl\n```\n", expression))
|
||||
writeOrPanic(w, "will output\n")
|
||||
|
||||
writeOrPanic(w, fmt.Sprintf("```hcl\n%v```\n\n", mustProcessFormatScenario(s, NewHclDecoder(), NewHclEncoder(ConfiguredHclPreferences))))
|
||||
}
|
||||
|
||||
func TestHclEncoderPrintDocumentSeparator(t *testing.T) {
|
||||
encoder := NewHclEncoder(ConfiguredHclPreferences)
|
||||
var buf bytes.Buffer
|
||||
writer := bufio.NewWriter(&buf)
|
||||
|
||||
err := encoder.PrintDocumentSeparator(writer)
|
||||
writer.Flush()
|
||||
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "", buf.String())
|
||||
}
|
||||
|
||||
func TestHclEncoderPrintLeadingContent(t *testing.T) {
|
||||
encoder := NewHclEncoder(ConfiguredHclPreferences)
|
||||
var buf bytes.Buffer
|
||||
writer := bufio.NewWriter(&buf)
|
||||
|
||||
err := encoder.PrintLeadingContent(writer, "some content")
|
||||
writer.Flush()
|
||||
|
||||
test.AssertResult(t, nil, err)
|
||||
test.AssertResult(t, "", buf.String())
|
||||
}
|
||||
|
||||
func TestHclEncoderCanHandleAliases(t *testing.T) {
|
||||
encoder := NewHclEncoder(ConfiguredHclPreferences)
|
||||
test.AssertResult(t, false, encoder.CanHandleAliases())
|
||||
}
|
||||
|
||||
func TestHclFormatScenarios(t *testing.T) {
|
||||
for _, tt := range hclFormatScenarios {
|
||||
testHclScenario(t, tt)
|
||||
}
|
||||
genericScenarios := make([]interface{}, len(hclFormatScenarios))
|
||||
for i, s := range hclFormatScenarios {
|
||||
genericScenarios[i] = s
|
||||
}
|
||||
documentScenarios(t, "usage", "hcl", genericScenarios, documentHclScenario)
|
||||
}
|
||||
30
pkg/yqlib/kyaml.go
Normal file
30
pkg/yqlib/kyaml.go
Normal file
@ -0,0 +1,30 @@
|
||||
//go:build !yq_nokyaml
|
||||
|
||||
package yqlib
|
||||
|
||||
type KYamlPreferences struct {
|
||||
Indent int
|
||||
ColorsEnabled bool
|
||||
PrintDocSeparators bool
|
||||
UnwrapScalar bool
|
||||
}
|
||||
|
||||
func NewDefaultKYamlPreferences() KYamlPreferences {
|
||||
return KYamlPreferences{
|
||||
Indent: 2,
|
||||
ColorsEnabled: false,
|
||||
PrintDocSeparators: true,
|
||||
UnwrapScalar: true,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *KYamlPreferences) Copy() KYamlPreferences {
|
||||
return KYamlPreferences{
|
||||
Indent: p.Indent,
|
||||
ColorsEnabled: p.ColorsEnabled,
|
||||
PrintDocSeparators: p.PrintDocSeparators,
|
||||
UnwrapScalar: p.UnwrapScalar,
|
||||
}
|
||||
}
|
||||
|
||||
var ConfiguredKYamlPreferences = NewDefaultKYamlPreferences()
|
||||
542
pkg/yqlib/kyaml_test.go
Normal file
542
pkg/yqlib/kyaml_test.go
Normal file
@ -0,0 +1,542 @@
|
||||
//go:build !yq_nokyaml
|
||||
|
||||
package yqlib
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/mikefarah/yq/v4/test"
|
||||
)
|
||||
|
||||
var ansiRe = regexp.MustCompile(`\x1b\[[0-9;]*m`)
|
||||
|
||||
func stripANSI(s string) string {
|
||||
return ansiRe.ReplaceAllString(s, "")
|
||||
}
|
||||
|
||||
var kyamlFormatScenarios = []formatScenario{
|
||||
{
|
||||
description: "Encode kyaml: plain string scalar",
|
||||
subdescription: "Strings are always double-quoted in KYaml output.",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "cat\n",
|
||||
expected: "\"cat\"\n",
|
||||
},
|
||||
{
|
||||
description: "encode plain int scalar",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "12\n",
|
||||
expected: "12\n",
|
||||
skipDoc: true,
|
||||
},
|
||||
{
|
||||
description: "encode plain bool scalar",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "true\n",
|
||||
expected: "true\n",
|
||||
skipDoc: true,
|
||||
},
|
||||
{
|
||||
description: "encode plain null scalar",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "null\n",
|
||||
expected: "null\n",
|
||||
skipDoc: true,
|
||||
},
|
||||
{
|
||||
description: "encode flow mapping and sequence",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "a: b\nc:\n - d\n",
|
||||
expected: "{\n" +
|
||||
" a: \"b\",\n" +
|
||||
" c: [\n" +
|
||||
" \"d\",\n" +
|
||||
" ],\n" +
|
||||
"}\n",
|
||||
},
|
||||
{
|
||||
description: "encode non-string scalars",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "a: 12\n" +
|
||||
"b: true\n" +
|
||||
"c: null\n" +
|
||||
"d: \"true\"\n",
|
||||
expected: "{\n" +
|
||||
" a: 12,\n" +
|
||||
" b: true,\n" +
|
||||
" c: null,\n" +
|
||||
" d: \"true\",\n" +
|
||||
"}\n",
|
||||
},
|
||||
{
|
||||
description: "quote non-identifier keys",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "\"1a\": b\n\"has space\": c\n",
|
||||
expected: "{\n" +
|
||||
" \"1a\": \"b\",\n" +
|
||||
" \"has space\": \"c\",\n" +
|
||||
"}\n",
|
||||
},
|
||||
{
|
||||
description: "escape quoted strings",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "a: \"line1\\nline2\\t\\\"q\\\"\"\n",
|
||||
expected: "{\n" +
|
||||
" a: \"line1\\nline2\\t\\\"q\\\"\",\n" +
|
||||
"}\n",
|
||||
},
|
||||
{
|
||||
description: "preserve comments when encoding",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "# leading\n" +
|
||||
"a: 1 # a line\n" +
|
||||
"# head b\n" +
|
||||
"b: 2\n" +
|
||||
"c:\n" +
|
||||
" # head d\n" +
|
||||
" - d # d line\n" +
|
||||
" - e\n" +
|
||||
"# trailing\n",
|
||||
expected: "# leading\n" +
|
||||
"{\n" +
|
||||
" a: 1, # a line\n" +
|
||||
" # head b\n" +
|
||||
" b: 2,\n" +
|
||||
" c: [\n" +
|
||||
" # head d\n" +
|
||||
" \"d\", # d line\n" +
|
||||
" \"e\",\n" +
|
||||
" ],\n" +
|
||||
" # trailing\n" +
|
||||
"}\n",
|
||||
},
|
||||
{
|
||||
description: "Encode kyaml: anchors and aliases",
|
||||
subdescription: "KYaml output does not support anchors/aliases; they are expanded to concrete values.",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "base: &base\n" +
|
||||
" a: b\n" +
|
||||
"copy: *base\n",
|
||||
expected: "{\n" +
|
||||
" base: {\n" +
|
||||
" a: \"b\",\n" +
|
||||
" },\n" +
|
||||
" copy: {\n" +
|
||||
" a: \"b\",\n" +
|
||||
" },\n" +
|
||||
"}\n",
|
||||
},
|
||||
{
|
||||
description: "Encode kyaml: yaml to kyaml shows formatting differences",
|
||||
subdescription: "KYaml uses flow-style collections (braces/brackets) and explicit commas.",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "person:\n" +
|
||||
" name: John\n" +
|
||||
" pets:\n" +
|
||||
" - cat\n" +
|
||||
" - dog\n",
|
||||
expected: "{\n" +
|
||||
" person: {\n" +
|
||||
" name: \"John\",\n" +
|
||||
" pets: [\n" +
|
||||
" \"cat\",\n" +
|
||||
" \"dog\",\n" +
|
||||
" ],\n" +
|
||||
" },\n" +
|
||||
"}\n",
|
||||
},
|
||||
{
|
||||
description: "Encode kyaml: nested lists of objects",
|
||||
subdescription: "Lists and objects can be nested arbitrarily; KYaml always uses flow-style collections.",
|
||||
scenarioType: "encode",
|
||||
indent: 2,
|
||||
input: "- name: a\n" +
|
||||
" items:\n" +
|
||||
" - id: 1\n" +
|
||||
" tags:\n" +
|
||||
" - k: x\n" +
|
||||
" v: y\n" +
|
||||
" - k: x2\n" +
|
||||
" v: y2\n" +
|
||||
" - id: 2\n" +
|
||||
" tags:\n" +
|
||||
" - k: z\n" +
|
||||
" v: w\n",
|
||||
expected: "[\n" +
|
||||
" {\n" +
|
||||
" name: \"a\",\n" +
|
||||
" items: [\n" +
|
||||
" {\n" +
|
||||
" id: 1,\n" +
|
||||
" tags: [\n" +
|
||||
" {\n" +
|
||||
" k: \"x\",\n" +
|
||||
" v: \"y\",\n" +
|
||||
" },\n" +
|
||||
" {\n" +
|
||||
" k: \"x2\",\n" +
|
||||
" v: \"y2\",\n" +
|
||||
" },\n" +
|
||||
" ],\n" +
|
||||
" },\n" +
|
||||
" {\n" +
|
||||
" id: 2,\n" +
|
||||
" tags: [\n" +
|
||||
" {\n" +
|
||||
" k: \"z\",\n" +
|
||||
" v: \"w\",\n" +
|
||||
" },\n" +
|
||||
" ],\n" +
|
||||
" },\n" +
|
||||
" ],\n" +
|
||||
" },\n" +
|
||||
"]\n",
|
||||
},
|
||||
}
|
||||
|
||||
func testKYamlScenario(t *testing.T, s formatScenario) {
|
||||
prefs := ConfiguredKYamlPreferences.Copy()
|
||||
prefs.Indent = s.indent
|
||||
prefs.UnwrapScalar = false
|
||||
|
||||
switch s.scenarioType {
|
||||
case "encode":
|
||||
test.AssertResultWithContext(
|
||||
t,
|
||||
s.expected,
|
||||
mustProcessFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewKYamlEncoder(prefs)),
|
||||
s.description,
|
||||
)
|
||||
default:
|
||||
panic(fmt.Sprintf("unhandled scenario type %q", s.scenarioType))
|
||||
}
|
||||
}
|
||||
|
||||
func documentKYamlScenario(_ *testing.T, w *bufio.Writer, i interface{}) {
|
||||
s := i.(formatScenario)
|
||||
if s.skipDoc {
|
||||
return
|
||||
}
|
||||
|
||||
switch s.scenarioType {
|
||||
case "encode":
|
||||
documentKYamlEncodeScenario(w, s)
|
||||
default:
|
||||
panic(fmt.Sprintf("unhandled scenario type %q", s.scenarioType))
|
||||
}
|
||||
}
|
||||
|
||||
func documentKYamlEncodeScenario(w *bufio.Writer, s formatScenario) {
|
||||
writeOrPanic(w, fmt.Sprintf("## %v\n", s.description))
|
||||
|
||||
if s.subdescription != "" {
|
||||
writeOrPanic(w, s.subdescription)
|
||||
writeOrPanic(w, "\n\n")
|
||||
}
|
||||
|
||||
writeOrPanic(w, "Given a sample.yml file of:\n")
|
||||
writeOrPanic(w, fmt.Sprintf("```yaml\n%v\n```\n", s.input))
|
||||
|
||||
writeOrPanic(w, "then\n")
|
||||
|
||||
expression := s.expression
|
||||
if expression == "" {
|
||||
expression = "."
|
||||
}
|
||||
|
||||
if s.indent == 2 {
|
||||
writeOrPanic(w, fmt.Sprintf("```bash\nyq -o=kyaml '%v' sample.yml\n```\n", expression))
|
||||
} else {
|
||||
writeOrPanic(w, fmt.Sprintf("```bash\nyq -o=kyaml -I=%v '%v' sample.yml\n```\n", s.indent, expression))
|
||||
}
|
||||
|
||||
writeOrPanic(w, "will output\n")
|
||||
|
||||
prefs := ConfiguredKYamlPreferences.Copy()
|
||||
prefs.Indent = s.indent
|
||||
prefs.UnwrapScalar = false
|
||||
|
||||
writeOrPanic(w, fmt.Sprintf("```yaml\n%v```\n\n", mustProcessFormatScenario(s, NewYamlDecoder(ConfiguredYamlPreferences), NewKYamlEncoder(prefs))))
|
||||
}
|
||||
|
||||
func TestKYamlFormatScenarios(t *testing.T) {
|
||||
for _, s := range kyamlFormatScenarios {
|
||||
testKYamlScenario(t, s)
|
||||
}
|
||||
|
||||
genericScenarios := make([]interface{}, len(kyamlFormatScenarios))
|
||||
for i, s := range kyamlFormatScenarios {
|
||||
genericScenarios[i] = s
|
||||
}
|
||||
documentScenarios(t, "usage", "kyaml", genericScenarios, documentKYamlScenario)
|
||||
}
|
||||
|
||||
func TestKYamlEncoderPrintDocumentSeparator(t *testing.T) {
|
||||
t.Run("enabled", func(t *testing.T) {
|
||||
prefs := NewDefaultKYamlPreferences()
|
||||
prefs.PrintDocSeparators = true
|
||||
|
||||
var buf bytes.Buffer
|
||||
err := NewKYamlEncoder(prefs).PrintDocumentSeparator(&buf)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != "---\n" {
|
||||
t.Fatalf("expected doc separator, got %q", buf.String())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("disabled", func(t *testing.T) {
|
||||
prefs := NewDefaultKYamlPreferences()
|
||||
prefs.PrintDocSeparators = false
|
||||
|
||||
var buf bytes.Buffer
|
||||
err := NewKYamlEncoder(prefs).PrintDocumentSeparator(&buf)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != "" {
|
||||
t.Fatalf("expected no output, got %q", buf.String())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestKYamlEncoderEncodeUnwrapScalar(t *testing.T) {
|
||||
prefs := NewDefaultKYamlPreferences()
|
||||
prefs.UnwrapScalar = true
|
||||
|
||||
var buf bytes.Buffer
|
||||
err := NewKYamlEncoder(prefs).Encode(&buf, &CandidateNode{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: "cat",
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != "cat\n" {
|
||||
t.Fatalf("expected unwrapped scalar, got %q", buf.String())
|
||||
}
|
||||
}
|
||||
|
||||
func TestKYamlEncoderEncodeColorsEnabled(t *testing.T) {
|
||||
prefs := NewDefaultKYamlPreferences()
|
||||
prefs.UnwrapScalar = false
|
||||
prefs.ColorsEnabled = true
|
||||
|
||||
var buf bytes.Buffer
|
||||
err := NewKYamlEncoder(prefs).Encode(&buf, &CandidateNode{
|
||||
Kind: MappingNode,
|
||||
Content: []*CandidateNode{
|
||||
{Kind: ScalarNode, Tag: "!!str", Value: "a"},
|
||||
{Kind: ScalarNode, Tag: "!!str", Value: "b"},
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
out := stripANSI(buf.String())
|
||||
if !strings.Contains(out, "a:") || !strings.Contains(out, "\"b\"") {
|
||||
t.Fatalf("expected colourised output to contain rendered tokens, got %q", out)
|
||||
}
|
||||
}
|
||||
|
||||
func TestKYamlEncoderWriteNodeAliasAndUnknown(t *testing.T) {
|
||||
ke := NewKYamlEncoder(NewDefaultKYamlPreferences()).(*kyamlEncoder)
|
||||
|
||||
t.Run("alias_nil", func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := ke.writeNode(&buf, &CandidateNode{Kind: AliasNode}, 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != "null" {
|
||||
t.Fatalf("expected null for nil alias, got %q", buf.String())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("alias_value", func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := ke.writeNode(&buf, &CandidateNode{
|
||||
Kind: AliasNode,
|
||||
Alias: &CandidateNode{Kind: ScalarNode, Tag: "!!int", Value: "12"},
|
||||
}, 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != "12" {
|
||||
t.Fatalf("expected dereferenced alias value, got %q", buf.String())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("unknown_kind", func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := ke.writeNode(&buf, &CandidateNode{Kind: Kind(12345)}, 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != "null" {
|
||||
t.Fatalf("expected null for unknown kind, got %q", buf.String())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestKYamlEncoderEmptyCollections(t *testing.T) {
|
||||
ke := NewKYamlEncoder(NewDefaultKYamlPreferences()).(*kyamlEncoder)
|
||||
|
||||
t.Run("empty_mapping", func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := ke.writeNode(&buf, &CandidateNode{Kind: MappingNode}, 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != "{}" {
|
||||
t.Fatalf("expected empty mapping, got %q", buf.String())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("empty_sequence", func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := ke.writeNode(&buf, &CandidateNode{Kind: SequenceNode}, 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != "[]" {
|
||||
t.Fatalf("expected empty sequence, got %q", buf.String())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestKYamlEncoderScalarFallbackAndEscaping(t *testing.T) {
|
||||
ke := NewKYamlEncoder(NewDefaultKYamlPreferences()).(*kyamlEncoder)
|
||||
|
||||
t.Run("unknown_tag_falls_back_to_string", func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := ke.writeNode(&buf, &CandidateNode{Kind: ScalarNode, Tag: "!!timestamp", Value: "2020-01-01T00:00:00Z"}, 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != "\"2020-01-01T00:00:00Z\"" {
|
||||
t.Fatalf("expected quoted fallback, got %q", buf.String())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("escape_double_quoted", func(t *testing.T) {
|
||||
got := escapeDoubleQuotedString("a\\b\"c\n\r\t" + string(rune(0x01)))
|
||||
want := "a\\\\b\\\"c\\n\\r\\t\\u0001"
|
||||
if got != want {
|
||||
t.Fatalf("expected %q, got %q", want, got)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("valid_bare_key", func(t *testing.T) {
|
||||
if isValidKYamlBareKey("") {
|
||||
t.Fatalf("expected empty string to be invalid")
|
||||
}
|
||||
if isValidKYamlBareKey("1a") {
|
||||
t.Fatalf("expected leading digit to be invalid")
|
||||
}
|
||||
if !isValidKYamlBareKey("a_b-2") {
|
||||
t.Fatalf("expected identifier-like key to be valid")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestKYamlEncoderCommentsInMapping(t *testing.T) {
|
||||
prefs := NewDefaultKYamlPreferences()
|
||||
prefs.UnwrapScalar = false
|
||||
ke := NewKYamlEncoder(prefs).(*kyamlEncoder)
|
||||
|
||||
var buf bytes.Buffer
|
||||
err := ke.writeNode(&buf, &CandidateNode{
|
||||
Kind: MappingNode,
|
||||
Content: []*CandidateNode{
|
||||
{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: "a",
|
||||
HeadComment: "key head",
|
||||
LineComment: "key line",
|
||||
FootComment: "key foot",
|
||||
},
|
||||
{
|
||||
Kind: ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: "b",
|
||||
HeadComment: "value head",
|
||||
},
|
||||
},
|
||||
}, 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
out := buf.String()
|
||||
if !strings.Contains(out, "# key head\n") {
|
||||
t.Fatalf("expected key head comment, got %q", out)
|
||||
}
|
||||
if !strings.Contains(out, "# value head\n") {
|
||||
t.Fatalf("expected value head comment, got %q", out)
|
||||
}
|
||||
if !strings.Contains(out, ", # key line\n") {
|
||||
t.Fatalf("expected inline key comment fallback, got %q", out)
|
||||
}
|
||||
if !strings.Contains(out, "# key foot\n") {
|
||||
t.Fatalf("expected foot comment fallback, got %q", out)
|
||||
}
|
||||
}
|
||||
|
||||
func TestKYamlEncoderCommentBlockAndInlineComment(t *testing.T) {
|
||||
ke := NewKYamlEncoder(NewDefaultKYamlPreferences()).(*kyamlEncoder)
|
||||
|
||||
t.Run("comment_block_prefixing_and_crlf", func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := ke.writeCommentBlock(&buf, "line1\r\n\r\n# already\r\nline2", 2)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
want := " # line1\n # already\n # line2\n"
|
||||
if buf.String() != want {
|
||||
t.Fatalf("expected %q, got %q", want, buf.String())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("inline_comment_prefix_and_first_line_only", func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := ke.writeInlineComment(&buf, "hello\r\nsecond line")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != " # hello" {
|
||||
t.Fatalf("expected %q, got %q", " # hello", buf.String())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("inline_comment_already_prefixed", func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := ke.writeInlineComment(&buf, "# hello")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if buf.String() != " # hello" {
|
||||
t.Fatalf("expected %q, got %q", " # hello", buf.String())
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -61,7 +61,7 @@ func unwrap(value string) string {
|
||||
}
|
||||
|
||||
func extractNumberParameter(value string) (int, error) {
|
||||
parameterParser := regexp.MustCompile(`.*\(([0-9]+)\)`)
|
||||
parameterParser := regexp.MustCompile(`.*\((-?[0-9]+)\)`)
|
||||
matches := parameterParser.FindStringSubmatch(value)
|
||||
var indent, errParsingInt = parseInt(matches[1])
|
||||
if errParsingInt != nil {
|
||||
|
||||
@ -57,7 +57,7 @@ var participleYqRules = []*participleYqRule{
|
||||
simpleOp("sort_?keys", sortKeysOpType),
|
||||
|
||||
{"ArrayToMap", "array_?to_?map", expressionOpToken(`(.[] | select(. != null) ) as $i ireduce({}; .[$i | key] = $i)`), 0},
|
||||
|
||||
{"Root", "root", expressionOpToken(`parent(-1)`), 0},
|
||||
{"YamlEncodeWithIndent", `to_?yaml\([0-9]+\)`, encodeParseIndent(YamlFormat), 0},
|
||||
{"XMLEncodeWithIndent", `to_?xml\([0-9]+\)`, encodeParseIndent(XMLFormat), 0},
|
||||
{"JSONEncodeWithIndent", `to_?json\([0-9]+\)`, encodeParseIndent(JSONFormat), 0},
|
||||
@ -132,7 +132,7 @@ var participleYqRules = []*participleYqRule{
|
||||
simpleOp("split", splitStringOpType),
|
||||
|
||||
simpleOp("parents", getParentsOpType),
|
||||
{"ParentWithLevel", `parent\([0-9]+\)`, parentWithLevel(), 0},
|
||||
{"ParentWithLevel", `parent\(-?[0-9]+\)`, parentWithLevel(), 0},
|
||||
{"ParentWithDefaultLevel", `parent`, parentWithDefaultLevel(), 0},
|
||||
|
||||
simpleOp("keys", keysOpType),
|
||||
@ -379,9 +379,7 @@ func stringValue() yqAction {
|
||||
log.Debug("rawTokenvalue: %v", rawToken.Value)
|
||||
value := unwrap(rawToken.Value)
|
||||
log.Debug("unwrapped: %v", value)
|
||||
value = strings.ReplaceAll(value, "\\\"", "\"")
|
||||
value = strings.ReplaceAll(value, "\\n", "\n")
|
||||
log.Debug("replaced: %v", value)
|
||||
value = processEscapeCharacters(value)
|
||||
return &token{TokenType: operationToken, Operation: &Operation{
|
||||
OperationType: stringInterpolationOpType,
|
||||
StringValue: value,
|
||||
@ -453,6 +451,7 @@ func multiplyWithPrefs(op *operationType) yqAction {
|
||||
prefs.AssignPrefs.ClobberCustomTags = true
|
||||
}
|
||||
prefs.TraversePrefs.DontFollowAlias = true
|
||||
prefs.TraversePrefs.ExactKeyMatch = true
|
||||
op := &Operation{OperationType: op, Value: multiplyOpType.Type, StringValue: options, Preferences: prefs}
|
||||
return &token{TokenType: operationToken, Operation: op}, nil
|
||||
}
|
||||
|
||||
@ -704,6 +704,90 @@ var participleLexerScenarios = []participleLexerScenario{
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
expression: `"string with a\r"`,
|
||||
tokens: []*token{
|
||||
{
|
||||
TokenType: operationToken,
|
||||
Operation: &Operation{
|
||||
OperationType: stringInterpolationOpType,
|
||||
Value: "string with a\r",
|
||||
StringValue: "string with a\r",
|
||||
Preferences: nil,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
expression: `"string with a\t"`,
|
||||
tokens: []*token{
|
||||
{
|
||||
TokenType: operationToken,
|
||||
Operation: &Operation{
|
||||
OperationType: stringInterpolationOpType,
|
||||
Value: "string with a\t",
|
||||
StringValue: "string with a\t",
|
||||
Preferences: nil,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
expression: `"string with a\f"`,
|
||||
tokens: []*token{
|
||||
{
|
||||
TokenType: operationToken,
|
||||
Operation: &Operation{
|
||||
OperationType: stringInterpolationOpType,
|
||||
Value: "string with a\f",
|
||||
StringValue: "string with a\f",
|
||||
Preferences: nil,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
expression: `"string with a\v"`,
|
||||
tokens: []*token{
|
||||
{
|
||||
TokenType: operationToken,
|
||||
Operation: &Operation{
|
||||
OperationType: stringInterpolationOpType,
|
||||
Value: "string with a\v",
|
||||
StringValue: "string with a\v",
|
||||
Preferences: nil,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
expression: `"string with a\b"`,
|
||||
tokens: []*token{
|
||||
{
|
||||
TokenType: operationToken,
|
||||
Operation: &Operation{
|
||||
OperationType: stringInterpolationOpType,
|
||||
Value: "string with a\b",
|
||||
StringValue: "string with a\b",
|
||||
Preferences: nil,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
expression: `"string with a\a"`,
|
||||
tokens: []*token{
|
||||
{
|
||||
TokenType: operationToken,
|
||||
Operation: &Operation{
|
||||
OperationType: stringInterpolationOpType,
|
||||
Value: "string with a\a",
|
||||
StringValue: "string with a\a",
|
||||
Preferences: nil,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestParticipleLexer(t *testing.T) {
|
||||
|
||||
@ -186,6 +186,76 @@ func parseInt(numberString string) (int, error) {
|
||||
return int(parsed), err
|
||||
}
|
||||
|
||||
func processEscapeCharacters(original string) string {
|
||||
if original == "" {
|
||||
return original
|
||||
}
|
||||
|
||||
var result strings.Builder
|
||||
runes := []rune(original)
|
||||
|
||||
for i := 0; i < len(runes); i++ {
|
||||
if runes[i] == '\\' && i < len(runes)-1 {
|
||||
next := runes[i+1]
|
||||
switch next {
|
||||
case '\\':
|
||||
// Check if followed by opening bracket - if so, preserve both backslashes
|
||||
// this is required for string interpolation to work correctly.
|
||||
if i+2 < len(runes) && runes[i+2] == '(' {
|
||||
// Preserve \\ when followed by (
|
||||
result.WriteRune('\\')
|
||||
result.WriteRune('\\')
|
||||
i++ // Skip the next backslash (we'll process the ( normally on next iteration)
|
||||
continue
|
||||
}
|
||||
// Escaped backslash: \\ -> \
|
||||
result.WriteRune('\\')
|
||||
i++ // Skip the next backslash
|
||||
continue
|
||||
case '"':
|
||||
result.WriteRune('"')
|
||||
i++ // Skip the quote
|
||||
continue
|
||||
case 'n':
|
||||
result.WriteRune('\n')
|
||||
i++ // Skip the 'n'
|
||||
continue
|
||||
case 't':
|
||||
result.WriteRune('\t')
|
||||
i++ // Skip the 't'
|
||||
continue
|
||||
case 'r':
|
||||
result.WriteRune('\r')
|
||||
i++ // Skip the 'r'
|
||||
continue
|
||||
case 'f':
|
||||
result.WriteRune('\f')
|
||||
i++ // Skip the 'f'
|
||||
continue
|
||||
case 'v':
|
||||
result.WriteRune('\v')
|
||||
i++ // Skip the 'v'
|
||||
continue
|
||||
case 'b':
|
||||
result.WriteRune('\b')
|
||||
i++ // Skip the 'b'
|
||||
continue
|
||||
case 'a':
|
||||
result.WriteRune('\a')
|
||||
i++ // Skip the 'a'
|
||||
continue
|
||||
}
|
||||
}
|
||||
result.WriteRune(runes[i])
|
||||
}
|
||||
|
||||
value := result.String()
|
||||
if value != original {
|
||||
log.Debug("processEscapeCharacters from [%v] to [%v]", original, value)
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
func headAndLineComment(node *CandidateNode) string {
|
||||
return headComment(node) + lineComment(node)
|
||||
}
|
||||
|
||||
@ -2,6 +2,7 @@ package yqlib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/mikefarah/yq/v4/test"
|
||||
@ -160,3 +161,369 @@ func TestParseInt64(t *testing.T) {
|
||||
test.AssertResultComplexWithContext(t, tt.expectedFormatString, fmt.Sprintf(format, actualNumber), fmt.Sprintf("Formatting of: %v", tt.numberString))
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetContentValueByKey(t *testing.T) {
|
||||
// Create content with key-value pairs
|
||||
key1 := createStringScalarNode("key1")
|
||||
value1 := createStringScalarNode("value1")
|
||||
key2 := createStringScalarNode("key2")
|
||||
value2 := createStringScalarNode("value2")
|
||||
|
||||
content := []*CandidateNode{key1, value1, key2, value2}
|
||||
|
||||
// Test finding existing key
|
||||
result := getContentValueByKey(content, "key1")
|
||||
test.AssertResult(t, value1, result)
|
||||
|
||||
// Test finding another existing key
|
||||
result = getContentValueByKey(content, "key2")
|
||||
test.AssertResult(t, value2, result)
|
||||
|
||||
// Test finding non-existing key
|
||||
result = getContentValueByKey(content, "nonexistent")
|
||||
test.AssertResult(t, (*CandidateNode)(nil), result)
|
||||
|
||||
// Test with empty content
|
||||
result = getContentValueByKey([]*CandidateNode{}, "key1")
|
||||
test.AssertResult(t, (*CandidateNode)(nil), result)
|
||||
}
|
||||
|
||||
func TestRecurseNodeArrayEqual(t *testing.T) {
|
||||
// Create two arrays with same content
|
||||
array1 := &CandidateNode{
|
||||
Kind: SequenceNode,
|
||||
Content: []*CandidateNode{
|
||||
createStringScalarNode("item1"),
|
||||
createStringScalarNode("item2"),
|
||||
},
|
||||
}
|
||||
|
||||
array2 := &CandidateNode{
|
||||
Kind: SequenceNode,
|
||||
Content: []*CandidateNode{
|
||||
createStringScalarNode("item1"),
|
||||
createStringScalarNode("item2"),
|
||||
},
|
||||
}
|
||||
|
||||
array3 := &CandidateNode{
|
||||
Kind: SequenceNode,
|
||||
Content: []*CandidateNode{
|
||||
createStringScalarNode("item1"),
|
||||
createStringScalarNode("different"),
|
||||
},
|
||||
}
|
||||
|
||||
array4 := &CandidateNode{
|
||||
Kind: SequenceNode,
|
||||
Content: []*CandidateNode{
|
||||
createStringScalarNode("item1"),
|
||||
},
|
||||
}
|
||||
|
||||
test.AssertResult(t, true, recurseNodeArrayEqual(array1, array2))
|
||||
test.AssertResult(t, false, recurseNodeArrayEqual(array1, array3))
|
||||
test.AssertResult(t, false, recurseNodeArrayEqual(array1, array4))
|
||||
}
|
||||
|
||||
func TestFindInArray(t *testing.T) {
|
||||
item1 := createStringScalarNode("item1")
|
||||
item2 := createStringScalarNode("item2")
|
||||
item3 := createStringScalarNode("item3")
|
||||
|
||||
array := &CandidateNode{
|
||||
Kind: SequenceNode,
|
||||
Content: []*CandidateNode{item1, item2, item3},
|
||||
}
|
||||
|
||||
// Test finding existing items
|
||||
test.AssertResult(t, 0, findInArray(array, item1))
|
||||
test.AssertResult(t, 1, findInArray(array, item2))
|
||||
test.AssertResult(t, 2, findInArray(array, item3))
|
||||
|
||||
// Test finding non-existing item
|
||||
nonExistent := createStringScalarNode("nonexistent")
|
||||
test.AssertResult(t, -1, findInArray(array, nonExistent))
|
||||
}
|
||||
|
||||
func TestFindKeyInMap(t *testing.T) {
|
||||
key1 := createStringScalarNode("key1")
|
||||
value1 := createStringScalarNode("value1")
|
||||
key2 := createStringScalarNode("key2")
|
||||
value2 := createStringScalarNode("value2")
|
||||
|
||||
mapNode := &CandidateNode{
|
||||
Kind: MappingNode,
|
||||
Content: []*CandidateNode{key1, value1, key2, value2},
|
||||
}
|
||||
|
||||
// Test finding existing keys
|
||||
test.AssertResult(t, 0, findKeyInMap(mapNode, key1))
|
||||
test.AssertResult(t, 2, findKeyInMap(mapNode, key2))
|
||||
|
||||
// Test finding non-existing key
|
||||
nonExistent := createStringScalarNode("nonexistent")
|
||||
test.AssertResult(t, -1, findKeyInMap(mapNode, nonExistent))
|
||||
}
|
||||
|
||||
func TestRecurseNodeObjectEqual(t *testing.T) {
|
||||
// Create two objects with same content
|
||||
key1 := createStringScalarNode("key1")
|
||||
value1 := createStringScalarNode("value1")
|
||||
key2 := createStringScalarNode("key2")
|
||||
value2 := createStringScalarNode("value2")
|
||||
|
||||
obj1 := &CandidateNode{
|
||||
Kind: MappingNode,
|
||||
Content: []*CandidateNode{key1, value1, key2, value2},
|
||||
}
|
||||
|
||||
obj2 := &CandidateNode{
|
||||
Kind: MappingNode,
|
||||
Content: []*CandidateNode{key1, value1, key2, value2},
|
||||
}
|
||||
|
||||
// Create object with different values
|
||||
value3 := createStringScalarNode("value3")
|
||||
obj3 := &CandidateNode{
|
||||
Kind: MappingNode,
|
||||
Content: []*CandidateNode{key1, value3, key2, value2},
|
||||
}
|
||||
|
||||
// Create object with different keys
|
||||
key3 := createStringScalarNode("key3")
|
||||
obj4 := &CandidateNode{
|
||||
Kind: MappingNode,
|
||||
Content: []*CandidateNode{key1, value1, key3, value2},
|
||||
}
|
||||
|
||||
test.AssertResult(t, true, recurseNodeObjectEqual(obj1, obj2))
|
||||
test.AssertResult(t, false, recurseNodeObjectEqual(obj1, obj3))
|
||||
test.AssertResult(t, false, recurseNodeObjectEqual(obj1, obj4))
|
||||
}
|
||||
|
||||
func TestParseInt(t *testing.T) {
|
||||
type parseIntScenario struct {
|
||||
numberString string
|
||||
expectedParsedNumber int
|
||||
expectedError string
|
||||
}
|
||||
|
||||
scenarios := []parseIntScenario{
|
||||
{
|
||||
numberString: "34",
|
||||
expectedParsedNumber: 34,
|
||||
},
|
||||
{
|
||||
numberString: "10_000",
|
||||
expectedParsedNumber: 10000,
|
||||
},
|
||||
{
|
||||
numberString: "0x10",
|
||||
expectedParsedNumber: 16,
|
||||
},
|
||||
{
|
||||
numberString: "0o10",
|
||||
expectedParsedNumber: 8,
|
||||
},
|
||||
{
|
||||
numberString: "invalid",
|
||||
expectedError: "strconv.ParseInt",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range scenarios {
|
||||
actualNumber, err := parseInt(tt.numberString)
|
||||
if tt.expectedError != "" {
|
||||
if err == nil {
|
||||
t.Errorf("Expected error for '%s' but got none", tt.numberString)
|
||||
} else if !strings.Contains(err.Error(), tt.expectedError) {
|
||||
t.Errorf("Expected error containing '%s' for '%s', got '%s'", tt.expectedError, tt.numberString, err.Error())
|
||||
}
|
||||
continue
|
||||
}
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error for '%s': %v", tt.numberString, err)
|
||||
}
|
||||
test.AssertResultComplexWithContext(t, tt.expectedParsedNumber, actualNumber, tt.numberString)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHeadAndLineComment(t *testing.T) {
|
||||
node := &CandidateNode{
|
||||
HeadComment: "# head comment",
|
||||
LineComment: "# line comment",
|
||||
}
|
||||
|
||||
result := headAndLineComment(node)
|
||||
test.AssertResult(t, " head comment line comment", result)
|
||||
}
|
||||
|
||||
func TestHeadComment(t *testing.T) {
|
||||
node := &CandidateNode{
|
||||
HeadComment: "# head comment",
|
||||
}
|
||||
|
||||
result := headComment(node)
|
||||
test.AssertResult(t, " head comment", result)
|
||||
|
||||
// Test without #
|
||||
node.HeadComment = "no hash comment"
|
||||
result = headComment(node)
|
||||
test.AssertResult(t, "no hash comment", result)
|
||||
}
|
||||
|
||||
func TestLineComment(t *testing.T) {
|
||||
node := &CandidateNode{
|
||||
LineComment: "# line comment",
|
||||
}
|
||||
|
||||
result := lineComment(node)
|
||||
test.AssertResult(t, " line comment", result)
|
||||
|
||||
// Test without #
|
||||
node.LineComment = "no hash comment"
|
||||
result = lineComment(node)
|
||||
test.AssertResult(t, "no hash comment", result)
|
||||
}
|
||||
|
||||
func TestFootComment(t *testing.T) {
|
||||
node := &CandidateNode{
|
||||
FootComment: "# foot comment",
|
||||
}
|
||||
|
||||
result := footComment(node)
|
||||
test.AssertResult(t, " foot comment", result)
|
||||
|
||||
// Test without #
|
||||
node.FootComment = "no hash comment"
|
||||
result = footComment(node)
|
||||
test.AssertResult(t, "no hash comment", result)
|
||||
}
|
||||
|
||||
func TestKindString(t *testing.T) {
|
||||
test.AssertResult(t, "ScalarNode", KindString(ScalarNode))
|
||||
test.AssertResult(t, "SequenceNode", KindString(SequenceNode))
|
||||
test.AssertResult(t, "MappingNode", KindString(MappingNode))
|
||||
test.AssertResult(t, "AliasNode", KindString(AliasNode))
|
||||
test.AssertResult(t, "unknown!", KindString(Kind(999))) // Invalid kind
|
||||
}
|
||||
|
||||
type processEscapeCharactersScenario struct {
|
||||
input string
|
||||
expected string
|
||||
}
|
||||
|
||||
var processEscapeCharactersScenarios = []processEscapeCharactersScenario{
|
||||
{
|
||||
input: "",
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
input: "hello",
|
||||
expected: "hello",
|
||||
},
|
||||
{
|
||||
input: "\\\"",
|
||||
expected: "\"",
|
||||
},
|
||||
{
|
||||
input: "hello\\\"world",
|
||||
expected: "hello\"world",
|
||||
},
|
||||
{
|
||||
input: "\\n",
|
||||
expected: "\n",
|
||||
},
|
||||
{
|
||||
input: "line1\\nline2",
|
||||
expected: "line1\nline2",
|
||||
},
|
||||
{
|
||||
input: "\\t",
|
||||
expected: "\t",
|
||||
},
|
||||
{
|
||||
input: "hello\\tworld",
|
||||
expected: "hello\tworld",
|
||||
},
|
||||
{
|
||||
input: "\\r",
|
||||
expected: "\r",
|
||||
},
|
||||
{
|
||||
input: "hello\\rworld",
|
||||
expected: "hello\rworld",
|
||||
},
|
||||
{
|
||||
input: "\\f",
|
||||
expected: "\f",
|
||||
},
|
||||
{
|
||||
input: "hello\\fworld",
|
||||
expected: "hello\fworld",
|
||||
},
|
||||
{
|
||||
input: "\\v",
|
||||
expected: "\v",
|
||||
},
|
||||
{
|
||||
input: "hello\\vworld",
|
||||
expected: "hello\vworld",
|
||||
},
|
||||
{
|
||||
input: "\\b",
|
||||
expected: "\b",
|
||||
},
|
||||
{
|
||||
input: "hello\\bworld",
|
||||
expected: "hello\bworld",
|
||||
},
|
||||
{
|
||||
input: "\\a",
|
||||
expected: "\a",
|
||||
},
|
||||
{
|
||||
input: "hello\\aworld",
|
||||
expected: "hello\aworld",
|
||||
},
|
||||
{
|
||||
input: "\\\"\\n\\t\\r\\f\\v\\b\\a",
|
||||
expected: "\"\n\t\r\f\v\b\a",
|
||||
},
|
||||
{
|
||||
input: "multiple\\nlines\\twith\\ttabs",
|
||||
expected: "multiple\nlines\twith\ttabs",
|
||||
},
|
||||
{
|
||||
input: "quote\\\"here",
|
||||
expected: "quote\"here",
|
||||
},
|
||||
{
|
||||
input: "\\\\",
|
||||
expected: "\\", // Backslash is processed: "\\\\" becomes "\\"
|
||||
},
|
||||
{
|
||||
input: "\\\"test\\\"",
|
||||
expected: "\"test\"",
|
||||
},
|
||||
{
|
||||
input: "a\\\\b",
|
||||
expected: "a\\b", // Tests roundtrip: "a\\\\b" should become "a\\b"
|
||||
},
|
||||
{
|
||||
input: "Hi \\\\(.value)",
|
||||
expected: "Hi \\\\(.value)",
|
||||
},
|
||||
{
|
||||
input: `a\\b`,
|
||||
expected: "a\\b",
|
||||
},
|
||||
}
|
||||
|
||||
func TestProcessEscapeCharacters(t *testing.T) {
|
||||
for _, tt := range processEscapeCharactersScenarios {
|
||||
actual := processEscapeCharacters(tt.input)
|
||||
test.AssertResultComplexWithContext(t, tt.expected, actual, fmt.Sprintf("Input: %q", tt.input))
|
||||
}
|
||||
}
|
||||
|
||||
11
pkg/yqlib/no_hcl.go
Normal file
11
pkg/yqlib/no_hcl.go
Normal file
@ -0,0 +1,11 @@
|
||||
//go:build yq_nohcl
|
||||
|
||||
package yqlib
|
||||
|
||||
func NewHclDecoder() Decoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewHclEncoder(_ HclPreferences) Encoder {
|
||||
return nil
|
||||
}
|
||||
7
pkg/yqlib/no_kyaml.go
Normal file
7
pkg/yqlib/no_kyaml.go
Normal file
@ -0,0 +1,7 @@
|
||||
//go:build yq_nokyaml
|
||||
|
||||
package yqlib
|
||||
|
||||
func NewKYamlEncoder(_ KYamlPreferences) Encoder {
|
||||
return nil
|
||||
}
|
||||
@ -5,3 +5,11 @@ package yqlib
|
||||
func NewTomlDecoder() Decoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewTomlEncoder() Encoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewTomlEncoderWithPrefs(prefs TomlPreferences) Encoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -4,7 +4,7 @@ import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
var specDocument = `- &CENTER { x: 1, y: 2 }
|
||||
var specDocument = `- &CENTRE { x: 1, y: 2 }
|
||||
- &LEFT { x: 0, y: 2 }
|
||||
- &BIG { r: 10 }
|
||||
- &SMALL { r: 1 }
|
||||
@ -139,7 +139,7 @@ var fixedAnchorOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
description: "FIXED: Merge multiple maps",
|
||||
subdescription: "Taken from https://yaml.org/type/merge.html. Same values as legacy, but with the correct key order.",
|
||||
document: specDocument + "- << : [ *CENTER, *BIG ]\n",
|
||||
document: specDocument + "- << : [ *CENTRE, *BIG ]\n",
|
||||
expression: ".[4] | explode(.)",
|
||||
expected: []string{"D0, P[4], (!!map)::x: 1\ny: 2\nr: 10\n"},
|
||||
},
|
||||
@ -171,7 +171,7 @@ var fixedAnchorOperatorScenarios = []expressionScenario{
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "Exploding merge anchor should not explode neighbors",
|
||||
description: "Exploding merge anchor should not explode neighbours",
|
||||
subdescription: "b must not be exploded, as `r: *a` will become invalid",
|
||||
document: `{b: &b {a: &a 42}, r: *a, c: {<<: *b}}`,
|
||||
expression: `explode(.c)`,
|
||||
@ -181,7 +181,7 @@ var fixedAnchorOperatorScenarios = []expressionScenario{
|
||||
},
|
||||
{
|
||||
skipDoc: true,
|
||||
description: "Exploding sequence merge anchor should not explode neighbors",
|
||||
description: "Exploding sequence merge anchor should not explode neighbours",
|
||||
subdescription: "b must not be exploded, as `r: *a` will become invalid",
|
||||
document: `{b: &b {a: &a 42}, r: *a, c: {<<: [*b]}}`,
|
||||
expression: `explode(.c)`,
|
||||
@ -265,7 +265,7 @@ var badAnchorOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
description: "LEGACY: Merge multiple maps",
|
||||
subdescription: "see https://yaml.org/type/merge.html. This has the correct data, but the wrong key order; set --yaml-fix-merge-anchor-to-spec=true to fix the key order.",
|
||||
document: specDocument + "- << : [ *CENTER, *BIG ]\n",
|
||||
document: specDocument + "- << : [ *CENTRE, *BIG ]\n",
|
||||
expression: ".[4] | explode(.)",
|
||||
expected: []string{"D0, P[4], (!!map)::r: 10\nx: 1\ny: 2\n"},
|
||||
},
|
||||
@ -297,7 +297,7 @@ var anchorOperatorScenarios = []expressionScenario{
|
||||
{
|
||||
description: "Merge one map",
|
||||
subdescription: "see https://yaml.org/type/merge.html",
|
||||
document: specDocument + "- << : *CENTER\n r: 10\n",
|
||||
document: specDocument + "- << : *CENTRE\n r: 10\n",
|
||||
expression: ".[4] | explode(.)",
|
||||
expected: []string{expectedSpecResult},
|
||||
},
|
||||
|
||||
@ -17,6 +17,9 @@ type envOpPreferences struct {
|
||||
}
|
||||
|
||||
func envOperator(_ *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
if ConfiguredSecurityPreferences.DisableEnvOps {
|
||||
return Context{}, fmt.Errorf("env operations have been disabled")
|
||||
}
|
||||
envName := expressionNode.Operation.CandidateNode.Value
|
||||
log.Debug("EnvOperator, env name:", envName)
|
||||
|
||||
@ -54,6 +57,9 @@ func envOperator(_ *dataTreeNavigator, context Context, expressionNode *Expressi
|
||||
}
|
||||
|
||||
func envsubstOperator(_ *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
if ConfiguredSecurityPreferences.DisableEnvOps {
|
||||
return Context{}, fmt.Errorf("env operations have been disabled")
|
||||
}
|
||||
var results = list.New()
|
||||
preferences := envOpPreferences{}
|
||||
if expressionNode.Operation.Preferences != nil {
|
||||
|
||||
@ -178,3 +178,40 @@ func TestEnvOperatorScenarios(t *testing.T) {
|
||||
}
|
||||
documentOperatorScenarios(t, "env-variable-operators", envOperatorScenarios)
|
||||
}
|
||||
|
||||
var envOperatorSecurityDisabledScenarios = []expressionScenario{
|
||||
{
|
||||
description: "env() operation fails when security is enabled",
|
||||
subdescription: "Use `--security-disable-env-ops` to disable env operations for security.",
|
||||
expression: `env("MYENV")`,
|
||||
expectedError: "env operations have been disabled",
|
||||
},
|
||||
{
|
||||
description: "strenv() operation fails when security is enabled",
|
||||
subdescription: "Use `--security-disable-env-ops` to disable env operations for security.",
|
||||
expression: `strenv("MYENV")`,
|
||||
expectedError: "env operations have been disabled",
|
||||
},
|
||||
{
|
||||
description: "envsubst() operation fails when security is enabled",
|
||||
subdescription: "Use `--security-disable-env-ops` to disable env operations for security.",
|
||||
expression: `"value: ${MYENV}" | envsubst`,
|
||||
expectedError: "env operations have been disabled",
|
||||
},
|
||||
}
|
||||
|
||||
func TestEnvOperatorSecurityDisabledScenarios(t *testing.T) {
|
||||
// Save original security preferences
|
||||
originalDisableEnvOps := ConfiguredSecurityPreferences.DisableEnvOps
|
||||
defer func() {
|
||||
ConfiguredSecurityPreferences.DisableEnvOps = originalDisableEnvOps
|
||||
}()
|
||||
|
||||
// Test that env() fails when DisableEnvOps is true
|
||||
ConfiguredSecurityPreferences.DisableEnvOps = true
|
||||
|
||||
for _, tt := range envOperatorSecurityDisabledScenarios {
|
||||
testScenario(t, &tt)
|
||||
}
|
||||
appendOperatorDocumentScenario(t, "env-variable-operators", envOperatorSecurityDisabledScenarios)
|
||||
}
|
||||
|
||||
@ -13,15 +13,15 @@ var firstOperatorScenarios = []expressionScenario{
|
||||
},
|
||||
{
|
||||
description: "First matching element from array with multiple matches",
|
||||
document: "[{a: banana},{a: cat},{a: apple},{a: cat}]",
|
||||
document: "[{a: banana},{a: cat, b: firstCat},{a: apple},{a: cat, b: secondCat}]",
|
||||
expression: `first(.a == "cat")`,
|
||||
expected: []string{
|
||||
"D0, P[1], (!!map)::{a: cat}\n",
|
||||
"D0, P[1], (!!map)::{a: cat, b: firstCat}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "First matching element from array with numeric condition",
|
||||
document: "[{a: 10},{a: 100},{a: 1}]",
|
||||
document: "[{a: 10},{a: 100},{a: 1},{a: 101}]",
|
||||
expression: `first(.a > 50)`,
|
||||
expected: []string{
|
||||
"D0, P[1], (!!map)::{a: 100}\n",
|
||||
@ -29,10 +29,10 @@ var firstOperatorScenarios = []expressionScenario{
|
||||
},
|
||||
{
|
||||
description: "First matching element from array with boolean condition",
|
||||
document: "[{a: false},{a: true},{a: false}]",
|
||||
document: "[{a: false},{a: true, b: firstTrue},{a: false}, {a: true, b: secondTrue}]",
|
||||
expression: `first(.a == true)`,
|
||||
expected: []string{
|
||||
"D0, P[1], (!!map)::{a: true}\n",
|
||||
"D0, P[1], (!!map)::{a: true, b: firstTrue}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -45,10 +45,10 @@ var firstOperatorScenarios = []expressionScenario{
|
||||
},
|
||||
{
|
||||
description: "First matching element from array with complex condition",
|
||||
document: "[{a: dog, b: 5},{a: cat, b: 3},{a: apple, b: 7}]",
|
||||
expression: `first(.b > 4)`,
|
||||
document: "[{a: dog, b: 7},{a: cat, b: 3},{a: apple, b: 5}]",
|
||||
expression: `first(.b > 4 and .b < 6)`,
|
||||
expected: []string{
|
||||
"D0, P[0], (!!map)::{a: dog, b: 5}\n",
|
||||
"D0, P[2], (!!map)::{a: apple, b: 5}\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -61,7 +61,7 @@ var firstOperatorScenarios = []expressionScenario{
|
||||
},
|
||||
{
|
||||
description: "First matching element from map with numeric condition",
|
||||
document: "x: {a: 10}\ny: {a: 100}\nz: {a: 1}",
|
||||
document: "x: {a: 10}\ny: {a: 100}\nz: {a: 101}",
|
||||
expression: `first(.a > 50)`,
|
||||
expected: []string{
|
||||
"D0, P[y], (!!map)::{a: 100}\n",
|
||||
|
||||
@ -63,6 +63,9 @@ func loadWithDecoder(filename string, decoder Decoder) (*CandidateNode, error) {
|
||||
|
||||
func loadStringOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
log.Debugf("loadString")
|
||||
if ConfiguredSecurityPreferences.DisableFileOps {
|
||||
return Context{}, fmt.Errorf("file operations have been disabled")
|
||||
}
|
||||
|
||||
var results = list.New()
|
||||
|
||||
@ -94,6 +97,9 @@ func loadStringOperator(d *dataTreeNavigator, context Context, expressionNode *E
|
||||
|
||||
func loadOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
log.Debugf("loadOperator")
|
||||
if ConfiguredSecurityPreferences.DisableFileOps {
|
||||
return Context{}, fmt.Errorf("file operations have been disabled")
|
||||
}
|
||||
|
||||
loadPrefs := expressionNode.Operation.Preferences.(loadPrefs)
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user