mirror of
https://github.com/mikefarah/yq.git
synced 2024-12-19 20:19:04 +00:00
Spellig with cspell
This commit is contained in:
parent
980b0e6daa
commit
cdb9b56066
6
Makefile
6
Makefile
@ -84,8 +84,12 @@ format: vendor
|
||||
${ENGINERUN} bash ./scripts/format.sh
|
||||
|
||||
|
||||
.PHONY: spelling
|
||||
spelling: format
|
||||
${ENGINERUN} bash ./scripts/spelling.sh
|
||||
|
||||
.PHONY: secure
|
||||
secure: format
|
||||
secure: spelling
|
||||
${ENGINERUN} bash ./scripts/secure.sh
|
||||
|
||||
.PHONY: check
|
||||
|
@ -143,7 +143,7 @@ testBasicCatWithFilesNoDash() {
|
||||
}
|
||||
|
||||
# when the nullinput flag is used
|
||||
# dont automatically read STDIN (this breaks github actions)
|
||||
# don't automatically read STDIN (this breaks github actions)
|
||||
testBasicCreateFileGithubAction() {
|
||||
cat /dev/null | ./yq -n ".a = 123" > test.yml
|
||||
}
|
||||
@ -302,7 +302,7 @@ testBasicExitStatusNoEval() {
|
||||
assertEquals 1 "$?"
|
||||
}
|
||||
|
||||
testBasicExtractFieldWithSeperator() {
|
||||
testBasicExtractFieldWithSeparator() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
name: chart-name
|
||||
@ -312,7 +312,7 @@ EOL
|
||||
assertEquals "chart-name" "$X"
|
||||
}
|
||||
|
||||
testBasicExtractMultipleFieldWithSeperator() {
|
||||
testBasicExtractMultipleFieldWithSeparator() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
name: chart-name
|
||||
|
@ -11,7 +11,7 @@ a: test
|
||||
EOL
|
||||
}
|
||||
|
||||
testLeadingSeperatorWithDoc() {
|
||||
testLeadingSeparatorWithDoc() {
|
||||
cat >test.yml <<EOL
|
||||
# hi peeps
|
||||
# cool
|
||||
@ -35,7 +35,7 @@ EOM
|
||||
}
|
||||
|
||||
|
||||
testLeadingSeperatorWithNewlinesNewDoc() {
|
||||
testLeadingSeparatorWithNewlinesNewDoc() {
|
||||
cat >test.yml <<EOL
|
||||
# hi peeps
|
||||
# cool
|
||||
@ -62,7 +62,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorWithNewlinesMoreComments() {
|
||||
testLeadingSeparatorWithNewlinesMoreComments() {
|
||||
cat >test.yml <<EOL
|
||||
# hi peeps
|
||||
# cool
|
||||
@ -92,7 +92,7 @@ EOM
|
||||
}
|
||||
|
||||
|
||||
testLeadingSeperatorWithDirective() {
|
||||
testLeadingSeparatorWithDirective() {
|
||||
cat >test.yml <<EOL
|
||||
%YAML 1.1
|
||||
---
|
||||
@ -110,18 +110,18 @@ EOM
|
||||
}
|
||||
|
||||
|
||||
testLeadingSeperatorPipeIntoEvalSeq() {
|
||||
testLeadingSeparatorPipeIntoEvalSeq() {
|
||||
X=$(./yq e - < test.yml)
|
||||
expected=$(cat test.yml)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorExtractField() {
|
||||
testLeadingSeparatorExtractField() {
|
||||
X=$(./yq e '.a' - < test.yml)
|
||||
assertEquals "test" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorExtractFieldWithCommentsAfterSep() {
|
||||
testLeadingSeparatorExtractFieldWithCommentsAfterSep() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
# hi peeps
|
||||
@ -132,7 +132,7 @@ EOL
|
||||
assertEquals "test" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorExtractFieldWithCommentsBeforeSep() {
|
||||
testLeadingSeparatorExtractFieldWithCommentsBeforeSep() {
|
||||
cat >test.yml <<EOL
|
||||
# hi peeps
|
||||
# cool
|
||||
@ -144,7 +144,7 @@ EOL
|
||||
}
|
||||
|
||||
|
||||
testLeadingSeperatorExtractFieldMultiDoc() {
|
||||
testLeadingSeparatorExtractFieldMultiDoc() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
a: test
|
||||
@ -161,7 +161,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorExtractFieldMultiDocWithComments() {
|
||||
testLeadingSeparatorExtractFieldMultiDocWithComments() {
|
||||
cat >test.yml <<EOL
|
||||
# here
|
||||
---
|
||||
@ -184,26 +184,26 @@ EOM
|
||||
}
|
||||
|
||||
|
||||
testLeadingSeperatorEvalSeq() {
|
||||
testLeadingSeparatorEvalSeq() {
|
||||
X=$(./yq e test.yml)
|
||||
expected=$(cat test.yml)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorPipeIntoEvalAll() {
|
||||
testLeadingSeparatorPipeIntoEvalAll() {
|
||||
X=$(./yq ea - < test.yml)
|
||||
expected=$(cat test.yml)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
|
||||
testLeadingSeperatorEvalAll() {
|
||||
testLeadingSeparatorEvalAll() {
|
||||
X=$(./yq ea test.yml)
|
||||
expected=$(cat test.yml)
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocEvalSimple() {
|
||||
testLeadingSeparatorMultiDocEvalSimple() {
|
||||
read -r -d '' expected << EOM
|
||||
---
|
||||
a: test
|
||||
@ -217,7 +217,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocInOneFile() {
|
||||
testLeadingSeparatorMultiDocInOneFile() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
# hi peeps
|
||||
@ -231,7 +231,7 @@ EOL
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocInOneFileEvalAll() {
|
||||
testLeadingSeparatorMultiDocInOneFileEvalAll() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
# hi peeps
|
||||
@ -245,7 +245,7 @@ EOL
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocEvalComments() {
|
||||
testLeadingSeparatorMultiDocEvalComments() {
|
||||
cat >test.yml <<EOL
|
||||
# hi peeps
|
||||
# cool
|
||||
@ -273,7 +273,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocEvalCommentsTrailingSep() {
|
||||
testLeadingSeparatorMultiDocEvalCommentsTrailingSep() {
|
||||
cat >test.yml <<EOL
|
||||
# hi peeps
|
||||
# cool
|
||||
@ -305,7 +305,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiMultiDocEvalCommentsTrailingSep() {
|
||||
testLeadingSeparatorMultiMultiDocEvalCommentsTrailingSep() {
|
||||
cat >test.yml <<EOL
|
||||
# hi peeps
|
||||
# cool
|
||||
@ -345,7 +345,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocEvalCommentsLeadingSep() {
|
||||
testLeadingSeparatorMultiDocEvalCommentsLeadingSep() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
# hi peeps
|
||||
@ -405,7 +405,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocEvalCommentsStripComments() {
|
||||
testLeadingSeparatorMultiDocEvalCommentsStripComments() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
# hi peeps
|
||||
@ -428,7 +428,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocEvalCommentsLeadingSepNoDocFlag() {
|
||||
testLeadingSeparatorMultiDocEvalCommentsLeadingSepNoDocFlag() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
# hi peeps
|
||||
@ -454,7 +454,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocEvalJsonFlag() {
|
||||
testLeadingSeparatorMultiDocEvalJsonFlag() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
# hi peeps
|
||||
@ -483,7 +483,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocEvalAllJsonFlag() {
|
||||
testLeadingSeparatorMultiDocEvalAllJsonFlag() {
|
||||
cat >test.yml <<EOL
|
||||
---
|
||||
# hi peeps
|
||||
@ -512,7 +512,7 @@ EOM
|
||||
assertEquals "$expected" "$X"
|
||||
}
|
||||
|
||||
testLeadingSeperatorMultiDocEvalAll() {
|
||||
testLeadingSeparatorMultiDocEvalAll() {
|
||||
read -r -d '' expected << EOM
|
||||
---
|
||||
a: test
|
||||
|
@ -8,12 +8,12 @@ import (
|
||||
|
||||
type boolFlag interface {
|
||||
pflag.Value
|
||||
IsExplicitySet() bool
|
||||
IsExplicitlySet() bool
|
||||
IsSet() bool
|
||||
}
|
||||
|
||||
type unwrapScalarFlagStrc struct {
|
||||
explicitySet bool
|
||||
explicitlySet bool
|
||||
value bool
|
||||
}
|
||||
|
||||
@ -21,8 +21,8 @@ func newUnwrapFlag() boolFlag {
|
||||
return &unwrapScalarFlagStrc{value: true}
|
||||
}
|
||||
|
||||
func (f *unwrapScalarFlagStrc) IsExplicitySet() bool {
|
||||
return f.explicitySet
|
||||
func (f *unwrapScalarFlagStrc) IsExplicitlySet() bool {
|
||||
return f.explicitlySet
|
||||
}
|
||||
|
||||
func (f *unwrapScalarFlagStrc) IsSet() bool {
|
||||
@ -37,7 +37,7 @@ func (f *unwrapScalarFlagStrc) Set(value string) error {
|
||||
|
||||
v, err := strconv.ParseBool(value)
|
||||
f.value = v
|
||||
f.explicitySet = true
|
||||
f.explicitlySet = true
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -104,7 +104,7 @@ func initCommand(cmd *cobra.Command, args []string) (string, []string, error) {
|
||||
outputFormatType == yqlib.PropsOutputFormat {
|
||||
unwrapScalar = true
|
||||
}
|
||||
if unwrapScalarFlag.IsExplicitySet() {
|
||||
if unwrapScalarFlag.IsExplicitlySet() {
|
||||
unwrapScalar = unwrapScalarFlag.IsSet()
|
||||
}
|
||||
|
||||
|
14
cspell.config.yaml
Normal file
14
cspell.config.yaml
Normal file
@ -0,0 +1,14 @@
|
||||
---
|
||||
$schema: https://raw.githubusercontent.com/streetsidesoftware/cspell/main/cspell.schema.json
|
||||
version: '0.2'
|
||||
language: en-GB
|
||||
dictionaryDefinitions:
|
||||
- name: project-words
|
||||
path: './project-words.txt'
|
||||
addWords: true
|
||||
dictionaries:
|
||||
- project-words
|
||||
ignorePaths:
|
||||
- 'vendor'
|
||||
- 'bin'
|
||||
- '/project-words.txt'
|
@ -49,7 +49,7 @@ func (dec *yamlDecoder) processReadStream(reader *bufio.Reader) (io.Reader, stri
|
||||
}
|
||||
} else if string(peekBytes) == "---" {
|
||||
_, err := reader.ReadString('\n')
|
||||
sb.WriteString("$yqDocSeperator$\n")
|
||||
sb.WriteString("$yqDocSeparator$\n")
|
||||
if errors.Is(err, io.EOF) {
|
||||
return reader, sb.String(), nil
|
||||
} else if err != nil {
|
||||
|
@ -26,7 +26,7 @@ will output
|
||||
bar: 100
|
||||
```
|
||||
|
||||
## Group by field, with nuls
|
||||
## Group by field, with nulls
|
||||
Given a sample.yml file of:
|
||||
```yaml
|
||||
- cat: dog
|
||||
|
@ -34,7 +34,7 @@ Given a sample.yml file of:
|
||||
ascii_=_symbols: replaced with _
|
||||
"ascii_ _controls": dropped (this example uses \t)
|
||||
nonascii_א_characters: dropped
|
||||
effrot_expeñded_tò_preserve_accented_latin_letters: moderate (via unicode NFKD)
|
||||
effort_expeñded_tò_preserve_accented_latin_letters: moderate (via unicode NFKD)
|
||||
|
||||
```
|
||||
then
|
||||
@ -46,7 +46,7 @@ will output
|
||||
ascii___symbols='replaced with _'
|
||||
ascii__controls='dropped (this example uses \t)'
|
||||
nonascii__characters=dropped
|
||||
effrot_expended_to_preserve_accented_latin_letters='moderate (via unicode NFKD)'
|
||||
effort_expended_to_preserve_accented_latin_letters='moderate (via unicode NFKD)'
|
||||
```
|
||||
|
||||
## Encode shell variables: empty values, arrays and maps
|
||||
|
@ -100,7 +100,7 @@ func (le *luaEncoder) encodeString(writer io.Writer, node *yaml.Node) error {
|
||||
case yaml.SingleQuotedStyle:
|
||||
quote = "'"
|
||||
|
||||
// falltrough to regular ol' string
|
||||
// fallthrough to regular ol' string
|
||||
}
|
||||
return writeString(writer, quote+le.escape.Replace(node.Value)+quote)
|
||||
}
|
||||
|
@ -37,7 +37,7 @@ func (pe *propertiesEncoder) PrintLeadingContent(writer io.Writer, content strin
|
||||
if errReading != nil && !errors.Is(errReading, io.EOF) {
|
||||
return errReading
|
||||
}
|
||||
if strings.Contains(readline, "$yqDocSeperator$") {
|
||||
if strings.Contains(readline, "$yqDocSeparator$") {
|
||||
|
||||
if err := pe.PrintDocumentSeparator(writer); err != nil {
|
||||
return err
|
||||
|
@ -47,7 +47,7 @@ func (ye *yamlEncoder) PrintLeadingContent(writer io.Writer, content string) err
|
||||
if errReading != nil && !errors.Is(errReading, io.EOF) {
|
||||
return errReading
|
||||
}
|
||||
if strings.Contains(readline, "$yqDocSeperator$") {
|
||||
if strings.Contains(readline, "$yqDocSeparator$") {
|
||||
|
||||
if err := ye.PrintDocumentSeparator(writer); err != nil {
|
||||
return err
|
||||
|
@ -26,8 +26,8 @@ var participleYqRules = []*participleYqRule{
|
||||
{"RecursiveDecent", `\.\.`, recursiveDecentOpToken(false), 0},
|
||||
|
||||
{"GetVariable", `\$[a-zA-Z_\-0-9]+`, getVariableOpToken(), 0},
|
||||
{"AsignAsVariable", `as`, opTokenWithPrefs(assignVariableOpType, nil, assignVarPreferences{}), 0},
|
||||
{"AsignRefVariable", `ref`, opTokenWithPrefs(assignVariableOpType, nil, assignVarPreferences{IsReference: true}), 0},
|
||||
{"AssignAsVariable", `as`, opTokenWithPrefs(assignVariableOpType, nil, assignVarPreferences{}), 0},
|
||||
{"AssignRefVariable", `ref`, opTokenWithPrefs(assignVariableOpType, nil, assignVarPreferences{IsReference: true}), 0},
|
||||
|
||||
{"CreateMap", `:\s*`, opToken(createMapOpType), 0},
|
||||
simpleOp("length", lengthOpType),
|
||||
|
@ -77,8 +77,8 @@ func assignCommentsOperator(d *dataTreeNavigator, context Context, expressionNod
|
||||
|
||||
func getCommentsOperator(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
preferences := expressionNode.Operation.Preferences.(commentOpPreferences)
|
||||
var startCommentCharaterRegExp = regexp.MustCompile(`^# `)
|
||||
var subsequentCommentCharaterRegExp = regexp.MustCompile(`\n# `)
|
||||
var startCommentCharacterRegExp = regexp.MustCompile(`^# `)
|
||||
var subsequentCommentCharacterRegExp = regexp.MustCompile(`\n# `)
|
||||
|
||||
log.Debugf("GetComments operator!")
|
||||
var results = list.New()
|
||||
@ -112,8 +112,8 @@ func getCommentsOperator(d *dataTreeNavigator, context Context, expressionNode *
|
||||
} else if preferences.FootComment {
|
||||
comment = candidate.Node.FootComment
|
||||
}
|
||||
comment = startCommentCharaterRegExp.ReplaceAllString(comment, "")
|
||||
comment = subsequentCommentCharaterRegExp.ReplaceAllString(comment, "\n")
|
||||
comment = startCommentCharacterRegExp.ReplaceAllString(comment, "")
|
||||
comment = subsequentCommentCharacterRegExp.ReplaceAllString(comment, "\n")
|
||||
|
||||
node := &yaml.Node{Kind: yaml.ScalarNode, Value: comment, Tag: "!!str"}
|
||||
result := candidate.CreateReplacement(node)
|
||||
|
@ -10,7 +10,7 @@ import (
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func getStringParamter(parameterName string, d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (string, error) {
|
||||
func getStringParameter(parameterName string, d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (string, error) {
|
||||
result, err := d.GetMatchingNodes(context.ReadOnlyClone(), expressionNode)
|
||||
|
||||
if err != nil {
|
||||
@ -24,7 +24,7 @@ func getStringParamter(parameterName string, d *dataTreeNavigator, context Conte
|
||||
|
||||
func withDateTimeFormat(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
if expressionNode.RHS.Operation.OperationType == blockOpType || expressionNode.RHS.Operation.OperationType == unionOpType {
|
||||
layout, err := getStringParamter("layout", d, context, expressionNode.RHS.LHS)
|
||||
layout, err := getStringParameter("layout", d, context, expressionNode.RHS.LHS)
|
||||
if err != nil {
|
||||
return Context{}, fmt.Errorf("could not get date time format: %w", err)
|
||||
}
|
||||
@ -63,7 +63,7 @@ func parseDateTime(layout string, datestring string) (time.Time, error) {
|
||||
}
|
||||
|
||||
func formatDateTime(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
format, err := getStringParamter("format", d, context, expressionNode.RHS)
|
||||
format, err := getStringParameter("format", d, context, expressionNode.RHS)
|
||||
layout := context.GetDateTimeLayout()
|
||||
|
||||
if err != nil {
|
||||
@ -97,7 +97,7 @@ func formatDateTime(d *dataTreeNavigator, context Context, expressionNode *Expre
|
||||
}
|
||||
|
||||
func tzOp(d *dataTreeNavigator, context Context, expressionNode *ExpressionNode) (Context, error) {
|
||||
timezoneStr, err := getStringParamter("timezone", d, context, expressionNode.RHS)
|
||||
timezoneStr, err := getStringParameter("timezone", d, context, expressionNode.RHS)
|
||||
layout := context.GetDateTimeLayout()
|
||||
|
||||
if err != nil {
|
||||
|
@ -14,7 +14,7 @@ var groupByOperatorScenarios = []expressionScenario{
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Group by field, with nuls",
|
||||
description: "Group by field, with nulls",
|
||||
document: `[{cat: dog}, {foo: 1, bar: 10}, {foo: 3, bar: 100}, {no: foo for you}, {foo: 1, bar: 1}]`,
|
||||
expression: `group_by(.foo)`,
|
||||
expected: []string{
|
||||
|
@ -50,7 +50,7 @@ func keysOperator(d *dataTreeNavigator, context Context, expressionNode *Express
|
||||
if node.Kind == yaml.MappingNode {
|
||||
targetNode = getMapKeys(node)
|
||||
} else if node.Kind == yaml.SequenceNode {
|
||||
targetNode = getIndicies(node)
|
||||
targetNode = getIndices(node)
|
||||
} else {
|
||||
return Context{}, fmt.Errorf("Cannot get keys of %v, keys only works for maps and arrays", node.Tag)
|
||||
}
|
||||
@ -70,7 +70,7 @@ func getMapKeys(node *yaml.Node) *yaml.Node {
|
||||
return &yaml.Node{Kind: yaml.SequenceNode, Tag: "!!seq", Content: contents}
|
||||
}
|
||||
|
||||
func getIndicies(node *yaml.Node) *yaml.Node {
|
||||
func getIndices(node *yaml.Node) *yaml.Node {
|
||||
var contents = make([]*yaml.Node, len(node.Content))
|
||||
|
||||
for index := range node.Content {
|
||||
|
@ -76,7 +76,7 @@ func pickOperator(d *dataTreeNavigator, context Context, expressionNode *Express
|
||||
}
|
||||
|
||||
} else {
|
||||
return Context{}, fmt.Errorf("cannot pick indicies from type %v (%v)", node.Tag, candidate.GetNicePath())
|
||||
return Context{}, fmt.Errorf("cannot pick indices from type %v (%v)", node.Tag, candidate.GetNicePath())
|
||||
}
|
||||
|
||||
results.PushBack(candidate.CreateReplacementWithDocWrappers(replacement))
|
||||
|
@ -202,7 +202,7 @@ func traverseArrayWithIndices(candidate *CandidateNode, indices []*yaml.Node, pr
|
||||
contentLength := len(node.Content)
|
||||
for contentLength <= index {
|
||||
if contentLength == 0 {
|
||||
// default to nice yaml formating
|
||||
// default to nice yaml formatting
|
||||
node.Style = 0
|
||||
}
|
||||
|
||||
|
@ -148,7 +148,7 @@ func (p *resultsPrinter) PrintResults(matchingNodes *list.List) error {
|
||||
return errorWriting
|
||||
}
|
||||
|
||||
commentsStartWithSepExp := regexp.MustCompile(`^\$yqDocSeperator\$`)
|
||||
commentsStartWithSepExp := regexp.MustCompile(`^\$yqDocSeparator\$`)
|
||||
commentStartsWithSeparator := commentsStartWithSepExp.MatchString(mappedDoc.LeadingContent)
|
||||
|
||||
if (p.previousDocIndex != mappedDoc.Document || p.previousFileIndex != mappedDoc.FileIndex) && !commentStartsWithSeparator {
|
||||
|
@ -82,15 +82,15 @@ func TestPrinterMultipleDocsInSequenceWithLeadingContent(t *testing.T) {
|
||||
}
|
||||
|
||||
el := inputs.Front()
|
||||
el.Value.(*CandidateNode).LeadingContent = "# go cats\n$yqDocSeperator$\n"
|
||||
el.Value.(*CandidateNode).LeadingContent = "# go cats\n$yqDocSeparator$\n"
|
||||
sample1 := nodeToList(el.Value.(*CandidateNode))
|
||||
|
||||
el = el.Next()
|
||||
el.Value.(*CandidateNode).LeadingContent = "$yqDocSeperator$\n"
|
||||
el.Value.(*CandidateNode).LeadingContent = "$yqDocSeparator$\n"
|
||||
sample2 := nodeToList(el.Value.(*CandidateNode))
|
||||
|
||||
el = el.Next()
|
||||
el.Value.(*CandidateNode).LeadingContent = "$yqDocSeperator$\n# cool\n"
|
||||
el.Value.(*CandidateNode).LeadingContent = "$yqDocSeparator$\n# cool\n"
|
||||
sample3 := nodeToList(el.Value.(*CandidateNode))
|
||||
|
||||
err = printer.PrintResults(sample1)
|
||||
@ -174,21 +174,21 @@ func TestPrinterMultipleFilesInSequenceWithLeadingContent(t *testing.T) {
|
||||
elNode := el.Value.(*CandidateNode)
|
||||
elNode.Document = 0
|
||||
elNode.FileIndex = 0
|
||||
elNode.LeadingContent = "# go cats\n$yqDocSeperator$\n"
|
||||
elNode.LeadingContent = "# go cats\n$yqDocSeparator$\n"
|
||||
sample1 := nodeToList(elNode)
|
||||
|
||||
el = el.Next()
|
||||
elNode = el.Value.(*CandidateNode)
|
||||
elNode.Document = 0
|
||||
elNode.FileIndex = 1
|
||||
elNode.LeadingContent = "$yqDocSeperator$\n"
|
||||
elNode.LeadingContent = "$yqDocSeparator$\n"
|
||||
sample2 := nodeToList(elNode)
|
||||
|
||||
el = el.Next()
|
||||
elNode = el.Value.(*CandidateNode)
|
||||
elNode.Document = 0
|
||||
elNode.FileIndex = 2
|
||||
elNode.LeadingContent = "$yqDocSeperator$\n# cool\n"
|
||||
elNode.LeadingContent = "$yqDocSeparator$\n# cool\n"
|
||||
sample3 := nodeToList(elNode)
|
||||
|
||||
err = printer.PrintResults(sample1)
|
||||
@ -239,7 +239,7 @@ func TestPrinterMultipleDocsInSinglePrintWithLeadingDoc(t *testing.T) {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
inputs.Front().Value.(*CandidateNode).LeadingContent = "# go cats\n$yqDocSeperator$\n"
|
||||
inputs.Front().Value.(*CandidateNode).LeadingContent = "# go cats\n$yqDocSeparator$\n"
|
||||
|
||||
err = printer.PrintResults(inputs)
|
||||
if err != nil {
|
||||
@ -267,7 +267,7 @@ func TestPrinterMultipleDocsInSinglePrintWithLeadingDocTrailing(t *testing.T) {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
inputs.Front().Value.(*CandidateNode).LeadingContent = "$yqDocSeperator$\n"
|
||||
inputs.Front().Value.(*CandidateNode).LeadingContent = "$yqDocSeparator$\n"
|
||||
err = printer.PrintResults(inputs)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
@ -313,7 +313,7 @@ func TestPrinterMultipleDocsJson(t *testing.T) {
|
||||
var output bytes.Buffer
|
||||
var writer = bufio.NewWriter(&output)
|
||||
// note printDocSeparators is true, it should still not print document separators
|
||||
// when outputing JSON.
|
||||
// when outputting JSON.
|
||||
encoder := NewJSONEncoder(0, false, false)
|
||||
if encoder == nil {
|
||||
t.Skipf("no support for %s output format", "json")
|
||||
@ -365,7 +365,7 @@ func TestPrinterNulSeparatorWithJson(t *testing.T) {
|
||||
var output bytes.Buffer
|
||||
var writer = bufio.NewWriter(&output)
|
||||
// note printDocSeparators is true, it should still not print document separators
|
||||
// when outputing JSON.
|
||||
// when outputting JSON.
|
||||
encoder := NewJSONEncoder(0, false, false)
|
||||
if encoder == nil {
|
||||
t.Skipf("no support for %s output format", "json")
|
||||
|
@ -35,12 +35,12 @@ var shellVariablesScenarios = []formatScenario{
|
||||
"ascii_=_symbols: replaced with _" + "\n" +
|
||||
"\"ascii_\t_controls\": dropped (this example uses \\t)" + "\n" +
|
||||
"nonascii_\u05d0_characters: dropped" + "\n" +
|
||||
"effrot_expe\u00f1ded_t\u00f2_preserve_accented_latin_letters: moderate (via unicode NFKD)" + "\n",
|
||||
"effort_expe\u00f1ded_t\u00f2_preserve_accented_latin_letters: moderate (via unicode NFKD)" + "\n",
|
||||
expected: "" +
|
||||
"ascii___symbols='replaced with _'" + "\n" +
|
||||
"ascii__controls='dropped (this example uses \\t)'" + "\n" +
|
||||
"nonascii__characters=dropped" + "\n" +
|
||||
"effrot_expended_to_preserve_accented_latin_letters='moderate (via unicode NFKD)'" + "\n",
|
||||
"effort_expended_to_preserve_accented_latin_letters='moderate (via unicode NFKD)'" + "\n",
|
||||
},
|
||||
{
|
||||
description: "Encode shell variables: empty values, arrays and maps",
|
||||
@ -65,10 +65,10 @@ func TestShellVariableScenarios(t *testing.T) {
|
||||
for i, s := range shellVariablesScenarios {
|
||||
genericScenarios[i] = s
|
||||
}
|
||||
documentScenarios(t, "usage", "shellvariables", genericScenarios, documentShellVaraibleScenario)
|
||||
documentScenarios(t, "usage", "shellvariables", genericScenarios, documentShellVariableScenario)
|
||||
}
|
||||
|
||||
func documentShellVaraibleScenario(_ *testing.T, w *bufio.Writer, i interface{}) {
|
||||
func documentShellVariableScenario(_ *testing.T, w *bufio.Writer, i interface{}) {
|
||||
s := i.(formatScenario)
|
||||
if s.skipDoc {
|
||||
return
|
||||
|
@ -44,7 +44,7 @@ func (w *writeInPlaceHandlerImpl) CreateTempFile() (*os.File, error) {
|
||||
}
|
||||
|
||||
func (w *writeInPlaceHandlerImpl) FinishWriteInPlace(evaluatedSuccessfully bool) error {
|
||||
log.Debug("Going to write-inplace, evaluatedSuccessfully=%v, target=%v", evaluatedSuccessfully, w.inputFilename)
|
||||
log.Debug("Going to write in place, evaluatedSuccessfully=%v, target=%v", evaluatedSuccessfully, w.inputFilename)
|
||||
safelyCloseFile(w.tempFile)
|
||||
if evaluatedSuccessfully {
|
||||
log.Debug("Moving temp file to target")
|
||||
|
@ -671,7 +671,7 @@ func documentXMLScenario(t *testing.T, w *bufio.Writer, i interface{}) {
|
||||
case "decode-raw-token-off":
|
||||
documentXMLDecodeKeepNsRawTokenScenario(w, s)
|
||||
case "roundtrip-skip-directives":
|
||||
documentXMLSkipDirectrivesScenario(w, s)
|
||||
documentXMLSkipDirectivesScenario(w, s)
|
||||
|
||||
default:
|
||||
panic(fmt.Sprintf("unhandled scenario type %q", s.scenarioType))
|
||||
@ -787,7 +787,7 @@ func documentXMLRoundTripScenario(w *bufio.Writer, s formatScenario) {
|
||||
writeOrPanic(w, fmt.Sprintf("```xml\n%v```\n\n", mustProcessFormatScenario(s, NewXMLDecoder(ConfiguredXMLPreferences), NewXMLEncoder(2, ConfiguredXMLPreferences))))
|
||||
}
|
||||
|
||||
func documentXMLSkipDirectrivesScenario(w *bufio.Writer, s formatScenario) {
|
||||
func documentXMLSkipDirectivesScenario(w *bufio.Writer, s formatScenario) {
|
||||
writeOrPanic(w, fmt.Sprintf("## %v\n", s.description))
|
||||
|
||||
if s.subdescription != "" {
|
||||
|
253
project-words.txt
Normal file
253
project-words.txt
Normal file
@ -0,0 +1,253 @@
|
||||
abxbbxdbxebxczzx
|
||||
abxbbxdbxebxczzy
|
||||
accum
|
||||
Accum
|
||||
adithyasunil
|
||||
AEDT
|
||||
água
|
||||
ÁGUA
|
||||
alecthomas
|
||||
appleapple
|
||||
Astuff
|
||||
autocreating
|
||||
autoparse
|
||||
AWST
|
||||
axbxcxdxe
|
||||
axbxcxdxexxx
|
||||
bananabanana
|
||||
barp
|
||||
bitnami
|
||||
blarp
|
||||
blddir
|
||||
Bobo
|
||||
BODMAS
|
||||
bonapite
|
||||
Brien
|
||||
Bstuff
|
||||
BUILDKIT
|
||||
buildpackage
|
||||
catmeow
|
||||
CATYPE
|
||||
CBVVE
|
||||
chardata
|
||||
chillum
|
||||
choco
|
||||
chomper
|
||||
cleanup
|
||||
cmlu
|
||||
colorise
|
||||
colors
|
||||
compinit
|
||||
coolioo
|
||||
coverprofile
|
||||
createmap
|
||||
csvd
|
||||
CSVUTF
|
||||
currentlabel
|
||||
cygpath
|
||||
czvf
|
||||
datestring
|
||||
datetime
|
||||
Datetime
|
||||
datetimes
|
||||
DEBEMAIL
|
||||
debhelper
|
||||
Debugf
|
||||
debuild
|
||||
delish
|
||||
delpaths
|
||||
DELPATHS
|
||||
devorbitus
|
||||
devscripts
|
||||
dimchansky
|
||||
Dont
|
||||
dput
|
||||
elliotchance
|
||||
endhint
|
||||
endofname
|
||||
Entriesfrom
|
||||
envsubst
|
||||
errorlevel
|
||||
Escandón
|
||||
Evalall
|
||||
fakefilename
|
||||
fakeroot
|
||||
Farah
|
||||
fatih
|
||||
Fifi
|
||||
filebytes
|
||||
Fileish
|
||||
foobar
|
||||
foobaz
|
||||
foof
|
||||
frood
|
||||
fullpath
|
||||
gitbook
|
||||
githubactions
|
||||
gnupg
|
||||
goccy
|
||||
gofmt
|
||||
gogo
|
||||
golangci
|
||||
GOMODCACHE
|
||||
GOPATH
|
||||
gosec
|
||||
gota
|
||||
goversion
|
||||
GOVERSION
|
||||
haha
|
||||
headcommentwas
|
||||
hellno
|
||||
herbygillot
|
||||
hexdump
|
||||
Hoang
|
||||
hostpath
|
||||
hotdog
|
||||
howdy
|
||||
incase
|
||||
inlinetables
|
||||
inplace
|
||||
ints
|
||||
ireduce
|
||||
iwatch
|
||||
jinzhu
|
||||
jq's
|
||||
jsond
|
||||
keygrip
|
||||
Keygrip
|
||||
KEYGRIP
|
||||
KEYID
|
||||
keyvalue
|
||||
kwak
|
||||
lalilu
|
||||
ldflags
|
||||
LDFLAGS
|
||||
lexer
|
||||
Lexer
|
||||
libdistro
|
||||
lindex
|
||||
linecomment
|
||||
magiconair
|
||||
mapvalues
|
||||
Mier
|
||||
mikefarah
|
||||
minideb
|
||||
minishift
|
||||
mipsle
|
||||
mitchellh
|
||||
mktemp
|
||||
multidoc
|
||||
multimaint
|
||||
multine
|
||||
myenv
|
||||
myenvnonexisting
|
||||
myfile
|
||||
myformat
|
||||
ndjson
|
||||
NDJSON
|
||||
NFKD
|
||||
nixpkgs
|
||||
nojson
|
||||
nonascii
|
||||
nonempty
|
||||
noninteractive
|
||||
Nonquoting
|
||||
nosec
|
||||
notoml
|
||||
noxml
|
||||
nullinput
|
||||
onea
|
||||
Oneshot
|
||||
opencollect
|
||||
opstack
|
||||
orderedmap
|
||||
orignal
|
||||
osarch
|
||||
overridign
|
||||
pacman
|
||||
Padder
|
||||
pandoc
|
||||
parsechangelog
|
||||
pcsv
|
||||
pelletier
|
||||
pflag
|
||||
prechecking
|
||||
Prerelease
|
||||
proc
|
||||
propsd
|
||||
qylib
|
||||
readline
|
||||
realnames
|
||||
realpath
|
||||
repr
|
||||
rhash
|
||||
rindex
|
||||
risentveber
|
||||
rmescandon
|
||||
Rosey
|
||||
roundtrip
|
||||
Roundtrip
|
||||
roundtripping
|
||||
runningvms
|
||||
sadface
|
||||
selfupdate
|
||||
setpath
|
||||
sharedfolder
|
||||
Sharedfolder
|
||||
shellvariables
|
||||
shellvars
|
||||
shortfunc
|
||||
shortpipe
|
||||
shunit
|
||||
Sidenote
|
||||
snapcraft
|
||||
somevalue
|
||||
splt
|
||||
squeek
|
||||
srcdir
|
||||
stackoverflow
|
||||
stiched
|
||||
Strc
|
||||
strenv
|
||||
strload
|
||||
stylig
|
||||
subarray
|
||||
subchild
|
||||
subdescription
|
||||
submatch
|
||||
submatches
|
||||
SUBSTR
|
||||
tempfile
|
||||
tfstate
|
||||
Tfstate
|
||||
thar
|
||||
timezone
|
||||
Timezone
|
||||
timezones
|
||||
Timezones
|
||||
tojson
|
||||
Tokenvalue
|
||||
traver
|
||||
tsvd
|
||||
Tuan
|
||||
tzdata
|
||||
Uhoh
|
||||
updateassign
|
||||
urid
|
||||
utfbom
|
||||
Warningf
|
||||
Wazowski
|
||||
webi
|
||||
Webi
|
||||
whereever
|
||||
winget
|
||||
withdots
|
||||
wizz
|
||||
woop
|
||||
workdir
|
||||
Writable
|
||||
xmld
|
||||
xyzzy
|
||||
yamld
|
||||
yqlib
|
||||
zabbix
|
@ -26,7 +26,7 @@ if [ "$1" == "" ]; then
|
||||
fi
|
||||
|
||||
if [ "$2" != "" ]; then
|
||||
# so we dont match x.tar.gz when 'x' is given
|
||||
# so we don't match x.tar.gz when 'x' is given
|
||||
file="$2\s"
|
||||
else
|
||||
file=""
|
||||
@ -47,7 +47,7 @@ fi
|
||||
|
||||
grepMatch=$(grep -m 1 -n "$1" checksums_hashes_order)
|
||||
if [ "$grepMatch" == "" ]; then
|
||||
echo "Could not find hash algorith '$1' in checksums_hashes_order"
|
||||
echo "Could not find hash algorithm '$1' in checksums_hashes_order"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
#! /bin/bash
|
||||
set -e
|
||||
|
||||
# note that this reqires pandoc to be installed.
|
||||
# note that this requires pandoc to be installed.
|
||||
|
||||
cat ./pkg/yqlib/doc/operators/headers/Main.md > man.md
|
||||
printf "\n# HOW IT WORKS\n" >> man.md
|
||||
|
@ -1,7 +1,7 @@
|
||||
#! /bin/bash
|
||||
set -e
|
||||
|
||||
# note that this reqires pandoc to be installed.
|
||||
# note that this requires pandoc to be installed.
|
||||
|
||||
pandoc \
|
||||
--variable=title:"YQ" \
|
||||
|
@ -30,7 +30,7 @@ show_help() {
|
||||
echo " distribution is considered"
|
||||
echo " --goversion VERSION The version of Golang to use. Default to $GOVERSION"
|
||||
echo " -k, --sign-key KEYID Sign the package sources with the provided gpg key id (long format). When not provided this"
|
||||
echo " paramater, the generated sources are not signed"
|
||||
echo " parameter, the generated sources are not signed"
|
||||
echo " -s, --sign Sign the package sources with a gpg key of the maintainer"
|
||||
echo " -m, --maintainer WHO The maintainer used as author of the changelog. git.name and git.email (see git config) is"
|
||||
echo " the considered format"
|
||||
|
3
scripts/spelling.sh
Executable file
3
scripts/spelling.sh
Executable file
@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
npx cspell --no-progress "**/*.{sh,go}"
|
Loading…
Reference in New Issue
Block a user