enable errorlint linter

This commit is contained in:
Matthieu MOREL 2021-11-22 07:43:38 +01:00 committed by Mike Farah
parent 3f4bbf748d
commit 8711042c98
8 changed files with 22 additions and 17 deletions

View File

@ -2,6 +2,7 @@ run:
timeout: 5m
linters:
enable:
- errorlint
- gofmt
- goimports
- gosec

View File

@ -3,6 +3,7 @@ package yqlib
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"io"
@ -126,7 +127,7 @@ func (o *orderedMap) UnmarshalJSON(data []byte) error {
// cycle through k/v
var tok json.Token
for tok, err = dec.Token(); err != io.EOF; tok, err = dec.Token() {
for tok, err = dec.Token(); !errors.Is(err, io.EOF); tok, err = dec.Token() {
// we can expect two types: string or Delim. Delim automatically means
// that it is the closing bracket of the object, whereas string means
// that there is another key.
@ -142,7 +143,7 @@ func (o *orderedMap) UnmarshalJSON(data []byte) error {
o.kv = append(o.kv, kv)
}
// unexpected error
if err != nil && err != io.EOF {
if err != nil && !errors.Is(err, io.EOF) {
return err
}
return nil

View File

@ -467,7 +467,7 @@ func (p *expressionTokeniserImpl) Tokenise(expression string) ([]*token, error)
scanner, err := p.lexer.Scanner([]byte(expression))
if err != nil {
return nil, fmt.Errorf("Parsing expression: %v", err)
return nil, fmt.Errorf("Parsing expression: %w", err)
}
var tokens []*token
for tok, err, eof := scanner.Next(); !eof; tok, err, eof = scanner.Next() {
@ -478,7 +478,7 @@ func (p *expressionTokeniserImpl) Tokenise(expression string) ([]*token, error)
tokens = append(tokens, currentToken)
}
if err != nil {
return nil, fmt.Errorf("Parsing expression: %v", err)
return nil, fmt.Errorf("Parsing expression: %w", err)
}
}
var postProcessedTokens = make([]*token, 0)

View File

@ -2,6 +2,7 @@ package yqlib
import (
"bufio"
"errors"
"io"
"os"
)
@ -63,7 +64,7 @@ func (f *frontMatterHandlerImpl) Split() error {
for {
peekBytes, err := reader.Peek(3)
if err == io.EOF {
if errors.Is(err, io.EOF) {
// we've finished reading the yaml content..I guess
break
} else if err != nil {
@ -75,7 +76,7 @@ func (f *frontMatterHandlerImpl) Split() error {
}
line, errReading := reader.ReadString('\n')
lineCount = lineCount + 1
if errReading != nil && errReading != io.EOF {
if errReading != nil && !errors.Is(errReading, io.EOF) {
return errReading
}

View File

@ -194,7 +194,7 @@ func traverseArrayWithIndices(candidate *CandidateNode, indices []*yaml.Node, pr
continue
}
if err != nil {
return nil, fmt.Errorf("Cannot index array with '%v' (%v)", indexNode.Value, err)
return nil, fmt.Errorf("Cannot index array with '%v' (%w)", indexNode.Value, err)
}
indexToUse := index
contentLength := int64(len(node.Content))

View File

@ -45,7 +45,7 @@ func testScenario(t *testing.T, s *expressionScenario) {
node, err := NewExpressionParser().ParseExpression(s.expression)
if err != nil {
t.Error(fmt.Errorf("Error parsing expression %v of %v: %v", s.expression, s.description, err))
t.Error(fmt.Errorf("Error parsing expression %v of %v: %w", s.expression, s.description, err))
return
}
inputs := list.New()
@ -84,7 +84,7 @@ func testScenario(t *testing.T, s *expressionScenario) {
context, err := NewDataTreeNavigator().GetMatchingNodes(Context{MatchingNodes: inputs}, node)
if err != nil {
t.Error(fmt.Errorf("%v: %v", err, s.expression))
t.Error(fmt.Errorf("%w: %v", err, s.expression))
return
}
test.AssertResultComplexWithContext(t, s.expected, resultsToString(t, context.MatchingNodes), fmt.Sprintf("desc: %v\nexp: %v\ndoc: %v", s.description, s.expression, s.document))
@ -253,7 +253,7 @@ func documentOutput(t *testing.T, w *bufio.Writer, s expressionScenario, formatt
node, err := NewExpressionParser().ParseExpression(s.expression)
if err != nil {
t.Error(fmt.Errorf("Error parsing expression %v of %v: %v", s.expression, s.description, err))
t.Error(fmt.Errorf("Error parsing expression %v of %v: %w", s.expression, s.description, err))
return
}

View File

@ -2,6 +2,7 @@ package yqlib
import (
"container/list"
"errors"
"io"
"os"
@ -95,7 +96,7 @@ func (s *streamEvaluator) Evaluate(filename string, reader io.Reader, node *Expr
var dataBucket yaml.Node
errorReading := decoder.Decode(&dataBucket)
if errorReading == io.EOF {
if errors.Is(errorReading, io.EOF) {
s.fileIndex = s.fileIndex + 1
return currentIndex, nil
} else if errorReading != nil {

View File

@ -3,6 +3,7 @@ package yqlib
import (
"bufio"
"container/list"
"errors"
"io"
"os"
"regexp"
@ -44,7 +45,7 @@ func processLeadingContent(mappedDoc *CandidateNode, writer io.Writer, printDocS
for {
readline, errReading := reader.ReadString('\n')
if errReading != nil && errReading != io.EOF {
if errReading != nil && !errors.Is(errReading, io.EOF) {
return errReading
}
if strings.Contains(readline, "$yqDocSeperator$") {
@ -59,7 +60,7 @@ func processLeadingContent(mappedDoc *CandidateNode, writer io.Writer, printDocS
}
}
if errReading == io.EOF {
if errors.Is(errReading, io.EOF) {
if readline != "" {
// the last comment we read didn't have a new line, put one in
if err := writeString(writer, "\n"); err != nil {
@ -78,7 +79,7 @@ func processReadStream(reader *bufio.Reader) (io.Reader, string, error) {
var sb strings.Builder
for {
peekBytes, err := reader.Peek(3)
if err == io.EOF {
if errors.Is(err, io.EOF) {
// EOF are handled else where..
return reader, sb.String(), nil
} else if err != nil {
@ -86,7 +87,7 @@ func processReadStream(reader *bufio.Reader) (io.Reader, string, error) {
} else if string(peekBytes) == "---" {
_, err := reader.ReadString('\n')
sb.WriteString("$yqDocSeperator$\n")
if err == io.EOF {
if errors.Is(err, io.EOF) {
return reader, sb.String(), nil
} else if err != nil {
return reader, sb.String(), err
@ -94,7 +95,7 @@ func processReadStream(reader *bufio.Reader) (io.Reader, string, error) {
} else if commentLineRegEx.MatchString(string(peekBytes)) {
line, err := reader.ReadString('\n')
sb.WriteString(line)
if err == io.EOF {
if errors.Is(err, io.EOF) {
return reader, sb.String(), nil
} else if err != nil {
return reader, sb.String(), err
@ -114,7 +115,7 @@ func readDocuments(reader io.Reader, filename string, fileIndex int) (*list.List
var dataBucket yaml.Node
errorReading := decoder.Decode(&dataBucket)
if errorReading == io.EOF {
if errors.Is(errorReading, io.EOF) {
switch reader := reader.(type) {
case *os.File:
safelyCloseFile(reader)