Sebastiaan van Stijn
2024-06-04 11:33:43 +02:00
parent dbdd3601eb
commit 9358f84668
146 changed files with 2661 additions and 1102 deletions

View File

@ -1,10 +1,10 @@
package attestations
import (
"encoding/csv"
"strings"
"github.com/pkg/errors"
"github.com/tonistiigi/go-csvvalue"
)
const (
@ -63,8 +63,7 @@ func Parse(values map[string]string) (map[string]map[string]string, error) {
if v == "" {
continue
}
csvReader := csv.NewReader(strings.NewReader(v))
fields, err := csvReader.Read()
fields, err := csvvalue.Fields(v, nil)
if err != nil {
return nil, errors.Wrapf(err, "failed to parse %s", k)
}

View File

@ -0,0 +1,146 @@
package linter
import (
"fmt"
"strconv"
"strings"
"github.com/moby/buildkit/frontend/dockerfile/parser"
"github.com/pkg/errors"
)
type Config struct {
Warn LintWarnFunc
SkipRules []string
SkipAll bool
ReturnAsError bool
}
type Linter struct {
SkippedRules map[string]struct{}
CalledRules []string
SkipAll bool
ReturnAsError bool
Warn LintWarnFunc
}
func New(config *Config) *Linter {
toret := &Linter{
SkippedRules: map[string]struct{}{},
CalledRules: []string{},
Warn: config.Warn,
}
toret.SkipAll = config.SkipAll
toret.ReturnAsError = config.ReturnAsError
for _, rule := range config.SkipRules {
toret.SkippedRules[rule] = struct{}{}
}
return toret
}
func (lc *Linter) Run(rule LinterRuleI, location []parser.Range, txt ...string) {
if lc == nil || lc.Warn == nil || lc.SkipAll {
return
}
rulename := rule.RuleName()
if _, ok := lc.SkippedRules[rulename]; ok {
return
}
lc.CalledRules = append(lc.CalledRules, rulename)
rule.Run(lc.Warn, location, txt...)
}
func (lc *Linter) Error() error {
if lc == nil || !lc.ReturnAsError {
return nil
}
if len(lc.CalledRules) == 0 {
return nil
}
var rules []string
uniqueRules := map[string]struct{}{}
for _, r := range lc.CalledRules {
uniqueRules[r] = struct{}{}
}
for r := range uniqueRules {
rules = append(rules, r)
}
return errors.Errorf("lint violation found for rules: %s", strings.Join(rules, ", "))
}
type LinterRuleI interface {
RuleName() string
Run(warn LintWarnFunc, location []parser.Range, txt ...string)
}
type LinterRule[F any] struct {
Name string
Description string
URL string
Format F
}
func (rule *LinterRule[F]) RuleName() string {
return rule.Name
}
func (rule *LinterRule[F]) Run(warn LintWarnFunc, location []parser.Range, txt ...string) {
if len(txt) == 0 {
txt = []string{rule.Description}
}
short := strings.Join(txt, " ")
warn(rule.Name, rule.Description, rule.URL, short, location)
}
func LintFormatShort(rulename, msg string, line int) string {
msg = fmt.Sprintf("%s: %s", rulename, msg)
if line > 0 {
msg = fmt.Sprintf("%s (line %d)", msg, line)
}
return msg
}
type LintWarnFunc func(rulename, description, url, fmtmsg string, location []parser.Range)
func ParseLintOptions(checkStr string) (*Config, error) {
checkStr = strings.TrimSpace(checkStr)
if checkStr == "" {
return &Config{}, nil
}
parts := strings.SplitN(checkStr, ";", 2)
var skipSet []string
var errorOnWarn, skipAll bool
for _, p := range parts {
k, v, ok := strings.Cut(p, "=")
if !ok {
return nil, errors.Errorf("invalid check option %q", p)
}
k = strings.TrimSpace(k)
switch k {
case "skip":
v = strings.TrimSpace(v)
if v == "all" {
skipAll = true
} else {
skipSet = strings.Split(v, ",")
for i, rule := range skipSet {
skipSet[i] = strings.TrimSpace(rule)
}
}
case "error":
v, err := strconv.ParseBool(strings.TrimSpace(v))
if err != nil {
return nil, errors.Wrapf(err, "failed to parse check option %q", p)
}
errorOnWarn = v
default:
return nil, errors.Errorf("invalid check option %q", k)
}
}
return &Config{
SkipRules: skipSet,
SkipAll: skipAll,
ReturnAsError: errorOnWarn,
}, nil
}

View File

@ -0,0 +1,127 @@
package linter
import (
"fmt"
)
var (
RuleStageNameCasing = LinterRule[func(string) string]{
Name: "StageNameCasing",
Description: "Stage names should be lowercase",
URL: "https://docs.docker.com/go/dockerfile/rule/stage-name-casing/",
Format: func(stageName string) string {
return fmt.Sprintf("Stage name '%s' should be lowercase", stageName)
},
}
RuleFromAsCasing = LinterRule[func(string, string) string]{
Name: "FromAsCasing",
Description: "The 'as' keyword should match the case of the 'from' keyword",
URL: "https://docs.docker.com/go/dockerfile/rule/from-as-casing/",
Format: func(from, as string) string {
return fmt.Sprintf("'%s' and '%s' keywords' casing do not match", as, from)
},
}
RuleNoEmptyContinuation = LinterRule[func() string]{
Name: "NoEmptyContinuation",
Description: "Empty continuation lines will become errors in a future release",
URL: "https://docs.docker.com/go/dockerfile/rule/no-empty-continuation/",
Format: func() string {
return "Empty continuation line"
},
}
RuleConsistentInstructionCasing = LinterRule[func(string, string) string]{
Name: "ConsistentInstructionCasing",
Description: "All commands within the Dockerfile should use the same casing (either upper or lower)",
URL: "https://docs.docker.com/go/dockerfile/rule/consistent-instruction-casing/",
Format: func(violatingCommand, correctCasing string) string {
return fmt.Sprintf("Command '%s' should match the case of the command majority (%s)", violatingCommand, correctCasing)
},
}
RuleDuplicateStageName = LinterRule[func(string) string]{
Name: "DuplicateStageName",
Description: "Stage names should be unique",
URL: "https://docs.docker.com/go/dockerfile/rule/duplicate-stage-name/",
Format: func(stageName string) string {
return fmt.Sprintf("Duplicate stage name %q, stage names should be unique", stageName)
},
}
RuleReservedStageName = LinterRule[func(string) string]{
Name: "ReservedStageName",
Description: "Reserved words should not be used as stage names",
URL: "https://docs.docker.com/go/dockerfile/rule/reserved-stage-name/",
Format: func(reservedStageName string) string {
return fmt.Sprintf("Stage name should not use the same name as reserved stage %q", reservedStageName)
},
}
RuleJSONArgsRecommended = LinterRule[func(instructionName string) string]{
Name: "JSONArgsRecommended",
Description: "JSON arguments recommended for ENTRYPOINT/CMD to prevent unintended behavior related to OS signals",
URL: "https://docs.docker.com/go/dockerfile/rule/json-args-recommended/",
Format: func(instructionName string) string {
return fmt.Sprintf("JSON arguments recommended for %s to prevent unintended behavior related to OS signals", instructionName)
},
}
RuleMaintainerDeprecated = LinterRule[func() string]{
Name: "MaintainerDeprecated",
Description: "The MAINTAINER instruction is deprecated, use a label instead to define an image author",
URL: "https://docs.docker.com/go/dockerfile/rule/maintainer-deprecated/",
Format: func() string {
return "Maintainer instruction is deprecated in favor of using label"
},
}
RuleUndefinedArgInFrom = LinterRule[func(string, string) string]{
Name: "UndefinedArgInFrom",
Description: "FROM command must use declared ARGs",
URL: "https://docs.docker.com/go/dockerfile/rule/undefined-arg-in-from/",
Format: func(baseArg, suggest string) string {
out := fmt.Sprintf("FROM argument '%s' is not declared", baseArg)
if suggest != "" {
out += fmt.Sprintf(" (did you mean %s?)", suggest)
}
return out
},
}
RuleWorkdirRelativePath = LinterRule[func(workdir string) string]{
Name: "WorkdirRelativePath",
Description: "Relative workdir without an absolute workdir declared within the build can have unexpected results if the base image changes",
URL: "https://docs.docker.com/go/dockerfile/rule/workdir-relative-path/",
Format: func(workdir string) string {
return fmt.Sprintf("Relative workdir %q can have unexpected results if the base image changes", workdir)
},
}
RuleUndefinedVar = LinterRule[func(string, string) string]{
Name: "UndefinedVar",
Description: "Variables should be defined before their use",
URL: "https://docs.docker.com/go/dockerfile/rule/undefined-var/",
Format: func(arg, suggest string) string {
out := fmt.Sprintf("Usage of undefined variable '$%s'", arg)
if suggest != "" {
out += fmt.Sprintf(" (did you mean $%s?)", suggest)
}
return out
},
}
RuleMultipleInstructionsDisallowed = LinterRule[func(instructionName string) string]{
Name: "MultipleInstructionsDisallowed",
Description: "Multiple instructions of the same type should not be used in the same stage",
URL: "https://docs.docker.com/go/dockerfile/rule/multiple-instructions-disallowed/",
Format: func(instructionName string) string {
return fmt.Sprintf("Multiple %s instructions should not be used in the same stage because only the last one will be used", instructionName)
},
}
RuleLegacyKeyValueFormat = LinterRule[func(cmdName string) string]{
Name: "LegacyKeyValueFormat",
Description: "Legacy key/value format with whitespace separator should not be used",
URL: "https://docs.docker.com/go/dockerfile/rule/legacy-key-value-format/",
Format: func(cmdName string) string {
return fmt.Sprintf("\"%s key=value\" should be used instead of legacy \"%s key value\" format", cmdName, cmdName)
},
}
RuleInvalidBaseImagePlatform = LinterRule[func(string, string, string) string]{
Name: "InvalidBaseImagePlatform",
Description: "Base image platform does not match expected target platform",
Format: func(image, expected, actual string) string {
return fmt.Sprintf("Base image %s was pulled with platform %q, expected %q for current build", image, actual, expected)
},
}
)

View File

@ -17,6 +17,7 @@ import (
var (
errDockerfileNotStringArray = errors.New("when using JSON array syntax, arrays must be comprised of strings only")
errDockerfileNotJSONArray = errors.New("not a JSON array")
)
const (
@ -58,11 +59,11 @@ func parseWords(rest string, d *directives) []string {
words := []string{}
phase := inSpaces
word := ""
quote := '\000'
blankOK := false
var ch rune
var chWidth int
var sbuilder strings.Builder
for pos := 0; pos <= len(rest); pos += chWidth {
if pos != len(rest) {
@ -79,18 +80,18 @@ func parseWords(rest string, d *directives) []string {
phase = inWord // found it, fall through
}
if (phase == inWord || phase == inQuote) && (pos == len(rest)) {
if blankOK || len(word) > 0 {
words = append(words, word)
if blankOK || sbuilder.Len() > 0 {
words = append(words, sbuilder.String())
}
break
}
if phase == inWord {
if unicode.IsSpace(ch) {
phase = inSpaces
if blankOK || len(word) > 0 {
words = append(words, word)
if blankOK || sbuilder.Len() > 0 {
words = append(words, sbuilder.String())
}
word = ""
sbuilder.Reset()
blankOK = false
continue
}
@ -106,11 +107,11 @@ func parseWords(rest string, d *directives) []string {
// If we're not quoted and we see an escape token, then always just
// add the escape token plus the char to the word, even if the char
// is a quote.
word += string(ch)
sbuilder.WriteRune(ch)
pos += chWidth
ch, chWidth = utf8.DecodeRuneInString(rest[pos:])
}
word += string(ch)
sbuilder.WriteRune(ch)
continue
}
if phase == inQuote {
@ -124,10 +125,10 @@ func parseWords(rest string, d *directives) []string {
continue // just skip the escape token at end
}
pos += chWidth
word += string(ch)
sbuilder.WriteRune(ch)
ch, chWidth = utf8.DecodeRuneInString(rest[pos:])
}
word += string(ch)
sbuilder.WriteRune(ch)
}
}
@ -277,7 +278,7 @@ func parseString(rest string, d *directives) (*Node, map[string]bool, error) {
func parseJSON(rest string) (*Node, map[string]bool, error) {
rest = strings.TrimLeftFunc(rest, unicode.IsSpace)
if !strings.HasPrefix(rest, "[") {
return nil, nil, errors.Errorf("Error parsing %q as a JSON array", rest)
return nil, nil, errDockerfileNotJSONArray
}
var myJSON []interface{}

View File

@ -114,7 +114,6 @@ type Heredoc struct {
var (
dispatch map[string]func(string, *directives) (*Node, map[string]bool, error)
reWhitespace = regexp.MustCompile(`[\t\v\f\r ]+`)
reComment = regexp.MustCompile(`^#.*$`)
reHeredoc = regexp.MustCompile(`^(\d*)<<(-?)([^<]*)$`)
reLeadingTabs = regexp.MustCompile(`(?m)^\t+`)
)
@ -169,8 +168,8 @@ func (d *directives) setEscapeToken(s string) error {
// possibleParserDirective looks for parser directives, eg '# escapeToken=<char>'.
// Parser directives must precede any builder instruction or other comments,
// and cannot be repeated.
func (d *directives) possibleParserDirective(line string) error {
directive, err := d.parser.ParseLine([]byte(line))
func (d *directives) possibleParserDirective(line []byte) error {
directive, err := d.parser.ParseLine(line)
if err != nil {
return err
}
@ -284,6 +283,7 @@ func Parse(rwc io.Reader) (*Result, error) {
scanner.Split(scanLines)
warnings := []Warning{}
var comments []string
buf := &bytes.Buffer{}
var err error
for scanner.Scan() {
@ -307,10 +307,12 @@ func Parse(rwc io.Reader) (*Result, error) {
currentLine++
startLine := currentLine
line, isEndOfLine := trimContinuationCharacter(string(bytesRead), d)
if isEndOfLine && line == "" {
bytesRead, isEndOfLine := trimContinuationCharacter(bytesRead, d)
if isEndOfLine && len(bytesRead) == 0 {
continue
}
buf.Reset()
buf.Write(bytesRead)
var hasEmptyContinuationLine bool
for !isEndOfLine && scanner.Scan() {
@ -329,11 +331,12 @@ func Parse(rwc io.Reader) (*Result, error) {
continue
}
continuationLine := string(bytesRead)
continuationLine, isEndOfLine = trimContinuationCharacter(continuationLine, d)
line += continuationLine
bytesRead, isEndOfLine = trimContinuationCharacter(bytesRead, d)
buf.Write(bytesRead)
}
line := buf.String()
if hasEmptyContinuationLine {
warnings = append(warnings, Warning{
Short: "Empty continuation line found in: " + line,
@ -348,7 +351,7 @@ func Parse(rwc io.Reader) (*Result, error) {
return nil, withLocation(err, startLine, currentLine)
}
if child.canContainHeredoc() {
if child.canContainHeredoc() && strings.Contains(line, "<<") {
heredocs, err := heredocsFromLine(line)
if err != nil {
return nil, withLocation(err, startLine, currentLine)
@ -415,7 +418,7 @@ func heredocFromMatch(match []string) (*Heredoc, error) {
// If there are quotes in one but not the other, then we know that some
// part of the heredoc word is quoted, so we shouldn't expand the content.
shlex.RawQuotes = false
words, err := shlex.ProcessWords(rest, []string{})
words, err := shlex.ProcessWords(rest, emptyEnvs{})
if err != nil {
return nil, err
}
@ -425,7 +428,7 @@ func heredocFromMatch(match []string) (*Heredoc, error) {
}
shlex.RawQuotes = true
wordsRaw, err := shlex.ProcessWords(rest, []string{})
wordsRaw, err := shlex.ProcessWords(rest, emptyEnvs{})
if err != nil {
return nil, err
}
@ -466,7 +469,7 @@ func heredocsFromLine(line string) ([]Heredoc, error) {
shlex.RawQuotes = true
shlex.RawEscapes = true
shlex.SkipUnsetEnv = true
words, _ := shlex.ProcessWords(line, []string{})
words, _ := shlex.ProcessWords(line, emptyEnvs{})
var docs []Heredoc
for _, word := range words {
@ -487,7 +490,10 @@ func ChompHeredocContent(src string) string {
}
func trimComments(src []byte) []byte {
return reComment.ReplaceAll(src, []byte{})
if !isComment(src) {
return src
}
return nil
}
func trimLeadingWhitespace(src []byte) []byte {
@ -501,7 +507,8 @@ func trimNewline(src []byte) []byte {
}
func isComment(line []byte) bool {
return reComment.Match(trimLeadingWhitespace(trimNewline(line)))
line = trimLeadingWhitespace(line)
return len(line) > 0 && line[0] == '#'
}
func isEmptyContinuationLine(line []byte) bool {
@ -510,9 +517,9 @@ func isEmptyContinuationLine(line []byte) bool {
var utf8bom = []byte{0xEF, 0xBB, 0xBF}
func trimContinuationCharacter(line string, d *directives) (string, bool) {
if d.lineContinuationRegex.MatchString(line) {
line = d.lineContinuationRegex.ReplaceAllString(line, "$1")
func trimContinuationCharacter(line []byte, d *directives) ([]byte, bool) {
if d.lineContinuationRegex.Match(line) {
line = d.lineContinuationRegex.ReplaceAll(line, []byte("$1"))
return line, false
}
return line, true
@ -525,7 +532,7 @@ func processLine(d *directives, token []byte, stripLeftWhitespace bool) ([]byte,
if stripLeftWhitespace {
token = trimLeadingWhitespace(token)
}
return trimComments(token), d.possibleParserDirective(string(token))
return trimComments(token), d.possibleParserDirective(token)
}
// Variation of bufio.ScanLines that preserves the line endings
@ -550,3 +557,13 @@ func handleScannerError(err error) error {
return err
}
}
type emptyEnvs struct{}
func (emptyEnvs) Get(string) (string, bool) {
return "", false
}
func (emptyEnvs) Keys() []string {
return nil
}

View File

@ -36,7 +36,7 @@ func extractBuilderFlags(line string) (string, []string, error) {
words := []string{}
phase := inSpaces
word := ""
sbuilder := &strings.Builder{}
quote := '\000'
blankOK := false
var ch rune
@ -62,13 +62,14 @@ func extractBuilderFlags(line string) (string, []string, error) {
phase = inWord // found something with "--", fall through
}
if (phase == inWord || phase == inQuote) && (pos == len(line)) {
if word != "--" && (blankOK || len(word) > 0) {
if word := sbuilder.String(); word != "--" && (blankOK || len(word) > 0) {
words = append(words, word)
}
break
}
if phase == inWord {
if unicode.IsSpace(ch) {
word := sbuilder.String()
phase = inSpaces
if word == "--" {
return line[pos:], words, nil
@ -76,7 +77,7 @@ func extractBuilderFlags(line string) (string, []string, error) {
if blankOK || len(word) > 0 {
words = append(words, word)
}
word = ""
sbuilder.Reset()
blankOK = false
continue
}
@ -93,7 +94,9 @@ func extractBuilderFlags(line string) (string, []string, error) {
pos++
ch = rune(line[pos])
}
word += string(ch)
if _, err := sbuilder.WriteRune(ch); err != nil {
return "", nil, err
}
continue
}
if phase == inQuote {
@ -109,7 +112,9 @@ func extractBuilderFlags(line string) (string, []string, error) {
pos++
ch = rune(line[pos])
}
word += string(ch)
if _, err := sbuilder.WriteRune(ch); err != nil {
return "", nil, err
}
}
}

View File

@ -9,3 +9,10 @@ package shell
func EqualEnvKeys(from, to string) bool {
return from == to
}
// NormalizeEnvKey returns the key in a normalized form that can be used
// for comparison. On Unix this is a no-op. On Windows this converts the
// key to uppercase.
func NormalizeEnvKey(key string) string {
return key
}

View File

@ -8,3 +8,10 @@ import "strings"
func EqualEnvKeys(from, to string) bool {
return strings.EqualFold(from, to)
}
// NormalizeEnvKey returns the key in a normalized form that can be used
// for comparison. On Unix this is a no-op. On Windows this converts the
// key to uppercase.
func NormalizeEnvKey(key string) string {
return strings.ToUpper(key)
}

View File

@ -4,6 +4,7 @@ import (
"bytes"
"fmt"
"regexp"
"slices"
"strings"
"text/scanner"
"unicode"
@ -11,6 +12,11 @@ import (
"github.com/pkg/errors"
)
type EnvGetter interface {
Get(string) (string, bool)
Keys() []string
}
// Lex performs shell word splitting and variable expansion.
//
// Lex takes a string and an array of env variables and
@ -18,12 +24,15 @@ import (
// tokens. Tries to mimic bash shell process.
// It doesn't support all flavors of ${xx:...} formats but new ones can
// be added by adding code to the "special ${} format processing" section
//
// It is not safe to call methods on a Lex instance concurrently.
type Lex struct {
escapeToken rune
RawQuotes bool
RawEscapes bool
SkipProcessQuotes bool
SkipUnsetEnv bool
shellWord shellWord
}
// NewLex creates a new Lex which uses escapeToken to escape quotes.
@ -35,8 +44,9 @@ func NewLex(escapeToken rune) *Lex {
// and replace any env var references in 'word'. It will also
// return variables in word which were not found in the 'env' list,
// which is useful in later linting.
func (s *Lex) ProcessWord(word string, env []string) (string, map[string]struct{}, error) {
result, err := s.process(word, BuildEnvs(env))
// TODO: rename
func (s *Lex) ProcessWord(word string, env EnvGetter) (string, map[string]struct{}, error) {
result, err := s.process(word, env, true)
return result.Result, result.Unmatched, err
}
@ -47,18 +57,11 @@ func (s *Lex) ProcessWord(word string, env []string) (string, map[string]struct{
// this splitting is done **after** the env var substitutions are done.
// Note, each one is trimmed to remove leading and trailing spaces (unless
// they are quoted", but ProcessWord retains spaces between words.
func (s *Lex) ProcessWords(word string, env []string) ([]string, error) {
result, err := s.process(word, BuildEnvs(env))
func (s *Lex) ProcessWords(word string, env EnvGetter) ([]string, error) {
result, err := s.process(word, env, false)
return result.Words, err
}
// ProcessWordWithMap will use the 'env' list of environment variables,
// and replace any env var references in 'word'.
func (s *Lex) ProcessWordWithMap(word string, env map[string]string) (string, error) {
result, err := s.process(word, env)
return result.Result, err
}
type ProcessWordResult struct {
Result string
Words []string
@ -68,8 +71,26 @@ type ProcessWordResult struct {
// ProcessWordWithMatches will use the 'env' list of environment variables,
// replace any env var references in 'word' and return the env that were used.
func (s *Lex) ProcessWordWithMatches(word string, env map[string]string) (ProcessWordResult, error) {
sw := s.init(word, env)
func (s *Lex) ProcessWordWithMatches(word string, env EnvGetter) (ProcessWordResult, error) {
return s.process(word, env, true)
}
func (s *Lex) initWord(word string, env EnvGetter, capture bool) *shellWord {
sw := &s.shellWord
sw.Lex = s
sw.envs = env
sw.capture = capture
sw.rawEscapes = s.RawEscapes
if capture {
sw.matches = nil
sw.nonmatches = nil
}
sw.scanner.Init(strings.NewReader(word))
return sw
}
func (s *Lex) process(word string, env EnvGetter, capture bool) (ProcessWordResult, error) {
sw := s.initWord(word, env, capture)
word, words, err := sw.process(word)
return ProcessWordResult{
Result: word,
@ -79,47 +100,15 @@ func (s *Lex) ProcessWordWithMatches(word string, env map[string]string) (Proces
}, err
}
func (s *Lex) ProcessWordsWithMap(word string, env map[string]string) ([]string, error) {
result, err := s.process(word, env)
return result.Words, err
}
func (s *Lex) init(word string, env map[string]string) *shellWord {
sw := &shellWord{
envs: env,
escapeToken: s.escapeToken,
skipUnsetEnv: s.SkipUnsetEnv,
skipProcessQuotes: s.SkipProcessQuotes,
rawQuotes: s.RawQuotes,
rawEscapes: s.RawEscapes,
matches: make(map[string]struct{}),
nonmatches: make(map[string]struct{}),
}
sw.scanner.Init(strings.NewReader(word))
return sw
}
func (s *Lex) process(word string, env map[string]string) (*ProcessWordResult, error) {
sw := s.init(word, env)
word, words, err := sw.process(word)
return &ProcessWordResult{
Result: word,
Words: words,
Matched: sw.matches,
Unmatched: sw.nonmatches,
}, err
}
type shellWord struct {
scanner scanner.Scanner
envs map[string]string
escapeToken rune
rawQuotes bool
rawEscapes bool
skipUnsetEnv bool
skipProcessQuotes bool
matches map[string]struct{}
nonmatches map[string]struct{}
*Lex
wordsBuffer strings.Builder
scanner scanner.Scanner
envs EnvGetter
rawEscapes bool
capture bool // capture matches and nonmatches
matches map[string]struct{}
nonmatches map[string]struct{}
}
func (sw *shellWord) process(source string) (string, []string, error) {
@ -131,16 +120,16 @@ func (sw *shellWord) process(source string) (string, []string, error) {
}
type wordsStruct struct {
word string
buf *strings.Builder
words []string
inWord bool
}
func (w *wordsStruct) addChar(ch rune) {
if unicode.IsSpace(ch) && w.inWord {
if len(w.word) != 0 {
w.words = append(w.words, w.word)
w.word = ""
if w.buf.Len() != 0 {
w.words = append(w.words, w.buf.String())
w.buf.Reset()
w.inWord = false
}
} else if !unicode.IsSpace(ch) {
@ -149,7 +138,7 @@ func (w *wordsStruct) addChar(ch rune) {
}
func (w *wordsStruct) addRawChar(ch rune) {
w.word += string(ch)
w.buf.WriteRune(ch)
w.inWord = true
}
@ -160,16 +149,16 @@ func (w *wordsStruct) addString(str string) {
}
func (w *wordsStruct) addRawString(str string) {
w.word += str
w.buf.WriteString(str)
w.inWord = true
}
func (w *wordsStruct) getWords() []string {
if len(w.word) > 0 {
w.words = append(w.words, w.word)
if w.buf.Len() > 0 {
w.words = append(w.words, w.buf.String())
// Just in case we're called again by mistake
w.word = ""
w.buf.Reset()
w.inWord = false
}
return w.words
@ -178,13 +167,18 @@ func (w *wordsStruct) getWords() []string {
// Process the word, starting at 'pos', and stop when we get to the
// end of the word or the 'stopChar' character
func (sw *shellWord) processStopOn(stopChar rune, rawEscapes bool) (string, []string, error) {
var result bytes.Buffer
// result buffer can't be currently shared for shellWord as it is called internally
// by processDollar
var result strings.Builder
sw.wordsBuffer.Reset()
var words wordsStruct
words.buf = &sw.wordsBuffer
// no need to initialize all the time
var charFuncMapping = map[rune]func() (string, error){
'$': sw.processDollar,
}
if !sw.skipProcessQuotes {
if !sw.SkipProcessQuotes {
charFuncMapping['\''] = sw.processSingleQuote
charFuncMapping['"'] = sw.processDoubleQuote
}
@ -261,7 +255,7 @@ func (sw *shellWord) processSingleQuote() (string, error) {
var result bytes.Buffer
ch := sw.scanner.Next()
if sw.rawQuotes {
if sw.RawQuotes {
result.WriteRune(ch)
}
@ -271,7 +265,7 @@ func (sw *shellWord) processSingleQuote() (string, error) {
case scanner.EOF:
return "", errors.New("unexpected end of statement while looking for matching single-quote")
case '\'':
if sw.rawQuotes {
if sw.RawQuotes {
result.WriteRune(ch)
}
return result.String(), nil
@ -296,7 +290,7 @@ func (sw *shellWord) processDoubleQuote() (string, error) {
var result bytes.Buffer
ch := sw.scanner.Next()
if sw.rawQuotes {
if sw.RawQuotes {
result.WriteRune(ch)
}
@ -306,7 +300,7 @@ func (sw *shellWord) processDoubleQuote() (string, error) {
return "", errors.New("unexpected end of statement while looking for matching double-quote")
case '"':
ch := sw.scanner.Next()
if sw.rawQuotes {
if sw.RawQuotes {
result.WriteRune(ch)
}
return result.String(), nil
@ -350,7 +344,7 @@ func (sw *shellWord) processDollar() (string, error) {
return "$", nil
}
value, found := sw.getEnv(name)
if !found && sw.skipUnsetEnv {
if !found && sw.SkipUnsetEnv {
return "$" + name, nil
}
return value, nil
@ -373,7 +367,7 @@ func (sw *shellWord) processDollar() (string, error) {
case '}':
// Normal ${xx} case
value, set := sw.getEnv(name)
if !set && sw.skipUnsetEnv {
if !set && sw.SkipUnsetEnv {
return fmt.Sprintf("${%s}", name), nil
}
return value, nil
@ -395,7 +389,7 @@ func (sw *shellWord) processDollar() (string, error) {
// Grab the current value of the variable in question so we
// can use it to determine what to do based on the modifier
value, set := sw.getEnv(name)
if sw.skipUnsetEnv && !set {
if sw.SkipUnsetEnv && !set {
return fmt.Sprintf("${%s%s%s}", name, chs, word), nil
}
@ -465,7 +459,7 @@ func (sw *shellWord) processDollar() (string, error) {
}
value, set := sw.getEnv(name)
if sw.skipUnsetEnv && !set {
if sw.SkipUnsetEnv && !set {
return fmt.Sprintf("${%s/%s/%s}", name, pattern, replacement), nil
}
@ -528,34 +522,51 @@ func isSpecialParam(char rune) bool {
}
func (sw *shellWord) getEnv(name string) (string, bool) {
for key, value := range sw.envs {
if EqualEnvKeys(name, key) {
v, ok := sw.envs.Get(name)
if ok {
if sw.capture {
if sw.matches == nil {
sw.matches = make(map[string]struct{})
}
sw.matches[name] = struct{}{}
return value, true
}
return v, true
}
if sw.capture {
if sw.nonmatches == nil {
sw.nonmatches = make(map[string]struct{})
}
sw.nonmatches[name] = struct{}{}
}
sw.nonmatches[name] = struct{}{}
return "", false
}
func BuildEnvs(env []string) map[string]string {
func EnvsFromSlice(env []string) EnvGetter {
envs := map[string]string{}
keys := make([]string, 0, len(env))
for _, e := range env {
i := strings.Index(e, "=")
if i < 0 {
envs[e] = ""
} else {
k := e[:i]
v := e[i+1:]
// overwrite value if key already exists
envs[k] = v
}
k, v, _ := strings.Cut(e, "=")
keys = append(keys, k)
envs[NormalizeEnvKey(k)] = v
}
return &envGetter{env: envs, keys: keys}
}
return envs
type envGetter struct {
env map[string]string
keys []string
}
var _ EnvGetter = &envGetter{}
func (e *envGetter) Get(key string) (string, bool) {
key = NormalizeEnvKey(key)
v, ok := e.env[key]
return v, ok
}
func (e *envGetter) Keys() []string {
return e.keys
}
// convertShellPatternToRegex converts a shell-like wildcard pattern
@ -647,11 +658,7 @@ func reversePattern(pattern string) string {
func reverseString(str string) string {
out := []rune(str)
outIdx := len(out) - 1
for i := 0; i < outIdx; i++ {
out[i], out[outIdx] = out[outIdx], out[i]
outIdx--
}
slices.Reverse(out)
return string(out)
}

View File

@ -1,7 +1,6 @@
package dockerui
import (
"encoding/csv"
"net"
"strconv"
"strings"
@ -13,6 +12,7 @@ import (
"github.com/moby/buildkit/solver/pb"
ocispecs "github.com/opencontainers/image-spec/specs-go/v1"
"github.com/pkg/errors"
"github.com/tonistiigi/go-csvvalue"
)
func parsePlatforms(v string) ([]ocispecs.Platform, error) {
@ -45,8 +45,7 @@ func parseExtraHosts(v string) ([]llb.HostIP, error) {
return nil, nil
}
out := make([]llb.HostIP, 0)
csvReader := csv.NewReader(strings.NewReader(v))
fields, err := csvReader.Read()
fields, err := csvvalue.Fields(v, nil)
if err != nil {
return nil, err
}
@ -80,8 +79,7 @@ func parseUlimits(v string) ([]pb.Ulimit, error) {
return nil, nil
}
out := make([]pb.Ulimit, 0)
csvReader := csv.NewReader(strings.NewReader(v))
fields, err := csvReader.Read()
fields, err := csvvalue.Fields(v, nil)
if err != nil {
return nil, err
}

View File

@ -14,6 +14,7 @@ import (
controlapi "github.com/moby/buildkit/api/services/control"
"github.com/moby/buildkit/client/llb"
"github.com/moby/buildkit/frontend/attestations"
"github.com/moby/buildkit/frontend/dockerfile/linter"
"github.com/moby/buildkit/frontend/gateway/client"
"github.com/moby/buildkit/solver/pb"
"github.com/moby/buildkit/util/flightcontrol"
@ -65,7 +66,7 @@ type Config struct {
ShmSize int64
Target string
Ulimits []pb.Ulimit
LinterConfig *string
LinterConfig *linter.Config
CacheImports []client.CacheOptionsEntry
TargetPlatforms []ocispecs.Platform // nil means default
@ -78,8 +79,7 @@ type Client struct {
Config
client client.Client
ignoreCache []string
bctx *buildContext
g flightcontrol.Group[*buildContext]
g flightcontrol.CachedGroup[*buildContext]
bopts client.BuildOpts
dockerignore []byte
@ -281,21 +281,17 @@ func (bc *Client) init() error {
bc.Hostname = opts[keyHostname]
if v, ok := opts[keyDockerfileLintArg]; ok {
bc.LinterConfig = &v
bc.LinterConfig, err = linter.ParseLintOptions(v)
if err != nil {
return errors.Wrapf(err, "failed to parse %s", keyDockerfileLintArg)
}
}
return nil
}
func (bc *Client) buildContext(ctx context.Context) (*buildContext, error) {
return bc.g.Do(ctx, "initcontext", func(ctx context.Context) (*buildContext, error) {
if bc.bctx != nil {
return bc.bctx, nil
}
bctx, err := bc.initContext(ctx)
if err == nil {
bc.bctx = bctx
}
return bctx, err
return bc.initContext(ctx)
})
}