Sebastiaan van Stijn
2024-06-04 11:33:43 +02:00
parent dbdd3601eb
commit 9358f84668
146 changed files with 2661 additions and 1102 deletions

View File

@ -2,6 +2,7 @@ package client
import (
"context"
"maps"
"github.com/moby/buildkit/client/buildid"
gateway "github.com/moby/buildkit/frontend/gateway/client"
@ -42,12 +43,10 @@ func (c *Client) Build(ctx context.Context, opt SolveOpt, product string, buildF
}
cb := func(ref string, s *session.Session, opts map[string]string) error {
for k, v := range opts {
if feOpts == nil {
feOpts = map[string]string{}
}
feOpts[k] = v
if feOpts == nil {
feOpts = map[string]string{}
}
maps.Copy(feOpts, opts)
gwClient := c.gatewayClientForBuild(ref)
g, err := grpcclient.New(ctx, feOpts, s.ID(), product, gwClient, gworkers)
if err != nil {

View File

@ -6,16 +6,12 @@ import (
"github.com/moby/buildkit/solver/pb"
"github.com/moby/buildkit/util/flightcontrol"
digest "github.com/opencontainers/go-digest"
"github.com/pkg/errors"
)
type asyncState struct {
f func(context.Context, State, *Constraints) (State, error)
prev State
target State
set bool
err error
g flightcontrol.Group[State]
f func(context.Context, State, *Constraints) (State, error)
prev State
g flightcontrol.CachedGroup[State]
}
func (as *asyncState) Output() Output {
@ -23,59 +19,33 @@ func (as *asyncState) Output() Output {
}
func (as *asyncState) Vertex(ctx context.Context, c *Constraints) Vertex {
err := as.Do(ctx, c)
target, err := as.Do(ctx, c)
if err != nil {
return &errVertex{err}
}
if as.set {
out := as.target.Output()
if out == nil {
return nil
}
return out.Vertex(ctx, c)
out := target.Output()
if out == nil {
return nil
}
return nil
return out.Vertex(ctx, c)
}
func (as *asyncState) ToInput(ctx context.Context, c *Constraints) (*pb.Input, error) {
err := as.Do(ctx, c)
target, err := as.Do(ctx, c)
if err != nil {
return nil, err
}
if as.set {
out := as.target.Output()
if out == nil {
return nil, nil
}
return out.ToInput(ctx, c)
out := target.Output()
if out == nil {
return nil, nil
}
return nil, nil
return out.ToInput(ctx, c)
}
func (as *asyncState) Do(ctx context.Context, c *Constraints) error {
_, err := as.g.Do(ctx, "", func(ctx context.Context) (State, error) {
if as.set {
return as.target, as.err
}
res, err := as.f(ctx, as.prev, c)
if err != nil {
select {
case <-ctx.Done():
if errors.Is(err, context.Cause(ctx)) {
return res, err
}
default:
}
}
as.target = res
as.err = err
as.set = true
return res, err
func (as *asyncState) Do(ctx context.Context, c *Constraints) (State, error) {
return as.g.Do(ctx, "", func(ctx context.Context) (State, error) {
return as.f(ctx, as.prev, c)
})
if err != nil {
return err
}
return as.err
}
type errVertex struct {

View File

@ -2,6 +2,7 @@ package llb
import (
"io"
"maps"
"github.com/containerd/containerd/platforms"
"github.com/moby/buildkit/solver/pb"
@ -18,24 +19,17 @@ type Definition struct {
}
func (def *Definition) ToPB() *pb.Definition {
md := make(map[digest.Digest]pb.OpMetadata, len(def.Metadata))
for k, v := range def.Metadata {
md[k] = v
}
return &pb.Definition{
Def: def.Def,
Source: def.Source,
Metadata: md,
Metadata: maps.Clone(def.Metadata),
}
}
func (def *Definition) FromPB(x *pb.Definition) {
def.Def = x.Def
def.Source = x.Source
def.Metadata = make(map[digest.Digest]pb.OpMetadata)
for k, v := range x.Metadata {
def.Metadata[k] = v
}
def.Metadata = maps.Clone(x.Metadata)
}
func (def *Definition) Head() (digest.Digest, error) {

View File

@ -5,6 +5,8 @@ import (
"fmt"
"net"
"path"
"slices"
"sync"
"github.com/containerd/containerd/platforms"
"github.com/google/shlex"
@ -111,16 +113,16 @@ func Reset(other State) StateOption {
}
}
func getEnv(s State) func(context.Context, *Constraints) (EnvList, error) {
return func(ctx context.Context, c *Constraints) (EnvList, error) {
func getEnv(s State) func(context.Context, *Constraints) (*EnvList, error) {
return func(ctx context.Context, c *Constraints) (*EnvList, error) {
v, err := s.getValue(keyEnv)(ctx, c)
if err != nil {
return nil, err
}
if v != nil {
return v.(EnvList), nil
return v.(*EnvList), nil
}
return EnvList{}, nil
return &EnvList{}, nil
}
}
@ -346,54 +348,83 @@ func getSecurity(s State) func(context.Context, *Constraints) (pb.SecurityMode,
}
}
type EnvList []KeyValue
type KeyValue struct {
key string
value string
type EnvList struct {
parent *EnvList
key string
value string
del bool
once sync.Once
l int
values map[string]string
keys []string
}
func (e EnvList) AddOrReplace(k, v string) EnvList {
e = e.Delete(k)
e = append(e, KeyValue{key: k, value: v})
return e
func (e *EnvList) AddOrReplace(k, v string) *EnvList {
return &EnvList{
parent: e,
key: k,
value: v,
l: e.l + 1,
}
}
func (e EnvList) SetDefault(k, v string) EnvList {
func (e *EnvList) SetDefault(k, v string) *EnvList {
if _, ok := e.Get(k); !ok {
e = append(e, KeyValue{key: k, value: v})
return e.AddOrReplace(k, v)
}
return e
}
func (e EnvList) Delete(k string) EnvList {
e = append([]KeyValue(nil), e...)
if i, ok := e.Index(k); ok {
return append(e[:i], e[i+1:]...)
func (e *EnvList) Delete(k string) EnvList {
return EnvList{
parent: e,
key: k,
del: true,
l: e.l + 1,
}
return e
}
func (e EnvList) Get(k string) (string, bool) {
if index, ok := e.Index(k); ok {
return e[index].value, true
}
return "", false
func (e *EnvList) makeValues() {
m := make(map[string]string, e.l)
seen := make(map[string]struct{}, e.l)
keys := make([]string, 0, e.l)
e.keys = e.addValue(keys, m, seen)
e.values = m
slices.Reverse(e.keys)
}
func (e EnvList) Index(k string) (int, bool) {
for i, kv := range e {
if kv.key == k {
return i, true
}
func (e *EnvList) addValue(keys []string, vals map[string]string, seen map[string]struct{}) []string {
if e.parent == nil {
return keys
}
return -1, false
if _, ok := seen[e.key]; !e.del && !ok {
vals[e.key] = e.value
keys = append(keys, e.key)
}
seen[e.key] = struct{}{}
if e.parent != nil {
keys = e.parent.addValue(keys, vals, seen)
}
return keys
}
func (e EnvList) ToArray() []string {
out := make([]string, 0, len(e))
for _, kv := range e {
out = append(out, kv.key+"="+kv.value)
func (e *EnvList) Get(k string) (string, bool) {
e.once.Do(e.makeValues)
v, ok := e.values[k]
return v, ok
}
func (e *EnvList) Keys() []string {
e.once.Do(e.makeValues)
return e.keys
}
func (e *EnvList) ToArray() []string {
keys := e.Keys()
out := make([]string, 0, len(keys))
for _, k := range keys {
v, _ := e.Get(k)
out = append(out, k+"="+v)
}
return out
}

View File

@ -4,6 +4,7 @@ import (
"context"
"encoding/json"
"fmt"
"maps"
"net"
"strings"
@ -104,11 +105,11 @@ func (s State) getValue(k interface{}) func(context.Context, *Constraints) (inte
}
if s.async != nil {
return func(ctx context.Context, c *Constraints) (interface{}, error) {
err := s.async.Do(ctx, c)
target, err := s.async.Do(ctx, c)
if err != nil {
return nil, err
}
return s.async.target.getValue(k)(ctx, c)
return target.getValue(k)(ctx, c)
}
}
if s.prev == nil {
@ -118,8 +119,13 @@ func (s State) getValue(k interface{}) func(context.Context, *Constraints) (inte
}
func (s State) Async(f func(context.Context, State, *Constraints) (State, error)) State {
as := &asyncState{
f: f,
prev: s,
}
as.g.CacheError = true
s2 := State{
async: &asyncState{f: f, prev: s},
async: as,
}
return s2
}
@ -345,16 +351,12 @@ func (s State) GetEnv(ctx context.Context, key string, co ...ConstraintsOpt) (st
// Env returns a new [State] with the provided environment variable set.
// See [Env]
func (s State) Env(ctx context.Context, co ...ConstraintsOpt) ([]string, error) {
func (s State) Env(ctx context.Context, co ...ConstraintsOpt) (*EnvList, error) {
c := &Constraints{}
for _, f := range co {
f.SetConstraintsOption(c)
}
env, err := getEnv(s)(ctx, c)
if err != nil {
return nil, err
}
return env.ToArray(), nil
return getEnv(s)(ctx, c)
}
// GetDir returns the current working directory for the state.
@ -566,9 +568,7 @@ func mergeMetadata(m1, m2 pb.OpMetadata) pb.OpMetadata {
if m1.Description == nil {
m1.Description = make(map[string]string)
}
for k, v := range m2.Description {
m1.Description[k] = v
}
maps.Copy(m1.Description, m2.Description)
}
if m2.ExportCache != nil {
m1.ExportCache = m2.ExportCache
@ -597,9 +597,7 @@ func WithDescription(m map[string]string) ConstraintsOpt {
if c.Metadata.Description == nil {
c.Metadata.Description = map[string]string{}
}
for k, v := range m {
c.Metadata.Description[k] = v
}
maps.Copy(c.Metadata.Description, m)
})
}

View File

@ -5,6 +5,7 @@ import (
"encoding/base64"
"encoding/json"
"io"
"maps"
"os"
"path/filepath"
"strings"
@ -219,13 +220,8 @@ func (c *Client) solve(ctx context.Context, def *llb.Definition, runGateway runG
})
}
frontendAttrs := map[string]string{}
for k, v := range opt.FrontendAttrs {
frontendAttrs[k] = v
}
for k, v := range cacheOpt.frontendAttrs {
frontendAttrs[k] = v
}
frontendAttrs := maps.Clone(opt.FrontendAttrs)
maps.Copy(frontendAttrs, cacheOpt.frontendAttrs)
solveCtx, cancelSolve := context.WithCancelCause(ctx)
var res *SolveResponse

View File

@ -142,6 +142,8 @@ type ContainerdConfig struct {
MaxParallelism int `toml:"max-parallelism"`
DefaultCgroupParent string `toml:"defaultCgroupParent"`
Rootless bool `toml:"rootless"`
}

View File

@ -1,10 +1,10 @@
package attestations
import (
"encoding/csv"
"strings"
"github.com/pkg/errors"
"github.com/tonistiigi/go-csvvalue"
)
const (
@ -63,8 +63,7 @@ func Parse(values map[string]string) (map[string]map[string]string, error) {
if v == "" {
continue
}
csvReader := csv.NewReader(strings.NewReader(v))
fields, err := csvReader.Read()
fields, err := csvvalue.Fields(v, nil)
if err != nil {
return nil, errors.Wrapf(err, "failed to parse %s", k)
}

View File

@ -0,0 +1,146 @@
package linter
import (
"fmt"
"strconv"
"strings"
"github.com/moby/buildkit/frontend/dockerfile/parser"
"github.com/pkg/errors"
)
type Config struct {
Warn LintWarnFunc
SkipRules []string
SkipAll bool
ReturnAsError bool
}
type Linter struct {
SkippedRules map[string]struct{}
CalledRules []string
SkipAll bool
ReturnAsError bool
Warn LintWarnFunc
}
func New(config *Config) *Linter {
toret := &Linter{
SkippedRules: map[string]struct{}{},
CalledRules: []string{},
Warn: config.Warn,
}
toret.SkipAll = config.SkipAll
toret.ReturnAsError = config.ReturnAsError
for _, rule := range config.SkipRules {
toret.SkippedRules[rule] = struct{}{}
}
return toret
}
func (lc *Linter) Run(rule LinterRuleI, location []parser.Range, txt ...string) {
if lc == nil || lc.Warn == nil || lc.SkipAll {
return
}
rulename := rule.RuleName()
if _, ok := lc.SkippedRules[rulename]; ok {
return
}
lc.CalledRules = append(lc.CalledRules, rulename)
rule.Run(lc.Warn, location, txt...)
}
func (lc *Linter) Error() error {
if lc == nil || !lc.ReturnAsError {
return nil
}
if len(lc.CalledRules) == 0 {
return nil
}
var rules []string
uniqueRules := map[string]struct{}{}
for _, r := range lc.CalledRules {
uniqueRules[r] = struct{}{}
}
for r := range uniqueRules {
rules = append(rules, r)
}
return errors.Errorf("lint violation found for rules: %s", strings.Join(rules, ", "))
}
type LinterRuleI interface {
RuleName() string
Run(warn LintWarnFunc, location []parser.Range, txt ...string)
}
type LinterRule[F any] struct {
Name string
Description string
URL string
Format F
}
func (rule *LinterRule[F]) RuleName() string {
return rule.Name
}
func (rule *LinterRule[F]) Run(warn LintWarnFunc, location []parser.Range, txt ...string) {
if len(txt) == 0 {
txt = []string{rule.Description}
}
short := strings.Join(txt, " ")
warn(rule.Name, rule.Description, rule.URL, short, location)
}
func LintFormatShort(rulename, msg string, line int) string {
msg = fmt.Sprintf("%s: %s", rulename, msg)
if line > 0 {
msg = fmt.Sprintf("%s (line %d)", msg, line)
}
return msg
}
type LintWarnFunc func(rulename, description, url, fmtmsg string, location []parser.Range)
func ParseLintOptions(checkStr string) (*Config, error) {
checkStr = strings.TrimSpace(checkStr)
if checkStr == "" {
return &Config{}, nil
}
parts := strings.SplitN(checkStr, ";", 2)
var skipSet []string
var errorOnWarn, skipAll bool
for _, p := range parts {
k, v, ok := strings.Cut(p, "=")
if !ok {
return nil, errors.Errorf("invalid check option %q", p)
}
k = strings.TrimSpace(k)
switch k {
case "skip":
v = strings.TrimSpace(v)
if v == "all" {
skipAll = true
} else {
skipSet = strings.Split(v, ",")
for i, rule := range skipSet {
skipSet[i] = strings.TrimSpace(rule)
}
}
case "error":
v, err := strconv.ParseBool(strings.TrimSpace(v))
if err != nil {
return nil, errors.Wrapf(err, "failed to parse check option %q", p)
}
errorOnWarn = v
default:
return nil, errors.Errorf("invalid check option %q", k)
}
}
return &Config{
SkipRules: skipSet,
SkipAll: skipAll,
ReturnAsError: errorOnWarn,
}, nil
}

View File

@ -0,0 +1,127 @@
package linter
import (
"fmt"
)
var (
RuleStageNameCasing = LinterRule[func(string) string]{
Name: "StageNameCasing",
Description: "Stage names should be lowercase",
URL: "https://docs.docker.com/go/dockerfile/rule/stage-name-casing/",
Format: func(stageName string) string {
return fmt.Sprintf("Stage name '%s' should be lowercase", stageName)
},
}
RuleFromAsCasing = LinterRule[func(string, string) string]{
Name: "FromAsCasing",
Description: "The 'as' keyword should match the case of the 'from' keyword",
URL: "https://docs.docker.com/go/dockerfile/rule/from-as-casing/",
Format: func(from, as string) string {
return fmt.Sprintf("'%s' and '%s' keywords' casing do not match", as, from)
},
}
RuleNoEmptyContinuation = LinterRule[func() string]{
Name: "NoEmptyContinuation",
Description: "Empty continuation lines will become errors in a future release",
URL: "https://docs.docker.com/go/dockerfile/rule/no-empty-continuation/",
Format: func() string {
return "Empty continuation line"
},
}
RuleConsistentInstructionCasing = LinterRule[func(string, string) string]{
Name: "ConsistentInstructionCasing",
Description: "All commands within the Dockerfile should use the same casing (either upper or lower)",
URL: "https://docs.docker.com/go/dockerfile/rule/consistent-instruction-casing/",
Format: func(violatingCommand, correctCasing string) string {
return fmt.Sprintf("Command '%s' should match the case of the command majority (%s)", violatingCommand, correctCasing)
},
}
RuleDuplicateStageName = LinterRule[func(string) string]{
Name: "DuplicateStageName",
Description: "Stage names should be unique",
URL: "https://docs.docker.com/go/dockerfile/rule/duplicate-stage-name/",
Format: func(stageName string) string {
return fmt.Sprintf("Duplicate stage name %q, stage names should be unique", stageName)
},
}
RuleReservedStageName = LinterRule[func(string) string]{
Name: "ReservedStageName",
Description: "Reserved words should not be used as stage names",
URL: "https://docs.docker.com/go/dockerfile/rule/reserved-stage-name/",
Format: func(reservedStageName string) string {
return fmt.Sprintf("Stage name should not use the same name as reserved stage %q", reservedStageName)
},
}
RuleJSONArgsRecommended = LinterRule[func(instructionName string) string]{
Name: "JSONArgsRecommended",
Description: "JSON arguments recommended for ENTRYPOINT/CMD to prevent unintended behavior related to OS signals",
URL: "https://docs.docker.com/go/dockerfile/rule/json-args-recommended/",
Format: func(instructionName string) string {
return fmt.Sprintf("JSON arguments recommended for %s to prevent unintended behavior related to OS signals", instructionName)
},
}
RuleMaintainerDeprecated = LinterRule[func() string]{
Name: "MaintainerDeprecated",
Description: "The MAINTAINER instruction is deprecated, use a label instead to define an image author",
URL: "https://docs.docker.com/go/dockerfile/rule/maintainer-deprecated/",
Format: func() string {
return "Maintainer instruction is deprecated in favor of using label"
},
}
RuleUndefinedArgInFrom = LinterRule[func(string, string) string]{
Name: "UndefinedArgInFrom",
Description: "FROM command must use declared ARGs",
URL: "https://docs.docker.com/go/dockerfile/rule/undefined-arg-in-from/",
Format: func(baseArg, suggest string) string {
out := fmt.Sprintf("FROM argument '%s' is not declared", baseArg)
if suggest != "" {
out += fmt.Sprintf(" (did you mean %s?)", suggest)
}
return out
},
}
RuleWorkdirRelativePath = LinterRule[func(workdir string) string]{
Name: "WorkdirRelativePath",
Description: "Relative workdir without an absolute workdir declared within the build can have unexpected results if the base image changes",
URL: "https://docs.docker.com/go/dockerfile/rule/workdir-relative-path/",
Format: func(workdir string) string {
return fmt.Sprintf("Relative workdir %q can have unexpected results if the base image changes", workdir)
},
}
RuleUndefinedVar = LinterRule[func(string, string) string]{
Name: "UndefinedVar",
Description: "Variables should be defined before their use",
URL: "https://docs.docker.com/go/dockerfile/rule/undefined-var/",
Format: func(arg, suggest string) string {
out := fmt.Sprintf("Usage of undefined variable '$%s'", arg)
if suggest != "" {
out += fmt.Sprintf(" (did you mean $%s?)", suggest)
}
return out
},
}
RuleMultipleInstructionsDisallowed = LinterRule[func(instructionName string) string]{
Name: "MultipleInstructionsDisallowed",
Description: "Multiple instructions of the same type should not be used in the same stage",
URL: "https://docs.docker.com/go/dockerfile/rule/multiple-instructions-disallowed/",
Format: func(instructionName string) string {
return fmt.Sprintf("Multiple %s instructions should not be used in the same stage because only the last one will be used", instructionName)
},
}
RuleLegacyKeyValueFormat = LinterRule[func(cmdName string) string]{
Name: "LegacyKeyValueFormat",
Description: "Legacy key/value format with whitespace separator should not be used",
URL: "https://docs.docker.com/go/dockerfile/rule/legacy-key-value-format/",
Format: func(cmdName string) string {
return fmt.Sprintf("\"%s key=value\" should be used instead of legacy \"%s key value\" format", cmdName, cmdName)
},
}
RuleInvalidBaseImagePlatform = LinterRule[func(string, string, string) string]{
Name: "InvalidBaseImagePlatform",
Description: "Base image platform does not match expected target platform",
Format: func(image, expected, actual string) string {
return fmt.Sprintf("Base image %s was pulled with platform %q, expected %q for current build", image, actual, expected)
},
}
)

View File

@ -17,6 +17,7 @@ import (
var (
errDockerfileNotStringArray = errors.New("when using JSON array syntax, arrays must be comprised of strings only")
errDockerfileNotJSONArray = errors.New("not a JSON array")
)
const (
@ -58,11 +59,11 @@ func parseWords(rest string, d *directives) []string {
words := []string{}
phase := inSpaces
word := ""
quote := '\000'
blankOK := false
var ch rune
var chWidth int
var sbuilder strings.Builder
for pos := 0; pos <= len(rest); pos += chWidth {
if pos != len(rest) {
@ -79,18 +80,18 @@ func parseWords(rest string, d *directives) []string {
phase = inWord // found it, fall through
}
if (phase == inWord || phase == inQuote) && (pos == len(rest)) {
if blankOK || len(word) > 0 {
words = append(words, word)
if blankOK || sbuilder.Len() > 0 {
words = append(words, sbuilder.String())
}
break
}
if phase == inWord {
if unicode.IsSpace(ch) {
phase = inSpaces
if blankOK || len(word) > 0 {
words = append(words, word)
if blankOK || sbuilder.Len() > 0 {
words = append(words, sbuilder.String())
}
word = ""
sbuilder.Reset()
blankOK = false
continue
}
@ -106,11 +107,11 @@ func parseWords(rest string, d *directives) []string {
// If we're not quoted and we see an escape token, then always just
// add the escape token plus the char to the word, even if the char
// is a quote.
word += string(ch)
sbuilder.WriteRune(ch)
pos += chWidth
ch, chWidth = utf8.DecodeRuneInString(rest[pos:])
}
word += string(ch)
sbuilder.WriteRune(ch)
continue
}
if phase == inQuote {
@ -124,10 +125,10 @@ func parseWords(rest string, d *directives) []string {
continue // just skip the escape token at end
}
pos += chWidth
word += string(ch)
sbuilder.WriteRune(ch)
ch, chWidth = utf8.DecodeRuneInString(rest[pos:])
}
word += string(ch)
sbuilder.WriteRune(ch)
}
}
@ -277,7 +278,7 @@ func parseString(rest string, d *directives) (*Node, map[string]bool, error) {
func parseJSON(rest string) (*Node, map[string]bool, error) {
rest = strings.TrimLeftFunc(rest, unicode.IsSpace)
if !strings.HasPrefix(rest, "[") {
return nil, nil, errors.Errorf("Error parsing %q as a JSON array", rest)
return nil, nil, errDockerfileNotJSONArray
}
var myJSON []interface{}

View File

@ -114,7 +114,6 @@ type Heredoc struct {
var (
dispatch map[string]func(string, *directives) (*Node, map[string]bool, error)
reWhitespace = regexp.MustCompile(`[\t\v\f\r ]+`)
reComment = regexp.MustCompile(`^#.*$`)
reHeredoc = regexp.MustCompile(`^(\d*)<<(-?)([^<]*)$`)
reLeadingTabs = regexp.MustCompile(`(?m)^\t+`)
)
@ -169,8 +168,8 @@ func (d *directives) setEscapeToken(s string) error {
// possibleParserDirective looks for parser directives, eg '# escapeToken=<char>'.
// Parser directives must precede any builder instruction or other comments,
// and cannot be repeated.
func (d *directives) possibleParserDirective(line string) error {
directive, err := d.parser.ParseLine([]byte(line))
func (d *directives) possibleParserDirective(line []byte) error {
directive, err := d.parser.ParseLine(line)
if err != nil {
return err
}
@ -284,6 +283,7 @@ func Parse(rwc io.Reader) (*Result, error) {
scanner.Split(scanLines)
warnings := []Warning{}
var comments []string
buf := &bytes.Buffer{}
var err error
for scanner.Scan() {
@ -307,10 +307,12 @@ func Parse(rwc io.Reader) (*Result, error) {
currentLine++
startLine := currentLine
line, isEndOfLine := trimContinuationCharacter(string(bytesRead), d)
if isEndOfLine && line == "" {
bytesRead, isEndOfLine := trimContinuationCharacter(bytesRead, d)
if isEndOfLine && len(bytesRead) == 0 {
continue
}
buf.Reset()
buf.Write(bytesRead)
var hasEmptyContinuationLine bool
for !isEndOfLine && scanner.Scan() {
@ -329,11 +331,12 @@ func Parse(rwc io.Reader) (*Result, error) {
continue
}
continuationLine := string(bytesRead)
continuationLine, isEndOfLine = trimContinuationCharacter(continuationLine, d)
line += continuationLine
bytesRead, isEndOfLine = trimContinuationCharacter(bytesRead, d)
buf.Write(bytesRead)
}
line := buf.String()
if hasEmptyContinuationLine {
warnings = append(warnings, Warning{
Short: "Empty continuation line found in: " + line,
@ -348,7 +351,7 @@ func Parse(rwc io.Reader) (*Result, error) {
return nil, withLocation(err, startLine, currentLine)
}
if child.canContainHeredoc() {
if child.canContainHeredoc() && strings.Contains(line, "<<") {
heredocs, err := heredocsFromLine(line)
if err != nil {
return nil, withLocation(err, startLine, currentLine)
@ -415,7 +418,7 @@ func heredocFromMatch(match []string) (*Heredoc, error) {
// If there are quotes in one but not the other, then we know that some
// part of the heredoc word is quoted, so we shouldn't expand the content.
shlex.RawQuotes = false
words, err := shlex.ProcessWords(rest, []string{})
words, err := shlex.ProcessWords(rest, emptyEnvs{})
if err != nil {
return nil, err
}
@ -425,7 +428,7 @@ func heredocFromMatch(match []string) (*Heredoc, error) {
}
shlex.RawQuotes = true
wordsRaw, err := shlex.ProcessWords(rest, []string{})
wordsRaw, err := shlex.ProcessWords(rest, emptyEnvs{})
if err != nil {
return nil, err
}
@ -466,7 +469,7 @@ func heredocsFromLine(line string) ([]Heredoc, error) {
shlex.RawQuotes = true
shlex.RawEscapes = true
shlex.SkipUnsetEnv = true
words, _ := shlex.ProcessWords(line, []string{})
words, _ := shlex.ProcessWords(line, emptyEnvs{})
var docs []Heredoc
for _, word := range words {
@ -487,7 +490,10 @@ func ChompHeredocContent(src string) string {
}
func trimComments(src []byte) []byte {
return reComment.ReplaceAll(src, []byte{})
if !isComment(src) {
return src
}
return nil
}
func trimLeadingWhitespace(src []byte) []byte {
@ -501,7 +507,8 @@ func trimNewline(src []byte) []byte {
}
func isComment(line []byte) bool {
return reComment.Match(trimLeadingWhitespace(trimNewline(line)))
line = trimLeadingWhitespace(line)
return len(line) > 0 && line[0] == '#'
}
func isEmptyContinuationLine(line []byte) bool {
@ -510,9 +517,9 @@ func isEmptyContinuationLine(line []byte) bool {
var utf8bom = []byte{0xEF, 0xBB, 0xBF}
func trimContinuationCharacter(line string, d *directives) (string, bool) {
if d.lineContinuationRegex.MatchString(line) {
line = d.lineContinuationRegex.ReplaceAllString(line, "$1")
func trimContinuationCharacter(line []byte, d *directives) ([]byte, bool) {
if d.lineContinuationRegex.Match(line) {
line = d.lineContinuationRegex.ReplaceAll(line, []byte("$1"))
return line, false
}
return line, true
@ -525,7 +532,7 @@ func processLine(d *directives, token []byte, stripLeftWhitespace bool) ([]byte,
if stripLeftWhitespace {
token = trimLeadingWhitespace(token)
}
return trimComments(token), d.possibleParserDirective(string(token))
return trimComments(token), d.possibleParserDirective(token)
}
// Variation of bufio.ScanLines that preserves the line endings
@ -550,3 +557,13 @@ func handleScannerError(err error) error {
return err
}
}
type emptyEnvs struct{}
func (emptyEnvs) Get(string) (string, bool) {
return "", false
}
func (emptyEnvs) Keys() []string {
return nil
}

View File

@ -36,7 +36,7 @@ func extractBuilderFlags(line string) (string, []string, error) {
words := []string{}
phase := inSpaces
word := ""
sbuilder := &strings.Builder{}
quote := '\000'
blankOK := false
var ch rune
@ -62,13 +62,14 @@ func extractBuilderFlags(line string) (string, []string, error) {
phase = inWord // found something with "--", fall through
}
if (phase == inWord || phase == inQuote) && (pos == len(line)) {
if word != "--" && (blankOK || len(word) > 0) {
if word := sbuilder.String(); word != "--" && (blankOK || len(word) > 0) {
words = append(words, word)
}
break
}
if phase == inWord {
if unicode.IsSpace(ch) {
word := sbuilder.String()
phase = inSpaces
if word == "--" {
return line[pos:], words, nil
@ -76,7 +77,7 @@ func extractBuilderFlags(line string) (string, []string, error) {
if blankOK || len(word) > 0 {
words = append(words, word)
}
word = ""
sbuilder.Reset()
blankOK = false
continue
}
@ -93,7 +94,9 @@ func extractBuilderFlags(line string) (string, []string, error) {
pos++
ch = rune(line[pos])
}
word += string(ch)
if _, err := sbuilder.WriteRune(ch); err != nil {
return "", nil, err
}
continue
}
if phase == inQuote {
@ -109,7 +112,9 @@ func extractBuilderFlags(line string) (string, []string, error) {
pos++
ch = rune(line[pos])
}
word += string(ch)
if _, err := sbuilder.WriteRune(ch); err != nil {
return "", nil, err
}
}
}

View File

@ -9,3 +9,10 @@ package shell
func EqualEnvKeys(from, to string) bool {
return from == to
}
// NormalizeEnvKey returns the key in a normalized form that can be used
// for comparison. On Unix this is a no-op. On Windows this converts the
// key to uppercase.
func NormalizeEnvKey(key string) string {
return key
}

View File

@ -8,3 +8,10 @@ import "strings"
func EqualEnvKeys(from, to string) bool {
return strings.EqualFold(from, to)
}
// NormalizeEnvKey returns the key in a normalized form that can be used
// for comparison. On Unix this is a no-op. On Windows this converts the
// key to uppercase.
func NormalizeEnvKey(key string) string {
return strings.ToUpper(key)
}

View File

@ -4,6 +4,7 @@ import (
"bytes"
"fmt"
"regexp"
"slices"
"strings"
"text/scanner"
"unicode"
@ -11,6 +12,11 @@ import (
"github.com/pkg/errors"
)
type EnvGetter interface {
Get(string) (string, bool)
Keys() []string
}
// Lex performs shell word splitting and variable expansion.
//
// Lex takes a string and an array of env variables and
@ -18,12 +24,15 @@ import (
// tokens. Tries to mimic bash shell process.
// It doesn't support all flavors of ${xx:...} formats but new ones can
// be added by adding code to the "special ${} format processing" section
//
// It is not safe to call methods on a Lex instance concurrently.
type Lex struct {
escapeToken rune
RawQuotes bool
RawEscapes bool
SkipProcessQuotes bool
SkipUnsetEnv bool
shellWord shellWord
}
// NewLex creates a new Lex which uses escapeToken to escape quotes.
@ -35,8 +44,9 @@ func NewLex(escapeToken rune) *Lex {
// and replace any env var references in 'word'. It will also
// return variables in word which were not found in the 'env' list,
// which is useful in later linting.
func (s *Lex) ProcessWord(word string, env []string) (string, map[string]struct{}, error) {
result, err := s.process(word, BuildEnvs(env))
// TODO: rename
func (s *Lex) ProcessWord(word string, env EnvGetter) (string, map[string]struct{}, error) {
result, err := s.process(word, env, true)
return result.Result, result.Unmatched, err
}
@ -47,18 +57,11 @@ func (s *Lex) ProcessWord(word string, env []string) (string, map[string]struct{
// this splitting is done **after** the env var substitutions are done.
// Note, each one is trimmed to remove leading and trailing spaces (unless
// they are quoted", but ProcessWord retains spaces between words.
func (s *Lex) ProcessWords(word string, env []string) ([]string, error) {
result, err := s.process(word, BuildEnvs(env))
func (s *Lex) ProcessWords(word string, env EnvGetter) ([]string, error) {
result, err := s.process(word, env, false)
return result.Words, err
}
// ProcessWordWithMap will use the 'env' list of environment variables,
// and replace any env var references in 'word'.
func (s *Lex) ProcessWordWithMap(word string, env map[string]string) (string, error) {
result, err := s.process(word, env)
return result.Result, err
}
type ProcessWordResult struct {
Result string
Words []string
@ -68,8 +71,26 @@ type ProcessWordResult struct {
// ProcessWordWithMatches will use the 'env' list of environment variables,
// replace any env var references in 'word' and return the env that were used.
func (s *Lex) ProcessWordWithMatches(word string, env map[string]string) (ProcessWordResult, error) {
sw := s.init(word, env)
func (s *Lex) ProcessWordWithMatches(word string, env EnvGetter) (ProcessWordResult, error) {
return s.process(word, env, true)
}
func (s *Lex) initWord(word string, env EnvGetter, capture bool) *shellWord {
sw := &s.shellWord
sw.Lex = s
sw.envs = env
sw.capture = capture
sw.rawEscapes = s.RawEscapes
if capture {
sw.matches = nil
sw.nonmatches = nil
}
sw.scanner.Init(strings.NewReader(word))
return sw
}
func (s *Lex) process(word string, env EnvGetter, capture bool) (ProcessWordResult, error) {
sw := s.initWord(word, env, capture)
word, words, err := sw.process(word)
return ProcessWordResult{
Result: word,
@ -79,47 +100,15 @@ func (s *Lex) ProcessWordWithMatches(word string, env map[string]string) (Proces
}, err
}
func (s *Lex) ProcessWordsWithMap(word string, env map[string]string) ([]string, error) {
result, err := s.process(word, env)
return result.Words, err
}
func (s *Lex) init(word string, env map[string]string) *shellWord {
sw := &shellWord{
envs: env,
escapeToken: s.escapeToken,
skipUnsetEnv: s.SkipUnsetEnv,
skipProcessQuotes: s.SkipProcessQuotes,
rawQuotes: s.RawQuotes,
rawEscapes: s.RawEscapes,
matches: make(map[string]struct{}),
nonmatches: make(map[string]struct{}),
}
sw.scanner.Init(strings.NewReader(word))
return sw
}
func (s *Lex) process(word string, env map[string]string) (*ProcessWordResult, error) {
sw := s.init(word, env)
word, words, err := sw.process(word)
return &ProcessWordResult{
Result: word,
Words: words,
Matched: sw.matches,
Unmatched: sw.nonmatches,
}, err
}
type shellWord struct {
scanner scanner.Scanner
envs map[string]string
escapeToken rune
rawQuotes bool
rawEscapes bool
skipUnsetEnv bool
skipProcessQuotes bool
matches map[string]struct{}
nonmatches map[string]struct{}
*Lex
wordsBuffer strings.Builder
scanner scanner.Scanner
envs EnvGetter
rawEscapes bool
capture bool // capture matches and nonmatches
matches map[string]struct{}
nonmatches map[string]struct{}
}
func (sw *shellWord) process(source string) (string, []string, error) {
@ -131,16 +120,16 @@ func (sw *shellWord) process(source string) (string, []string, error) {
}
type wordsStruct struct {
word string
buf *strings.Builder
words []string
inWord bool
}
func (w *wordsStruct) addChar(ch rune) {
if unicode.IsSpace(ch) && w.inWord {
if len(w.word) != 0 {
w.words = append(w.words, w.word)
w.word = ""
if w.buf.Len() != 0 {
w.words = append(w.words, w.buf.String())
w.buf.Reset()
w.inWord = false
}
} else if !unicode.IsSpace(ch) {
@ -149,7 +138,7 @@ func (w *wordsStruct) addChar(ch rune) {
}
func (w *wordsStruct) addRawChar(ch rune) {
w.word += string(ch)
w.buf.WriteRune(ch)
w.inWord = true
}
@ -160,16 +149,16 @@ func (w *wordsStruct) addString(str string) {
}
func (w *wordsStruct) addRawString(str string) {
w.word += str
w.buf.WriteString(str)
w.inWord = true
}
func (w *wordsStruct) getWords() []string {
if len(w.word) > 0 {
w.words = append(w.words, w.word)
if w.buf.Len() > 0 {
w.words = append(w.words, w.buf.String())
// Just in case we're called again by mistake
w.word = ""
w.buf.Reset()
w.inWord = false
}
return w.words
@ -178,13 +167,18 @@ func (w *wordsStruct) getWords() []string {
// Process the word, starting at 'pos', and stop when we get to the
// end of the word or the 'stopChar' character
func (sw *shellWord) processStopOn(stopChar rune, rawEscapes bool) (string, []string, error) {
var result bytes.Buffer
// result buffer can't be currently shared for shellWord as it is called internally
// by processDollar
var result strings.Builder
sw.wordsBuffer.Reset()
var words wordsStruct
words.buf = &sw.wordsBuffer
// no need to initialize all the time
var charFuncMapping = map[rune]func() (string, error){
'$': sw.processDollar,
}
if !sw.skipProcessQuotes {
if !sw.SkipProcessQuotes {
charFuncMapping['\''] = sw.processSingleQuote
charFuncMapping['"'] = sw.processDoubleQuote
}
@ -261,7 +255,7 @@ func (sw *shellWord) processSingleQuote() (string, error) {
var result bytes.Buffer
ch := sw.scanner.Next()
if sw.rawQuotes {
if sw.RawQuotes {
result.WriteRune(ch)
}
@ -271,7 +265,7 @@ func (sw *shellWord) processSingleQuote() (string, error) {
case scanner.EOF:
return "", errors.New("unexpected end of statement while looking for matching single-quote")
case '\'':
if sw.rawQuotes {
if sw.RawQuotes {
result.WriteRune(ch)
}
return result.String(), nil
@ -296,7 +290,7 @@ func (sw *shellWord) processDoubleQuote() (string, error) {
var result bytes.Buffer
ch := sw.scanner.Next()
if sw.rawQuotes {
if sw.RawQuotes {
result.WriteRune(ch)
}
@ -306,7 +300,7 @@ func (sw *shellWord) processDoubleQuote() (string, error) {
return "", errors.New("unexpected end of statement while looking for matching double-quote")
case '"':
ch := sw.scanner.Next()
if sw.rawQuotes {
if sw.RawQuotes {
result.WriteRune(ch)
}
return result.String(), nil
@ -350,7 +344,7 @@ func (sw *shellWord) processDollar() (string, error) {
return "$", nil
}
value, found := sw.getEnv(name)
if !found && sw.skipUnsetEnv {
if !found && sw.SkipUnsetEnv {
return "$" + name, nil
}
return value, nil
@ -373,7 +367,7 @@ func (sw *shellWord) processDollar() (string, error) {
case '}':
// Normal ${xx} case
value, set := sw.getEnv(name)
if !set && sw.skipUnsetEnv {
if !set && sw.SkipUnsetEnv {
return fmt.Sprintf("${%s}", name), nil
}
return value, nil
@ -395,7 +389,7 @@ func (sw *shellWord) processDollar() (string, error) {
// Grab the current value of the variable in question so we
// can use it to determine what to do based on the modifier
value, set := sw.getEnv(name)
if sw.skipUnsetEnv && !set {
if sw.SkipUnsetEnv && !set {
return fmt.Sprintf("${%s%s%s}", name, chs, word), nil
}
@ -465,7 +459,7 @@ func (sw *shellWord) processDollar() (string, error) {
}
value, set := sw.getEnv(name)
if sw.skipUnsetEnv && !set {
if sw.SkipUnsetEnv && !set {
return fmt.Sprintf("${%s/%s/%s}", name, pattern, replacement), nil
}
@ -528,34 +522,51 @@ func isSpecialParam(char rune) bool {
}
func (sw *shellWord) getEnv(name string) (string, bool) {
for key, value := range sw.envs {
if EqualEnvKeys(name, key) {
v, ok := sw.envs.Get(name)
if ok {
if sw.capture {
if sw.matches == nil {
sw.matches = make(map[string]struct{})
}
sw.matches[name] = struct{}{}
return value, true
}
return v, true
}
if sw.capture {
if sw.nonmatches == nil {
sw.nonmatches = make(map[string]struct{})
}
sw.nonmatches[name] = struct{}{}
}
sw.nonmatches[name] = struct{}{}
return "", false
}
func BuildEnvs(env []string) map[string]string {
func EnvsFromSlice(env []string) EnvGetter {
envs := map[string]string{}
keys := make([]string, 0, len(env))
for _, e := range env {
i := strings.Index(e, "=")
if i < 0 {
envs[e] = ""
} else {
k := e[:i]
v := e[i+1:]
// overwrite value if key already exists
envs[k] = v
}
k, v, _ := strings.Cut(e, "=")
keys = append(keys, k)
envs[NormalizeEnvKey(k)] = v
}
return &envGetter{env: envs, keys: keys}
}
return envs
type envGetter struct {
env map[string]string
keys []string
}
var _ EnvGetter = &envGetter{}
func (e *envGetter) Get(key string) (string, bool) {
key = NormalizeEnvKey(key)
v, ok := e.env[key]
return v, ok
}
func (e *envGetter) Keys() []string {
return e.keys
}
// convertShellPatternToRegex converts a shell-like wildcard pattern
@ -647,11 +658,7 @@ func reversePattern(pattern string) string {
func reverseString(str string) string {
out := []rune(str)
outIdx := len(out) - 1
for i := 0; i < outIdx; i++ {
out[i], out[outIdx] = out[outIdx], out[i]
outIdx--
}
slices.Reverse(out)
return string(out)
}

View File

@ -1,7 +1,6 @@
package dockerui
import (
"encoding/csv"
"net"
"strconv"
"strings"
@ -13,6 +12,7 @@ import (
"github.com/moby/buildkit/solver/pb"
ocispecs "github.com/opencontainers/image-spec/specs-go/v1"
"github.com/pkg/errors"
"github.com/tonistiigi/go-csvvalue"
)
func parsePlatforms(v string) ([]ocispecs.Platform, error) {
@ -45,8 +45,7 @@ func parseExtraHosts(v string) ([]llb.HostIP, error) {
return nil, nil
}
out := make([]llb.HostIP, 0)
csvReader := csv.NewReader(strings.NewReader(v))
fields, err := csvReader.Read()
fields, err := csvvalue.Fields(v, nil)
if err != nil {
return nil, err
}
@ -80,8 +79,7 @@ func parseUlimits(v string) ([]pb.Ulimit, error) {
return nil, nil
}
out := make([]pb.Ulimit, 0)
csvReader := csv.NewReader(strings.NewReader(v))
fields, err := csvReader.Read()
fields, err := csvvalue.Fields(v, nil)
if err != nil {
return nil, err
}

View File

@ -14,6 +14,7 @@ import (
controlapi "github.com/moby/buildkit/api/services/control"
"github.com/moby/buildkit/client/llb"
"github.com/moby/buildkit/frontend/attestations"
"github.com/moby/buildkit/frontend/dockerfile/linter"
"github.com/moby/buildkit/frontend/gateway/client"
"github.com/moby/buildkit/solver/pb"
"github.com/moby/buildkit/util/flightcontrol"
@ -65,7 +66,7 @@ type Config struct {
ShmSize int64
Target string
Ulimits []pb.Ulimit
LinterConfig *string
LinterConfig *linter.Config
CacheImports []client.CacheOptionsEntry
TargetPlatforms []ocispecs.Platform // nil means default
@ -78,8 +79,7 @@ type Client struct {
Config
client client.Client
ignoreCache []string
bctx *buildContext
g flightcontrol.Group[*buildContext]
g flightcontrol.CachedGroup[*buildContext]
bopts client.BuildOpts
dockerignore []byte
@ -281,21 +281,17 @@ func (bc *Client) init() error {
bc.Hostname = opts[keyHostname]
if v, ok := opts[keyDockerfileLintArg]; ok {
bc.LinterConfig = &v
bc.LinterConfig, err = linter.ParseLintOptions(v)
if err != nil {
return errors.Wrapf(err, "failed to parse %s", keyDockerfileLintArg)
}
}
return nil
}
func (bc *Client) buildContext(ctx context.Context) (*buildContext, error) {
return bc.g.Do(ctx, "initcontext", func(ctx context.Context) (*buildContext, error) {
if bc.bctx != nil {
return bc.bctx, nil
}
bctx, err := bc.initContext(ctx)
if err == nil {
bc.bctx = bctx
}
return bctx, err
return bc.initContext(ctx)
})
}

View File

@ -1,17 +1,15 @@
package identity
import (
cryptorand "crypto/rand"
"crypto/rand"
"io"
"math/big"
"github.com/pkg/errors"
)
var (
// idReader is used for random id generation. This declaration allows us to
// replace it for testing.
idReader = cryptorand.Reader
idReader = rand.Reader
)
// parameters for random identifier generation. We can tweak this when there is
@ -46,7 +44,7 @@ func NewID() string {
var p [randomIDEntropyBytes]byte
if _, err := io.ReadFull(idReader, p[:]); err != nil {
panic(errors.Wrap(err, "failed to read random bytes: %v"))
panic("failed to read random bytes: " + err.Error())
}
p[0] |= 0x80 // set high bit to avoid the need for padding

View File

@ -0,0 +1,63 @@
package flightcontrol
import (
"context"
"sync"
"github.com/pkg/errors"
)
// Group is a flightcontrol synchronization group that memoizes the results of a function
// and returns the cached result if the function is called with the same key.
// Don't use with long-running groups as the results are cached indefinitely.
type CachedGroup[T any] struct {
// CacheError defines if error results should also be cached.
// It is not safe to change this value after the first call to Do.
// Context cancellation errors are never cached.
CacheError bool
g Group[T]
mu sync.Mutex
cache map[string]result[T]
}
type result[T any] struct {
v T
err error
}
// Do executes a context function syncronized by the key or returns the cached result for the key.
func (g *CachedGroup[T]) Do(ctx context.Context, key string, fn func(ctx context.Context) (T, error)) (T, error) {
return g.g.Do(ctx, key, func(ctx context.Context) (T, error) {
g.mu.Lock()
if v, ok := g.cache[key]; ok {
g.mu.Unlock()
if v.err != nil {
if g.CacheError {
return v.v, v.err
}
} else {
return v.v, nil
}
}
g.mu.Unlock()
v, err := fn(ctx)
if err != nil {
select {
case <-ctx.Done():
if errors.Is(err, context.Cause(ctx)) {
return v, err
}
default:
}
}
if err == nil || g.CacheError {
g.mu.Lock()
if g.cache == nil {
g.cache = make(map[string]result[T])
}
g.cache[key] = result[T]{v: v, err: err}
g.mu.Unlock()
}
return v, err
})
}

View File

@ -1,6 +1,7 @@
package progress
import (
"maps"
"sort"
"sync"
"time"
@ -83,9 +84,7 @@ func (ps *MultiWriter) WriteRawProgress(p *Progress) error {
meta := p.meta
if len(ps.meta) > 0 {
meta = map[string]interface{}{}
for k, v := range p.meta {
meta[k] = v
}
maps.Copy(meta, p.meta)
for k, v := range ps.meta {
if _, ok := meta[k]; !ok {
meta[k] = v

View File

@ -3,6 +3,7 @@ package progress
import (
"context"
"io"
"maps"
"sort"
"sync"
"time"
@ -207,9 +208,7 @@ func pipe() (*progressReader, *progressWriter, func(error)) {
func newWriter(pw *progressWriter) *progressWriter {
meta := make(map[string]interface{})
for k, v := range pw.meta {
meta[k] = v
}
maps.Copy(meta, pw.meta)
pw = &progressWriter{
reader: pw.reader,
meta: meta,
@ -240,9 +239,7 @@ func (pw *progressWriter) WriteRawProgress(p *Progress) error {
meta := p.meta
if len(pw.meta) > 0 {
meta = map[string]interface{}{}
for k, v := range p.meta {
meta[k] = v
}
maps.Copy(meta, p.meta)
for k, v := range pw.meta {
if _, ok := meta[k]; !ok {
meta[k] = v

View File

@ -1,13 +1,13 @@
package progressui
import (
"encoding/csv"
"errors"
"strconv"
"strings"
"github.com/moby/buildkit/util/bklog"
"github.com/morikuni/aec"
"github.com/tonistiigi/go-csvvalue"
)
var termColorMap = map[string]aec.ANSI{
@ -59,9 +59,9 @@ func setUserDefinedTermColors(colorsEnv string) {
}
func readBuildkitColorsEnv(colorsEnv string) []string {
csvReader := csv.NewReader(strings.NewReader(colorsEnv))
csvReader := csvvalue.NewParser()
csvReader.Comma = ':'
fields, err := csvReader.Read()
fields, err := csvReader.Fields(colorsEnv, nil)
if err != nil {
bklog.L.WithError(err).Warnf("Could not parse BUILDKIT_COLORS. Falling back to defaults.")
return nil
@ -70,8 +70,7 @@ func readBuildkitColorsEnv(colorsEnv string) []string {
}
func readRGB(v string) aec.ANSI {
csvReader := csv.NewReader(strings.NewReader(v))
fields, err := csvReader.Read()
fields, err := csvvalue.Fields(v, nil)
if err != nil {
bklog.L.WithError(err).Warnf("Could not parse value %s as valid comma-separated RGB color. Ignoring.", v)
return nil

View File

@ -4,6 +4,7 @@ import (
"bytes"
"context"
"fmt"
"maps"
"math/rand"
"os"
"os/exec"
@ -137,9 +138,7 @@ func WithMirroredImages(m map[string]string) TestOpt {
if tc.mirroredImages == nil {
tc.mirroredImages = map[string]string{}
}
for k, v := range m {
tc.mirroredImages[k] = v
}
maps.Copy(tc.mirroredImages, m)
}
}
@ -418,9 +417,7 @@ func prepareValueMatrix(tc testConf) []matrixValue {
for _, c := range current {
vv := newMatrixValue(featureName, featureValue, v)
vv.fn = append(vv.fn, c.fn...)
for k, v := range c.values {
vv.values[k] = v
}
maps.Copy(vv.values, c.values)
m = append(m, vv)
}
}