Compare commits

..

3 Commits

Author SHA1 Message Date
Tõnis Tiigi
59582a88fc Merge pull request #2471 from tonistiigi/v0.14.1-picks
[v0.14] cherry picks for v0.14.1
2024-05-22 07:43:43 -07:00
Tonis Tiigi
a3b1fae96d driver: handle nil logger for bootstrap
resolveNode methods can call with nil logger. Although
the results should already be cached now in resolver
this makes the protection more explicit.

Signed-off-by: Tonis Tiigi <tonistiigi@gmail.com>
(cherry picked from commit 035236a5ed)
2024-05-22 07:17:53 -07:00
Tonis Tiigi
6a84f43fba build: add cache to resolvedNode
Currently it is possible for boot() to be called
multiple times, resulting multiple slow requests to
establish connection (eg. multiple container inspects
for container driver).

Signed-off-by: Tonis Tiigi <tonistiigi@gmail.com>
(cherry picked from commit 99777eaf34)
2024-05-22 07:17:46 -07:00
767 changed files with 13362 additions and 62126 deletions

104
.github/labeler.yml vendored
View File

@@ -1,104 +0,0 @@
# Add 'area/project' label to changes in basic project documentation and .github folder, excluding .github/workflows
area/project:
- all:
- changed-files:
- any-glob-to-any-file:
- .github/**
- LICENSE
- AUTHORS
- MAINTAINERS
- PROJECT.md
- README.md
- .gitignore
- codecov.yml
- all-globs-to-all-files: '!.github/workflows/*'
# Add 'area/github-actions' label to changes in the .github/workflows folder
area/ci:
- changed-files:
- any-glob-to-any-file: '.github/workflows/**'
# Add 'area/bake' label to changes in the bake
area/bake:
- changed-files:
- any-glob-to-any-file: 'bake/**'
# Add 'area/bake/compose' label to changes in the bake+compose
area/bake/compose:
- changed-files:
- any-glob-to-any-file:
- bake/compose.go
- bake/compose_test.go
# Add 'area/build' label to changes in build files
area/build:
- changed-files:
- any-glob-to-any-file: 'build/**'
# Add 'area/builder' label to changes in builder files
area/builder:
- changed-files:
- any-glob-to-any-file: 'builder/**'
# Add 'area/cli' label to changes in the CLI
area/cli:
- changed-files:
- any-glob-to-any-file:
- cmd/**
- commands/**
# Add 'area/controller' label to changes in the controller
area/controller:
- changed-files:
- any-glob-to-any-file: 'controller/**'
# Add 'area/docs' label to markdown files in the docs folder
area/docs:
- changed-files:
- any-glob-to-any-file: 'docs/**/*.md'
# Add 'area/dependencies' label to changes in go dependency files
area/dependencies:
- changed-files:
- any-glob-to-any-file:
- go.mod
- go.sum
- vendor/**
# Add 'area/driver' label to changes in the driver folder
area/driver:
- changed-files:
- any-glob-to-any-file: 'driver/**'
# Add 'area/driver/docker' label to changes in the docker driver
area/driver/docker:
- changed-files:
- any-glob-to-any-file: 'driver/docker/**'
# Add 'area/driver/docker-container' label to changes in the docker-container driver
area/driver/docker-container:
- changed-files:
- any-glob-to-any-file: 'driver/docker-container/**'
# Add 'area/driver/kubernetes' label to changes in the kubernetes driver
area/driver/kubernetes:
- changed-files:
- any-glob-to-any-file: 'driver/kubernetes/**'
# Add 'area/driver/remote' label to changes in the remote driver
area/driver/remote:
- changed-files:
- any-glob-to-any-file: 'driver/remote/**'
# Add 'area/hack' label to changes in the hack folder
area/hack:
- changed-files:
- any-glob-to-any-file: 'hack/**'
# Add 'area/tests' label to changes in test files
area/tests:
- changed-files:
- any-glob-to-any-file:
- tests/**
- '**/*_test.go'

View File

@@ -26,17 +26,16 @@ env:
TEST_CACHE_SCOPE: "test" TEST_CACHE_SCOPE: "test"
TESTFLAGS: "-v --parallel=6 --timeout=30m" TESTFLAGS: "-v --parallel=6 --timeout=30m"
GOTESTSUM_FORMAT: "standard-verbose" GOTESTSUM_FORMAT: "standard-verbose"
GO_VERSION: "1.22" GO_VERSION: "1.21"
GOTESTSUM_VERSION: "v1.9.0" # same as one in Dockerfile GOTESTSUM_VERSION: "v1.9.0" # same as one in Dockerfile
jobs: jobs:
test-integration: test-integration:
runs-on: ubuntu-24.04 runs-on: ubuntu-22.04
env: env:
TESTFLAGS_DOCKER: "-v --parallel=1 --timeout=30m" TESTFLAGS_DOCKER: "-v --parallel=1 --timeout=30m"
TEST_IMAGE_BUILD: "0" TEST_IMAGE_BUILD: "0"
TEST_IMAGE_ID: "buildx-tests" TEST_IMAGE_ID: "buildx-tests"
TEST_COVERAGE: "1"
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@@ -44,9 +43,9 @@ jobs:
- master - master
- latest - latest
- buildx-stable-1 - buildx-stable-1
- v0.14.1 - v0.13.1
- v0.13.2
- v0.12.5 - v0.12.5
- v0.11.6
worker: worker:
- docker-container - docker-container
- remote - remote
@@ -106,7 +105,7 @@ jobs:
buildkitd-flags: --debug buildkitd-flags: --debug
- -
name: Build test image name: Build test image
uses: docker/bake-action@v5 uses: docker/bake-action@v4
with: with:
targets: integration-test targets: integration-test
set: | set: |
@@ -126,7 +125,6 @@ jobs:
directory: ./bin/testreports directory: ./bin/testreports
flags: integration flags: integration
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
disable_file_fixes: true
- -
name: Generate annotations name: Generate annotations
if: always() if: always()
@@ -147,7 +145,7 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
os: os:
- ubuntu-24.04 - ubuntu-22.04
- macos-12 - macos-12
- windows-2022 - windows-2022
env: env:
@@ -199,7 +197,6 @@ jobs:
env_vars: RUNNER_OS env_vars: RUNNER_OS
flags: unit flags: unit
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
disable_file_fixes: true
- -
name: Generate annotations name: Generate annotations
if: always() if: always()
@@ -215,7 +212,7 @@ jobs:
path: ${{ env.TESTREPORTS_BASEDIR }} path: ${{ env.TESTREPORTS_BASEDIR }}
prepare-binaries: prepare-binaries:
runs-on: ubuntu-24.04 runs-on: ubuntu-22.04
outputs: outputs:
matrix: ${{ steps.platforms.outputs.matrix }} matrix: ${{ steps.platforms.outputs.matrix }}
steps: steps:
@@ -233,7 +230,7 @@ jobs:
echo ${{ steps.platforms.outputs.matrix }} echo ${{ steps.platforms.outputs.matrix }}
binaries: binaries:
runs-on: ubuntu-24.04 runs-on: ubuntu-22.04
needs: needs:
- prepare-binaries - prepare-binaries
strategy: strategy:
@@ -276,7 +273,7 @@ jobs:
if-no-files-found: error if-no-files-found: error
bin-image: bin-image:
runs-on: ubuntu-24.04 runs-on: ubuntu-22.04
needs: needs:
- test-integration - test-integration
- test-unit - test-unit
@@ -316,7 +313,7 @@ jobs:
password: ${{ secrets.DOCKERPUBLICBOT_WRITE_PAT }} password: ${{ secrets.DOCKERPUBLICBOT_WRITE_PAT }}
- -
name: Build and push image name: Build and push image
uses: docker/bake-action@v5 uses: docker/bake-action@v4
with: with:
files: | files: |
./docker-bake.hcl ./docker-bake.hcl
@@ -329,7 +326,7 @@ jobs:
*.cache-to=type=gha,scope=bin-image,mode=max *.cache-to=type=gha,scope=bin-image,mode=max
release: release:
runs-on: ubuntu-24.04 runs-on: ubuntu-22.04
needs: needs:
- test-integration - test-integration
- test-unit - test-unit
@@ -359,7 +356,7 @@ jobs:
- -
name: GitHub Release name: GitHub Release
if: startsWith(github.ref, 'refs/tags/v') if: startsWith(github.ref, 'refs/tags/v')
uses: softprops/action-gh-release@a74c6b72af54cfa997e81df42d94703d6313a2d0 # v2.0.6 uses: softprops/action-gh-release@9d7c94cfd0a1f3ed45544c887983e9fa900f0564 # v2.0.4
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with: with:

View File

@@ -13,11 +13,11 @@ permissions:
security-events: write security-events: write
env: env:
GO_VERSION: "1.22" GO_VERSION: "1.21"
jobs: jobs:
codeql: codeql:
runs-on: ubuntu-24.04 runs-on: ubuntu-latest
steps: steps:
- -
name: Checkout name: Checkout

View File

@@ -12,7 +12,7 @@ on:
jobs: jobs:
open-pr: open-pr:
runs-on: ubuntu-24.04 runs-on: ubuntu-22.04
if: ${{ (github.event.release.prerelease != true || github.event.inputs.tag != '') && github.repository == 'docker/buildx' }} if: ${{ (github.event.release.prerelease != true || github.event.inputs.tag != '') && github.repository == 'docker/buildx' }}
steps: steps:
- -
@@ -36,7 +36,7 @@ jobs:
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v3
- -
name: Generate yaml name: Generate yaml
uses: docker/bake-action@v5 uses: docker/bake-action@v4
with: with:
source: ${{ github.server_url }}/${{ github.repository }}.git#${{ env.RELEASE_NAME }} source: ${{ github.server_url }}/${{ github.repository }}.git#${{ env.RELEASE_NAME }}
targets: update-docs targets: update-docs
@@ -57,7 +57,7 @@ jobs:
VENDOR_MODULE: github.com/docker/buildx@${{ env.RELEASE_NAME }} VENDOR_MODULE: github.com/docker/buildx@${{ env.RELEASE_NAME }}
- -
name: Create PR on docs repo name: Create PR on docs repo
uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c # v6.1.0 uses: peter-evans/create-pull-request@c55203cfde3e5c11a452d352b4393e68b85b4533 # v6.0.3
with: with:
token: ${{ secrets.GHPAT_DOCS_DISPATCH }} token: ${{ secrets.GHPAT_DOCS_DISPATCH }}
push-to-fork: docker-tools-robot/docker.github.io push-to-fork: docker-tools-robot/docker.github.io

View File

@@ -22,7 +22,7 @@ on:
jobs: jobs:
docs-yaml: docs-yaml:
runs-on: ubuntu-24.04 runs-on: ubuntu-22.04
steps: steps:
- -
name: Checkout name: Checkout
@@ -34,7 +34,7 @@ jobs:
version: latest version: latest
- -
name: Build reference YAML docs name: Build reference YAML docs
uses: docker/bake-action@v5 uses: docker/bake-action@v4
with: with:
targets: update-docs targets: update-docs
provenance: false provenance: false

View File

@@ -22,7 +22,7 @@ env:
jobs: jobs:
build: build:
runs-on: ubuntu-24.04 runs-on: ubuntu-22.04
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
@@ -33,7 +33,7 @@ jobs:
version: latest version: latest
- -
name: Build name: Build
uses: docker/bake-action@v5 uses: docker/bake-action@v4
with: with:
targets: binaries targets: binaries
set: | set: |
@@ -84,8 +84,6 @@ jobs:
endpoint: tcp://localhost:1234 endpoint: tcp://localhost:1234
- driver: docker-container - driver: docker-container
metadata-provenance: max metadata-provenance: max
- driver: docker-container
metadata-warnings: true
exclude: exclude:
- driver: docker - driver: docker
multi-node: mnode-true multi-node: mnode-true
@@ -136,9 +134,6 @@ jobs:
if [ -n "${{ matrix.metadata-provenance }}" ]; then if [ -n "${{ matrix.metadata-provenance }}" ]; then
echo "BUILDX_METADATA_PROVENANCE=${{ matrix.metadata-provenance }}" >> $GITHUB_ENV echo "BUILDX_METADATA_PROVENANCE=${{ matrix.metadata-provenance }}" >> $GITHUB_ENV
fi fi
if [ -n "${{ matrix.metadata-warnings }}" ]; then
echo "BUILDX_METADATA_WARNINGS=${{ matrix.metadata-warnings }}" >> $GITHUB_ENV
fi
- -
name: Install k3s name: Install k3s
if: matrix.driver == 'kubernetes' if: matrix.driver == 'kubernetes'

View File

@@ -1,19 +0,0 @@
name: labeler
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
on:
pull_request_target:
jobs:
labeler:
permissions:
contents: read
pull-requests: write
runs-on: ubuntu-latest
steps:
-
name: Run
uses: actions/labeler@v5

View File

@@ -17,63 +17,16 @@ on:
- '.github/releases.json' - '.github/releases.json'
jobs: jobs:
prepare:
runs-on: ubuntu-24.04
outputs:
includes: ${{ steps.matrix.outputs.includes }}
steps:
-
name: Checkout
uses: actions/checkout@v4
-
name: Matrix
id: matrix
uses: actions/github-script@v7
with:
script: |
let def = {};
await core.group(`Parsing definition`, async () => {
const printEnv = Object.assign({}, process.env, {
GOLANGCI_LINT_MULTIPLATFORM: process.env.GITHUB_REPOSITORY === 'docker/buildx' ? '1' : ''
});
const resPrint = await exec.getExecOutput('docker', ['buildx', 'bake', 'validate', '--print'], {
ignoreReturnCode: true,
env: printEnv
});
if (resPrint.stderr.length > 0 && resPrint.exitCode != 0) {
throw new Error(res.stderr);
}
def = JSON.parse(resPrint.stdout.trim());
});
await core.group(`Generating matrix`, async () => {
const includes = [];
for (const targetName of Object.keys(def.target)) {
const target = def.target[targetName];
if (target.platforms && target.platforms.length > 0) {
target.platforms.forEach(platform => {
includes.push({
target: targetName,
platform: platform
});
});
} else {
includes.push({
target: targetName
});
}
}
core.info(JSON.stringify(includes, null, 2));
core.setOutput('includes', JSON.stringify(includes));
});
validate: validate:
runs-on: ubuntu-24.04 runs-on: ubuntu-22.04
needs:
- prepare
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
include: ${{ fromJson(needs.prepare.outputs.includes) }} target:
- lint
- validate-vendor
- validate-docs
- validate-generated-files
steps: steps:
- -
name: Prepare name: Prepare
@@ -90,9 +43,6 @@ jobs:
with: with:
version: latest version: latest
- -
name: Validate name: Run
uses: docker/bake-action@v5 run: |
with: make ${{ matrix.target }}
targets: ${{ matrix.target }}
set: |
*.platform=${{ matrix.platform }}

View File

@@ -25,30 +25,17 @@ linters:
disable-all: true disable-all: true
linters-settings: linters-settings:
govet:
enable:
- nilness
- unusedwrite
# enable-all: true
# disable:
# - fieldalignment
# - shadow
depguard: depguard:
rules: rules:
main: main:
deny: deny:
- pkg: "github.com/containerd/containerd/errdefs" # The io/ioutil package has been deprecated.
desc: The containerd errdefs package was migrated to a separate module. Use github.com/containerd/errdefs instead. # https://go.dev/doc/go1.16#ioutil
- pkg: "github.com/containerd/containerd/log"
desc: The containerd log package was migrated to a separate module. Use github.com/containerd/log instead.
- pkg: "github.com/containerd/containerd/platforms"
desc: The containerd platforms package was migrated to a separate module. Use github.com/containerd/platforms instead.
- pkg: "io/ioutil" - pkg: "io/ioutil"
desc: The io/ioutil package has been deprecated. desc: The io/ioutil package has been deprecated.
forbidigo: forbidigo:
forbid: forbid:
- '^fmt\.Errorf(# use errors\.Errorf instead)?$' - '^fmt\.Errorf(# use errors\.Errorf instead)?$'
- '^platforms\.DefaultString(# use platforms\.Format(platforms\.DefaultSpec()) instead\.)?$'
gosec: gosec:
excludes: excludes:
- G204 # Audit use of command execution - G204 # Audit use of command execution

View File

@@ -1,13 +1,13 @@
# syntax=docker/dockerfile:1 # syntax=docker/dockerfile:1
ARG GO_VERSION=1.22 ARG GO_VERSION=1.21
ARG XX_VERSION=1.4.0 ARG XX_VERSION=1.4.0
# for testing # for testing
ARG DOCKER_VERSION=27.0.3 ARG DOCKER_VERSION=26.0.0
ARG GOTESTSUM_VERSION=v1.9.0 ARG GOTESTSUM_VERSION=v1.9.0
ARG REGISTRY_VERSION=2.8.0 ARG REGISTRY_VERSION=2.8.0
ARG BUILDKIT_VERSION=v0.14.1 ARG BUILDKIT_VERSION=v0.13.1
ARG UNDOCK_VERSION=0.7.0 ARG UNDOCK_VERSION=0.7.0
FROM --platform=$BUILDPLATFORM tonistiigi/xx:${XX_VERSION} AS xx FROM --platform=$BUILDPLATFORM tonistiigi/xx:${XX_VERSION} AS xx
@@ -27,36 +27,10 @@ WORKDIR /src
FROM gobase AS gotestsum FROM gobase AS gotestsum
ARG GOTESTSUM_VERSION ARG GOTESTSUM_VERSION
ENV GOFLAGS="" ENV GOFLAGS=
RUN --mount=target=/root/.cache,type=cache <<EOT RUN --mount=target=/root/.cache,type=cache \
set -ex GOBIN=/out/ go install "gotest.tools/gotestsum@${GOTESTSUM_VERSION}" && \
go install "gotest.tools/gotestsum@${GOTESTSUM_VERSION}" /out/gotestsum --version
go install "github.com/wadey/gocovmerge@latest"
mkdir /out
/go/bin/gotestsum --version
mv /go/bin/gotestsum /out
mv /go/bin/gocovmerge /out
EOT
COPY --chmod=755 <<"EOF" /out/gotestsumandcover
#!/bin/sh
set -x
if [ -z "$GO_TEST_COVERPROFILE" ]; then
exec gotestsum "$@"
fi
coverdir="$(dirname "$GO_TEST_COVERPROFILE")"
mkdir -p "$coverdir/helpers"
gotestsum "$@" "-coverprofile=$GO_TEST_COVERPROFILE"
ecode=$?
go tool covdata textfmt -i=$coverdir/helpers -o=$coverdir/helpers-report.txt
gocovmerge "$coverdir/helpers-report.txt" "$GO_TEST_COVERPROFILE" > "$coverdir/merged-report.txt"
mv "$coverdir/merged-report.txt" "$GO_TEST_COVERPROFILE"
rm "$coverdir/helpers-report.txt"
for f in "$coverdir/helpers"/*; do
rm "$f"
done
rmdir "$coverdir/helpers"
exit $ecode
EOF
FROM gobase AS buildx-version FROM gobase AS buildx-version
RUN --mount=type=bind,target=. <<EOT RUN --mount=type=bind,target=. <<EOT
@@ -68,7 +42,6 @@ EOT
FROM gobase AS buildx-build FROM gobase AS buildx-build
ARG TARGETPLATFORM ARG TARGETPLATFORM
ARG GO_EXTRA_FLAGS
RUN --mount=type=bind,target=. \ RUN --mount=type=bind,target=. \
--mount=type=cache,target=/root/.cache \ --mount=type=cache,target=/root/.cache \
--mount=type=cache,target=/go/pkg/mod \ --mount=type=cache,target=/go/pkg/mod \
@@ -115,7 +88,7 @@ RUN apk add --no-cache \
shadow-uidmap \ shadow-uidmap \
xfsprogs \ xfsprogs \
xz xz
COPY --link --from=gotestsum /out /usr/bin/ COPY --link --from=gotestsum /out/gotestsum /usr/bin/
COPY --link --from=registry /bin/registry /usr/bin/ COPY --link --from=registry /bin/registry /usr/bin/
COPY --link --from=docker-engine / /usr/bin/ COPY --link --from=docker-engine / /usr/bin/
COPY --link --from=docker-cli / /usr/bin/ COPY --link --from=docker-cli / /usr/bin/

View File

@@ -153,7 +153,6 @@ made through a pull request.
"akihirosuda", "akihirosuda",
"crazy-max", "crazy-max",
"jedevc", "jedevc",
"jsternberg",
"tiborvass", "tiborvass",
"tonistiigi", "tonistiigi",
] ]
@@ -195,11 +194,6 @@ made through a pull request.
Email = "me@jedevc.com" Email = "me@jedevc.com"
GitHub = "jedevc" GitHub = "jedevc"
[people.jsternberg]
Name = "Jonathan Sternberg"
Email = "jonathan.sternberg@docker.com"
GitHub = "jsternberg"
[people.thajeztah] [people.thajeztah]
Name = "Sebastiaan van Stijn" Name = "Sebastiaan van Stijn"
Email = "github@gone.nl" Email = "github@gone.nl"

View File

@@ -8,8 +8,6 @@ endif
export BUILDX_CMD ?= docker buildx export BUILDX_CMD ?= docker buildx
BAKE_TARGETS := binaries binaries-cross lint lint-gopls validate-vendor validate-docs validate-authors validate-generated-files
.PHONY: all .PHONY: all
all: binaries all: binaries
@@ -21,9 +19,13 @@ build:
shell: shell:
./hack/shell ./hack/shell
.PHONY: $(BAKE_TARGETS) .PHONY: binaries
$(BAKE_TARGETS): binaries:
$(BUILDX_CMD) bake $@ $(BUILDX_CMD) bake binaries
.PHONY: binaries-cross
binaries-cross:
$(BUILDX_CMD) bake binaries-cross
.PHONY: install .PHONY: install
install: binaries install: binaries
@@ -37,6 +39,10 @@ release:
.PHONY: validate-all .PHONY: validate-all
validate-all: lint test validate-vendor validate-docs validate-generated-files validate-all: lint test validate-vendor validate-docs validate-generated-files
.PHONY: lint
lint:
$(BUILDX_CMD) bake lint
.PHONY: test .PHONY: test
test: test:
./hack/test ./hack/test
@@ -49,6 +55,22 @@ test-unit:
test-integration: test-integration:
TESTPKGS=./tests ./hack/test TESTPKGS=./tests ./hack/test
.PHONY: validate-vendor
validate-vendor:
$(BUILDX_CMD) bake validate-vendor
.PHONY: validate-docs
validate-docs:
$(BUILDX_CMD) bake validate-docs
.PHONY: validate-authors
validate-authors:
$(BUILDX_CMD) bake validate-authors
.PHONY: validate-generated-files
validate-generated-files:
$(BUILDX_CMD) bake validate-generated-files
.PHONY: test-driver .PHONY: test-driver
test-driver: test-driver:
./hack/test-driver ./hack/test-driver

View File

@@ -187,12 +187,12 @@ through various "drivers". Each driver defines how and where a build should
run, and have different feature sets. run, and have different feature sets.
We currently support the following drivers: We currently support the following drivers:
- The `docker` driver ([guide](https://docs.docker.com/build/drivers/docker/), [reference](https://docs.docker.com/engine/reference/commandline/buildx_create/#driver)) - The `docker` driver ([guide](docs/manuals/drivers/docker.md), [reference](https://docs.docker.com/engine/reference/commandline/buildx_create/#driver))
- The `docker-container` driver ([guide](https://docs.docker.com/build/drivers/docker-container/), [reference](https://docs.docker.com/engine/reference/commandline/buildx_create/#driver)) - The `docker-container` driver ([guide](docs/manuals/drivers/docker-container.md), [reference](https://docs.docker.com/engine/reference/commandline/buildx_create/#driver))
- The `kubernetes` driver ([guide](https://docs.docker.com/build/drivers/kubernetes/), [reference](https://docs.docker.com/engine/reference/commandline/buildx_create/#driver)) - The `kubernetes` driver ([guide](docs/manuals/drivers/kubernetes.md), [reference](https://docs.docker.com/engine/reference/commandline/buildx_create/#driver))
- The `remote` driver ([guide](https://docs.docker.com/build/drivers/remote/)) - The `remote` driver ([guide](docs/manuals/drivers/remote.md))
For more information on drivers, see the [drivers guide](https://docs.docker.com/build/drivers/). For more information on drivers, see the [drivers guide](docs/manuals/drivers/index.md).
## Working with builder instances ## Working with builder instances

View File

@@ -2,6 +2,7 @@ package bake
import ( import (
"context" "context"
"encoding/csv"
"io" "io"
"os" "os"
"path" "path"
@@ -26,7 +27,6 @@ import (
"github.com/moby/buildkit/client/llb" "github.com/moby/buildkit/client/llb"
"github.com/moby/buildkit/session/auth/authprovider" "github.com/moby/buildkit/session/auth/authprovider"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/tonistiigi/go-csvvalue"
"github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/convert" "github.com/zclconf/go-cty/cty/convert"
) )
@@ -177,7 +177,7 @@ func readWithProgress(r io.Reader, setStatus func(st *client.VertexStatus)) (dt
} }
func ListTargets(files []File) ([]string, error) { func ListTargets(files []File) ([]string, error) {
c, _, err := ParseFiles(files, nil) c, err := ParseFiles(files, nil)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -192,7 +192,7 @@ func ListTargets(files []File) ([]string, error) {
} }
func ReadTargets(ctx context.Context, files []File, targets, overrides []string, defaults map[string]string) (map[string]*Target, map[string]*Group, error) { func ReadTargets(ctx context.Context, files []File, targets, overrides []string, defaults map[string]string) (map[string]*Target, map[string]*Group, error) {
c, _, err := ParseFiles(files, defaults) c, err := ParseFiles(files, defaults)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
@@ -298,7 +298,7 @@ func sliceToMap(env []string) (res map[string]string) {
return return
} }
func ParseFiles(files []File, defaults map[string]string) (_ *Config, _ *hclparser.ParseMeta, err error) { func ParseFiles(files []File, defaults map[string]string) (_ *Config, err error) {
defer func() { defer func() {
err = formatHCLError(err, files) err = formatHCLError(err, files)
}() }()
@@ -310,7 +310,7 @@ func ParseFiles(files []File, defaults map[string]string) (_ *Config, _ *hclpars
isCompose, composeErr := validateComposeFile(f.Data, f.Name) isCompose, composeErr := validateComposeFile(f.Data, f.Name)
if isCompose { if isCompose {
if composeErr != nil { if composeErr != nil {
return nil, nil, composeErr return nil, composeErr
} }
composeFiles = append(composeFiles, f) composeFiles = append(composeFiles, f)
} }
@@ -318,13 +318,13 @@ func ParseFiles(files []File, defaults map[string]string) (_ *Config, _ *hclpars
hf, isHCL, err := ParseHCLFile(f.Data, f.Name) hf, isHCL, err := ParseHCLFile(f.Data, f.Name)
if isHCL { if isHCL {
if err != nil { if err != nil {
return nil, nil, err return nil, err
} }
hclFiles = append(hclFiles, hf) hclFiles = append(hclFiles, hf)
} else if composeErr != nil { } else if composeErr != nil {
return nil, nil, errors.Wrapf(err, "failed to parse %s: parsing yaml: %v, parsing hcl", f.Name, composeErr) return nil, errors.Wrapf(err, "failed to parse %s: parsing yaml: %v, parsing hcl", f.Name, composeErr)
} else { } else {
return nil, nil, err return nil, err
} }
} }
} }
@@ -332,24 +332,23 @@ func ParseFiles(files []File, defaults map[string]string) (_ *Config, _ *hclpars
if len(composeFiles) > 0 { if len(composeFiles) > 0 {
cfg, cmperr := ParseComposeFiles(composeFiles) cfg, cmperr := ParseComposeFiles(composeFiles)
if cmperr != nil { if cmperr != nil {
return nil, nil, errors.Wrap(cmperr, "failed to parse compose file") return nil, errors.Wrap(cmperr, "failed to parse compose file")
} }
c = mergeConfig(c, *cfg) c = mergeConfig(c, *cfg)
c = dedupeConfig(c) c = dedupeConfig(c)
} }
var pm hclparser.ParseMeta
if len(hclFiles) > 0 { if len(hclFiles) > 0 {
res, err := hclparser.Parse(hclparser.MergeFiles(hclFiles), hclparser.Opt{ renamed, err := hclparser.Parse(hclparser.MergeFiles(hclFiles), hclparser.Opt{
LookupVar: os.LookupEnv, LookupVar: os.LookupEnv,
Vars: defaults, Vars: defaults,
ValidateLabel: validateTargetName, ValidateLabel: validateTargetName,
}, &c) }, &c)
if err.HasErrors() { if err.HasErrors() {
return nil, nil, err return nil, err
} }
for _, renamed := range res.Renamed { for _, renamed := range renamed {
for oldName, newNames := range renamed { for oldName, newNames := range renamed {
newNames = dedupSlice(newNames) newNames = dedupSlice(newNames)
if len(newNames) == 1 && oldName == newNames[0] { if len(newNames) == 1 && oldName == newNames[0] {
@@ -362,10 +361,9 @@ func ParseFiles(files []File, defaults map[string]string) (_ *Config, _ *hclpars
} }
} }
c = dedupeConfig(c) c = dedupeConfig(c)
pm = *res
} }
return &c, &pm, nil return &c, nil
} }
func dedupeConfig(c Config) Config { func dedupeConfig(c Config) Config {
@@ -390,8 +388,7 @@ func dedupeConfig(c Config) Config {
} }
func ParseFile(dt []byte, fn string) (*Config, error) { func ParseFile(dt []byte, fn string) (*Config, error) {
c, _, err := ParseFiles([]File{{Data: dt, Name: fn}}, nil) return ParseFiles([]File{{Data: dt, Name: fn}}, nil)
return c, err
} }
type Config struct { type Config struct {
@@ -494,7 +491,7 @@ func (c Config) loadLinks(name string, t *Target, m map[string]*Target, o map[st
if err != nil { if err != nil {
return err return err
} }
t2.Outputs = []string{"type=cacheonly"} t2.Outputs = nil
t2.linked = true t2.linked = true
m[target] = t2 m[target] = t2
} }
@@ -672,15 +669,13 @@ func (c Config) target(name string, visited map[string]*Target, overrides map[st
} }
type Group struct { type Group struct {
Name string `json:"-" hcl:"name,label" cty:"name"` Name string `json:"-" hcl:"name,label" cty:"name"`
Description string `json:"description,omitempty" hcl:"description,optional" cty:"description"` Targets []string `json:"targets" hcl:"targets" cty:"targets"`
Targets []string `json:"targets" hcl:"targets" cty:"targets"`
// Target // TODO? // Target // TODO?
} }
type Target struct { type Target struct {
Name string `json:"-" hcl:"name,label" cty:"name"` Name string `json:"-" hcl:"name,label" cty:"name"`
Description string `json:"description,omitempty" hcl:"description,optional" cty:"description"`
// Inherits is the only field that cannot be overridden with --set // Inherits is the only field that cannot be overridden with --set
Inherits []string `json:"inherits,omitempty" hcl:"inherits,optional" cty:"inherits"` Inherits []string `json:"inherits,omitempty" hcl:"inherits,optional" cty:"inherits"`
@@ -707,8 +702,7 @@ type Target struct {
NoCacheFilter []string `json:"no-cache-filter,omitempty" hcl:"no-cache-filter,optional" cty:"no-cache-filter"` NoCacheFilter []string `json:"no-cache-filter,omitempty" hcl:"no-cache-filter,optional" cty:"no-cache-filter"`
ShmSize *string `json:"shm-size,omitempty" hcl:"shm-size,optional"` ShmSize *string `json:"shm-size,omitempty" hcl:"shm-size,optional"`
Ulimits []string `json:"ulimits,omitempty" hcl:"ulimits,optional"` Ulimits []string `json:"ulimits,omitempty" hcl:"ulimits,optional"`
Call *string `json:"call,omitempty" hcl:"call,optional" cty:"call"` // IMPORTANT: if you add more fields here, do not forget to update newOverrides and docs/bake-reference.md.
// IMPORTANT: if you add more fields here, do not forget to update newOverrides/AddOverrides and docs/bake-reference.md.
// linked is a private field to mark a target used as a linked one // linked is a private field to mark a target used as a linked one
linked bool linked bool
@@ -782,9 +776,6 @@ func (t *Target) Merge(t2 *Target) {
if t2.Target != nil { if t2.Target != nil {
t.Target = t2.Target t.Target = t2.Target
} }
if t2.Call != nil {
t.Call = t2.Call
}
if t2.Annotations != nil { // merge if t2.Annotations != nil { // merge
t.Annotations = append(t.Annotations, t2.Annotations...) t.Annotations = append(t.Annotations, t2.Annotations...)
} }
@@ -828,9 +819,6 @@ func (t *Target) Merge(t2 *Target) {
if t2.Ulimits != nil { // merge if t2.Ulimits != nil { // merge
t.Ulimits = append(t.Ulimits, t2.Ulimits...) t.Ulimits = append(t.Ulimits, t2.Ulimits...)
} }
if t2.Description != "" {
t.Description = t2.Description
}
t.Inherits = append(t.Inherits, t2.Inherits...) t.Inherits = append(t.Inherits, t2.Inherits...)
} }
@@ -875,8 +863,6 @@ func (t *Target) AddOverrides(overrides map[string]Override) error {
t.CacheTo = o.ArrValue t.CacheTo = o.ArrValue
case "target": case "target":
t.Target = &value t.Target = &value
case "call":
t.Call = &value
case "secrets": case "secrets":
t.Secrets = o.ArrValue t.Secrets = o.ArrValue
case "ssh": case "ssh":
@@ -1312,12 +1298,6 @@ func toBuildOpt(t *Target, inp *Input) (*build.Options, error) {
bo.Target = *t.Target bo.Target = *t.Target
} }
if t.Call != nil {
bo.PrintFunc = &build.PrintFunc{
Name: *t.Call,
}
}
cacheImports, err := buildflags.ParseCacheEntry(t.CacheFrom) cacheImports, err := buildflags.ParseCacheEntry(t.CacheFrom)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -1413,7 +1393,8 @@ func removeAttestDupes(s []string) []string {
} }
func parseOutput(str string) map[string]string { func parseOutput(str string) map[string]string {
fields, err := csvvalue.Fields(str, nil) csvReader := csv.NewReader(strings.NewReader(str))
fields, err := csvReader.Read()
if err != nil { if err != nil {
return nil return nil
} }

View File

@@ -838,8 +838,7 @@ func TestReadContextFromTargetChain(t *testing.T) {
mid, ok := m["mid"] mid, ok := m["mid"]
require.True(t, ok) require.True(t, ok)
require.Equal(t, 1, len(mid.Outputs)) require.Equal(t, 0, len(mid.Outputs))
require.Equal(t, "type=cacheonly", mid.Outputs[0])
require.Equal(t, 1, len(mid.Contexts)) require.Equal(t, 1, len(mid.Contexts))
base, ok := m["base"] base, ok := m["base"]
@@ -1529,7 +1528,7 @@ services:
v2: "bar" v2: "bar"
`) `)
c, _, err := ParseFiles([]File{ c, err := ParseFiles([]File{
{Data: dt, Name: "c1.foo"}, {Data: dt, Name: "c1.foo"},
{Data: dt2, Name: "c2.bar"}, {Data: dt2, Name: "c2.bar"},
}, nil) }, nil)

View File

@@ -5,10 +5,8 @@ import (
"fmt" "fmt"
"os" "os"
"path/filepath" "path/filepath"
"sort"
"strings" "strings"
"github.com/compose-spec/compose-go/v2/consts"
"github.com/compose-spec/compose-go/v2/dotenv" "github.com/compose-spec/compose-go/v2/dotenv"
"github.com/compose-spec/compose-go/v2/loader" "github.com/compose-spec/compose-go/v2/loader"
composetypes "github.com/compose-spec/compose-go/v2/types" composetypes "github.com/compose-spec/compose-go/v2/types"
@@ -41,11 +39,7 @@ func ParseCompose(cfgs []composetypes.ConfigFile, envs map[string]string) (*Conf
ConfigFiles: cfgs, ConfigFiles: cfgs,
Environment: envs, Environment: envs,
}, func(options *loader.Options) { }, func(options *loader.Options) {
projectName := "bake" options.SetProjectName("bake", false)
if v, ok := envs[consts.ComposeProjectName]; ok && v != "" {
projectName = v
}
options.SetProjectName(projectName, false)
options.SkipNormalization = true options.SkipNormalization = true
options.Profiles = []string{"*"} options.Profiles = []string{"*"}
}) })
@@ -113,13 +107,6 @@ func ParseCompose(cfgs []composetypes.ConfigFile, envs map[string]string) (*Conf
} }
} }
var ssh []string
for _, bkey := range s.Build.SSH {
sshkey := composeToBuildkitSSH(bkey)
ssh = append(ssh, sshkey)
}
sort.Strings(ssh)
var secrets []string var secrets []string
for _, bs := range s.Build.Secrets { for _, bs := range s.Build.Secrets {
secret, err := composeToBuildkitSecret(bs, cfg.Secrets[bs.Source]) secret, err := composeToBuildkitSecret(bs, cfg.Secrets[bs.Source])
@@ -155,7 +142,6 @@ func ParseCompose(cfgs []composetypes.ConfigFile, envs map[string]string) (*Conf
CacheFrom: s.Build.CacheFrom, CacheFrom: s.Build.CacheFrom,
CacheTo: s.Build.CacheTo, CacheTo: s.Build.CacheTo,
NetworkMode: &s.Build.Network, NetworkMode: &s.Build.Network,
SSH: ssh,
Secrets: secrets, Secrets: secrets,
ShmSize: shmSize, ShmSize: shmSize,
Ulimits: ulimits, Ulimits: ulimits,
@@ -289,7 +275,7 @@ type xbake struct {
NoCacheFilter stringArray `yaml:"no-cache-filter,omitempty"` NoCacheFilter stringArray `yaml:"no-cache-filter,omitempty"`
Contexts stringMap `yaml:"contexts,omitempty"` Contexts stringMap `yaml:"contexts,omitempty"`
// don't forget to update documentation if you add a new field: // don't forget to update documentation if you add a new field:
// https://github.com/docker/docs/blob/main/content/build/bake/compose-file.md#extension-field-with-x-bake // docs/manuals/bake/compose-file.md#extension-field-with-x-bake
} }
type stringMap map[string]string type stringMap map[string]string
@@ -339,7 +325,6 @@ func (t *Target) composeExtTarget(exts map[string]interface{}) error {
} }
if len(xb.SSH) > 0 { if len(xb.SSH) > 0 {
t.SSH = dedupSlice(append(t.SSH, xb.SSH...)) t.SSH = dedupSlice(append(t.SSH, xb.SSH...))
sort.Strings(t.SSH)
} }
if len(xb.Platforms) > 0 { if len(xb.Platforms) > 0 {
t.Platforms = dedupSlice(append(t.Platforms, xb.Platforms...)) t.Platforms = dedupSlice(append(t.Platforms, xb.Platforms...))
@@ -383,17 +368,3 @@ func composeToBuildkitSecret(inp composetypes.ServiceSecretConfig, psecret compo
return strings.Join(bkattrs, ","), nil return strings.Join(bkattrs, ","), nil
} }
// composeToBuildkitSSH converts secret from compose format to buildkit's
// csv format.
func composeToBuildkitSSH(sshKey composetypes.SSHKey) string {
var bkattrs []string
bkattrs = append(bkattrs, sshKey.ID)
if sshKey.Path != "" {
bkattrs = append(bkattrs, sshKey.Path)
}
return strings.Join(bkattrs, "=")
}

View File

@@ -32,9 +32,6 @@ services:
- type=local,src=path/to/cache - type=local,src=path/to/cache
cache_to: cache_to:
- type=local,dest=path/to/cache - type=local,dest=path/to/cache
ssh:
- key=path/to/key
- default
secrets: secrets:
- token - token
- aws - aws
@@ -77,7 +74,6 @@ secrets:
require.Equal(t, []string{"type=local,src=path/to/cache"}, c.Targets[1].CacheFrom) require.Equal(t, []string{"type=local,src=path/to/cache"}, c.Targets[1].CacheFrom)
require.Equal(t, []string{"type=local,dest=path/to/cache"}, c.Targets[1].CacheTo) require.Equal(t, []string{"type=local,dest=path/to/cache"}, c.Targets[1].CacheTo)
require.Equal(t, "none", *c.Targets[1].NetworkMode) require.Equal(t, "none", *c.Targets[1].NetworkMode)
require.Equal(t, []string{"default", "key=path/to/key"}, c.Targets[1].SSH)
require.Equal(t, []string{ require.Equal(t, []string{
"id=token,env=ENV_TOKEN", "id=token,env=ENV_TOKEN",
"id=aws,src=/root/.aws/credentials", "id=aws,src=/root/.aws/credentials",
@@ -282,8 +278,6 @@ services:
- user/app:cache - user/app:cache
tags: tags:
- ct-addon:baz - ct-addon:baz
ssh:
key: path/to/key
args: args:
CT_ECR: foo CT_ECR: foo
CT_TAG: bar CT_TAG: bar
@@ -293,9 +287,6 @@ services:
tags: tags:
- ct-addon:foo - ct-addon:foo
- ct-addon:alp - ct-addon:alp
ssh:
- default
- other=path/to/otherkey
platforms: platforms:
- linux/amd64 - linux/amd64
- linux/arm64 - linux/arm64
@@ -338,7 +329,6 @@ services:
require.Equal(t, []string{"linux/amd64", "linux/arm64"}, c.Targets[0].Platforms) require.Equal(t, []string{"linux/amd64", "linux/arm64"}, c.Targets[0].Platforms)
require.Equal(t, []string{"user/app:cache", "type=local,src=path/to/cache"}, c.Targets[0].CacheFrom) require.Equal(t, []string{"user/app:cache", "type=local,src=path/to/cache"}, c.Targets[0].CacheFrom)
require.Equal(t, []string{"user/app:cache", "type=local,dest=path/to/cache"}, c.Targets[0].CacheTo) require.Equal(t, []string{"user/app:cache", "type=local,dest=path/to/cache"}, c.Targets[0].CacheTo)
require.Equal(t, []string{"default", "key=path/to/key", "other=path/to/otherkey"}, c.Targets[0].SSH)
require.Equal(t, newBool(true), c.Targets[0].Pull) require.Equal(t, newBool(true), c.Targets[0].Pull)
require.Equal(t, map[string]string{"alpine": "docker-image://alpine:3.13"}, c.Targets[0].Contexts) require.Equal(t, map[string]string{"alpine": "docker-image://alpine:3.13"}, c.Targets[0].Contexts)
require.Equal(t, []string{"ct-fake-aws:bar"}, c.Targets[1].Tags) require.Equal(t, []string{"ct-fake-aws:bar"}, c.Targets[1].Tags)
@@ -363,8 +353,6 @@ services:
- user/app:cache - user/app:cache
tags: tags:
- ct-addon:foo - ct-addon:foo
ssh:
- default
x-bake: x-bake:
tags: tags:
- ct-addon:foo - ct-addon:foo
@@ -374,9 +362,6 @@ services:
- type=local,src=path/to/cache - type=local,src=path/to/cache
cache-to: cache-to:
- type=local,dest=path/to/cache - type=local,dest=path/to/cache
ssh:
- default
- key=path/to/key
`) `)
c, err := ParseCompose([]composetypes.ConfigFile{{Content: dt}}, nil) c, err := ParseCompose([]composetypes.ConfigFile{{Content: dt}}, nil)
@@ -385,7 +370,6 @@ services:
require.Equal(t, []string{"ct-addon:foo", "ct-addon:baz"}, c.Targets[0].Tags) require.Equal(t, []string{"ct-addon:foo", "ct-addon:baz"}, c.Targets[0].Tags)
require.Equal(t, []string{"user/app:cache", "type=local,src=path/to/cache"}, c.Targets[0].CacheFrom) require.Equal(t, []string{"user/app:cache", "type=local,src=path/to/cache"}, c.Targets[0].CacheFrom)
require.Equal(t, []string{"user/app:cache", "type=local,dest=path/to/cache"}, c.Targets[0].CacheTo) require.Equal(t, []string{"user/app:cache", "type=local,dest=path/to/cache"}, c.Targets[0].CacheTo)
require.Equal(t, []string{"default", "key=path/to/key"}, c.Targets[0].SSH)
} }
func TestEnv(t *testing.T) { func TestEnv(t *testing.T) {
@@ -758,46 +742,6 @@ services:
require.NoError(t, err) require.NoError(t, err)
} }
func TestCgroup(t *testing.T) {
var dt = []byte(`
services:
scratch:
build:
context: ./webapp
cgroup: private
`)
_, err := ParseCompose([]composetypes.ConfigFile{{Content: dt}}, nil)
require.NoError(t, err)
}
func TestProjectName(t *testing.T) {
var dt = []byte(`
services:
scratch:
build:
context: ./webapp
args:
PROJECT_NAME: ${COMPOSE_PROJECT_NAME}
`)
t.Run("default", func(t *testing.T) {
c, err := ParseCompose([]composetypes.ConfigFile{{Content: dt}}, nil)
require.NoError(t, err)
require.Len(t, c.Targets, 1)
require.Len(t, c.Targets[0].Args, 1)
require.Equal(t, map[string]*string{"PROJECT_NAME": ptrstr("bake")}, c.Targets[0].Args)
})
t.Run("env", func(t *testing.T) {
c, err := ParseCompose([]composetypes.ConfigFile{{Content: dt}}, map[string]string{"COMPOSE_PROJECT_NAME": "foo"})
require.NoError(t, err)
require.Len(t, c.Targets, 1)
require.Len(t, c.Targets[0].Args, 1)
require.Equal(t, map[string]*string{"PROJECT_NAME": ptrstr("foo")}, c.Targets[0].Args)
})
}
// chdir changes the current working directory to the named directory, // chdir changes the current working directory to the named directory,
// and then restore the original working directory at the end of the test. // and then restore the original working directory at the end of the test.
func chdir(t *testing.T, dir string) { func chdir(t *testing.T, dir string) {

View File

@@ -273,7 +273,7 @@ func TestHCLMultiFileSharedVariables(t *testing.T) {
} }
`) `)
c, _, err := ParseFiles([]File{ c, err := ParseFiles([]File{
{Data: dt, Name: "c1.hcl"}, {Data: dt, Name: "c1.hcl"},
{Data: dt2, Name: "c2.hcl"}, {Data: dt2, Name: "c2.hcl"},
}, nil) }, nil)
@@ -285,7 +285,7 @@ func TestHCLMultiFileSharedVariables(t *testing.T) {
t.Setenv("FOO", "def") t.Setenv("FOO", "def")
c, _, err = ParseFiles([]File{ c, err = ParseFiles([]File{
{Data: dt, Name: "c1.hcl"}, {Data: dt, Name: "c1.hcl"},
{Data: dt2, Name: "c2.hcl"}, {Data: dt2, Name: "c2.hcl"},
}, nil) }, nil)
@@ -322,7 +322,7 @@ func TestHCLVarsWithVars(t *testing.T) {
} }
`) `)
c, _, err := ParseFiles([]File{ c, err := ParseFiles([]File{
{Data: dt, Name: "c1.hcl"}, {Data: dt, Name: "c1.hcl"},
{Data: dt2, Name: "c2.hcl"}, {Data: dt2, Name: "c2.hcl"},
}, nil) }, nil)
@@ -334,7 +334,7 @@ func TestHCLVarsWithVars(t *testing.T) {
t.Setenv("BASE", "new") t.Setenv("BASE", "new")
c, _, err = ParseFiles([]File{ c, err = ParseFiles([]File{
{Data: dt, Name: "c1.hcl"}, {Data: dt, Name: "c1.hcl"},
{Data: dt2, Name: "c2.hcl"}, {Data: dt2, Name: "c2.hcl"},
}, nil) }, nil)
@@ -612,7 +612,7 @@ func TestHCLMultiFileAttrs(t *testing.T) {
FOO="def" FOO="def"
`) `)
c, _, err := ParseFiles([]File{ c, err := ParseFiles([]File{
{Data: dt, Name: "c1.hcl"}, {Data: dt, Name: "c1.hcl"},
{Data: dt2, Name: "c2.hcl"}, {Data: dt2, Name: "c2.hcl"},
}, nil) }, nil)
@@ -623,7 +623,7 @@ func TestHCLMultiFileAttrs(t *testing.T) {
t.Setenv("FOO", "ghi") t.Setenv("FOO", "ghi")
c, _, err = ParseFiles([]File{ c, err = ParseFiles([]File{
{Data: dt, Name: "c1.hcl"}, {Data: dt, Name: "c1.hcl"},
{Data: dt2, Name: "c2.hcl"}, {Data: dt2, Name: "c2.hcl"},
}, nil) }, nil)
@@ -647,7 +647,7 @@ func TestHCLMultiFileGlobalAttrs(t *testing.T) {
FOO = "def" FOO = "def"
`) `)
c, _, err := ParseFiles([]File{ c, err := ParseFiles([]File{
{Data: dt, Name: "c1.hcl"}, {Data: dt, Name: "c1.hcl"},
{Data: dt2, Name: "c2.hcl"}, {Data: dt2, Name: "c2.hcl"},
}, nil) }, nil)
@@ -830,7 +830,7 @@ func TestHCLRenameMultiFile(t *testing.T) {
} }
`) `)
c, _, err := ParseFiles([]File{ c, err := ParseFiles([]File{
{Data: dt, Name: "c1.hcl"}, {Data: dt, Name: "c1.hcl"},
{Data: dt2, Name: "c2.hcl"}, {Data: dt2, Name: "c2.hcl"},
{Data: dt3, Name: "c3.hcl"}, {Data: dt3, Name: "c3.hcl"},
@@ -1050,7 +1050,7 @@ func TestHCLMatrixArgsOverride(t *testing.T) {
} }
`) `)
c, _, err := ParseFiles([]File{ c, err := ParseFiles([]File{
{Data: dt, Name: "docker-bake.hcl"}, {Data: dt, Name: "docker-bake.hcl"},
}, map[string]string{"ABC": "11,22,33"}) }, map[string]string{"ABC": "11,22,33"})
require.NoError(t, err) require.NoError(t, err)
@@ -1236,7 +1236,7 @@ services:
v2: "bar" v2: "bar"
`) `)
c, _, err := ParseFiles([]File{ c, err := ParseFiles([]File{
{Data: dt, Name: "c1.hcl"}, {Data: dt, Name: "c1.hcl"},
{Data: dt2, Name: "c2.yml"}, {Data: dt2, Name: "c2.yml"},
}, nil) }, nil)
@@ -1258,7 +1258,7 @@ func TestHCLBuiltinVars(t *testing.T) {
} }
`) `)
c, _, err := ParseFiles([]File{ c, err := ParseFiles([]File{
{Data: dt, Name: "c1.hcl"}, {Data: dt, Name: "c1.hcl"},
}, map[string]string{ }, map[string]string{
"BAKE_CMD_CONTEXT": "foo", "BAKE_CMD_CONTEXT": "foo",
@@ -1272,7 +1272,7 @@ func TestHCLBuiltinVars(t *testing.T) {
} }
func TestCombineHCLAndJSONTargets(t *testing.T) { func TestCombineHCLAndJSONTargets(t *testing.T) {
c, _, err := ParseFiles([]File{ c, err := ParseFiles([]File{
{ {
Name: "docker-bake.hcl", Name: "docker-bake.hcl",
Data: []byte(` Data: []byte(`
@@ -1348,7 +1348,7 @@ target "b" {
} }
func TestCombineHCLAndJSONVars(t *testing.T) { func TestCombineHCLAndJSONVars(t *testing.T) {
c, _, err := ParseFiles([]File{ c, err := ParseFiles([]File{
{ {
Name: "docker-bake.hcl", Name: "docker-bake.hcl",
Data: []byte(` Data: []byte(`

View File

@@ -25,11 +25,9 @@ type Opt struct {
} }
type variable struct { type variable struct {
Name string `json:"-" hcl:"name,label"` Name string `json:"-" hcl:"name,label"`
Default *hcl.Attribute `json:"default,omitempty" hcl:"default,optional"` Default *hcl.Attribute `json:"default,omitempty" hcl:"default,optional"`
Description string `json:"description,omitempty" hcl:"description,optional"` Body hcl.Body `json:"-" hcl:",body"`
Body hcl.Body `json:"-" hcl:",body"`
Remain hcl.Body `json:"-" hcl:",remain"`
} }
type functionDef struct { type functionDef struct {
@@ -536,18 +534,7 @@ func (p *parser) resolveBlockNames(block *hcl.Block) ([]string, error) {
return names, nil return names, nil
} }
type Variable struct { func Parse(b hcl.Body, opt Opt, val interface{}) (map[string]map[string][]string, hcl.Diagnostics) {
Name string
Description string
Value *string
}
type ParseMeta struct {
Renamed map[string]map[string][]string
AllVariables []*Variable
}
func Parse(b hcl.Body, opt Opt, val interface{}) (*ParseMeta, hcl.Diagnostics) {
reserved := map[string]struct{}{} reserved := map[string]struct{}{}
schema, _ := gohcl.ImpliedBodySchema(val) schema, _ := gohcl.ImpliedBodySchema(val)
@@ -656,7 +643,6 @@ func Parse(b hcl.Body, opt Opt, val interface{}) (*ParseMeta, hcl.Diagnostics) {
} }
} }
vars := make([]*Variable, 0, len(p.vars))
for k := range p.vars { for k := range p.vars {
if err := p.resolveValue(p.ectx, k); err != nil { if err := p.resolveValue(p.ectx, k); err != nil {
if diags, ok := err.(hcl.Diagnostics); ok { if diags, ok := err.(hcl.Diagnostics); ok {
@@ -665,21 +651,6 @@ func Parse(b hcl.Body, opt Opt, val interface{}) (*ParseMeta, hcl.Diagnostics) {
r := p.vars[k].Body.MissingItemRange() r := p.vars[k].Body.MissingItemRange()
return nil, wrapErrorDiagnostic("Invalid value", err, &r, &r) return nil, wrapErrorDiagnostic("Invalid value", err, &r, &r)
} }
v := &Variable{
Name: p.vars[k].Name,
Description: p.vars[k].Description,
}
if vv := p.ectx.Variables[k]; !vv.IsNull() {
var s string
switch vv.Type() {
case cty.String:
s = vv.AsString()
case cty.Bool:
s = strconv.FormatBool(vv.True())
}
v.Value = &s
}
vars = append(vars, v)
} }
for k := range p.funcs { for k := range p.funcs {
@@ -824,10 +795,7 @@ func Parse(b hcl.Body, opt Opt, val interface{}) (*ParseMeta, hcl.Diagnostics) {
} }
} }
return &ParseMeta{ return renamed, nil
Renamed: renamed,
AllVariables: vars,
}, nil
} }
// wrapErrorDiagnostic wraps an error into a hcl.Diagnostics object. // wrapErrorDiagnostic wraps an error into a hcl.Diagnostics object.

View File

@@ -111,19 +111,21 @@ func (mb mergedBodies) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
diags = append(diags, thisDiags...) diags = append(diags, thisDiags...)
} }
for name, attr := range thisAttrs { if thisAttrs != nil {
if existing := attrs[name]; existing != nil { for name, attr := range thisAttrs {
diags = diags.Append(&hcl.Diagnostic{ if existing := attrs[name]; existing != nil {
Severity: hcl.DiagError, diags = diags.Append(&hcl.Diagnostic{
Summary: "Duplicate argument", Severity: hcl.DiagError,
Detail: fmt.Sprintf( Summary: "Duplicate argument",
"Argument %q was already set at %s", Detail: fmt.Sprintf(
name, existing.NameRange.String(), "Argument %q was already set at %s",
), name, existing.NameRange.String(),
Subject: thisAttrs[name].NameRange.Ptr(), ),
}) Subject: thisAttrs[name].NameRange.Ptr(),
})
}
attrs[name] = attr
} }
attrs[name] = attr
} }
} }

View File

@@ -18,7 +18,6 @@ import (
"github.com/distribution/reference" "github.com/distribution/reference"
"github.com/docker/buildx/builder" "github.com/docker/buildx/builder"
"github.com/docker/buildx/driver" "github.com/docker/buildx/driver"
"github.com/docker/buildx/util/confutil"
"github.com/docker/buildx/util/desktop" "github.com/docker/buildx/util/desktop"
"github.com/docker/buildx/util/dockerutil" "github.com/docker/buildx/util/dockerutil"
"github.com/docker/buildx/util/imagetools" "github.com/docker/buildx/util/imagetools"
@@ -26,13 +25,12 @@ import (
"github.com/docker/buildx/util/resolver" "github.com/docker/buildx/util/resolver"
"github.com/docker/buildx/util/waitmap" "github.com/docker/buildx/util/waitmap"
"github.com/docker/cli/opts" "github.com/docker/cli/opts"
"github.com/docker/docker/api/types/image" imagetypes "github.com/docker/docker/api/types/image"
"github.com/docker/docker/pkg/jsonmessage" "github.com/docker/docker/pkg/jsonmessage"
"github.com/moby/buildkit/client" "github.com/moby/buildkit/client"
"github.com/moby/buildkit/client/llb" "github.com/moby/buildkit/client/llb"
"github.com/moby/buildkit/exporter/containerimage/exptypes" "github.com/moby/buildkit/exporter/containerimage/exptypes"
gateway "github.com/moby/buildkit/frontend/gateway/client" gateway "github.com/moby/buildkit/frontend/gateway/client"
"github.com/moby/buildkit/identity"
"github.com/moby/buildkit/session" "github.com/moby/buildkit/session"
"github.com/moby/buildkit/solver/errdefs" "github.com/moby/buildkit/solver/errdefs"
"github.com/moby/buildkit/solver/pb" "github.com/moby/buildkit/solver/pb"
@@ -54,8 +52,11 @@ var (
) )
const ( const (
printFallbackImage = "docker/dockerfile:1.5@sha256:dbbd5e059e8a07ff7ea6233b213b36aa516b4c53c645f1817a4dd18b83cbea56" //nolint:gosec // G101: false-positive
printLintFallbackImage = "docker.io/docker/dockerfile-upstream:1.8.1@sha256:e87caa74dcb7d46cd820352bfea12591f3dba3ddc4285e19c7dcd13359f7cefd" printFallbackImage = "docker/dockerfile:1.5@sha256:dbbd5e059e8a07ff7ea6233b213b36aa516b4c53c645f1817a4dd18b83cbea56"
// https://github.com/moby/buildkit/commit/71f99c52a669dc0322b5ea57bc28a09c20427227
//nolint:gosec // G101: false-positive
printLintFallbackImage = "docker.io/docker/dockerfile-upstream@sha256:47663570b6cc49ed90dc6e3215090a366989ab934d12dc93856a8ae0d27a95e7"
) )
type Options struct { type Options struct {
@@ -84,7 +85,7 @@ type Options struct {
Session []session.Attachable Session []session.Attachable
Linked bool // Linked marks this target as exclusively linked (not requested by the user). Linked bool // Linked marks this target as exclusively linked (not requested by the user).
PrintFunc *PrintFunc PrintFunc *PrintFunc
ProvenanceResponseMode confutil.MetadataProvenanceMode WithProvenanceResponse bool
SourcePolicy *spb.Policy SourcePolicy *spb.Policy
GroupRef string GroupRef string
} }
@@ -217,9 +218,6 @@ func BuildWithResultHandler(ctx context.Context, nodes []builder.Node, opt map[s
if err != nil { if err != nil {
logrus.WithError(err).Warn("current commit information was not captured by the build") logrus.WithError(err).Warn("current commit information was not captured by the build")
} }
if opt.Ref == "" {
opt.Ref = identity.NewID()
}
var reqn []*reqForNode var reqn []*reqForNode
for _, np := range drivers[k] { for _, np := range drivers[k] {
if np.Node().Driver.IsMobyDriver() { if np.Node().Driver.IsMobyDriver() {
@@ -478,8 +476,8 @@ func BuildWithResultHandler(ctx context.Context, nodes []builder.Node, opt map[s
rr.ExporterResponse[k] = string(v) rr.ExporterResponse[k] = string(v)
} }
rr.ExporterResponse["buildx.build.ref"] = buildRef rr.ExporterResponse["buildx.build.ref"] = buildRef
if node.Driver.HistoryAPISupported(ctx) { if opt.WithProvenanceResponse && node.Driver.HistoryAPISupported(ctx) {
if err := setRecordProvenance(ctx, c, rr, so.Ref, opt.ProvenanceResponseMode, pw); err != nil { if err := setRecordProvenance(ctx, c, rr, so.Ref, pw); err != nil {
return err return err
} }
} }
@@ -612,12 +610,7 @@ func BuildWithResultHandler(ctx context.Context, nodes []builder.Node, opt map[s
} }
} }
indexAnnotations, err := extractIndexAnnotations(opt.Exports) dt, desc, err := itpull.Combine(ctx, srcs, nil)
if err != nil {
return err
}
dt, desc, err := itpull.Combine(ctx, srcs, indexAnnotations, false)
if err != nil { if err != nil {
return err return err
} }
@@ -665,27 +658,6 @@ func BuildWithResultHandler(ctx context.Context, nodes []builder.Node, opt map[s
return resp, nil return resp, nil
} }
func extractIndexAnnotations(exports []client.ExportEntry) (map[exptypes.AnnotationKey]string, error) {
annotations := map[exptypes.AnnotationKey]string{}
for _, exp := range exports {
for k, v := range exp.Attrs {
ak, ok, err := exptypes.ParseAnnotationKey(k)
if !ok {
continue
}
if err != nil {
return nil, err
}
switch ak.Type {
case exptypes.AnnotationIndex, exptypes.AnnotationManifestDescriptor:
annotations[ak] = v
}
}
}
return annotations, nil
}
func pushWithMoby(ctx context.Context, d *driver.DriverHandle, name string, l progress.SubLogger) error { func pushWithMoby(ctx context.Context, d *driver.DriverHandle, name string, l progress.SubLogger) error {
api := d.Config().DockerAPI api := d.Config().DockerAPI
if api == nil { if api == nil {
@@ -696,7 +668,7 @@ func pushWithMoby(ctx context.Context, d *driver.DriverHandle, name string, l pr
return err return err
} }
rc, err := api.ImagePush(ctx, name, image.PushOptions{ rc, err := api.ImagePush(ctx, name, imagetypes.PushOptions{
RegistryAuth: creds, RegistryAuth: creds,
}) })
if err != nil { if err != nil {
@@ -775,11 +747,11 @@ func remoteDigestWithMoby(ctx context.Context, d *driver.DriverHandle, name stri
if err != nil { if err != nil {
return "", err return "", err
} }
img, _, err := api.ImageInspectWithRaw(ctx, name) image, _, err := api.ImageInspectWithRaw(ctx, name)
if err != nil { if err != nil {
return "", err return "", err
} }
if len(img.RepoDigests) == 0 { if len(image.RepoDigests) == 0 {
return "", nil return "", nil
} }
remoteImage, err := api.DistributionInspect(ctx, name, creds) remoteImage, err := api.DistributionInspect(ctx, name, creds)

View File

@@ -5,7 +5,7 @@ import (
stderrors "errors" stderrors "errors"
"net" "net"
"github.com/containerd/platforms" "github.com/containerd/containerd/platforms"
"github.com/docker/buildx/builder" "github.com/docker/buildx/builder"
"github.com/docker/buildx/util/progress" "github.com/docker/buildx/util/progress"
v1 "github.com/opencontainers/image-spec/specs-go/v1" v1 "github.com/opencontainers/image-spec/specs-go/v1"

View File

@@ -5,7 +5,7 @@ import (
"fmt" "fmt"
"sync" "sync"
"github.com/containerd/platforms" "github.com/containerd/containerd/platforms"
"github.com/docker/buildx/builder" "github.com/docker/buildx/builder"
"github.com/docker/buildx/driver" "github.com/docker/buildx/driver"
"github.com/docker/buildx/util/progress" "github.com/docker/buildx/util/progress"

View File

@@ -5,7 +5,7 @@ import (
"sort" "sort"
"testing" "testing"
"github.com/containerd/platforms" "github.com/containerd/containerd/platforms"
"github.com/docker/buildx/builder" "github.com/docker/buildx/builder"
specs "github.com/opencontainers/image-spec/specs-go/v1" specs "github.com/opencontainers/image-spec/specs-go/v1"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"

View File

@@ -37,7 +37,7 @@ func NewContainer(ctx context.Context, resultCtx *ResultHandle, cfg *controllera
cancel() cancel()
}() }()
containerCfg, err := resultCtx.getContainerConfig(cfg) containerCfg, err := resultCtx.getContainerConfig(ctx, c, cfg)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@@ -15,29 +15,29 @@ func saveLocalState(so *client.SolveOpt, target string, opts Options, node build
} }
lp := opts.Inputs.ContextPath lp := opts.Inputs.ContextPath
dp := opts.Inputs.DockerfilePath dp := opts.Inputs.DockerfilePath
if dp != "" && !IsRemoteURL(lp) && lp != "-" && dp != "-" { if lp != "" || dp != "" {
dp, err = filepath.Abs(dp) if lp != "" {
lp, err = filepath.Abs(lp)
if err != nil {
return err
}
}
if dp != "" {
dp, err = filepath.Abs(dp)
if err != nil {
return err
}
}
l, err := localstate.New(configDir)
if err != nil { if err != nil {
return err return err
} }
return l.SaveRef(node.Builder, node.Name, so.Ref, localstate.State{
Target: target,
LocalPath: lp,
DockerfilePath: dp,
GroupRef: opts.GroupRef,
})
} }
if lp != "" && !IsRemoteURL(lp) && lp != "-" { return nil
lp, err = filepath.Abs(lp)
if err != nil {
return err
}
}
if lp == "" && dp == "" {
return nil
}
l, err := localstate.New(configDir)
if err != nil {
return err
}
return l.SaveRef(node.Builder, node.Name, so.Ref, localstate.State{
Target: target,
LocalPath: lp,
DockerfilePath: dp,
GroupRef: opts.GroupRef,
})
} }

View File

@@ -12,7 +12,7 @@ import (
"github.com/containerd/containerd/content" "github.com/containerd/containerd/content"
"github.com/containerd/containerd/content/local" "github.com/containerd/containerd/content/local"
"github.com/containerd/platforms" "github.com/containerd/containerd/platforms"
"github.com/distribution/reference" "github.com/distribution/reference"
"github.com/docker/buildx/builder" "github.com/docker/buildx/builder"
"github.com/docker/buildx/driver" "github.com/docker/buildx/driver"
@@ -20,6 +20,7 @@ import (
"github.com/docker/buildx/util/dockerutil" "github.com/docker/buildx/util/dockerutil"
"github.com/docker/buildx/util/osutil" "github.com/docker/buildx/util/osutil"
"github.com/docker/buildx/util/progress" "github.com/docker/buildx/util/progress"
"github.com/docker/docker/builder/remotecontext/urlutil"
"github.com/moby/buildkit/client" "github.com/moby/buildkit/client"
"github.com/moby/buildkit/client/llb" "github.com/moby/buildkit/client/llb"
"github.com/moby/buildkit/client/ociindex" "github.com/moby/buildkit/client/ociindex"
@@ -104,6 +105,10 @@ func toSolveOpt(ctx context.Context, node builder.Node, multiDriver bool, opt Op
SourcePolicy: opt.SourcePolicy, SourcePolicy: opt.SourcePolicy,
} }
if so.Ref == "" {
so.Ref = identity.NewID()
}
if opt.CgroupParent != "" { if opt.CgroupParent != "" {
so.FrontendAttrs["cgroup-parent"] = opt.CgroupParent so.FrontendAttrs["cgroup-parent"] = opt.CgroupParent
} }
@@ -254,7 +259,7 @@ func toSolveOpt(ctx context.Context, node builder.Node, multiDriver bool, opt Op
if e.Type == "docker" || e.Type == "image" || e.Type == "oci" { if e.Type == "docker" || e.Type == "image" || e.Type == "oci" {
// inline buildinfo attrs from build arg // inline buildinfo attrs from build arg
if v, ok := opt.BuildArgs["BUILDKIT_INLINE_BUILDINFO_ATTRS"]; ok { if v, ok := opt.BuildArgs["BUILDKIT_INLINE_BUILDINFO_ATTRS"]; ok {
opt.Exports[i].Attrs["buildinfo-attrs"] = v e.Attrs["buildinfo-attrs"] = v
} }
} }
} }
@@ -268,9 +273,11 @@ func toSolveOpt(ctx context.Context, node builder.Node, multiDriver bool, opt Op
} }
defers = append(defers, releaseLoad) defers = append(defers, releaseLoad)
// add node identifier to shared key if one was specified if sharedKey := so.LocalDirs["context"]; sharedKey != "" {
if so.SharedKey != "" { if p, err := filepath.Abs(sharedKey); err == nil {
so.SharedKey += ":" + confutil.TryNodeIdentifier(configDir) sharedKey = filepath.Base(p)
}
so.SharedKey = sharedKey + ":" + confutil.TryNodeIdentifier(configDir)
} }
if opt.Pull { if opt.Pull {
@@ -410,11 +417,6 @@ func loadInputs(ctx context.Context, d *driver.DriverHandle, inp Inputs, addVCSL
if err := setLocalMount("context", inp.ContextPath, target, addVCSLocalDir); err != nil { if err := setLocalMount("context", inp.ContextPath, target, addVCSLocalDir); err != nil {
return nil, err return nil, err
} }
sharedKey := inp.ContextPath
if p, err := filepath.Abs(sharedKey); err == nil {
sharedKey = filepath.Base(p)
}
target.SharedKey = sharedKey
switch inp.DockerfilePath { switch inp.DockerfilePath {
case "-": case "-":
dockerfileReader = inp.InStream dockerfileReader = inp.InStream
@@ -450,7 +452,7 @@ func loadInputs(ctx context.Context, d *driver.DriverHandle, inp Inputs, addVCSL
dockerfileName = "Dockerfile" dockerfileName = "Dockerfile"
target.FrontendAttrs["dockerfilekey"] = "dockerfile" target.FrontendAttrs["dockerfilekey"] = "dockerfile"
} }
if isHTTPURL(inp.DockerfilePath) { if urlutil.IsURL(inp.DockerfilePath) {
dockerfileDir, err = createTempDockerfileFromURL(ctx, d, inp.DockerfilePath, pw) dockerfileDir, err = createTempDockerfileFromURL(ctx, d, inp.DockerfilePath, pw)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -492,18 +494,45 @@ func loadInputs(ctx context.Context, d *driver.DriverHandle, inp Inputs, addVCSL
// handle OCI layout // handle OCI layout
if strings.HasPrefix(v.Path, "oci-layout://") { if strings.HasPrefix(v.Path, "oci-layout://") {
localPath := strings.TrimPrefix(v.Path, "oci-layout://") pathAlone := strings.TrimPrefix(v.Path, "oci-layout://")
localPath := pathAlone
localPath, dig, hasDigest := strings.Cut(localPath, "@") localPath, dig, hasDigest := strings.Cut(localPath, "@")
localPath, tag, hasTag := strings.Cut(localPath, ":") localPath, tag, hasTag := strings.Cut(localPath, ":")
if !hasTag { if !hasTag {
tag = "latest" tag = "latest"
hasTag = true
} }
idx := ociindex.NewStoreIndex(localPath)
if !hasDigest { if !hasDigest {
dig, err = resolveDigest(localPath, tag) // lookup by name
desc, err := idx.Get(tag)
if err != nil { if err != nil {
return nil, errors.Wrapf(err, "oci-layout reference %q could not be resolved", v.Path) return nil, err
}
if desc != nil {
dig = string(desc.Digest)
hasDigest = true
} }
} }
if !hasDigest {
// lookup single
desc, err := idx.GetSingle()
if err != nil {
return nil, err
}
if desc != nil {
dig = string(desc.Digest)
hasDigest = true
}
}
if !hasDigest {
return nil, errors.Errorf("oci-layout reference %q could not be resolved", v.Path)
}
_, err := digest.Parse(dig)
if err != nil {
return nil, errors.Wrapf(err, "invalid oci-layout digest %s", dig)
}
store, err := local.NewStore(localPath) store, err := local.NewStore(localPath)
if err != nil { if err != nil {
return nil, errors.Wrapf(err, "invalid store at %s", localPath) return nil, errors.Wrapf(err, "invalid store at %s", localPath)
@@ -514,7 +543,15 @@ func loadInputs(ctx context.Context, d *driver.DriverHandle, inp Inputs, addVCSL
} }
target.OCIStores[storeName] = store target.OCIStores[storeName] = store
target.FrontendAttrs["context:"+k] = "oci-layout://" + storeName + ":" + tag + "@" + dig layout := "oci-layout://" + storeName
if hasTag {
layout += ":" + tag
}
if hasDigest {
layout += "@" + dig
}
target.FrontendAttrs["context:"+k] = layout
continue continue
} }
st, err := os.Stat(v.Path) st, err := os.Stat(v.Path)
@@ -536,40 +573,12 @@ func loadInputs(ctx context.Context, d *driver.DriverHandle, inp Inputs, addVCSL
release := func() { release := func() {
for _, dir := range toRemove { for _, dir := range toRemove {
_ = os.RemoveAll(dir) os.RemoveAll(dir)
} }
} }
return release, nil return release, nil
} }
func resolveDigest(localPath, tag string) (dig string, _ error) {
idx := ociindex.NewStoreIndex(localPath)
// lookup by name
desc, err := idx.Get(tag)
if err != nil {
return "", err
}
if desc == nil {
// lookup single
desc, err = idx.GetSingle()
if err != nil {
return "", err
}
}
if desc == nil {
return "", errors.New("failed to resolve digest")
}
dig = string(desc.Digest)
_, err = digest.Parse(dig)
if err != nil {
return "", errors.Wrapf(err, "invalid digest %s", dig)
}
return dig, nil
}
func setLocalMount(name, root string, so *client.SolveOpt, addVCSLocalDir func(key, dir string, so *client.SolveOpt)) error { func setLocalMount(name, root string, so *client.SolveOpt, addVCSLocalDir func(key, dir string, so *client.SolveOpt)) error {
lm, err := fsutil.NewFS(root) lm, err := fsutil.NewFS(root)
if err != nil { if err != nil {

View File

@@ -29,7 +29,8 @@ type provenanceBuilder struct {
ID string `json:"id,omitempty"` ID string `json:"id,omitempty"`
} }
func setRecordProvenance(ctx context.Context, c *client.Client, sr *client.SolveResponse, ref string, mode confutil.MetadataProvenanceMode, pw progress.Writer) error { func setRecordProvenance(ctx context.Context, c *client.Client, sr *client.SolveResponse, ref string, pw progress.Writer) error {
mode := confutil.MetadataProvenance()
if mode == confutil.MetadataProvenanceModeDisabled { if mode == confutil.MetadataProvenanceModeDisabled {
return nil return nil
} }

View File

@@ -292,10 +292,10 @@ func (r *ResultHandle) build(buildFunc gateway.BuildFunc) (err error) {
return err return err
} }
func (r *ResultHandle) getContainerConfig(cfg *controllerapi.InvokeConfig) (containerCfg gateway.NewContainerRequest, _ error) { func (r *ResultHandle) getContainerConfig(ctx context.Context, c gateway.Client, cfg *controllerapi.InvokeConfig) (containerCfg gateway.NewContainerRequest, _ error) {
if r.res != nil && r.solveErr == nil { if r.res != nil && r.solveErr == nil {
logrus.Debugf("creating container from successful build") logrus.Debugf("creating container from successful build")
ccfg, err := containerConfigFromResult(r.res, *cfg) ccfg, err := containerConfigFromResult(ctx, r.res, c, *cfg)
if err != nil { if err != nil {
return containerCfg, err return containerCfg, err
} }
@@ -327,7 +327,7 @@ func (r *ResultHandle) getProcessConfig(cfg *controllerapi.InvokeConfig, stdin i
return processCfg, nil return processCfg, nil
} }
func containerConfigFromResult(res *gateway.Result, cfg controllerapi.InvokeConfig) (*gateway.NewContainerRequest, error) { func containerConfigFromResult(ctx context.Context, res *gateway.Result, c gateway.Client, cfg controllerapi.InvokeConfig) (*gateway.NewContainerRequest, error) {
if cfg.Initial { if cfg.Initial {
return nil, errors.Errorf("starting from the container from the initial state of the step is supported only on the failed steps") return nil, errors.Errorf("starting from the container from the initial state of the step is supported only on the failed steps")
} }

View File

@@ -11,6 +11,7 @@ import (
"github.com/docker/buildx/driver" "github.com/docker/buildx/driver"
"github.com/docker/cli/opts" "github.com/docker/cli/opts"
"github.com/docker/docker/builder/remotecontext/urlutil"
"github.com/moby/buildkit/util/gitutil" "github.com/moby/buildkit/util/gitutil"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/sirupsen/logrus" "github.com/sirupsen/logrus"
@@ -25,15 +26,8 @@ const (
mobyHostGatewayName = "host-gateway" mobyHostGatewayName = "host-gateway"
) )
// isHTTPURL returns true if the provided str is an HTTP(S) URL by checking if it
// has a http:// or https:// scheme. No validation is performed to verify if the
// URL is well-formed.
func isHTTPURL(str string) bool {
return strings.HasPrefix(str, "https://") || strings.HasPrefix(str, "http://")
}
func IsRemoteURL(c string) bool { func IsRemoteURL(c string) bool {
if isHTTPURL(c) { if urlutil.IsURL(c) {
return true return true
} }
if _, err := gitutil.ParseGitRef(c); err == nil { if _, err := gitutil.ParseGitRef(c); err == nil {

View File

@@ -2,6 +2,7 @@ package builder
import ( import (
"context" "context"
"encoding/csv"
"encoding/json" "encoding/json"
"net/url" "net/url"
"os" "os"
@@ -26,7 +27,6 @@ import (
"github.com/moby/buildkit/util/progress/progressui" "github.com/moby/buildkit/util/progress/progressui"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/spf13/pflag" "github.com/spf13/pflag"
"github.com/tonistiigi/go-csvvalue"
"golang.org/x/sync/errgroup" "golang.org/x/sync/errgroup"
) )
@@ -601,7 +601,8 @@ func csvToMap(in []string) (map[string]string, error) {
} }
m := make(map[string]string, len(in)) m := make(map[string]string, len(in))
for _, s := range in { for _, s := range in {
fields, err := csvvalue.Fields(s, nil) csvReader := csv.NewReader(strings.NewReader(s))
fields, err := csvReader.Read()
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@@ -6,7 +6,7 @@ import (
"sort" "sort"
"strings" "strings"
"github.com/containerd/platforms" "github.com/containerd/containerd/platforms"
"github.com/docker/buildx/driver" "github.com/docker/buildx/driver"
ctxkube "github.com/docker/buildx/driver/kubernetes/context" ctxkube "github.com/docker/buildx/driver/kubernetes/context"
"github.com/docker/buildx/store" "github.com/docker/buildx/store"
@@ -48,9 +48,8 @@ func (b *Builder) Nodes() []Node {
type LoadNodesOption func(*loadNodesOptions) type LoadNodesOption func(*loadNodesOptions)
type loadNodesOptions struct { type loadNodesOptions struct {
data bool data bool
dialMeta map[string][]string dialMeta map[string][]string
clientOpt []client.ClientOpt
} }
func WithData() LoadNodesOption { func WithData() LoadNodesOption {
@@ -65,12 +64,6 @@ func WithDialMeta(dialMeta map[string][]string) LoadNodesOption {
} }
} }
func WithClientOpt(clientOpt ...client.ClientOpt) LoadNodesOption {
return func(o *loadNodesOptions) {
o.clientOpt = clientOpt
}
}
// LoadNodes loads and returns nodes for this builder. // LoadNodes loads and returns nodes for this builder.
// TODO: this should be a method on a Node object and lazy load data for each driver. // TODO: this should be a method on a Node object and lazy load data for each driver.
func (b *Builder) LoadNodes(ctx context.Context, opts ...LoadNodesOption) (_ []Node, err error) { func (b *Builder) LoadNodes(ctx context.Context, opts ...LoadNodesOption) (_ []Node, err error) {
@@ -158,7 +151,7 @@ func (b *Builder) LoadNodes(ctx context.Context, opts ...LoadNodesOption) (_ []N
node.ImageOpt = imageopt node.ImageOpt = imageopt
if lno.data { if lno.data {
if err := node.loadData(ctx, lno.clientOpt...); err != nil { if err := node.loadData(ctx); err != nil {
node.Err = err node.Err = err
} }
} }
@@ -193,7 +186,7 @@ func (b *Builder) LoadNodes(ctx context.Context, opts ...LoadNodesOption) (_ []N
if pl := di.DriverInfo.DynamicNodes[i].Platforms; len(pl) > 0 { if pl := di.DriverInfo.DynamicNodes[i].Platforms; len(pl) > 0 {
diClone.Platforms = pl diClone.Platforms = pl
} }
nodes = append(nodes, diClone) nodes = append(nodes, di)
} }
dynamicNodes = append(dynamicNodes, di.DriverInfo.DynamicNodes...) dynamicNodes = append(dynamicNodes, di.DriverInfo.DynamicNodes...)
} }
@@ -254,7 +247,7 @@ func (n *Node) MarshalJSON() ([]byte, error) {
}) })
} }
func (n *Node) loadData(ctx context.Context, clientOpt ...client.ClientOpt) error { func (n *Node) loadData(ctx context.Context) error {
if n.Driver == nil { if n.Driver == nil {
return nil return nil
} }
@@ -264,7 +257,7 @@ func (n *Node) loadData(ctx context.Context, clientOpt ...client.ClientOpt) erro
} }
n.DriverInfo = info n.DriverInfo = info
if n.DriverInfo.Status == driver.Running { if n.DriverInfo.Status == driver.Running {
driverClient, err := n.Driver.Client(ctx, clientOpt...) driverClient, err := n.Driver.Client(ctx)
if err != nil { if err != nil {
return err return err
} }

View File

@@ -4,6 +4,7 @@ import (
"github.com/moby/buildkit/util/tracing/detect" "github.com/moby/buildkit/util/tracing/detect"
"go.opentelemetry.io/otel" "go.opentelemetry.io/otel"
_ "github.com/moby/buildkit/util/tracing/detect/delegated"
_ "github.com/moby/buildkit/util/tracing/env" _ "github.com/moby/buildkit/util/tracing/env"
) )

View File

@@ -1,4 +1 @@
comment: false comment: false
ignore:
- "**/*.pb.go"

View File

@@ -1,27 +1,20 @@
package commands package commands
import ( import (
"bytes"
"cmp"
"context" "context"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io"
"os" "os"
"slices"
"strings" "strings"
"text/tabwriter"
"github.com/containerd/console" "github.com/containerd/console"
"github.com/containerd/platforms" "github.com/containerd/containerd/platforms"
"github.com/docker/buildx/bake" "github.com/docker/buildx/bake"
"github.com/docker/buildx/bake/hclparser"
"github.com/docker/buildx/build" "github.com/docker/buildx/build"
"github.com/docker/buildx/builder" "github.com/docker/buildx/builder"
"github.com/docker/buildx/controller/pb"
"github.com/docker/buildx/localstate" "github.com/docker/buildx/localstate"
"github.com/docker/buildx/util/buildflags" "github.com/docker/buildx/util/buildflags"
"github.com/docker/buildx/util/cobrautil"
"github.com/docker/buildx/util/cobrautil/completion" "github.com/docker/buildx/util/cobrautil/completion"
"github.com/docker/buildx/util/confutil" "github.com/docker/buildx/util/confutil"
"github.com/docker/buildx/util/desktop" "github.com/docker/buildx/util/desktop"
@@ -29,7 +22,6 @@ import (
"github.com/docker/buildx/util/progress" "github.com/docker/buildx/util/progress"
"github.com/docker/buildx/util/tracing" "github.com/docker/buildx/util/tracing"
"github.com/docker/cli/cli/command" "github.com/docker/cli/cli/command"
"github.com/moby/buildkit/client"
"github.com/moby/buildkit/identity" "github.com/moby/buildkit/identity"
"github.com/moby/buildkit/util/progress/progressui" "github.com/moby/buildkit/util/progress/progressui"
"github.com/pkg/errors" "github.com/pkg/errors"
@@ -37,19 +29,16 @@ import (
) )
type bakeOptions struct { type bakeOptions struct {
files []string files []string
overrides []string overrides []string
printOnly bool printOnly bool
listTargets bool sbom string
listVars bool provenance string
sbom string
provenance string
builder string builder string
metadataFile string metadataFile string
exportPush bool exportPush bool
exportLoad bool exportLoad bool
callFunc string
} }
func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in bakeOptions, cFlags commonFlags) (err error) { func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in bakeOptions, cFlags commonFlags) (err error) {
@@ -81,11 +70,6 @@ func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in ba
targets = []string{"default"} targets = []string{"default"}
} }
callFunc, err := buildflags.ParsePrintFunc(in.callFunc)
if err != nil {
return err
}
overrides := in.overrides overrides := in.overrides
if in.exportPush { if in.exportPush {
overrides = append(overrides, "*.push=true") overrides = append(overrides, "*.push=true")
@@ -93,9 +77,6 @@ func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in ba
if in.exportLoad { if in.exportLoad {
overrides = append(overrides, "*.load=true") overrides = append(overrides, "*.load=true")
} }
if callFunc != nil {
overrides = append(overrides, fmt.Sprintf("*.call=%s", callFunc.Name))
}
if cFlags.noCache != nil { if cFlags.noCache != nil {
overrides = append(overrides, fmt.Sprintf("*.no-cache=%t", *cFlags.noCache)) overrides = append(overrides, fmt.Sprintf("*.no-cache=%t", *cFlags.noCache))
} }
@@ -142,41 +123,22 @@ func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in ba
} }
progressMode := progressui.DisplayMode(cFlags.progress) progressMode := progressui.DisplayMode(cFlags.progress)
var printer *progress.Printer printer, err := progress.NewPrinter(ctx2, os.Stderr, progressMode,
printer, err = progress.NewPrinter(ctx2, os.Stderr, progressMode,
progress.WithDesc(progressTextDesc, progressConsoleDesc), progress.WithDesc(progressTextDesc, progressConsoleDesc),
progress.WithOnClose(func() {
printWarnings(os.Stderr, printer.Warnings(), progressMode)
}),
) )
if err != nil { if err != nil {
return err return err
} }
var resp map[string]*client.SolveResponse
defer func() { defer func() {
if printer != nil { if printer != nil {
err1 := printer.Wait() err1 := printer.Wait()
if err == nil { if err == nil {
err = err1 err = err1
} }
if err != nil { if err == nil && progressMode != progressui.QuietMode && progressMode != progressui.RawJSONMode {
return
}
if progressMode != progressui.QuietMode && progressMode != progressui.RawJSONMode {
desktop.PrintBuildDetails(os.Stderr, printer.BuildRefs(), term) desktop.PrintBuildDetails(os.Stderr, printer.BuildRefs(), term)
} }
if resp != nil && len(in.metadataFile) > 0 {
dt := make(map[string]interface{})
for t, r := range resp {
dt[t] = decodeExporterResponse(r.ExporterResponse)
}
if warnings := printer.Warnings(); len(warnings) > 0 && confutil.MetadataWarningsEnabled() {
dt["buildx.build.warnings"] = warnings
}
err = writeMetadataFile(in.metadataFile, dt)
}
} }
}() }()
@@ -189,32 +151,12 @@ func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in ba
return errors.New("couldn't find a bake definition") return errors.New("couldn't find a bake definition")
} }
defaults := map[string]string{ tgts, grps, err := bake.ReadTargets(ctx, files, targets, overrides, map[string]string{
// don't forget to update documentation if you add a new // don't forget to update documentation if you add a new
// built-in variable: docs/bake-reference.md#built-in-variables // built-in variable: docs/bake-reference.md#built-in-variables
"BAKE_CMD_CONTEXT": cmdContext, "BAKE_CMD_CONTEXT": cmdContext,
"BAKE_LOCAL_PLATFORM": platforms.Format(platforms.DefaultSpec()), "BAKE_LOCAL_PLATFORM": platforms.DefaultString(),
} })
if in.listTargets || in.listVars {
cfg, pm, err := bake.ParseFiles(files, defaults)
if err != nil {
return err
}
err = printer.Wait()
printer = nil
if err != nil {
return err
}
if in.listTargets {
return printTargetList(dockerCli.Out(), cfg)
} else if in.listVars {
return printVars(dockerCli.Out(), pm.AllVariables)
}
}
tgts, grps, err := bake.ReadTargets(ctx, files, targets, overrides, defaults)
if err != nil { if err != nil {
return err return err
} }
@@ -260,27 +202,12 @@ func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in ba
return nil return nil
} }
for _, opt := range bo {
if opt.PrintFunc != nil {
cf, err := buildflags.ParsePrintFunc(opt.PrintFunc.Name)
if err != nil {
return err
}
opt.PrintFunc.Name = cf.Name
}
}
prm := confutil.MetadataProvenance()
if len(in.metadataFile) == 0 {
prm = confutil.MetadataProvenanceModeDisabled
}
groupRef := identity.NewID() groupRef := identity.NewID()
var refs []string var refs []string
for k, b := range bo { for k, b := range bo {
b.Ref = identity.NewID() b.Ref = identity.NewID()
b.GroupRef = groupRef b.GroupRef = groupRef
b.ProvenanceResponseMode = prm b.WithProvenanceResponse = len(in.metadataFile) > 0
refs = append(refs, b.Ref) refs = append(refs, b.Ref)
bo[k] = b bo[k] = b
} }
@@ -297,122 +224,22 @@ func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in ba
return err return err
} }
resp, err = build.Build(ctx, nodes, bo, dockerutil.NewClient(dockerCli), confutil.ConfigDir(dockerCli), printer) resp, err := build.Build(ctx, nodes, bo, dockerutil.NewClient(dockerCli), confutil.ConfigDir(dockerCli), printer)
if err != nil { if err != nil {
return wrapBuildError(err, true) return wrapBuildError(err, true)
} }
err = printer.Wait() if len(in.metadataFile) > 0 {
if err != nil { dt := make(map[string]interface{})
return err for t, r := range resp {
} dt[t] = decodeExporterResponse(r.ExporterResponse)
var callFormatJSON bool
var jsonResults = map[string]map[string]any{}
if callFunc != nil {
callFormatJSON = callFunc.Format == "json"
}
var sep bool
var exitCode int
names := make([]string, 0, len(bo))
for name := range bo {
names = append(names, name)
}
slices.Sort(names)
for _, name := range names {
req := bo[name]
if req.PrintFunc == nil {
continue
} }
if err := writeMetadataFile(in.metadataFile, dt); err != nil {
pf := &pb.PrintFunc{
Name: req.PrintFunc.Name,
Format: req.PrintFunc.Format,
IgnoreStatus: req.PrintFunc.IgnoreStatus,
}
if callFunc != nil {
pf.Format = callFunc.Format
pf.IgnoreStatus = callFunc.IgnoreStatus
}
var res map[string]string
if sp, ok := resp[name]; ok {
res = sp.ExporterResponse
}
if callFormatJSON {
jsonResults[name] = map[string]any{}
buf := &bytes.Buffer{}
if code, err := printResult(buf, pf, res); err != nil {
jsonResults[name]["error"] = err.Error()
exitCode = 1
} else if code != 0 && exitCode == 0 {
exitCode = code
}
m := map[string]*json.RawMessage{}
if err := json.Unmarshal(buf.Bytes(), &m); err == nil {
for k, v := range m {
jsonResults[name][k] = v
}
} else {
jsonResults[name][pf.Name] = json.RawMessage(buf.Bytes())
}
} else {
if sep {
fmt.Fprintln(dockerCli.Out())
} else {
sep = true
}
fmt.Fprintf(dockerCli.Out(), "%s\n", name)
if descr := tgts[name].Description; descr != "" {
fmt.Fprintf(dockerCli.Out(), "%s\n", descr)
}
fmt.Fprintln(dockerCli.Out())
if code, err := printResult(dockerCli.Out(), pf, res); err != nil {
fmt.Fprintf(dockerCli.Out(), "error: %v\n", err)
exitCode = 1
} else if code != 0 && exitCode == 0 {
exitCode = code
}
}
}
if callFormatJSON {
out := struct {
Group map[string]*bake.Group `json:"group,omitempty"`
Target map[string]map[string]any `json:"target"`
}{
Group: grps,
Target: map[string]map[string]any{},
}
for name, def := range tgts {
out.Target[name] = map[string]any{
"build": def,
}
if res, ok := jsonResults[name]; ok {
printName := bo[name].PrintFunc.Name
if printName == "lint" {
printName = "check"
}
out.Target[name][printName] = res
}
}
dt, err := json.MarshalIndent(out, "", " ")
if err != nil {
return err return err
} }
fmt.Fprintln(dockerCli.Out(), string(dt))
} }
if exitCode != 0 { return err
os.Exit(exitCode)
}
return nil
} }
func bakeCmd(dockerCli command.Cli, rootOpts *rootOptions) *cobra.Command { func bakeCmd(dockerCli command.Cli, rootOpts *rootOptions) *cobra.Command {
@@ -448,18 +275,6 @@ func bakeCmd(dockerCli command.Cli, rootOpts *rootOptions) *cobra.Command {
flags.StringVar(&options.sbom, "sbom", "", `Shorthand for "--set=*.attest=type=sbom"`) flags.StringVar(&options.sbom, "sbom", "", `Shorthand for "--set=*.attest=type=sbom"`)
flags.StringVar(&options.provenance, "provenance", "", `Shorthand for "--set=*.attest=type=provenance"`) flags.StringVar(&options.provenance, "provenance", "", `Shorthand for "--set=*.attest=type=provenance"`)
flags.StringArrayVar(&options.overrides, "set", nil, `Override target value (e.g., "targetpattern.key=value")`) flags.StringArrayVar(&options.overrides, "set", nil, `Override target value (e.g., "targetpattern.key=value")`)
flags.StringVar(&options.callFunc, "call", "build", `Set method for evaluating build ("check", "outline", "targets")`)
flags.VarPF(callAlias(&options.callFunc, "check"), "check", "", `Shorthand for "--call=check"`)
flags.Lookup("check").NoOptDefVal = "true"
flags.BoolVar(&options.listTargets, "list-targets", false, "List available targets")
cobrautil.MarkFlagsExperimental(flags, "list-targets")
flags.MarkHidden("list-targets")
flags.BoolVar(&options.listVars, "list-variables", false, "List defined variables")
cobrautil.MarkFlagsExperimental(flags, "list-variables")
flags.MarkHidden("list-variables")
commonBuildFlags(&cFlags, flags) commonBuildFlags(&cFlags, flags)
@@ -516,75 +331,3 @@ func readBakeFiles(ctx context.Context, nodes []builder.Node, url string, names
return return
} }
func printVars(w io.Writer, vars []*hclparser.Variable) error {
slices.SortFunc(vars, func(a, b *hclparser.Variable) int {
return cmp.Compare(a.Name, b.Name)
})
tw := tabwriter.NewWriter(w, 1, 8, 1, '\t', 0)
defer tw.Flush()
tw.Write([]byte("VARIABLE\tVALUE\tDESCRIPTION\n"))
for _, v := range vars {
var value string
if v.Value != nil {
value = *v.Value
} else {
value = "<null>"
}
fmt.Fprintf(tw, "%s\t%s\t%s\n", v.Name, value, v.Description)
}
return nil
}
func printTargetList(w io.Writer, cfg *bake.Config) error {
tw := tabwriter.NewWriter(w, 1, 8, 1, '\t', 0)
defer tw.Flush()
tw.Write([]byte("TARGET\tDESCRIPTION\n"))
type targetOrGroup struct {
name string
target *bake.Target
group *bake.Group
}
list := make([]targetOrGroup, 0, len(cfg.Targets)+len(cfg.Groups))
for _, tgt := range cfg.Targets {
list = append(list, targetOrGroup{name: tgt.Name, target: tgt})
}
for _, grp := range cfg.Groups {
list = append(list, targetOrGroup{name: grp.Name, group: grp})
}
slices.SortFunc(list, func(a, b targetOrGroup) int {
return cmp.Compare(a.name, b.name)
})
for _, tgt := range list {
if strings.HasPrefix(tgt.name, "_") {
// convention for a private target
continue
}
var descr string
if tgt.target != nil {
descr = tgt.target.Description
} else if tgt.group != nil {
descr = tgt.group.Description
if len(tgt.group.Targets) > 0 {
slices.Sort(tgt.group.Targets)
names := strings.Join(tgt.group.Targets, ", ")
if descr != "" {
descr += " (" + names + ")"
} else {
descr = names
}
}
}
fmt.Fprintf(tw, "%s\t%s\n", tgt.name, descr)
}
return nil
}

View File

@@ -5,10 +5,12 @@ import (
"context" "context"
"crypto/sha256" "crypto/sha256"
"encoding/base64" "encoding/base64"
"encoding/csv"
"encoding/hex" "encoding/hex"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io"
"log"
"os" "os"
"path/filepath" "path/filepath"
"strconv" "strconv"
@@ -37,6 +39,7 @@ import (
"github.com/docker/buildx/util/osutil" "github.com/docker/buildx/util/osutil"
"github.com/docker/buildx/util/progress" "github.com/docker/buildx/util/progress"
"github.com/docker/buildx/util/tracing" "github.com/docker/buildx/util/tracing"
"github.com/docker/cli-docs-tool/annotation"
"github.com/docker/cli/cli" "github.com/docker/cli/cli"
"github.com/docker/cli/cli/command" "github.com/docker/cli/cli/command"
dockeropts "github.com/docker/cli/opts" dockeropts "github.com/docker/cli/opts"
@@ -56,7 +59,6 @@ import (
"github.com/sirupsen/logrus" "github.com/sirupsen/logrus"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag" "github.com/spf13/pflag"
"github.com/tonistiigi/go-csvvalue"
"go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/metric" "go.opentelemetry.io/otel/metric"
"google.golang.org/grpc/codes" "google.golang.org/grpc/codes"
@@ -204,11 +206,7 @@ func (o *buildOptions) toControllerOptions() (*controllerapi.BuildOptions, error
return nil, err return nil, err
} }
prm := confutil.MetadataProvenance() opts.WithProvenanceResponse = opts.PrintFunc == nil && len(o.metadataFile) > 0
if opts.PrintFunc != nil || len(o.metadataFile) == 0 {
prm = confutil.MetadataProvenanceModeDisabled
}
opts.ProvenanceResponseMode = string(prm)
return &opts, nil return &opts, nil
} }
@@ -342,7 +340,7 @@ func runBuild(ctx context.Context, dockerCli command.Cli, options buildOptions)
if confutil.IsExperimental() { if confutil.IsExperimental() {
resp, retErr = runControllerBuild(ctx, dockerCli, opts, options, printer) resp, retErr = runControllerBuild(ctx, dockerCli, opts, options, printer)
} else { } else {
resp, retErr = runBasicBuild(ctx, dockerCli, opts, printer) resp, retErr = runBasicBuild(ctx, dockerCli, opts, options, printer)
} }
if err := printer.Wait(); retErr == nil { if err := printer.Wait(); retErr == nil {
@@ -368,17 +366,11 @@ func runBuild(ctx context.Context, dockerCli command.Cli, options buildOptions)
} }
} }
if opts.PrintFunc != nil { if opts.PrintFunc != nil {
if exitcode, err := printResult(dockerCli.Out(), opts.PrintFunc, resp.ExporterResponse); err != nil { if err := printResult(opts.PrintFunc, resp.ExporterResponse); err != nil {
return err return err
} else if exitcode != 0 {
os.Exit(exitcode)
} }
} else if options.metadataFile != "" { } else if options.metadataFile != "" {
dt := decodeExporterResponse(resp.ExporterResponse) if err := writeMetadataFile(options.metadataFile, decodeExporterResponse(resp.ExporterResponse)); err != nil {
if warnings := printer.Warnings(); len(warnings) > 0 && confutil.MetadataWarningsEnabled() {
dt["buildx.build.warnings"] = warnings
}
if err := writeMetadataFile(options.metadataFile, dt); err != nil {
return err return err
} }
} }
@@ -394,7 +386,7 @@ func getImageID(resp map[string]string) string {
return dgst return dgst
} }
func runBasicBuild(ctx context.Context, dockerCli command.Cli, opts *controllerapi.BuildOptions, printer *progress.Printer) (*client.SolveResponse, error) { func runBasicBuild(ctx context.Context, dockerCli command.Cli, opts *controllerapi.BuildOptions, options buildOptions, printer *progress.Printer) (*client.SolveResponse, error) {
resp, res, err := cbuild.RunBuild(ctx, dockerCli, *opts, dockerCli.In(), printer, false) resp, res, err := cbuild.RunBuild(ctx, dockerCli, *opts, dockerCli.In(), printer, false)
if res != nil { if res != nil {
res.Done() res.Done()
@@ -530,12 +522,9 @@ func buildCmd(dockerCli command.Cli, rootOpts *rootOptions, debugConfig *debug.D
cmd := &cobra.Command{ cmd := &cobra.Command{
Use: "build [OPTIONS] PATH | URL | -", Use: "build [OPTIONS] PATH | URL | -",
Aliases: []string{"b"},
Short: "Start a build", Short: "Start a build",
Args: cli.ExactArgs(1), Args: cli.ExactArgs(1),
Aliases: []string{"b"},
Annotations: map[string]string{
"aliases": "docker build, docker builder build, docker image build, docker buildx b",
},
RunE: func(cmd *cobra.Command, args []string) error { RunE: func(cmd *cobra.Command, args []string) error {
options.contextPath = args[0] options.contextPath = args[0]
options.builder = rootOpts.builder options.builder = rootOpts.builder
@@ -574,6 +563,7 @@ func buildCmd(dockerCli command.Cli, rootOpts *rootOptions, debugConfig *debug.D
flags := cmd.Flags() flags := cmd.Flags()
flags.StringSliceVar(&options.extraHosts, "add-host", []string{}, `Add a custom host-to-IP mapping (format: "host:ip")`) flags.StringSliceVar(&options.extraHosts, "add-host", []string{}, `Add a custom host-to-IP mapping (format: "host:ip")`)
flags.SetAnnotation("add-host", annotation.ExternalURL, []string{"https://docs.docker.com/reference/cli/docker/image/build/#add-host"})
flags.StringSliceVar(&options.allow, "allow", []string{}, `Allow extra privileged entitlement (e.g., "network.host", "security.insecure")`) flags.StringSliceVar(&options.allow, "allow", []string{}, `Allow extra privileged entitlement (e.g., "network.host", "security.insecure")`)
@@ -586,10 +576,12 @@ func buildCmd(dockerCli command.Cli, rootOpts *rootOptions, debugConfig *debug.D
flags.StringArrayVar(&options.cacheTo, "cache-to", []string{}, `Cache export destinations (e.g., "user/app:cache", "type=local,dest=path/to/dir")`) flags.StringArrayVar(&options.cacheTo, "cache-to", []string{}, `Cache export destinations (e.g., "user/app:cache", "type=local,dest=path/to/dir")`)
flags.StringVar(&options.cgroupParent, "cgroup-parent", "", `Set the parent cgroup for the "RUN" instructions during build`) flags.StringVar(&options.cgroupParent, "cgroup-parent", "", `Set the parent cgroup for the "RUN" instructions during build`)
flags.SetAnnotation("cgroup-parent", annotation.ExternalURL, []string{"https://docs.docker.com/reference/cli/docker/image/build/#cgroup-parent"})
flags.StringArrayVar(&options.contexts, "build-context", []string{}, "Additional build contexts (e.g., name=path)") flags.StringArrayVar(&options.contexts, "build-context", []string{}, "Additional build contexts (e.g., name=path)")
flags.StringVarP(&options.dockerfileName, "file", "f", "", `Name of the Dockerfile (default: "PATH/Dockerfile")`) flags.StringVarP(&options.dockerfileName, "file", "f", "", `Name of the Dockerfile (default: "PATH/Dockerfile")`)
flags.SetAnnotation("file", annotation.ExternalURL, []string{"https://docs.docker.com/reference/cli/docker/image/build/#file"})
flags.StringVar(&options.imageIDFile, "iidfile", "", "Write the image ID to a file") flags.StringVar(&options.imageIDFile, "iidfile", "", "Write the image ID to a file")
@@ -605,6 +597,11 @@ func buildCmd(dockerCli command.Cli, rootOpts *rootOptions, debugConfig *debug.D
flags.StringArrayVar(&options.platforms, "platform", platformsDefault, "Set target platform for build") flags.StringArrayVar(&options.platforms, "platform", platformsDefault, "Set target platform for build")
if confutil.IsExperimental() {
flags.StringVar(&options.printFunc, "print", "", "Print result of information request (e.g., outline, targets)")
cobrautil.MarkFlagsExperimental(flags, "print")
}
flags.BoolVar(&options.exportPush, "push", false, `Shorthand for "--output=type=registry"`) flags.BoolVar(&options.exportPush, "push", false, `Shorthand for "--output=type=registry"`)
flags.BoolVarP(&options.quiet, "quiet", "q", false, "Suppress the build output and print image ID on success") flags.BoolVarP(&options.quiet, "quiet", "q", false, "Suppress the build output and print image ID on success")
@@ -616,8 +613,10 @@ func buildCmd(dockerCli command.Cli, rootOpts *rootOptions, debugConfig *debug.D
flags.StringArrayVar(&options.ssh, "ssh", []string{}, `SSH agent socket or keys to expose to the build (format: "default|<id>[=<socket>|<key>[,<key>]]")`) flags.StringArrayVar(&options.ssh, "ssh", []string{}, `SSH agent socket or keys to expose to the build (format: "default|<id>[=<socket>|<key>[,<key>]]")`)
flags.StringArrayVarP(&options.tags, "tag", "t", []string{}, `Name and optionally a tag (format: "name:tag")`) flags.StringArrayVarP(&options.tags, "tag", "t", []string{}, `Name and optionally a tag (format: "name:tag")`)
flags.SetAnnotation("tag", annotation.ExternalURL, []string{"https://docs.docker.com/reference/cli/docker/image/build/#tag"})
flags.StringVar(&options.target, "target", "", "Set the target build stage to build") flags.StringVar(&options.target, "target", "", "Set the target build stage to build")
flags.SetAnnotation("target", annotation.ExternalURL, []string{"https://docs.docker.com/reference/cli/docker/image/build/#target"})
options.ulimits = dockeropts.NewUlimitOpt(nil) options.ulimits = dockeropts.NewUlimitOpt(nil)
flags.Var(options.ulimits, "ulimit", "Ulimit options") flags.Var(options.ulimits, "ulimit", "Ulimit options")
@@ -634,20 +633,12 @@ func buildCmd(dockerCli command.Cli, rootOpts *rootOptions, debugConfig *debug.D
cobrautil.MarkFlagsExperimental(flags, "root", "detach", "server-config") cobrautil.MarkFlagsExperimental(flags, "root", "detach", "server-config")
} }
flags.StringVar(&options.printFunc, "call", "build", `Set method for evaluating build ("check", "outline", "targets")`)
flags.VarPF(callAlias(&options.printFunc, "check"), "check", "", `Shorthand for "--call=check"`)
flags.Lookup("check").NoOptDefVal = "true"
// hidden flags // hidden flags
var ignore string var ignore string
var ignoreSlice []string var ignoreSlice []string
var ignoreBool bool var ignoreBool bool
var ignoreInt int64 var ignoreInt int64
flags.StringVar(&options.printFunc, "print", "", "Print result of information request (e.g., outline, targets)")
cobrautil.MarkFlagsExperimental(flags, "print")
flags.MarkHidden("print")
flags.BoolVar(&ignoreBool, "compress", false, "Compress the build context using gzip") flags.BoolVar(&ignoreBool, "compress", false, "Compress the build context using gzip")
flags.MarkHidden("compress") flags.MarkHidden("compress")
@@ -705,7 +696,7 @@ type commonFlags struct {
func commonBuildFlags(options *commonFlags, flags *pflag.FlagSet) { func commonBuildFlags(options *commonFlags, flags *pflag.FlagSet) {
options.noCache = flags.Bool("no-cache", false, "Do not use cache when building the image") options.noCache = flags.Bool("no-cache", false, "Do not use cache when building the image")
flags.StringVar(&options.progress, "progress", "auto", `Set type of progress output ("auto", "plain", "tty", "rawjson"). Use plain to show container output`) flags.StringVar(&options.progress, "progress", "auto", `Set type of progress output ("auto", "plain", "tty"). Use plain to show container output`)
options.pull = flags.Bool("pull", false, "Always attempt to pull all referenced images") options.pull = flags.Bool("pull", false, "Always attempt to pull all referenced images")
flags.StringVar(&options.metadataFile, "metadata-file", "", "Write build result metadata to a file") flags.StringVar(&options.metadataFile, "metadata-file", "", "Write build result metadata to a file")
} }
@@ -863,78 +854,47 @@ func printWarnings(w io.Writer, warnings []client.VertexWarning, mode progressui
} }
} }
func printResult(w io.Writer, f *controllerapi.PrintFunc, res map[string]string) (int, error) { func printResult(f *controllerapi.PrintFunc, res map[string]string) error {
switch f.Name { switch f.Name {
case "outline": case "outline":
return 0, printValue(w, outline.PrintOutline, outline.SubrequestsOutlineDefinition.Version, f.Format, res) return printValue(outline.PrintOutline, outline.SubrequestsOutlineDefinition.Version, f.Format, res)
case "targets": case "targets":
return 0, printValue(w, targets.PrintTargets, targets.SubrequestsTargetsDefinition.Version, f.Format, res) return printValue(targets.PrintTargets, targets.SubrequestsTargetsDefinition.Version, f.Format, res)
case "subrequests.describe": case "subrequests.describe":
return 0, printValue(w, subrequests.PrintDescribe, subrequests.SubrequestsDescribeDefinition.Version, f.Format, res) return printValue(subrequests.PrintDescribe, subrequests.SubrequestsDescribeDefinition.Version, f.Format, res)
case "lint": case "lint":
err := printValue(w, lint.PrintLintViolations, lint.SubrequestLintDefinition.Version, f.Format, res) return printValue(lint.PrintLintViolations, lint.SubrequestLintDefinition.Version, f.Format, res)
if err != nil {
return 0, err
}
lintResults := lint.LintResults{}
if result, ok := res["result.json"]; ok {
if err := json.Unmarshal([]byte(result), &lintResults); err != nil {
return 0, err
}
}
if lintResults.Error != nil {
// Print the error message and the source
// Normally, we would use `errdefs.WithSource` to attach the source to the
// error and let the error be printed by the handling that's already in place,
// but here we want to print the error in a way that's consistent with how
// the lint warnings are printed via the `lint.PrintLintViolations` function,
// which differs from the default error printing.
if f.Format != "json" && len(lintResults.Warnings) > 0 {
fmt.Fprintln(w)
}
lintBuf := bytes.NewBuffer([]byte(lintResults.Error.Message + "\n"))
sourceInfo := lintResults.Sources[lintResults.Error.Location.SourceIndex]
source := errdefs.Source{
Info: sourceInfo,
Ranges: lintResults.Error.Location.Ranges,
}
source.Print(lintBuf)
return 0, errors.New(lintBuf.String())
} else if len(lintResults.Warnings) == 0 && f.Format != "json" {
fmt.Fprintln(w, "Check complete, no warnings found.")
}
default: default:
if dt, ok := res["result.json"]; ok && f.Format == "json" { if dt, ok := res["result.json"]; ok && f.Format == "json" {
fmt.Fprintln(w, dt) fmt.Println(dt)
} else if dt, ok := res["result.txt"]; ok { } else if dt, ok := res["result.txt"]; ok {
fmt.Fprint(w, dt) fmt.Print(dt)
} else { } else {
fmt.Fprintf(w, "%s %+v\n", f, res) log.Printf("%s %+v", f, res)
} }
} }
if v, ok := res["result.statuscode"]; !f.IgnoreStatus && ok { if v, ok := res["result.statuscode"]; !f.IgnoreStatus && ok {
if n, err := strconv.Atoi(v); err == nil && n != 0 { if n, err := strconv.Atoi(v); err == nil && n != 0 {
return n, nil os.Exit(n)
} }
} }
return 0, nil return nil
} }
type printFunc func([]byte, io.Writer) error type printFunc func([]byte, io.Writer) error
func printValue(w io.Writer, printer printFunc, version string, format string, res map[string]string) error { func printValue(printer printFunc, version string, format string, res map[string]string) error {
if format == "json" { if format == "json" {
fmt.Fprintln(w, res["result.json"]) fmt.Fprintln(os.Stdout, res["result.json"])
return nil return nil
} }
if res["version"] != "" && versions.LessThan(version, res["version"]) && res["result.txt"] != "" { if res["version"] != "" && versions.LessThan(version, res["version"]) && res["result.txt"] != "" {
// structure is too new and we don't know how to print it // structure is too new and we don't know how to print it
fmt.Fprint(w, res["result.txt"]) fmt.Fprint(os.Stdout, res["result.txt"])
return nil return nil
} }
return printer([]byte(res["result.json"]), w) return printer([]byte(res["result.json"]), os.Stdout)
} }
type invokeConfig struct { type invokeConfig struct {
@@ -984,9 +944,9 @@ func (cfg *invokeConfig) parseInvokeConfig(invoke, on string) error {
return nil return nil
} }
csvParser := csvvalue.NewParser() csvReader := csv.NewReader(strings.NewReader(invoke))
csvParser.LazyQuotes = true csvReader.LazyQuotes = true
fields, err := csvParser.Fields(invoke, nil) fields, err := csvReader.Read()
if err != nil { if err != nil {
return err return err
} }
@@ -1042,20 +1002,6 @@ func maybeJSONArray(v string) []string {
return []string{v} return []string{v}
} }
func callAlias(target *string, value string) cobrautil.BoolFuncValue {
return func(s string) error {
v, err := strconv.ParseBool(s)
if err != nil {
return err
}
if v {
*target = value
}
return nil
}
}
// timeBuildCommand will start a timer for timing the build command. It records the time when the returned // timeBuildCommand will start a timer for timing the build command. It records the time when the returned
// function is invoked into a metric. // function is invoked into a metric.
func timeBuildCommand(mp metric.MeterProvider, attrs attribute.Set) func(err error) { func timeBuildCommand(mp metric.MeterProvider, attrs attribute.Set) func(err error) {

View File

@@ -80,7 +80,7 @@ func RootCmd(dockerCli command.Cli, children ...DebuggableCmd) *cobra.Command {
flags.StringVar(&controlOptions.Root, "root", "", "Specify root directory of server to connect for the monitor") flags.StringVar(&controlOptions.Root, "root", "", "Specify root directory of server to connect for the monitor")
flags.BoolVar(&controlOptions.Detach, "detach", runtime.GOOS == "linux", "Detach buildx server for the monitor (supported only on linux)") flags.BoolVar(&controlOptions.Detach, "detach", runtime.GOOS == "linux", "Detach buildx server for the monitor (supported only on linux)")
flags.StringVar(&controlOptions.ServerConfig, "server-config", "", "Specify buildx server config file for the monitor (used only when launching new server)") flags.StringVar(&controlOptions.ServerConfig, "server-config", "", "Specify buildx server config file for the monitor (used only when launching new server)")
flags.StringVar(&progressMode, "progress", "auto", `Set type of progress output ("auto", "plain", "tty", "rawjson") for the monitor. Use plain to show container output`) flags.StringVar(&progressMode, "progress", "auto", `Set type of progress output ("auto", "plain", "tty") for the monitor. Use plain to show container output`)
cobrautil.MarkFlagsExperimental(flags, "invoke", "on", "root", "detach", "server-config") cobrautil.MarkFlagsExperimental(flags, "invoke", "on", "root", "detach", "server-config")

View File

@@ -5,7 +5,7 @@ import (
"net" "net"
"os" "os"
"github.com/containerd/platforms" "github.com/containerd/containerd/platforms"
"github.com/docker/buildx/build" "github.com/docker/buildx/build"
"github.com/docker/buildx/builder" "github.com/docker/buildx/builder"
"github.com/docker/buildx/util/progress" "github.com/docker/buildx/util/progress"
@@ -125,7 +125,8 @@ func dialStdioCmd(dockerCli command.Cli, rootOpts *rootOptions) *cobra.Command {
} }
flags := cmd.Flags() flags := cmd.Flags()
cmd.Flags()
flags.StringVar(&opts.platform, "platform", os.Getenv("DOCKER_DEFAULT_PLATFORM"), "Target platform: this is used for node selection") flags.StringVar(&opts.platform, "platform", os.Getenv("DOCKER_DEFAULT_PLATFORM"), "Target platform: this is used for node selection")
flags.StringVar(&opts.progress, "progress", "quiet", `Set type of progress output ("auto", "plain", "tty", "rawjson"). Use plain to show container output`) flags.StringVar(&opts.progress, "progress", "quiet", "Set type of progress output (auto, plain, tty).")
return cmd return cmd
} }

View File

@@ -9,7 +9,6 @@ import (
"github.com/distribution/reference" "github.com/distribution/reference"
"github.com/docker/buildx/builder" "github.com/docker/buildx/builder"
"github.com/docker/buildx/util/buildflags"
"github.com/docker/buildx/util/cobrautil/completion" "github.com/docker/buildx/util/cobrautil/completion"
"github.com/docker/buildx/util/imagetools" "github.com/docker/buildx/util/imagetools"
"github.com/docker/buildx/util/progress" "github.com/docker/buildx/util/progress"
@@ -30,7 +29,6 @@ type createOptions struct {
dryrun bool dryrun bool
actionAppend bool actionAppend bool
progress string progress string
preferIndex bool
} }
func runCreate(ctx context.Context, dockerCli command.Cli, in createOptions, args []string) error { func runCreate(ctx context.Context, dockerCli command.Cli, in createOptions, args []string) error {
@@ -155,12 +153,7 @@ func runCreate(ctx context.Context, dockerCli command.Cli, in createOptions, arg
} }
} }
annotations, err := buildflags.ParseAnnotations(in.annotations) dt, desc, err := r.Combine(ctx, srcs, in.annotations)
if err != nil {
return errors.Wrapf(err, "failed to parse annotations")
}
dt, desc, err := r.Combine(ctx, srcs, annotations, in.preferIndex)
if err != nil { if err != nil {
return err return err
} }
@@ -288,9 +281,8 @@ func createCmd(dockerCli command.Cli, opts RootOptions) *cobra.Command {
flags.StringArrayVarP(&options.tags, "tag", "t", []string{}, "Set reference for new image") flags.StringArrayVarP(&options.tags, "tag", "t", []string{}, "Set reference for new image")
flags.BoolVar(&options.dryrun, "dry-run", false, "Show final image instead of pushing") flags.BoolVar(&options.dryrun, "dry-run", false, "Show final image instead of pushing")
flags.BoolVar(&options.actionAppend, "append", false, "Append to existing manifest") flags.BoolVar(&options.actionAppend, "append", false, "Append to existing manifest")
flags.StringVar(&options.progress, "progress", "auto", `Set type of progress output ("auto", "plain", "tty", "rawjson"). Use plain to show container output`) flags.StringVar(&options.progress, "progress", "auto", `Set type of progress output ("auto", "plain", "tty"). Use plain to show container output`)
flags.StringArrayVarP(&options.annotations, "annotation", "", []string{}, "Add annotation to the image") flags.StringArrayVarP(&options.annotations, "annotation", "", []string{}, "Add annotation to the image")
flags.BoolVar(&options.preferIndex, "prefer-index", true, "When only a single source is specified, prefer outputting an image index or manifest list instead of performing a carbon copy")
return cmd return cmd
} }

View File

@@ -15,7 +15,7 @@ import (
type installOptions struct { type installOptions struct {
} }
func runInstall(_ command.Cli, _ installOptions) error { func runInstall(dockerCli command.Cli, in installOptions) error {
dir := config.Dir() dir := config.Dir()
if err := os.MkdirAll(dir, 0755); err != nil { if err := os.MkdirAll(dir, 0755); err != nil {
return errors.Wrap(err, "could not create docker config") return errors.Wrap(err, "could not create docker config")

View File

@@ -195,8 +195,6 @@ func toBuildkitPruneInfo(f filters.Args) (*client.PruneInfo, error) {
case 1: case 1:
if filterKey == "id" { if filterKey == "id" {
filters = append(filters, filterKey+"~="+values[0]) filters = append(filters, filterKey+"~="+values[0])
} else if strings.HasSuffix(filterKey, "!") || strings.HasSuffix(filterKey, "~") {
filters = append(filters, filterKey+"="+values[0])
} else { } else {
filters = append(filters, filterKey+"=="+values[0]) filters = append(filters, filterKey+"=="+values[0])
} }

View File

@@ -15,7 +15,7 @@ import (
type uninstallOptions struct { type uninstallOptions struct {
} }
func runUninstall(_ command.Cli, _ uninstallOptions) error { func runUninstall(dockerCli command.Cli, in uninstallOptions) error {
dir := config.Dir() dir := config.Dir()
cfg, err := config.Load(dir) cfg, err := config.Load(dir)
if err != nil { if err != nil {

View File

@@ -11,7 +11,7 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )
func runVersion(_ command.Cli) error { func runVersion(dockerCli command.Cli) error {
fmt.Println(version.Package, version.Version, version.Revision) fmt.Println(version.Package, version.Version, version.Revision)
return nil return nil
} }

View File

@@ -21,7 +21,7 @@ import (
"github.com/docker/cli/cli/command" "github.com/docker/cli/cli/command"
"github.com/docker/cli/cli/config" "github.com/docker/cli/cli/config"
dockeropts "github.com/docker/cli/opts" dockeropts "github.com/docker/cli/opts"
"github.com/docker/docker/api/types/container" "github.com/docker/go-units"
"github.com/moby/buildkit/client" "github.com/moby/buildkit/client"
"github.com/moby/buildkit/session/auth/authprovider" "github.com/moby/buildkit/session/auth/authprovider"
"github.com/moby/buildkit/util/grpcerrors" "github.com/moby/buildkit/util/grpcerrors"
@@ -67,7 +67,7 @@ func RunBuild(ctx context.Context, dockerCli command.Cli, in controllerapi.Build
Target: in.Target, Target: in.Target,
Ulimits: controllerUlimitOpt2DockerUlimit(in.Ulimits), Ulimits: controllerUlimitOpt2DockerUlimit(in.Ulimits),
GroupRef: in.GroupRef, GroupRef: in.GroupRef,
ProvenanceResponseMode: confutil.ParseMetadataProvenance(in.ProvenanceResponseMode), WithProvenanceResponse: in.WithProvenanceResponse,
} }
platforms, err := platformutil.Parse(in.Platforms) platforms, err := platformutil.Parse(in.Platforms)
@@ -136,9 +136,8 @@ func RunBuild(ctx context.Context, dockerCli command.Cli, in controllerapi.Build
annotations, err := buildflags.ParseAnnotations(in.Annotations) annotations, err := buildflags.ParseAnnotations(in.Annotations)
if err != nil { if err != nil {
return nil, nil, errors.Wrap(err, "parse annotations") return nil, nil, err
} }
for _, o := range outputs { for _, o := range outputs {
for k, v := range annotations { for k, v := range annotations {
o.Attrs[k.String()] = v o.Attrs[k.String()] = v
@@ -190,7 +189,7 @@ func RunBuild(ctx context.Context, dockerCli command.Cli, in controllerapi.Build
return nil, nil, err return nil, nil, err
} }
resp, res, err := buildTargets(ctx, dockerCli, nodes, map[string]build.Options{defaultTargetName: opts}, progress, generateResult) resp, res, err := buildTargets(ctx, dockerCli, b.NodeGroup, nodes, map[string]build.Options{defaultTargetName: opts}, progress, generateResult)
err = wrapBuildError(err, false) err = wrapBuildError(err, false)
if err != nil { if err != nil {
// NOTE: buildTargets can return *build.ResultHandle even on error. // NOTE: buildTargets can return *build.ResultHandle even on error.
@@ -204,7 +203,7 @@ func RunBuild(ctx context.Context, dockerCli command.Cli, in controllerapi.Build
// NOTE: When an error happens during the build and this function acquires the debuggable *build.ResultHandle, // NOTE: When an error happens during the build and this function acquires the debuggable *build.ResultHandle,
// this function returns it in addition to the error (i.e. it does "return nil, res, err"). The caller can // this function returns it in addition to the error (i.e. it does "return nil, res, err"). The caller can
// inspect the result and debug the cause of that error. // inspect the result and debug the cause of that error.
func buildTargets(ctx context.Context, dockerCli command.Cli, nodes []builder.Node, opts map[string]build.Options, progress progress.Writer, generateResult bool) (*client.SolveResponse, *build.ResultHandle, error) { func buildTargets(ctx context.Context, dockerCli command.Cli, ng *store.NodeGroup, nodes []builder.Node, opts map[string]build.Options, progress progress.Writer, generateResult bool) (*client.SolveResponse, *build.ResultHandle, error) {
var res *build.ResultHandle var res *build.ResultHandle
var resp map[string]*client.SolveResponse var resp map[string]*client.SolveResponse
var err error var err error
@@ -271,9 +270,9 @@ func controllerUlimitOpt2DockerUlimit(u *controllerapi.UlimitOpt) *dockeropts.Ul
if u == nil { if u == nil {
return nil return nil
} }
values := make(map[string]*container.Ulimit) values := make(map[string]*units.Ulimit)
for k, v := range u.Values { for k, v := range u.Values {
values[k] = &container.Ulimit{ values[k] = &units.Ulimit{
Name: v.Name, Name: v.Name,
Hard: v.Hard, Hard: v.Hard,
Soft: v.Soft, Soft: v.Soft,

View File

@@ -302,7 +302,7 @@ type BuildOptions struct {
Ref string `protobuf:"bytes,29,opt,name=Ref,proto3" json:"Ref,omitempty"` Ref string `protobuf:"bytes,29,opt,name=Ref,proto3" json:"Ref,omitempty"`
GroupRef string `protobuf:"bytes,30,opt,name=GroupRef,proto3" json:"GroupRef,omitempty"` GroupRef string `protobuf:"bytes,30,opt,name=GroupRef,proto3" json:"GroupRef,omitempty"`
Annotations []string `protobuf:"bytes,31,rep,name=Annotations,proto3" json:"Annotations,omitempty"` Annotations []string `protobuf:"bytes,31,rep,name=Annotations,proto3" json:"Annotations,omitempty"`
ProvenanceResponseMode string `protobuf:"bytes,32,opt,name=ProvenanceResponseMode,proto3" json:"ProvenanceResponseMode,omitempty"` WithProvenanceResponse bool `protobuf:"varint,32,opt,name=WithProvenanceResponse,proto3" json:"WithProvenanceResponse,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"` XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"` XXX_sizecache int32 `json:"-"`
@@ -549,11 +549,11 @@ func (m *BuildOptions) GetAnnotations() []string {
return nil return nil
} }
func (m *BuildOptions) GetProvenanceResponseMode() string { func (m *BuildOptions) GetWithProvenanceResponse() bool {
if m != nil { if m != nil {
return m.ProvenanceResponseMode return m.WithProvenanceResponse
} }
return "" return false
} }
type ExportEntry struct { type ExportEntry struct {
@@ -2094,130 +2094,130 @@ func init() {
func init() { proto.RegisterFile("controller.proto", fileDescriptor_ed7f10298fa1d90f) } func init() { proto.RegisterFile("controller.proto", fileDescriptor_ed7f10298fa1d90f) }
var fileDescriptor_ed7f10298fa1d90f = []byte{ var fileDescriptor_ed7f10298fa1d90f = []byte{
// 1957 bytes of a gzipped FileDescriptorProto // 1960 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x58, 0x5f, 0x73, 0x1b, 0xb7, 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x58, 0x5f, 0x73, 0x1b, 0x49,
0x11, 0xef, 0x91, 0x14, 0xff, 0x2c, 0x45, 0xd9, 0x46, 0x6d, 0x17, 0x3e, 0x3b, 0xb6, 0x7c, 0xb6, 0x11, 0x67, 0x25, 0x59, 0x7f, 0x5a, 0x96, 0xcf, 0x19, 0x9c, 0x30, 0xd9, 0xe4, 0x12, 0x67, 0x93,
0x53, 0x4e, 0xdd, 0xa1, 0x12, 0xa5, 0x8e, 0xe3, 0x38, 0x99, 0xa9, 0x44, 0x89, 0x95, 0x32, 0xb6, 0x1c, 0x2a, 0x42, 0xc9, 0x77, 0x3e, 0x72, 0xb9, 0x5c, 0xee, 0xaa, 0xb0, 0x65, 0x0b, 0xfb, 0x2a,
0xa4, 0x01, 0x65, 0x67, 0xda, 0xcc, 0x34, 0x73, 0x22, 0x21, 0xea, 0x46, 0xa7, 0x03, 0x7b, 0x00, 0xb1, 0x5d, 0x23, 0x27, 0x29, 0xb8, 0x2a, 0xae, 0x56, 0xd2, 0x58, 0xde, 0xd2, 0x6a, 0x47, 0xec,
0xf5, 0xa7, 0x4f, 0x7d, 0x68, 0xdf, 0x3a, 0xfd, 0x1e, 0x9d, 0x7e, 0x84, 0x3e, 0xf5, 0xad, 0x1f, 0x8c, 0x64, 0x8b, 0x27, 0x1e, 0xe0, 0x8d, 0xe2, 0x7b, 0x50, 0x7c, 0x04, 0x9e, 0x78, 0xe3, 0xe3,
0xa7, 0x1f, 0xa1, 0x83, 0x05, 0xee, 0x78, 0x14, 0x79, 0x94, 0xd4, 0x3e, 0x11, 0xbb, 0xf8, 0xed, 0xf0, 0x11, 0xa8, 0xf9, 0xb3, 0xab, 0x5d, 0x4b, 0x2b, 0xdb, 0xf0, 0xa4, 0xe9, 0x9e, 0x5f, 0x77,
0x2e, 0x76, 0x6f, 0xb1, 0xbb, 0x20, 0xdc, 0xee, 0x89, 0x48, 0xc5, 0x22, 0x0c, 0x79, 0xdc, 0x1a, 0x4f, 0xf7, 0xf6, 0x74, 0xf7, 0x08, 0xd6, 0xbb, 0x2c, 0x10, 0x21, 0xf3, 0x7d, 0x1a, 0x36, 0x46,
0xc6, 0x42, 0x09, 0x72, 0xf7, 0x60, 0x14, 0x84, 0xfd, 0xf3, 0x56, 0x66, 0xe3, 0xf4, 0x73, 0xf7, 0x21, 0x13, 0x0c, 0x6d, 0x74, 0xc6, 0x9e, 0xdf, 0xbb, 0x6c, 0x24, 0x36, 0x26, 0x5f, 0xd8, 0x6f,
0xed, 0x20, 0x50, 0x47, 0xa3, 0x83, 0x56, 0x4f, 0x9c, 0xac, 0x9c, 0x88, 0x83, 0x8b, 0x15, 0x44, 0xfa, 0x9e, 0x38, 0x1f, 0x77, 0x1a, 0x5d, 0x36, 0xdc, 0x1a, 0xb2, 0xce, 0x74, 0x4b, 0xa1, 0x06,
0x1d, 0x07, 0x6a, 0xc5, 0x1f, 0x06, 0x2b, 0x92, 0xc7, 0xa7, 0x41, 0x8f, 0xcb, 0x15, 0x2b, 0x94, 0x9e, 0xd8, 0x72, 0x47, 0xde, 0x16, 0xa7, 0xe1, 0xc4, 0xeb, 0x52, 0xbe, 0x65, 0x84, 0xa2, 0x5f,
0xfc, 0x1a, 0x95, 0xee, 0xab, 0x5c, 0x61, 0x29, 0x46, 0x71, 0x8f, 0x0f, 0x45, 0x18, 0xf4, 0x2e, 0xad, 0xd2, 0x7e, 0x99, 0x29, 0xcc, 0xd9, 0x38, 0xec, 0xd2, 0x11, 0xf3, 0xbd, 0xee, 0x74, 0x6b,
0x56, 0x86, 0x07, 0x2b, 0x66, 0x65, 0xc4, 0xbc, 0x26, 0xdc, 0x7d, 0x17, 0x48, 0xb5, 0x17, 0x8b, 0xd4, 0xd9, 0xd2, 0x2b, 0x2d, 0xe6, 0xd4, 0x61, 0xe3, 0xad, 0xc7, 0xc5, 0x49, 0xc8, 0xba, 0x94,
0x1e, 0x97, 0x92, 0x4b, 0xc6, 0xff, 0x30, 0xe2, 0x52, 0x91, 0xdb, 0x50, 0x64, 0xfc, 0x90, 0x3a, 0x73, 0xca, 0x09, 0xfd, 0xc3, 0x98, 0x72, 0x81, 0xd6, 0x21, 0x4f, 0xe8, 0x19, 0xb6, 0x36, 0xad,
0xcb, 0x4e, 0xb3, 0xc6, 0xf4, 0xd2, 0xdb, 0x83, 0x7b, 0x97, 0x90, 0x72, 0x28, 0x22, 0xc9, 0xc9, 0x7a, 0x85, 0xc8, 0xa5, 0x73, 0x02, 0x77, 0xaf, 0x20, 0xf9, 0x88, 0x05, 0x9c, 0xa2, 0x57, 0xb0,
0x6b, 0x58, 0xd8, 0x8e, 0x0e, 0x85, 0xa4, 0xce, 0x72, 0xb1, 0x59, 0x5f, 0x7d, 0xda, 0x9a, 0xe5, 0x72, 0x18, 0x9c, 0x31, 0x8e, 0xad, 0xcd, 0x7c, 0xbd, 0xba, 0xfd, 0xa4, 0xb1, 0xc8, 0xb9, 0x86,
0x5c, 0xcb, 0xca, 0x69, 0x24, 0x33, 0x78, 0x4f, 0x42, 0x3d, 0xc3, 0x25, 0x8f, 0xa0, 0x96, 0x90, 0x91, 0x93, 0x48, 0xa2, 0xf1, 0x0e, 0x87, 0x6a, 0x82, 0x8b, 0x1e, 0x42, 0x25, 0x22, 0xf7, 0x8c,
0x1b, 0xd6, 0xf0, 0x98, 0x41, 0x3a, 0xb0, 0xb8, 0x1d, 0x9d, 0x8a, 0x63, 0xde, 0x16, 0xd1, 0x61, 0xe1, 0x19, 0x03, 0xb5, 0x60, 0xf5, 0x30, 0x98, 0xb0, 0x01, 0x6d, 0xb2, 0xe0, 0xcc, 0xeb, 0xe3,
0x30, 0xa0, 0x85, 0x65, 0xa7, 0x59, 0x5f, 0xf5, 0x66, 0x1b, 0xcb, 0x22, 0xd9, 0x84, 0x9c, 0xf7, 0xdc, 0xa6, 0x55, 0xaf, 0x6e, 0x3b, 0x8b, 0x8d, 0x25, 0x91, 0x24, 0x25, 0xe7, 0x7c, 0x0f, 0x78,
0x1d, 0xd0, 0x8d, 0x40, 0xf6, 0x44, 0x14, 0xf1, 0x5e, 0xe2, 0x4c, 0xae, 0xd3, 0x93, 0x67, 0x2a, 0xcf, 0xe3, 0x5d, 0x16, 0x04, 0xb4, 0x1b, 0x39, 0x93, 0xe9, 0x74, 0xfa, 0x4c, 0xb9, 0x2b, 0x67,
0x5c, 0x3a, 0x93, 0xf7, 0x10, 0x1e, 0xcc, 0xd0, 0x65, 0xc2, 0xe2, 0xfd, 0x1e, 0x16, 0xd7, 0xf5, 0x72, 0x1e, 0xc0, 0xfd, 0x05, 0xba, 0x74, 0x58, 0x9c, 0xdf, 0xc3, 0xea, 0xae, 0x3c, 0x5b, 0xb6,
0xd9, 0xf2, 0x95, 0x7f, 0x03, 0x95, 0xdd, 0xa1, 0x0a, 0x44, 0x24, 0xe7, 0x7b, 0x83, 0x6a, 0x2c, 0xf2, 0x6f, 0xa1, 0x74, 0x3c, 0x12, 0x1e, 0x0b, 0xf8, 0x72, 0x6f, 0x94, 0x1a, 0x83, 0x24, 0x91,
0x92, 0x25, 0x22, 0xde, 0xbf, 0x17, 0xad, 0x01, 0xcb, 0x20, 0xcb, 0x50, 0x6f, 0x8b, 0x48, 0xf1, 0x88, 0xf3, 0xef, 0x55, 0x63, 0xc0, 0x30, 0xd0, 0x26, 0x54, 0x9b, 0x2c, 0x10, 0xf4, 0x52, 0x9c,
0x73, 0xb5, 0xe7, 0xab, 0x23, 0x6b, 0x28, 0xcb, 0x22, 0x9f, 0xc2, 0xd2, 0x86, 0xe8, 0x1d, 0xf3, 0xb8, 0xe2, 0xdc, 0x18, 0x4a, 0xb2, 0xd0, 0x67, 0xb0, 0xb6, 0xc7, 0xba, 0x03, 0x1a, 0x9e, 0x79,
0xf8, 0x30, 0x08, 0xf9, 0x8e, 0x7f, 0xc2, 0xad, 0x4b, 0x97, 0xb8, 0xe4, 0x5b, 0xed, 0x75, 0x10, 0x3e, 0x3d, 0x72, 0x87, 0xd4, 0xb8, 0x74, 0x85, 0x8b, 0xbe, 0x93, 0x5e, 0x7b, 0x81, 0x68, 0x8d,
0xa9, 0xce, 0x28, 0xea, 0xd1, 0x22, 0x1e, 0xed, 0x49, 0xde, 0x57, 0xb5, 0x30, 0x36, 0x96, 0x20, 0x83, 0x2e, 0xce, 0xab, 0xa3, 0x3d, 0xce, 0xfa, 0xaa, 0x06, 0x46, 0x66, 0x12, 0xe8, 0x07, 0xa8,
0x3f, 0x40, 0x43, 0xab, 0xe9, 0x5b, 0xd3, 0x92, 0x96, 0x30, 0x31, 0x5e, 0x5d, 0xed, 0x5d, 0x6b, 0x49, 0x35, 0x3d, 0x63, 0x9a, 0xe3, 0x82, 0x4a, 0x8c, 0x97, 0xd7, 0x7b, 0xd7, 0x48, 0xc9, 0xed,
0x42, 0x6e, 0x33, 0x52, 0xf1, 0x05, 0x9b, 0xd4, 0x45, 0xee, 0xc2, 0xc2, 0x5a, 0x18, 0x8a, 0x33, 0x07, 0x22, 0x9c, 0x92, 0xb4, 0x2e, 0xb4, 0x01, 0x2b, 0x3b, 0xbe, 0xcf, 0x2e, 0xf0, 0xca, 0x66,
0xba, 0xb0, 0x5c, 0x6c, 0xd6, 0x98, 0x21, 0xc8, 0x97, 0x50, 0x59, 0x53, 0x8a, 0x4b, 0x25, 0x69, 0xbe, 0x5e, 0x21, 0x9a, 0x40, 0x5f, 0x41, 0x69, 0x47, 0x08, 0xca, 0x05, 0xc7, 0x45, 0x65, 0xec,
0x19, 0x8d, 0x3d, 0x9a, 0x6d, 0xcc, 0x80, 0x58, 0x02, 0x26, 0xbb, 0x50, 0x43, 0xfb, 0x6b, 0xf1, 0xe1, 0x62, 0x63, 0x1a, 0x44, 0x22, 0x30, 0x3a, 0x86, 0x8a, 0xb2, 0xbf, 0x13, 0xf6, 0x39, 0x2e,
0x40, 0xd2, 0x0a, 0x4a, 0x7e, 0x7e, 0x8d, 0x63, 0xa6, 0x32, 0xe6, 0x88, 0x63, 0x1d, 0x64, 0x13, 0x29, 0xc9, 0x2f, 0x6e, 0x70, 0xcc, 0x58, 0x46, 0x1f, 0x71, 0xa6, 0x03, 0xed, 0x43, 0xa5, 0xe9,
0x6a, 0x6d, 0xbf, 0x77, 0xc4, 0x3b, 0xb1, 0x38, 0xa1, 0x55, 0x54, 0xf8, 0xf3, 0xd9, 0x0a, 0x11, 0x76, 0xcf, 0x69, 0x2b, 0x64, 0x43, 0x5c, 0x56, 0x0a, 0x7f, 0xbe, 0x58, 0xa1, 0x82, 0x19, 0x85,
0x66, 0x15, 0x5a, 0x35, 0xa9, 0x24, 0x59, 0x83, 0x0a, 0x12, 0xfb, 0x82, 0xd6, 0x6e, 0xa6, 0x24, 0x46, 0x4d, 0x2c, 0x89, 0x76, 0xa0, 0xa4, 0x88, 0x53, 0x86, 0x2b, 0xb7, 0x53, 0x12, 0xc9, 0x21,
0x91, 0x23, 0x1e, 0x2c, 0xb6, 0x07, 0xb1, 0x18, 0x0d, 0xf7, 0xfc, 0x98, 0x47, 0x8a, 0x02, 0x7e, 0x07, 0x56, 0x9b, 0xfd, 0x90, 0x8d, 0x47, 0x27, 0x6e, 0x48, 0x03, 0x81, 0x41, 0x7d, 0xea, 0x14,
0xea, 0x09, 0x1e, 0x79, 0x0b, 0x95, 0xcd, 0xf3, 0xa1, 0x88, 0x95, 0xa4, 0xf5, 0x79, 0x97, 0xd7, 0x0f, 0xbd, 0x81, 0xd2, 0xfe, 0xe5, 0x88, 0x85, 0x82, 0xe3, 0xea, 0xb2, 0xcb, 0xab, 0x41, 0xc6,
0x80, 0xac, 0x01, 0x2b, 0x41, 0x1e, 0x03, 0x6c, 0x9e, 0xab, 0xd8, 0xdf, 0x12, 0x3a, 0xec, 0x8b, 0x80, 0x91, 0x40, 0x8f, 0x00, 0xf6, 0x2f, 0x45, 0xe8, 0x1e, 0x30, 0x19, 0xf6, 0x55, 0xf5, 0x39,
0xf8, 0x39, 0x32, 0x1c, 0xd2, 0x81, 0xf2, 0x3b, 0xff, 0x80, 0x87, 0x92, 0x36, 0x50, 0x77, 0xeb, 0x12, 0x1c, 0xd4, 0x82, 0xe2, 0x5b, 0xb7, 0x43, 0x7d, 0x8e, 0x6b, 0x4a, 0x77, 0xe3, 0x06, 0x81,
0x1a, 0x81, 0x35, 0x02, 0xc6, 0x90, 0x95, 0xd6, 0x79, 0xbd, 0xc3, 0xd5, 0x99, 0x88, 0x8f, 0xdf, 0xd5, 0x02, 0xda, 0x90, 0x91, 0x96, 0x79, 0x7d, 0x44, 0xc5, 0x05, 0x0b, 0x07, 0xef, 0x58, 0x8f,
0x8b, 0x3e, 0xa7, 0x4b, 0x26, 0xaf, 0x33, 0x2c, 0xf2, 0x1c, 0x1a, 0x3b, 0xc2, 0x04, 0x2f, 0x08, 0xe2, 0x35, 0x9d, 0xd7, 0x09, 0x16, 0x7a, 0x06, 0xb5, 0x23, 0xa6, 0x83, 0xe7, 0xf9, 0x82, 0x86,
0x15, 0x8f, 0xe9, 0x2d, 0x3c, 0xcc, 0x24, 0x13, 0xef, 0x72, 0xe8, 0xab, 0x43, 0x11, 0x9f, 0x48, 0xf8, 0x13, 0x75, 0x98, 0x34, 0x53, 0xdd, 0x65, 0xdf, 0x15, 0x67, 0x2c, 0x1c, 0x72, 0xbc, 0xae,
0x7a, 0x1b, 0x11, 0x63, 0x86, 0xce, 0xa0, 0x2e, 0xef, 0xc5, 0x5c, 0x49, 0x7a, 0x67, 0x5e, 0x06, 0x10, 0x33, 0x86, 0xcc, 0xa0, 0x36, 0xed, 0x86, 0x54, 0x70, 0x7c, 0x67, 0x59, 0x06, 0x69, 0x10,
0x19, 0x10, 0x4b, 0xc0, 0x84, 0x42, 0xa5, 0x7b, 0x74, 0xd2, 0x0d, 0xfe, 0xc8, 0x29, 0x59, 0x76, 0x89, 0xc0, 0x08, 0x43, 0xa9, 0x7d, 0x3e, 0x6c, 0x7b, 0x7f, 0xa4, 0x18, 0x6d, 0x5a, 0xf5, 0x3c,
0x9a, 0x45, 0x96, 0x90, 0xe4, 0x25, 0x14, 0xbb, 0xdd, 0x2d, 0xfa, 0x53, 0xd4, 0xf6, 0x20, 0x47, 0x89, 0x48, 0xf4, 0x02, 0xf2, 0xed, 0xf6, 0x01, 0xfe, 0xa9, 0xd2, 0x76, 0x3f, 0x43, 0x5b, 0xfb,
0x5b, 0x77, 0x8b, 0x69, 0x14, 0x21, 0x50, 0xda, 0xf7, 0x07, 0x92, 0xde, 0xc5, 0x73, 0xe1, 0x9a, 0x80, 0x48, 0x14, 0x42, 0x50, 0x38, 0x75, 0xfb, 0x1c, 0x6f, 0xa8, 0x73, 0xa9, 0x35, 0xba, 0x07,
0xdc, 0x87, 0xf2, 0xbe, 0x1f, 0x0f, 0xb8, 0xa2, 0xf7, 0xd0, 0x67, 0x4b, 0x91, 0x37, 0x50, 0xf9, 0xc5, 0x53, 0x37, 0xec, 0x53, 0x81, 0xef, 0x2a, 0x9f, 0x0d, 0x85, 0x5e, 0x43, 0xe9, 0xbd, 0xef,
0x10, 0x06, 0x27, 0x81, 0x92, 0xf4, 0xfe, 0xbc, 0xcb, 0x69, 0x40, 0xbb, 0x43, 0xc5, 0x12, 0xbc, 0x0d, 0x3d, 0xc1, 0xf1, 0xbd, 0x65, 0x97, 0x53, 0x83, 0x8e, 0x47, 0x82, 0x44, 0x78, 0x79, 0x5a,
0x3e, 0x2d, 0xc6, 0x9b, 0xc7, 0xf4, 0x67, 0xa8, 0x33, 0x21, 0xf5, 0x8e, 0x0d, 0x17, 0xa5, 0xcb, 0x15, 0x6f, 0x1a, 0xe2, 0x9f, 0x29, 0x9d, 0x11, 0x29, 0x77, 0x4c, 0xb8, 0x30, 0xde, 0xb4, 0xea,
0x4e, 0xb3, 0xca, 0x12, 0x52, 0x1f, 0x6d, 0x6f, 0x14, 0x86, 0xf4, 0x01, 0xb2, 0x71, 0x6d, 0xbe, 0x65, 0x12, 0x91, 0xf2, 0x68, 0x27, 0x63, 0xdf, 0xc7, 0xf7, 0x15, 0x5b, 0xad, 0xf5, 0xb7, 0x97,
0xbd, 0x4e, 0x83, 0xbd, 0x91, 0x3c, 0xa2, 0x2e, 0xee, 0x64, 0x38, 0xe3, 0xfd, 0x77, 0xc2, 0xef, 0x69, 0x70, 0x32, 0xe6, 0xe7, 0xd8, 0x56, 0x3b, 0x09, 0xce, 0x6c, 0xff, 0x2d, 0x73, 0x7b, 0xf8,
0xd3, 0x87, 0xd9, 0x7d, 0xcd, 0x21, 0xdb, 0xb0, 0xd8, 0xc5, 0xb6, 0xb4, 0x87, 0xcd, 0x88, 0x3e, 0x41, 0x72, 0x5f, 0x72, 0xd0, 0x21, 0xac, 0xb6, 0x55, 0x5b, 0x3a, 0x51, 0xcd, 0x08, 0x3f, 0x54,
0x42, 0x3f, 0x5e, 0xb4, 0x74, 0xe7, 0x6a, 0x25, 0x9d, 0x4b, 0xfb, 0x90, 0x6d, 0x5e, 0x2d, 0x03, 0x7e, 0x3c, 0x6f, 0xc8, 0xce, 0xd5, 0x88, 0x3a, 0x97, 0xf4, 0x21, 0xd9, 0xbc, 0x1a, 0x1a, 0x4c,
0x66, 0x13, 0xa2, 0x49, 0x5d, 0xfd, 0x64, 0x5c, 0x57, 0x5d, 0xa8, 0xfe, 0x46, 0x27, 0xb9, 0x66, 0x52, 0xa2, 0x51, 0x5d, 0xfd, 0x74, 0x56, 0x57, 0x6d, 0x28, 0xff, 0x46, 0x26, 0xb9, 0x64, 0x3f,
0x3f, 0x46, 0x76, 0x4a, 0xeb, 0x64, 0x5a, 0x8b, 0x22, 0xa1, 0x7c, 0x53, 0x77, 0x9f, 0x60, 0xb8, 0x52, 0xec, 0x98, 0x96, 0xc9, 0xb4, 0x13, 0x04, 0x4c, 0xb8, 0xba, 0xee, 0x3e, 0x56, 0xe1, 0x4e,
0xb3, 0x2c, 0xf2, 0x25, 0xdc, 0xdf, 0x8b, 0xc5, 0x29, 0x8f, 0xfc, 0xa8, 0xc7, 0x93, 0x6a, 0x8e, 0xb2, 0xd0, 0x57, 0x70, 0xef, 0xa3, 0x27, 0xce, 0x4f, 0x42, 0x36, 0xa1, 0x81, 0x1b, 0x74, 0x69,
0x99, 0xb7, 0x8c, 0xba, 0x72, 0x76, 0xdd, 0x5f, 0x03, 0x99, 0xae, 0x5e, 0xfa, 0x74, 0xc7, 0xfc, 0x54, 0xd1, 0xf1, 0xa6, 0x72, 0x23, 0x63, 0xd7, 0xfe, 0x35, 0xa0, 0xf9, 0xea, 0x25, 0x4f, 0x37,
0x22, 0xa9, 0xfa, 0xc7, 0xfc, 0x42, 0x17, 0xb0, 0x53, 0x3f, 0x1c, 0x25, 0xb5, 0xd7, 0x10, 0x5f, 0xa0, 0xd3, 0xa8, 0xea, 0x0f, 0xe8, 0x54, 0x16, 0xb0, 0x89, 0xeb, 0x8f, 0xa3, 0xda, 0xab, 0x89,
0x17, 0xbe, 0x72, 0xdc, 0x6f, 0x60, 0x69, 0xb2, 0xb0, 0xdc, 0x48, 0xfa, 0x0d, 0xd4, 0x33, 0xb7, 0x6f, 0x72, 0x5f, 0x5b, 0xf6, 0xb7, 0xb0, 0x96, 0x2e, 0x2c, 0xb7, 0x92, 0x7e, 0x0d, 0xd5, 0xc4,
0xe7, 0x26, 0xa2, 0xde, 0xbf, 0x1c, 0xa8, 0x67, 0xae, 0x38, 0x26, 0xe3, 0xc5, 0x90, 0x5b, 0x61, 0xed, 0xb9, 0x8d, 0xa8, 0xf3, 0x2f, 0x0b, 0xaa, 0x89, 0x2b, 0xae, 0x92, 0x71, 0x3a, 0xa2, 0x46,
0x5c, 0x93, 0x75, 0x58, 0x58, 0x53, 0x2a, 0xd6, 0xad, 0x4a, 0xe7, 0xf3, 0x2f, 0xaf, 0x2c, 0x14, 0x58, 0xad, 0xd1, 0x2e, 0xac, 0xec, 0x08, 0x11, 0xca, 0x56, 0x25, 0xf3, 0xf9, 0x97, 0xd7, 0x16,
0x2d, 0x84, 0x9b, 0xab, 0x6c, 0x44, 0x75, 0xf0, 0x37, 0xb8, 0x54, 0x41, 0x84, 0xa1, 0xc6, 0xce, 0x8a, 0x86, 0x82, 0xeb, 0xab, 0xac, 0x45, 0x65, 0xf0, 0xf7, 0x28, 0x17, 0x5e, 0xa0, 0x42, 0xad,
0x52, 0x63, 0x59, 0x96, 0xfb, 0x15, 0xc0, 0x58, 0xec, 0x46, 0x3e, 0xfc, 0xc3, 0x81, 0x3b, 0x53, 0x3a, 0x4b, 0x85, 0x24, 0x59, 0xf6, 0xd7, 0x00, 0x33, 0xb1, 0x5b, 0xf9, 0xf0, 0x0f, 0x0b, 0xee,
0xd5, 0x70, 0xa6, 0x27, 0x5b, 0x93, 0x9e, 0xac, 0x5e, 0xb3, 0xb2, 0x4e, 0xfb, 0xf3, 0x7f, 0x9c, 0xcc, 0x55, 0xc3, 0x85, 0x9e, 0x1c, 0xa4, 0x3d, 0xd9, 0xbe, 0x61, 0x65, 0x9d, 0xf7, 0xe7, 0xff,
0x76, 0x07, 0xca, 0xa6, 0x05, 0xcd, 0x3c, 0xa1, 0x0b, 0xd5, 0x8d, 0x40, 0xfa, 0x07, 0x21, 0xef, 0x38, 0xed, 0x11, 0x14, 0x75, 0x0b, 0x5a, 0x78, 0x42, 0x1b, 0xca, 0x7b, 0x1e, 0x77, 0x3b, 0x3e,
0xa3, 0x68, 0x95, 0xa5, 0x34, 0xf6, 0x3f, 0x3c, 0xbd, 0x89, 0x9e, 0x21, 0x3c, 0x53, 0x6b, 0xc8, 0xed, 0x29, 0xd1, 0x32, 0x89, 0x69, 0xd5, 0xff, 0xd4, 0xe9, 0x75, 0xf4, 0x34, 0xe1, 0xe8, 0x5a,
0x12, 0x14, 0xd2, 0xd9, 0xa9, 0xb0, 0xbd, 0xa1, 0xc1, 0xba, 0xf1, 0x1b, 0x57, 0x6b, 0xcc, 0x10, 0x83, 0xd6, 0x20, 0x17, 0xcf, 0x4e, 0xb9, 0xc3, 0x3d, 0x09, 0x96, 0x8d, 0x5f, 0xbb, 0x5a, 0x21,
0x5e, 0x07, 0xca, 0xa6, 0x7a, 0x4d, 0xe1, 0x5d, 0xa8, 0x76, 0x82, 0x90, 0xe3, 0xfc, 0x60, 0xce, 0x9a, 0x70, 0x5a, 0x50, 0xd4, 0xd5, 0x6b, 0x0e, 0x6f, 0x43, 0xb9, 0xe5, 0xf9, 0x54, 0xcd, 0x0f,
0x9c, 0xd2, 0xda, 0xbd, 0xcd, 0xe8, 0xd4, 0x9a, 0xd5, 0x4b, 0xef, 0x87, 0xcc, 0x98, 0xa0, 0xfd, 0xfa, 0xcc, 0x31, 0x2d, 0xdd, 0xdb, 0x0f, 0x26, 0xc6, 0xac, 0x5c, 0x3a, 0x3f, 0x24, 0xc6, 0x04,
0xc0, 0x89, 0xc2, 0xfa, 0x81, 0x73, 0xc4, 0x7d, 0x28, 0x77, 0x44, 0x7c, 0xe2, 0x2b, 0xab, 0xcc, 0xe9, 0x87, 0x9a, 0x28, 0x8c, 0x1f, 0x6a, 0x8e, 0xb8, 0x07, 0xc5, 0x16, 0x0b, 0x87, 0xae, 0x30,
0x52, 0xba, 0x35, 0x6d, 0x0f, 0x22, 0x11, 0xf3, 0xae, 0xf2, 0xd5, 0xc8, 0xb8, 0x52, 0x65, 0x13, 0xca, 0x0c, 0x25, 0x5b, 0xd3, 0x61, 0x3f, 0x60, 0x21, 0x6d, 0x0b, 0x57, 0x8c, 0xb5, 0x2b, 0x65,
0x3c, 0xcf, 0x83, 0xa5, 0xed, 0x48, 0x0e, 0x79, 0x4f, 0xe5, 0x8f, 0xa4, 0xbb, 0x70, 0x2b, 0xc5, 0x92, 0xe2, 0x39, 0x0e, 0xac, 0x1d, 0x06, 0x7c, 0x44, 0xbb, 0x22, 0x7b, 0x24, 0x3d, 0x86, 0x4f,
0xd8, 0x61, 0x34, 0x33, 0x53, 0x39, 0x37, 0x9f, 0xa9, 0xfe, 0xee, 0x40, 0x2d, 0xad, 0x9a, 0xa4, 0x62, 0x8c, 0x19, 0x46, 0x13, 0x33, 0x95, 0x75, 0xfb, 0x99, 0xea, 0xef, 0x16, 0x54, 0xe2, 0xaa,
0x0d, 0x65, 0xfc, 0x62, 0xc9, 0x64, 0xfb, 0xf2, 0x8a, 0x32, 0xdb, 0xfa, 0x88, 0x68, 0xdb, 0xbd, 0x89, 0x9a, 0x50, 0x54, 0x5f, 0x2c, 0x9a, 0x6c, 0x5f, 0x5c, 0x53, 0x66, 0x1b, 0x1f, 0x14, 0xda,
0x8c, 0xa8, 0xfb, 0x3d, 0xd4, 0x33, 0xec, 0x19, 0x49, 0xb2, 0x9a, 0x4d, 0x92, 0xdc, 0xb6, 0x63, 0x74, 0x2f, 0x2d, 0x6a, 0x7f, 0x84, 0x6a, 0x82, 0xbd, 0x20, 0x49, 0xb6, 0x93, 0x49, 0x92, 0xd9,
0x8c, 0x64, 0x53, 0x68, 0x03, 0xca, 0x86, 0x39, 0x33, 0xf4, 0x04, 0x4a, 0x5b, 0x7e, 0x6c, 0xd2, 0x76, 0xb4, 0x91, 0x64, 0x0a, 0xed, 0x41, 0x51, 0x33, 0x17, 0x86, 0x1e, 0x41, 0xe1, 0xc0, 0x0d,
0xa7, 0xc8, 0x70, 0xad, 0x79, 0x5d, 0x71, 0xa8, 0x30, 0xdc, 0x45, 0x86, 0x6b, 0xef, 0x9f, 0x0e, 0x75, 0xfa, 0xe4, 0x89, 0x5a, 0x4b, 0x5e, 0x9b, 0x9d, 0x09, 0x15, 0xee, 0x3c, 0x51, 0x6b, 0xe7,
0x34, 0xec, 0x98, 0x6a, 0x23, 0xc8, 0xe1, 0xb6, 0xb9, 0xc5, 0x3c, 0x4e, 0x78, 0xd6, 0xff, 0x37, 0x9f, 0x16, 0xd4, 0xcc, 0x98, 0x6a, 0x22, 0x48, 0x61, 0x5d, 0xdf, 0x62, 0x1a, 0xc6, 0x95, 0x4f,
0x73, 0x42, 0x99, 0x40, 0x5b, 0x97, 0x65, 0x4d, 0x34, 0xa6, 0x54, 0xba, 0x6d, 0xb8, 0x37, 0x13, 0xfb, 0xff, 0x7a, 0x49, 0x28, 0x23, 0x68, 0xe3, 0xaa, 0xac, 0x8e, 0xc6, 0x9c, 0x4a, 0xbb, 0x09,
0x7a, 0xa3, 0x6b, 0xf4, 0x02, 0xee, 0x8c, 0x07, 0xf0, 0xfc, 0x3c, 0xb9, 0x0b, 0x24, 0x0b, 0xb3, 0x77, 0x17, 0x42, 0x6f, 0x75, 0x8d, 0x9e, 0xc3, 0x9d, 0xd9, 0x00, 0x9e, 0x9d, 0x27, 0x1b, 0x80,
0x03, 0xfa, 0x13, 0xa8, 0xeb, 0x07, 0x4d, 0xbe, 0x98, 0x07, 0x8b, 0x06, 0x60, 0x23, 0x43, 0xa0, 0x92, 0x30, 0x33, 0xa0, 0x3f, 0x86, 0xaa, 0x7c, 0xd0, 0x64, 0x8b, 0x39, 0xb0, 0xaa, 0x01, 0x26,
0x74, 0xcc, 0x2f, 0x4c, 0x36, 0xd4, 0x18, 0xae, 0xbd, 0xbf, 0x39, 0xfa, 0x5d, 0x32, 0x1c, 0xa9, 0x32, 0x08, 0x0a, 0x03, 0x3a, 0xd5, 0xd9, 0x50, 0x21, 0x6a, 0xed, 0xfc, 0xcd, 0x92, 0xef, 0x92,
0xf7, 0x5c, 0x4a, 0x7f, 0xa0, 0x13, 0xb0, 0xb4, 0x1d, 0x05, 0xca, 0x66, 0xdf, 0xa7, 0x79, 0xef, 0xd1, 0x58, 0xbc, 0xa3, 0x9c, 0xbb, 0x7d, 0x99, 0x80, 0x85, 0xc3, 0xc0, 0x13, 0x26, 0xfb, 0x3e,
0x93, 0xe1, 0x48, 0x69, 0x98, 0x95, 0xda, 0xfa, 0x09, 0x43, 0x29, 0xf2, 0x1a, 0x4a, 0x1b, 0xbe, 0xcb, 0x7a, 0x9f, 0x8c, 0xc6, 0x42, 0xc2, 0x8c, 0xd4, 0xc1, 0x4f, 0x88, 0x92, 0x42, 0xaf, 0xa0,
0xf2, 0x6d, 0x2e, 0xe4, 0x4c, 0x63, 0x1a, 0x91, 0x11, 0xd4, 0xe4, 0x7a, 0x45, 0x3f, 0xc2, 0x86, 0xb0, 0xe7, 0x0a, 0xd7, 0xe4, 0x42, 0xc6, 0x34, 0x26, 0x11, 0x09, 0x41, 0x49, 0xee, 0x96, 0xe4,
0x23, 0xe5, 0x3d, 0x87, 0xdb, 0x97, 0xb5, 0xcf, 0x70, 0xed, 0x0b, 0xa8, 0x67, 0xb4, 0xe0, 0xdd, 0x23, 0x6c, 0x34, 0x16, 0xce, 0x33, 0x58, 0xbf, 0xaa, 0x7d, 0x81, 0x6b, 0x5f, 0x42, 0x35, 0xa1,
0xde, 0xed, 0x20, 0xa0, 0xca, 0xf4, 0x52, 0xfb, 0x9a, 0x1e, 0x64, 0xd1, 0xd8, 0xf0, 0x6e, 0x41, 0x45, 0xdd, 0xed, 0xe3, 0x96, 0x02, 0x94, 0x89, 0x5c, 0x4a, 0x5f, 0xe3, 0x83, 0xac, 0x6a, 0x1b,
0x03, 0x55, 0xa7, 0x11, 0xfc, 0x53, 0x01, 0x2a, 0x89, 0x8a, 0xd7, 0x13, 0x7e, 0x3f, 0xcd, 0xf3, 0xce, 0x27, 0x50, 0x53, 0xaa, 0xe3, 0x08, 0xfe, 0x29, 0x07, 0xa5, 0x48, 0xc5, 0xab, 0x94, 0xdf,
0x7b, 0xda, 0xe5, 0x57, 0x50, 0xd2, 0x35, 0xc6, 0xba, 0x9c, 0x33, 0xca, 0x74, 0xfa, 0x19, 0x31, 0x4f, 0xb2, 0xfc, 0x9e, 0x77, 0xf9, 0x25, 0x14, 0x64, 0x8d, 0x31, 0x2e, 0x67, 0x8c, 0x32, 0xad,
0x0d, 0x27, 0xdf, 0x42, 0x99, 0x71, 0xa9, 0xc7, 0x2e, 0xf3, 0x40, 0x79, 0x36, 0x5b, 0xd0, 0x60, 0x5e, 0x42, 0x4c, 0xc2, 0xd1, 0x77, 0x50, 0x24, 0x94, 0xcb, 0xb1, 0x4b, 0x3f, 0x50, 0x9e, 0x2e,
0xc6, 0xc2, 0x56, 0x48, 0x8b, 0x77, 0x83, 0x41, 0xe4, 0x87, 0xb4, 0x34, 0x4f, 0xdc, 0x60, 0x32, 0x16, 0xd4, 0x98, 0x99, 0xb0, 0x11, 0x92, 0xe2, 0x6d, 0xaf, 0x1f, 0xb8, 0x3e, 0x2e, 0x2c, 0x13,
0xe2, 0x86, 0x31, 0x0e, 0xf7, 0x5f, 0x1c, 0xa8, 0xcf, 0x0d, 0xf5, 0xfc, 0x27, 0xe4, 0xd4, 0xb3, 0xd7, 0x98, 0x84, 0xb8, 0x66, 0xcc, 0xc2, 0xfd, 0x17, 0x0b, 0xaa, 0x4b, 0x43, 0xbd, 0xfc, 0x09,
0xb6, 0xf8, 0x3f, 0x3e, 0x6b, 0xff, 0x5c, 0x98, 0x54, 0x84, 0x13, 0x98, 0xbe, 0x4f, 0x43, 0x11, 0x39, 0xf7, 0xac, 0xcd, 0xff, 0x8f, 0xcf, 0xda, 0x3f, 0xe7, 0xd2, 0x8a, 0xd4, 0x04, 0x26, 0xef,
0x44, 0xca, 0xa6, 0x6c, 0x86, 0xa3, 0x0f, 0xda, 0x3e, 0xe9, 0xdb, 0xc6, 0xa0, 0x97, 0xfa, 0x9a, 0xd3, 0x88, 0x79, 0x81, 0x30, 0x29, 0x9b, 0xe0, 0xc8, 0x83, 0x36, 0x87, 0x3d, 0xd3, 0x18, 0xe4,
0xed, 0x08, 0xcd, 0xab, 0x63, 0x1a, 0x18, 0x62, 0x5c, 0xf6, 0x8b, 0xb6, 0xec, 0xeb, 0xd4, 0xf8, 0x52, 0x5e, 0xb3, 0x23, 0x26, 0x79, 0x55, 0x95, 0x06, 0x9a, 0x98, 0x95, 0xfd, 0xbc, 0x29, 0xfb,
0x20, 0x79, 0x8c, 0x81, 0xab, 0x31, 0x5c, 0xeb, 0x4a, 0xbf, 0x23, 0x90, 0xbb, 0x80, 0xc2, 0x96, 0x32, 0x35, 0xde, 0x73, 0x1a, 0xaa, 0xc0, 0x55, 0x88, 0x5a, 0xcb, 0x4a, 0x7f, 0xc4, 0x14, 0x77,
0x42, 0x2b, 0x67, 0x7d, 0x5a, 0x36, 0xe1, 0x68, 0x9f, 0x25, 0x56, 0xce, 0xfa, 0xb4, 0x92, 0x5a, 0x45, 0x09, 0x1b, 0x4a, 0x59, 0xb9, 0xe8, 0xe1, 0xa2, 0x0e, 0x47, 0xf3, 0x22, 0xb2, 0x72, 0xd1,
0x39, 0x43, 0x2b, 0xfb, 0xea, 0x82, 0x56, 0x4d, 0x02, 0xee, 0xab, 0x0b, 0xdd, 0x8a, 0x98, 0x08, 0xc3, 0xa5, 0xd8, 0xca, 0x85, 0xb2, 0x72, 0x2a, 0xa6, 0xb8, 0xac, 0x13, 0xf0, 0x54, 0x4c, 0x65,
0xc3, 0x03, 0xbf, 0x77, 0x4c, 0x6b, 0xa6, 0x07, 0x26, 0xb4, 0x9e, 0x55, 0x75, 0xcc, 0x03, 0x3f, 0x2b, 0x22, 0xcc, 0xf7, 0x3b, 0x6e, 0x77, 0x80, 0x2b, 0xba, 0x07, 0x46, 0xb4, 0x9c, 0x55, 0x65,
0xc4, 0x57, 0x4d, 0x95, 0x25, 0xa4, 0xb7, 0x06, 0xb5, 0x34, 0x55, 0x74, 0x77, 0xeb, 0xf4, 0xf1, 0xcc, 0x3d, 0xd7, 0x57, 0xaf, 0x9a, 0x32, 0x89, 0x48, 0x67, 0x07, 0x2a, 0x71, 0xaa, 0xc8, 0xee,
0x53, 0x34, 0x58, 0xa1, 0xd3, 0x4f, 0xb2, 0xbc, 0x30, 0x9d, 0xe5, 0xc5, 0x4c, 0x96, 0xbf, 0x86, 0xd6, 0xea, 0xa9, 0x4f, 0x51, 0x23, 0xb9, 0x56, 0x2f, 0xca, 0xf2, 0xdc, 0x7c, 0x96, 0xe7, 0x13,
0xc6, 0x44, 0xd2, 0x68, 0x10, 0x13, 0x67, 0xd2, 0x2a, 0xc2, 0xb5, 0xe6, 0xb5, 0x45, 0x68, 0xde, 0x59, 0xfe, 0x0a, 0x6a, 0xa9, 0xa4, 0x91, 0x20, 0xc2, 0x2e, 0xb8, 0x51, 0xa4, 0xd6, 0x92, 0xd7,
0xed, 0x0d, 0x86, 0x6b, 0xef, 0x19, 0x34, 0x26, 0xd2, 0x65, 0x56, 0x5d, 0xf6, 0x9e, 0x42, 0xc3, 0x64, 0xbe, 0x7e, 0xb7, 0xd7, 0x88, 0x5a, 0x3b, 0x4f, 0xa1, 0x96, 0x4a, 0x97, 0x45, 0x75, 0xd9,
0x34, 0xb8, 0xfc, 0xb2, 0xf3, 0x1f, 0x07, 0x96, 0x12, 0x8c, 0xad, 0x3c, 0xbf, 0x82, 0xea, 0x29, 0x79, 0x02, 0x35, 0xdd, 0xe0, 0xb2, 0xcb, 0xce, 0x7f, 0x2c, 0x58, 0x8b, 0x30, 0xa6, 0xf2, 0xfc,
0x8f, 0x15, 0x3f, 0x4f, 0x7b, 0x11, 0x9d, 0x1e, 0x95, 0x3f, 0x22, 0x82, 0xa5, 0x48, 0xf2, 0x35, 0x0a, 0xca, 0x13, 0x1a, 0x0a, 0x7a, 0x19, 0xf7, 0x22, 0x3c, 0x3f, 0x2a, 0x7f, 0x50, 0x08, 0x12,
0x54, 0x25, 0xea, 0xe1, 0xc9, 0xac, 0xf3, 0x38, 0x4f, 0xca, 0xda, 0x4b, 0xf1, 0x64, 0x05, 0x4a, 0x23, 0xd1, 0x37, 0x50, 0xe6, 0x4a, 0x0f, 0x8d, 0x66, 0x9d, 0x47, 0x59, 0x52, 0xc6, 0x5e, 0x8c,
0xa1, 0x18, 0x48, 0xfc, 0xee, 0xf5, 0xd5, 0x87, 0x79, 0x72, 0xef, 0xc4, 0x80, 0x21, 0x90, 0xbc, 0x47, 0x5b, 0x50, 0xf0, 0x59, 0x9f, 0xab, 0xef, 0x5e, 0xdd, 0x7e, 0x90, 0x25, 0xf7, 0x96, 0xf5,
0x85, 0xea, 0x99, 0x1f, 0x47, 0x41, 0x34, 0x48, 0xde, 0xfb, 0x4f, 0xf2, 0x84, 0xbe, 0x37, 0x38, 0x89, 0x02, 0xa2, 0x37, 0x50, 0xbe, 0x70, 0xc3, 0xc0, 0x0b, 0xfa, 0xd1, 0x7b, 0xff, 0x71, 0x96,
0x96, 0x0a, 0x78, 0x0d, 0x7d, 0x89, 0x0e, 0x85, 0x8d, 0x89, 0xf7, 0x5b, 0x9d, 0xcb, 0x9a, 0xb4, 0xd0, 0x47, 0x8d, 0x23, 0xb1, 0x80, 0x53, 0x93, 0x97, 0xe8, 0x8c, 0x99, 0x98, 0x38, 0xbf, 0x95,
0xee, 0x6f, 0x43, 0xc3, 0xdc, 0x87, 0x8f, 0x3c, 0x96, 0x7a, 0x72, 0x74, 0xe6, 0xdd, 0xd9, 0xf5, 0xb9, 0x2c, 0x49, 0xe3, 0xfe, 0x21, 0xd4, 0xf4, 0x7d, 0xf8, 0x40, 0x43, 0x2e, 0x27, 0x47, 0x6b,
0x2c, 0x94, 0x4d, 0x4a, 0x7a, 0x3f, 0xda, 0x76, 0x97, 0x30, 0x74, 0x2e, 0x0d, 0xfd, 0xde, 0xb1, 0xd9, 0x9d, 0xdd, 0x4d, 0x42, 0x49, 0x5a, 0xd2, 0xf9, 0xd1, 0xb4, 0xbb, 0x88, 0x21, 0x73, 0x69,
0x3f, 0x48, 0xbe, 0x53, 0x42, 0xea, 0x9d, 0x53, 0x6b, 0xcf, 0x5c, 0xdb, 0x84, 0xd4, 0xb9, 0x19, 0xe4, 0x76, 0x07, 0x6e, 0x3f, 0xfa, 0x4e, 0x11, 0x29, 0x77, 0x26, 0xc6, 0x9e, 0xbe, 0xb6, 0x11,
0xf3, 0xd3, 0x40, 0x8e, 0x87, 0xd8, 0x94, 0x5e, 0xfd, 0x6b, 0x05, 0xa0, 0x9d, 0x9e, 0x87, 0xec, 0x29, 0x73, 0x33, 0xa4, 0x13, 0x8f, 0xcf, 0x86, 0xd8, 0x98, 0xde, 0xfe, 0x6b, 0x09, 0xa0, 0x19,
0xc1, 0x02, 0xda, 0x23, 0xde, 0xdc, 0xe6, 0x89, 0x7e, 0xbb, 0xcf, 0xae, 0xd1, 0x60, 0xc9, 0x47, 0x9f, 0x07, 0x9d, 0xc0, 0x8a, 0xb2, 0x87, 0x9c, 0xa5, 0xcd, 0x53, 0xf9, 0x6d, 0x3f, 0xbd, 0x41,
0x9d, 0xfc, 0x38, 0xf4, 0x90, 0xe7, 0x79, 0x65, 0x22, 0x3b, 0x37, 0xb9, 0x2f, 0xae, 0x40, 0x59, 0x83, 0x45, 0x1f, 0x64, 0xf2, 0xab, 0xa1, 0x07, 0x3d, 0xcb, 0x2a, 0x13, 0xc9, 0xb9, 0xc9, 0x7e,
0xbd, 0x1f, 0xa0, 0x6c, 0xb2, 0x80, 0xe4, 0xd5, 0xc2, 0x6c, 0xde, 0xba, 0xcf, 0xe7, 0x83, 0x8c, 0x7e, 0x0d, 0xca, 0xe8, 0x7d, 0x0f, 0x45, 0x9d, 0x05, 0x28, 0xab, 0x16, 0x26, 0xf3, 0xd6, 0x7e,
0xd2, 0xcf, 0x1c, 0xc2, 0x6c, 0xa5, 0x24, 0xde, 0x9c, 0x56, 0x68, 0x6f, 0x4c, 0x5e, 0x00, 0x26, 0xb6, 0x1c, 0xa4, 0x95, 0x7e, 0x6e, 0x21, 0x62, 0x2a, 0x25, 0x72, 0x96, 0xb4, 0x42, 0x73, 0x63,
0xba, 0x4e, 0xd3, 0x21, 0xdf, 0x41, 0xd9, 0xd4, 0x3a, 0xf2, 0xc9, 0x6c, 0x81, 0x44, 0xdf, 0xfc, 0xb2, 0x02, 0x90, 0xea, 0x3a, 0x75, 0x0b, 0x7d, 0x0f, 0x45, 0x5d, 0xeb, 0xd0, 0xa7, 0x8b, 0x05,
0xed, 0xa6, 0xf3, 0x99, 0x43, 0xde, 0x43, 0x49, 0x37, 0x79, 0x92, 0xd3, 0xb1, 0x32, 0x13, 0x82, 0x22, 0x7d, 0xcb, 0xb7, 0xeb, 0xd6, 0xe7, 0x16, 0x7a, 0x07, 0x05, 0xd9, 0xe4, 0x51, 0x46, 0xc7,
0xeb, 0xcd, 0x83, 0xd8, 0x28, 0xfe, 0x08, 0x30, 0x1e, 0x35, 0x48, 0xce, 0xbf, 0x36, 0x53, 0x33, 0x4a, 0x4c, 0x08, 0xb6, 0xb3, 0x0c, 0x62, 0xa2, 0xf8, 0x23, 0xc0, 0x6c, 0xd4, 0x40, 0x19, 0xff,
0x8b, 0xdb, 0xbc, 0x1a, 0x68, 0x0d, 0xbc, 0xd7, 0x7d, 0xf6, 0x50, 0x90, 0xdc, 0x0e, 0x9b, 0x5e, 0xda, 0xcc, 0xcd, 0x2c, 0x76, 0xfd, 0x7a, 0xa0, 0x31, 0xf0, 0x4e, 0xf6, 0xd9, 0x33, 0x86, 0x32,
0x23, 0xd7, 0x9b, 0x07, 0xb1, 0xea, 0x8e, 0xa0, 0x31, 0xf1, 0xaf, 0x2e, 0xf9, 0x45, 0xbe, 0x93, 0x3b, 0x6c, 0x7c, 0x8d, 0x6c, 0x67, 0x19, 0xc4, 0xa8, 0x3b, 0x87, 0x5a, 0xea, 0x5f, 0x5d, 0xf4,
0x97, 0xff, 0x24, 0x76, 0x5f, 0x5e, 0x0b, 0x6b, 0x2d, 0xa9, 0xec, 0xac, 0x66, 0xb7, 0x49, 0xeb, 0x8b, 0x6c, 0x27, 0xaf, 0xfe, 0x49, 0x6c, 0xbf, 0xb8, 0x11, 0xd6, 0x58, 0x12, 0xc9, 0x59, 0xcd,
0x2a, 0xbf, 0x27, 0xff, 0xa1, 0x75, 0x57, 0xae, 0x8d, 0x37, 0x56, 0xd7, 0x4b, 0xbf, 0x2b, 0x0c, 0x6c, 0xa3, 0xc6, 0x75, 0x7e, 0xa7, 0xff, 0xa1, 0xb5, 0xb7, 0x6e, 0x8c, 0xd7, 0x56, 0x77, 0x0b,
0x0f, 0x0e, 0xca, 0xf8, 0x67, 0xf7, 0x17, 0xff, 0x0d, 0x00, 0x00, 0xff, 0xff, 0xa2, 0x32, 0x20, 0xbf, 0xcb, 0x8d, 0x3a, 0x9d, 0xa2, 0xfa, 0xb3, 0xfb, 0xcb, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff,
0xaa, 0x8a, 0x17, 0x00, 0x00, 0xc1, 0x07, 0x8b, 0x2b, 0x8a, 0x17, 0x00, 0x00,
} }
// Reference imports to suppress errors if they are not otherwise used. // Reference imports to suppress errors if they are not otherwise used.

View File

@@ -80,7 +80,7 @@ message BuildOptions {
string Ref = 29; string Ref = 29;
string GroupRef = 30; string GroupRef = 30;
repeated string Annotations = 31; repeated string Annotations = 31;
string ProvenanceResponseMode = 32; bool WithProvenanceResponse = 32;
} }
message ExportEntry { message ExportEntry {

View File

@@ -4,6 +4,7 @@ import (
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/docker/docker/builder/remotecontext/urlutil"
"github.com/moby/buildkit/util/gitutil" "github.com/moby/buildkit/util/gitutil"
) )
@@ -21,7 +22,7 @@ func ResolveOptionPaths(options *BuildOptions) (_ *BuildOptions, err error) {
} }
} }
if options.DockerfileName != "" && options.DockerfileName != "-" { if options.DockerfileName != "" && options.DockerfileName != "-" {
if localContext && !isHTTPURL(options.DockerfileName) { if localContext && !urlutil.IsURL(options.DockerfileName) {
options.DockerfileName, err = filepath.Abs(options.DockerfileName) options.DockerfileName, err = filepath.Abs(options.DockerfileName)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -163,15 +164,8 @@ func ResolveOptionPaths(options *BuildOptions) (_ *BuildOptions, err error) {
return options, nil return options, nil
} }
// isHTTPURL returns true if the provided str is an HTTP(S) URL by checking if it
// has a http:// or https:// scheme. No validation is performed to verify if the
// URL is well-formed.
func isHTTPURL(str string) bool {
return strings.HasPrefix(str, "https://") || strings.HasPrefix(str, "http://")
}
func isRemoteURL(c string) bool { func isRemoteURL(c string) bool {
if isHTTPURL(c) { if urlutil.IsURL(c) {
return true return true
} }
if _, err := gitutil.ParseGitRef(c); err == nil { if _, err := gitutil.ParseGitRef(c); err == nil {

View File

@@ -210,7 +210,7 @@ func (c *Client) build(ctx context.Context, ref string, options pb.BuildOptions,
} }
return err return err
} else if n > 0 { } else if n > 0 {
if err := stream.Send(&pb.InputMessage{ if stream.Send(&pb.InputMessage{
Input: &pb.InputMessage_Data{ Input: &pb.InputMessage_Data{
Data: &pb.DataMessage{ Data: &pb.DataMessage{
Data: buf[:n], Data: buf[:n],

View File

@@ -358,7 +358,7 @@ func copyToStream(fd uint32, snd msgStream, r io.Reader) error {
} }
return err return err
} else if n > 0 { } else if n > 0 {
if err := snd.Send(&pb.Message{ if snd.Send(&pb.Message{
Input: &pb.Message_File{ Input: &pb.Message_File{
File: &pb.FdMessage{ File: &pb.FdMessage{
Fd: fd, Fd: fd,

View File

@@ -7,11 +7,8 @@ variable "DOCS_FORMATS" {
variable "DESTDIR" { variable "DESTDIR" {
default = "./bin" default = "./bin"
} }
variable "TEST_COVERAGE" {
default = null
}
variable "GOLANGCI_LINT_MULTIPLATFORM" { variable "GOLANGCI_LINT_MULTIPLATFORM" {
default = "" default = null
} }
# Special target: https://github.com/docker/metadata-action#bake-definition # Special target: https://github.com/docker/metadata-action#bake-definition
@@ -31,14 +28,14 @@ group "default" {
} }
group "validate" { group "validate" {
targets = ["lint", "lint-gopls", "validate-vendor", "validate-docs"] targets = ["lint", "validate-vendor", "validate-docs"]
} }
target "lint" { target "lint" {
inherits = ["_common"] inherits = ["_common"]
dockerfile = "./hack/dockerfiles/lint.Dockerfile" dockerfile = "./hack/dockerfiles/lint.Dockerfile"
output = ["type=cacheonly"] output = ["type=cacheonly"]
platforms = GOLANGCI_LINT_MULTIPLATFORM != "" ? [ platforms = GOLANGCI_LINT_MULTIPLATFORM != null ? [
"darwin/amd64", "darwin/amd64",
"darwin/arm64", "darwin/arm64",
"linux/amd64", "linux/amd64",
@@ -51,11 +48,6 @@ target "lint" {
] : [] ] : []
} }
target "lint-gopls" {
inherits = ["lint"]
target = "gopls-analyze"
}
target "validate-vendor" { target "validate-vendor" {
inherits = ["_common"] inherits = ["_common"]
dockerfile = "./hack/dockerfiles/vendor.Dockerfile" dockerfile = "./hack/dockerfiles/vendor.Dockerfile"
@@ -195,7 +187,6 @@ variable "TEST_BUILDKIT_TAG" {
target "integration-test-base" { target "integration-test-base" {
inherits = ["_common"] inherits = ["_common"]
args = { args = {
GO_EXTRA_FLAGS = TEST_COVERAGE == "1" ? "-cover" : null
HTTP_PROXY = HTTP_PROXY HTTP_PROXY = HTTP_PROXY
HTTPS_PROXY = HTTPS_PROXY HTTPS_PROXY = HTTPS_PROXY
NO_PROXY = NO_PROXY NO_PROXY = NO_PROXY

View File

@@ -1,6 +1,4 @@
--- # Bake file reference
title: Bake file reference
---
The Bake file is a file for defining workflows that you run using `docker buildx bake`. The Bake file is a file for defining workflows that you run using `docker buildx bake`.

3
docs/guides/cicd.md Normal file
View File

@@ -0,0 +1,3 @@
# CI/CD
This page has moved to [Docker Docs website](https://docs.docker.com/build/ci/)

View File

@@ -0,0 +1,3 @@
# CNI networking
This page has moved to [Docker Docs website](https://docs.docker.com/build/buildkit/configure/#cni-networking)

View File

@@ -0,0 +1,3 @@
# Color output controls
This page has moved to [Docker Docs website](https://docs.docker.com/build/building/env-vars/#buildkit_colors)

View File

@@ -0,0 +1,3 @@
# Using a custom network
This page has moved to [Docker Docs website](https://docs.docker.com/build/drivers/docker-container/#custom-network)

View File

@@ -0,0 +1,3 @@
# Using a custom registry configuration
This page has moved to [Docker Docs website](https://docs.docker.com/build/buildkit/configure/#setting-registry-certificates)

View File

@@ -0,0 +1,3 @@
# OpenTelemetry support
This page has moved to [Docker Docs website](https://docs.docker.com/build/building/opentelemetry/)

View File

@@ -0,0 +1,3 @@
# Registry mirror
This page has moved to [Docker Docs website](https://docs.docker.com/build/buildkit/configure/#registry-mirror)

View File

@@ -0,0 +1,3 @@
# Resource limiting
This page has moved to [Docker Docs website](https://docs.docker.com/build/buildkit/configure/#resource-limiting)

View File

@@ -0,0 +1,3 @@
# Defining additional build contexts and linking targets
This page has moved to [Docker Docs website](https://docs.docker.com/build/bake/build-contexts)

View File

@@ -0,0 +1,3 @@
# Building from Compose file
This page has moved to [Docker Docs website](https://docs.docker.com/build/bake/compose-file)

View File

@@ -0,0 +1,3 @@
# Configuring builds
This page has moved to [Docker Docs website](https://docs.docker.com/build/bake/configuring-build)

View File

@@ -0,0 +1,3 @@
# Bake file definition
This page has moved to [docs/bake-reference.md](../../bake-reference.md)

View File

@@ -0,0 +1,3 @@
# User defined HCL functions
This page has moved to [Docker Docs website](https://docs.docker.com/build/bake/hcl-funcs)

View File

@@ -0,0 +1,3 @@
# High-level build options with Bake
This page has moved to [Docker Docs website](https://docs.docker.com/build/bake)

3
docs/manuals/cache/backends/azblob.md vendored Normal file
View File

@@ -0,0 +1,3 @@
# Azure Blob Storage cache storage
This page has moved to [Docker Docs website](https://docs.docker.com/build/building/cache/backends/azblob)

3
docs/manuals/cache/backends/gha.md vendored Normal file
View File

@@ -0,0 +1,3 @@
# GitHub Actions cache storage
This page has moved to [Docker Docs website](https://docs.docker.com/build/building/cache/backends/gha)

3
docs/manuals/cache/backends/index.md vendored Normal file
View File

@@ -0,0 +1,3 @@
# Cache storage backends
This page has moved to [Docker Docs website](https://docs.docker.com/build/building/cache/backends)

3
docs/manuals/cache/backends/inline.md vendored Normal file
View File

@@ -0,0 +1,3 @@
# Inline cache storage
This page has moved to [Docker Docs website](https://docs.docker.com/build/building/cache/backends/inline)

3
docs/manuals/cache/backends/local.md vendored Normal file
View File

@@ -0,0 +1,3 @@
# Local cache storage
This page has moved to [Docker Docs website](https://docs.docker.com/build/building/cache/backends/local)

View File

@@ -0,0 +1,3 @@
# Registry cache storage
This page has moved to [Docker Docs website](https://docs.docker.com/build/building/cache/backends/registry)

3
docs/manuals/cache/backends/s3.md vendored Normal file
View File

@@ -0,0 +1,3 @@
# Amazon S3 cache storage
This page has moved to [Docker Docs website](https://docs.docker.com/build/building/cache/backends/s3)

View File

@@ -0,0 +1,3 @@
# Docker container driver
This page has moved to [Docker Docs website](https://docs.docker.com/build/building/drivers/docker-container)

View File

@@ -0,0 +1,3 @@
# Docker driver
This page has moved to [Docker Docs website](https://docs.docker.com/build/building/drivers/docker)

View File

@@ -0,0 +1,3 @@
# Buildx drivers overview
This page has moved to [Docker Docs website](https://docs.docker.com/build/building/drivers)

View File

@@ -0,0 +1,3 @@
# Kubernetes driver
This page has moved to [Docker Docs website](https://docs.docker.com/build/building/drivers/kubernetes)

View File

@@ -0,0 +1,3 @@
# Remote driver
This page has moved to [Docker Docs website](https://docs.docker.com/build/building/drivers/remote)

View File

@@ -0,0 +1,3 @@
# Image and registry exporters
This page has moved to [Docker Docs website](https://docs.docker.com/build/building/exporters/image-registry)

View File

@@ -0,0 +1,3 @@
# Exporters overview
This page has moved to [Docker Docs website](https://docs.docker.com/build/building/exporters)

View File

@@ -0,0 +1,3 @@
# Local and tar exporters
This page has moved to [Docker Docs website](https://docs.docker.com/build/building/exporters/local-tar)

View File

@@ -0,0 +1,3 @@
# OCI and Docker exporters
This page has moved to [Docker Docs website](https://docs.docker.com/build/building/exporters/oci-docker)

View File

@@ -13,22 +13,20 @@ Build from a file
### Options ### Options
| Name | Type | Default | Description | | Name | Type | Default | Description |
|:------------------------------------|:--------------|:--------|:----------------------------------------------------------------------------------------------------| |:------------------------------------|:--------------|:--------|:-----------------------------------------------------------------------------------------|
| [`--builder`](#builder) | `string` | | Override the configured builder instance | | [`--builder`](#builder) | `string` | | Override the configured builder instance |
| [`--call`](#call) | `string` | `build` | Set method for evaluating build (`check`, `outline`, `targets`) | | [`-f`](#file), [`--file`](#file) | `stringArray` | | Build definition file |
| [`--check`](#check) | `bool` | | Shorthand for `--call=check` | | `--load` | | | Shorthand for `--set=*.output=type=docker` |
| [`-f`](#file), [`--file`](#file) | `stringArray` | | Build definition file | | [`--metadata-file`](#metadata-file) | `string` | | Write build result metadata to a file |
| `--load` | `bool` | | Shorthand for `--set=*.output=type=docker` | | [`--no-cache`](#no-cache) | | | Do not use cache when building the image |
| [`--metadata-file`](#metadata-file) | `string` | | Write build result metadata to a file | | [`--print`](#print) | | | Print the options without building |
| [`--no-cache`](#no-cache) | `bool` | | Do not use cache when building the image | | [`--progress`](#progress) | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`). Use plain to show container output |
| [`--print`](#print) | `bool` | | Print the options without building | | [`--provenance`](#provenance) | `string` | | Shorthand for `--set=*.attest=type=provenance` |
| [`--progress`](#progress) | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`, `rawjson`). Use plain to show container output | | [`--pull`](#pull) | | | Always attempt to pull all referenced images |
| [`--provenance`](#provenance) | `string` | | Shorthand for `--set=*.attest=type=provenance` | | `--push` | | | Shorthand for `--set=*.output=type=registry` |
| [`--pull`](#pull) | `bool` | | Always attempt to pull all referenced images | | [`--sbom`](#sbom) | `string` | | Shorthand for `--set=*.attest=type=sbom` |
| `--push` | `bool` | | Shorthand for `--set=*.output=type=registry` | | [`--set`](#set) | `stringArray` | | Override target value (e.g., `targetpattern.key=value`) |
| [`--sbom`](#sbom) | `string` | | Shorthand for `--set=*.attest=type=sbom` |
| [`--set`](#set) | `stringArray` | | Override target value (e.g., `targetpattern.key=value`) |
<!---MARKER_GEN_END--> <!---MARKER_GEN_END-->
@@ -53,14 +51,6 @@ guide for introduction to writing bake files.
Same as [`buildx --builder`](buildx.md#builder). Same as [`buildx --builder`](buildx.md#builder).
### <a name="call"></a> Invoke a frontend method (--call)
Same as [`build --call`](buildx_build.md#call).
#### <a name="check"></a> Call: check (--check)
Same as [`build --check`](buildx_build.md#check).
### <a name="file"></a> Specify a build definition file (-f, --file) ### <a name="file"></a> Specify a build definition file (-f, --file)
Use the `-f` / `--file` option to specify the build definition file to use. Use the `-f` / `--file` option to specify the build definition file to use.
@@ -129,7 +119,6 @@ $ cat metadata.json
```json ```json
{ {
"buildx.build.warnings": {},
"db": { "db": {
"buildx.build.provenance": {}, "buildx.build.provenance": {},
"buildx.build.ref": "mybuilder/mybuilder0/0fjb6ubs52xx3vygf6fgdl611", "buildx.build.ref": "mybuilder/mybuilder0/0fjb6ubs52xx3vygf6fgdl611",
@@ -172,12 +161,6 @@ $ cat metadata.json
> * `max` sets full provenance. > * `max` sets full provenance.
> * `disabled`, `false` or `0` does not set any provenance. > * `disabled`, `false` or `0` does not set any provenance.
> **Note**
>
> Build warnings (`buildx.build.warnings`) are not included by default. Set the
> `BUILDX_METADATA_WARNINGS` environment variable to `1` or `true` to
> include them.
### <a name="no-cache"></a> Don't use cache when building the image (--no-cache) ### <a name="no-cache"></a> Don't use cache when building the image (--no-cache)
Same as `build --no-cache`. Don't use cache when building the image. Same as `build --no-cache`. Don't use cache when building the image.

View File

@@ -9,49 +9,48 @@ Start a build
### Aliases ### Aliases
`docker build`, `docker builder build`, `docker image build`, `docker buildx b` `docker buildx build`, `docker buildx b`
### Options ### Options
| Name | Type | Default | Description | | Name | Type | Default | Description |
|:----------------------------------------|:--------------|:----------|:----------------------------------------------------------------------------------------------------| |:---------------------------------------------------------------------------------------------------------------------------------------------------|:--------------|:----------|:----------------------------------------------------------------------------------------------------|
| [`--add-host`](#add-host) | `stringSlice` | | Add a custom host-to-IP mapping (format: `host:ip`) | | [`--add-host`](https://docs.docker.com/reference/cli/docker/image/build/#add-host) | `stringSlice` | | Add a custom host-to-IP mapping (format: `host:ip`) |
| [`--allow`](#allow) | `stringSlice` | | Allow extra privileged entitlement (e.g., `network.host`, `security.insecure`) | | [`--allow`](#allow) | `stringSlice` | | Allow extra privileged entitlement (e.g., `network.host`, `security.insecure`) |
| [`--annotation`](#annotation) | `stringArray` | | Add annotation to the image | | [`--annotation`](#annotation) | `stringArray` | | Add annotation to the image |
| [`--attest`](#attest) | `stringArray` | | Attestation parameters (format: `type=sbom,generator=image`) | | [`--attest`](#attest) | `stringArray` | | Attestation parameters (format: `type=sbom,generator=image`) |
| [`--build-arg`](#build-arg) | `stringArray` | | Set build-time variables | | [`--build-arg`](#build-arg) | `stringArray` | | Set build-time variables |
| [`--build-context`](#build-context) | `stringArray` | | Additional build contexts (e.g., name=path) | | [`--build-context`](#build-context) | `stringArray` | | Additional build contexts (e.g., name=path) |
| [`--builder`](#builder) | `string` | | Override the configured builder instance | | [`--builder`](#builder) | `string` | | Override the configured builder instance |
| [`--cache-from`](#cache-from) | `stringArray` | | External cache sources (e.g., `user/app:cache`, `type=local,src=path/to/dir`) | | [`--cache-from`](#cache-from) | `stringArray` | | External cache sources (e.g., `user/app:cache`, `type=local,src=path/to/dir`) |
| [`--cache-to`](#cache-to) | `stringArray` | | Cache export destinations (e.g., `user/app:cache`, `type=local,dest=path/to/dir`) | | [`--cache-to`](#cache-to) | `stringArray` | | Cache export destinations (e.g., `user/app:cache`, `type=local,dest=path/to/dir`) |
| [`--call`](#call) | `string` | `build` | Set method for evaluating build (`check`, `outline`, `targets`) | | [`--cgroup-parent`](https://docs.docker.com/reference/cli/docker/image/build/#cgroup-parent) | `string` | | Set the parent cgroup for the `RUN` instructions during build |
| [`--cgroup-parent`](#cgroup-parent) | `string` | | Set the parent cgroup for the `RUN` instructions during build | | `--detach` | | | Detach buildx server (supported only on linux) (EXPERIMENTAL) |
| [`--check`](#check) | `bool` | | Shorthand for `--call=check` | | [`-f`](https://docs.docker.com/reference/cli/docker/image/build/#file), [`--file`](https://docs.docker.com/reference/cli/docker/image/build/#file) | `string` | | Name of the Dockerfile (default: `PATH/Dockerfile`) |
| `--detach` | `bool` | | Detach buildx server (supported only on linux) (EXPERIMENTAL) | | `--iidfile` | `string` | | Write the image ID to a file |
| [`-f`](#file), [`--file`](#file) | `string` | | Name of the Dockerfile (default: `PATH/Dockerfile`) | | `--label` | `stringArray` | | Set metadata for an image |
| `--iidfile` | `string` | | Write the image ID to a file | | [`--load`](#load) | | | Shorthand for `--output=type=docker` |
| `--label` | `stringArray` | | Set metadata for an image | | [`--metadata-file`](#metadata-file) | `string` | | Write build result metadata to a file |
| [`--load`](#load) | `bool` | | Shorthand for `--output=type=docker` | | `--network` | `string` | `default` | Set the networking mode for the `RUN` instructions during build |
| [`--metadata-file`](#metadata-file) | `string` | | Write build result metadata to a file | | `--no-cache` | | | Do not use cache when building the image |
| [`--network`](#network) | `string` | `default` | Set the networking mode for the `RUN` instructions during build | | [`--no-cache-filter`](#no-cache-filter) | `stringArray` | | Do not cache specified stages |
| `--no-cache` | `bool` | | Do not use cache when building the image | | [`-o`](#output), [`--output`](#output) | `stringArray` | | Output destination (format: `type=local,dest=path`) |
| [`--no-cache-filter`](#no-cache-filter) | `stringArray` | | Do not cache specified stages | | [`--platform`](#platform) | `stringArray` | | Set target platform for build |
| [`-o`](#output), [`--output`](#output) | `stringArray` | | Output destination (format: `type=local,dest=path`) | | `--print` | `string` | | Print result of information request (e.g., outline, targets) (EXPERIMENTAL) |
| [`--platform`](#platform) | `stringArray` | | Set target platform for build | | [`--progress`](#progress) | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`). Use plain to show container output |
| [`--progress`](#progress) | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`, `rawjson`). Use plain to show container output | | [`--provenance`](#provenance) | `string` | | Shorthand for `--attest=type=provenance` |
| [`--provenance`](#provenance) | `string` | | Shorthand for `--attest=type=provenance` | | `--pull` | | | Always attempt to pull all referenced images |
| `--pull` | `bool` | | Always attempt to pull all referenced images | | [`--push`](#push) | | | Shorthand for `--output=type=registry` |
| [`--push`](#push) | `bool` | | Shorthand for `--output=type=registry` | | `-q`, `--quiet` | | | Suppress the build output and print image ID on success |
| `-q`, `--quiet` | `bool` | | Suppress the build output and print image ID on success | | `--root` | `string` | | Specify root directory of server to connect (EXPERIMENTAL) |
| `--root` | `string` | | Specify root directory of server to connect (EXPERIMENTAL) | | [`--sbom`](#sbom) | `string` | | Shorthand for `--attest=type=sbom` |
| [`--sbom`](#sbom) | `string` | | Shorthand for `--attest=type=sbom` | | [`--secret`](#secret) | `stringArray` | | Secret to expose to the build (format: `id=mysecret[,src=/local/secret]`) |
| [`--secret`](#secret) | `stringArray` | | Secret to expose to the build (format: `id=mysecret[,src=/local/secret]`) | | `--server-config` | `string` | | Specify buildx server config file (used only when launching new server) (EXPERIMENTAL) |
| `--server-config` | `string` | | Specify buildx server config file (used only when launching new server) (EXPERIMENTAL) | | [`--shm-size`](#shm-size) | `bytes` | `0` | Shared memory size for build containers |
| [`--shm-size`](#shm-size) | `bytes` | `0` | Shared memory size for build containers | | [`--ssh`](#ssh) | `stringArray` | | SSH agent socket or keys to expose to the build (format: `default\|<id>[=<socket>\|<key>[,<key>]]`) |
| [`--ssh`](#ssh) | `stringArray` | | SSH agent socket or keys to expose to the build (format: `default\|<id>[=<socket>\|<key>[,<key>]]`) | | [`-t`](https://docs.docker.com/reference/cli/docker/image/build/#tag), [`--tag`](https://docs.docker.com/reference/cli/docker/image/build/#tag) | `stringArray` | | Name and optionally a tag (format: `name:tag`) |
| [`-t`](#tag), [`--tag`](#tag) | `stringArray` | | Name and optionally a tag (format: `name:tag`) | | [`--target`](https://docs.docker.com/reference/cli/docker/image/build/#target) | `string` | | Set the target build stage to build |
| [`--target`](#target) | `string` | | Set the target build stage to build | | [`--ulimit`](#ulimit) | `ulimit` | | Ulimit options |
| [`--ulimit`](#ulimit) | `ulimit` | | Ulimit options |
<!---MARKER_GEN_END--> <!---MARKER_GEN_END-->
@@ -61,36 +60,15 @@ Flags marked with `[experimental]` need to be explicitly enabled by setting the
## Description ## Description
The `docker buildx build` command starts a build using BuildKit. The `buildx build` command starts a build using BuildKit. This command is similar
to the UI of `docker build` command and takes the same flags and arguments.
For documentation on most of these flags, refer to the [`docker build`
documentation](https://docs.docker.com/reference/cli/docker/image/build/).
This page describes a subset of the new flags.
## Examples ## Examples
### <a name="add-host"></a> Add entries to container hosts file (--add-host)
You can add other hosts into a build container's `/etc/hosts` file by using one
or more `--add-host` flags. This example adds static addresses for hosts named
`my-hostname` and `my_hostname_v6`:
```console
$ docker buildx build --add-host my_hostname=8.8.8.8 --add-host my_hostname_v6=2001:4860:4860::8888 .
```
If you need your build to connect to services running on the host, you can use
the special `host-gateway` value for `--add-host`. In the following example,
build containers resolve `host.docker.internal` to the host's gateway IP.
```console
$ docker buildx build --add-host host.docker.internal=host-gateway .
```
You can wrap an IPv6 address in square brackets.
`=` and `:` are both valid separators.
Both formats in the following example are valid:
```console
$ docker buildx build --add-host my-hostname:10.180.0.1 --add-host my-hostname_v6=[2001:4860:4860::8888] .
```
### <a name="annotation"></a> Create annotations (--annotation) ### <a name="annotation"></a> Create annotations (--annotation)
```text ```text
@@ -186,40 +164,7 @@ $ docker buildx build --allow security.insecure .
### <a name="build-arg"></a> Set build-time variables (--build-arg) ### <a name="build-arg"></a> Set build-time variables (--build-arg)
You can use `ENV` instructions in a Dockerfile to define variable values. These Same as [`docker build` command](https://docs.docker.com/reference/cli/docker/image/build/#build-arg).
values persist in the built image. Often persistence isn't what you want. Users
want to specify variables differently depending on which host they build an
image on.
A good example is `http_proxy` or source versions for pulling intermediate
files. The `ARG` instruction lets Dockerfile authors define values that users
can set at build-time using the `--build-arg` flag:
```console
$ docker buildx build --build-arg HTTP_PROXY=http://10.20.30.2:1234 --build-arg FTP_PROXY=http://40.50.60.5:4567 .
```
This flag allows you to pass the build-time variables that are
accessed like regular environment variables in the `RUN` instruction of the
Dockerfile. These values don't persist in the intermediate or final images
like `ENV` values do. You must add `--build-arg` for each build argument.
Using this flag doesn't alter the output you see when the build process echoes the`ARG` lines from the
Dockerfile.
For detailed information on using `ARG` and `ENV` instructions, see the
[Dockerfile reference](https://docs.docker.com/reference/dockerfile/).
You can also use the `--build-arg` flag without a value, in which case the daemon
propagates the value from the local environment into the Docker container it's building:
```console
$ export HTTP_PROXY=http://10.20.30.2:1234
$ docker buildx build --build-arg HTTP_PROXY .
```
This example is similar to how `docker run -e` works. Refer to the [`docker run` documentation](container_run.md#env)
for more information.
There are also useful built-in build arguments, such as: There are also useful built-in build arguments, such as:
@@ -325,167 +270,6 @@ $ docker buildx build --cache-from=type=s3,region=eu-west-1,bucket=mybucket .
More info about cache exporters and available attributes: https://github.com/moby/buildkit#export-cache More info about cache exporters and available attributes: https://github.com/moby/buildkit#export-cache
### <a name="call"></a> Invoke a frontend method (--call)
```text
--call=[build|check|outline|targets]
```
BuildKit frontends can support alternative modes of executions for builds,
using frontend methods. Frontend methods are a way to change or extend the
behavior of a build invocation, which lets you, for example, inspect, validate,
or generate alternative outputs from a build.
The `--call` flag for `docker buildx build` lets you specify the frontend
method that you want to execute. If this flag is unspecified, it defaults to
executing the build and evaluating [build checks](https://docs.docker.com/reference/build-checks/).
For Dockerfiles, the available methods are:
| Command | Description |
| ------------------------------ | ------------------------------------------------------------------------------------------------------------------- |
| `build` (default) | Execute the build and evaluate build checks for the current build target. |
| `check` | Evaluate build checks for the either the entire Dockerfile or the selected target, without executing a build. |
| `outline` | Show the build arguments that you can set for a target, and their default values. |
| `targets` | List all the build targets in the Dockerfile. |
| `subrequests.describe` | List all the frontend methods that the current frontend supports. |
Note that other frontends may implement these or other methods.
To see the list of available methods for the frontend you're using,
use `--call=subrequests.describe`.
```console
$ docker buildx build -q --call=subrequests.describe .
NAME VERSION DESCRIPTION
outline 1.0.0 List all parameters current build target supports
targets 1.0.0 List all targets current build supports
subrequests.describe 1.0.0 List available subrequest types
```
#### Descriptions
The [`--call=targets`](#call-targets) and [`--call=outline`](#call-outline)
methods include descriptions for build targets and arguments, if available.
Descriptions are generated from comments in the Dockerfile. A comment on the
line before a `FROM` instruction becomes the description of a build target, and
a comment before an `ARG` instruction the description of a build argument. The
comment must lead with the name of the stage or argument, for example:
```dockerfile
# syntax=docker/dockerfile:1
# GO_VERSION sets the Go version for the build
ARG GO_VERSION=1.22
# base-builder is the base stage for building the project
FROM golang:${GO_VERSION} AS base-builder
```
When you run `docker buildx build --call=outline`, the output includes the
descriptions, as follows:
```console
$ docker buildx build -q --call=outline .
TARGET: base-builder
DESCRIPTION: is the base stage for building the project
BUILD ARG VALUE DESCRIPTION
GO_VERSION 1.22 sets the Go version for the build
```
For more examples on how to write Dockerfile docstrings,
check out [the Dockerfile for Docker docs](https://github.com/docker/docs/blob/main/Dockerfile).
#### <a name="check"></a> Call: check (--check)
The `check` method evaluates build checks without executing the build. The
`--check` flag is a convenient shorthand for `--call=check`. Use the `check`
method to validate the build configuration before starting the build.
```console
$ docker buildx build -q --check https://github.com/docker/docs.git
WARNING: InvalidBaseImagePlatform
Base image wjdp/htmltest:v0.17.0 was pulled with platform "linux/amd64", expected "linux/arm64" for current build
Dockerfile:43
--------------------
41 | "#content/desktop/previous-versions/*.md"
42 |
43 | >>> FROM wjdp/htmltest:v${HTMLTEST_VERSION} AS test
44 | WORKDIR /test
45 | COPY --from=build /out ./public
--------------------
```
Using `--check` without specifying a target evaluates the entire Dockerfile.
If you want to evaluate a specific target, use the `--target` flag.
#### Call: outline
The `outline` method prints the name of the specified target (or the default
target, if `--target` isn't specified), and the build arguments that the target
consumes, along with their default values, if set.
The following example shows the default target `release` and its build arguments:
```console
$ docker buildx build -q --call=outline https://github.com/docker/docs.git
TARGET: release
DESCRIPTION: is an empty scratch image with only compiled assets
BUILD ARG VALUE DESCRIPTION
GO_VERSION 1.22 sets the Go version for the base stage
HUGO_VERSION 0.127.0
HUGO_ENV sets the hugo.Environment (production, development, preview)
DOCS_URL sets the base URL for the site
PAGEFIND_VERSION 1.1.0
```
This means that the `release` target is configurable using these build arguments:
```console
$ docker buildx build \
--build-arg GO_VERSION=1.22 \
--build-arg HUGO_VERSION=0.127.0 \
--build-arg HUGO_ENV=production \
--build-arg DOCS_URL=https://example.com \
--build-arg PAGEFIND_VERSION=1.1.0 \
--target release https://github.com/docker/docs.git
```
#### Call: targets
The `targets` method lists all the build targets in the Dockerfile. These are
the stages that you can build using the `--target` flag. It also indicates the
default target, which is the target that will be built when you don't specify a
target.
```console
$ docker buildx build -q --call=targets https://github.com/docker/docs.git
TARGET DESCRIPTION
base is the base stage with build dependencies
node installs Node.js dependencies
hugo downloads and extracts the Hugo binary
build-base is the base stage for building the site
dev is for local development with Docker Compose
build creates production builds with Hugo
lint lints markdown files
test validates HTML output and checks for broken links
update-modules downloads and vendors Hugo modules
vendor is an empty stage with only vendored Hugo modules
build-upstream builds an upstream project with a replacement module
validate-upstream validates HTML output for upstream builds
unused-media checks for unused graphics and other media
pagefind installs the Pagefind runtime
index generates a Pagefind index
test-go-redirects checks that the /go/ redirects are valid
release (default) is an empty scratch image with only compiled assets
```
### <a name="cache-to"></a> Export build cache to an external cache destination (--cache-to) ### <a name="cache-to"></a> Export build cache to an external cache destination (--cache-to)
```text ```text
@@ -525,27 +309,6 @@ $ docker buildx build --cache-to=type=s3,region=eu-west-1,bucket=mybucket .
More info about cache exporters and available attributes: https://github.com/moby/buildkit#export-cache More info about cache exporters and available attributes: https://github.com/moby/buildkit#export-cache
### <a name="cgroup-parent"></a> Use a custom parent cgroup (--cgroup-parent)
When you run `docker buildx build` with the `--cgroup-parent` option,
the daemon runs the containers used in the build with the
[corresponding `docker run` flag](container_run.md#cgroup-parent).
### <a name="file"></a> Specify a Dockerfile (-f, --file)
```console
$ docker buildx build -f <filepath> .
```
Specifies the filepath of the Dockerfile to use.
If unspecified, a file named `Dockerfile` at the root of the build context is used by default.
To read a Dockerfile from stdin, you can use `-` as the argument for `--file`.
```console
$ cat Dockerfile | docker buildx build -f - .
```
### <a name="load"></a> Load the single-platform build result to `docker images` (--load) ### <a name="load"></a> Load the single-platform build result to `docker images` (--load)
Shorthand for [`--output=type=docker`](#docker). Will automatically load the Shorthand for [`--output=type=docker`](#docker). Will automatically load the
@@ -566,7 +329,6 @@ $ cat metadata.json
{ {
"buildx.build.provenance": {}, "buildx.build.provenance": {},
"buildx.build.ref": "mybuilder/mybuilder0/0fjb6ubs52xx3vygf6fgdl611", "buildx.build.ref": "mybuilder/mybuilder0/0fjb6ubs52xx3vygf6fgdl611",
"buildx.build.warnings": {},
"containerimage.config.digest": "sha256:2937f66a9722f7f4a2df583de2f8cb97fc9196059a410e7f00072fc918930e66", "containerimage.config.digest": "sha256:2937f66a9722f7f4a2df583de2f8cb97fc9196059a410e7f00072fc918930e66",
"containerimage.descriptor": { "containerimage.descriptor": {
"annotations": { "annotations": {
@@ -586,26 +348,9 @@ $ cat metadata.json
> Build record [provenance](https://docs.docker.com/build/attestations/slsa-provenance/#provenance-attestation-example) > Build record [provenance](https://docs.docker.com/build/attestations/slsa-provenance/#provenance-attestation-example)
> (`buildx.build.provenance`) includes minimal provenance by default. Set the > (`buildx.build.provenance`) includes minimal provenance by default. Set the
> `BUILDX_METADATA_PROVENANCE` environment variable to customize this behavior: > `BUILDX_METADATA_PROVENANCE` environment variable to customize this behavior:
> > * `min` sets minimal provenance (default).
> - `min` sets minimal provenance (default). > * `max` sets full provenance.
> - `max` sets full provenance. > * `disabled`, `false` or `0` does not set any provenance.
> - `disabled`, `false` or `0` doesn't set any provenance.
### <a name="network"></a> Set the networking mode for the RUN instructions during build (--network)
Available options for the networking mode are:
- `default` (default): Run in the default network.
- `none`: Run with no network access.
- `host`: Run in the hosts network environment.
Find more details in the [Dockerfile reference](https://docs.docker.com/reference/dockerfile/#run---network).
> **Note**
>
> Build warnings (`buildx.build.warnings`) are not included by default. Set the
> `BUILDX_METADATA_WARNINGS` environment variable to `1` or `true` to
> include them.
### <a name="no-cache-filter"></a> Ignore build cache for specific stages (--no-cache-filter) ### <a name="no-cache-filter"></a> Ignore build cache for specific stages (--no-cache-filter)
@@ -668,19 +413,17 @@ The arguments for the `--no-cache-filter` flag must be names of stages.
-o, --output=[PATH,-,type=TYPE[,KEY=VALUE] -o, --output=[PATH,-,type=TYPE[,KEY=VALUE]
``` ```
Sets the export action for the build result. The default output, when using the Sets the export action for the build result. In `docker build` all builds finish
`docker` [build driver](https://docs.docker.com/build/drivers/), is a container by creating a container image and exporting it to `docker images`. `buildx` makes
image exported to the local image store. The `--output` flag makes this step this step configurable allowing results to be exported directly to the client,
configurable allows export of results directly to the client's filesystem, an OCI image tarballs, registry etc.
OCI image tarball, a registry, and more.
Buildx with `docker` driver only supports the local, tarball, and image Buildx with `docker` driver currently only supports local, tarball exporter and
[exporters](https://docs.docker.com/build/exporters/). The `docker-container` image exporter. `docker-container` driver supports all the exporters.
driver supports all exporters.
If you only specify a filepath as the argument to `--output`, Buildx uses the If just the path is specified as a value, `buildx` will use the local exporter
local exporter. If the value is `-`, Buildx uses the `tar` exporter and writes with this path as the destination. If the value is "-", `buildx` will use `tar`
the output to stdout. exporter and write to `stdout`.
```console ```console
$ docker buildx build -o . . $ docker buildx build -o . .
@@ -691,17 +434,12 @@ $ docker buildx build -o type=docker,dest=- . > myimage.tar
$ docker buildx build -t tonistiigi/foo -o type=registry $ docker buildx build -t tonistiigi/foo -o type=registry
``` ```
You can export multiple outputs by repeating the flag. > **Note **
>
> Since BuildKit v0.13.0 multiple outputs can be specified by repeating the flag.
Supported exported types are: Supported exported types are:
- [`local`](#local)
- [`tar`](#tar)
- [`oci`](#oci)
- [`docker`](#docker)
- [`image`](#image)
- [`registry`](#registry)
#### `local` #### `local`
The `local` export type writes all result files to a directory on the client. The The `local` export type writes all result files to a directory on the client. The
@@ -712,9 +450,6 @@ Attribute key:
- `dest` - destination directory where files will be written - `dest` - destination directory where files will be written
For more information, see
[Local and tar exporters](https://docs.docker.com/build/exporters/local-tar/).
#### `tar` #### `tar`
The `tar` export type writes all result files as a single tarball on the client. The `tar` export type writes all result files as a single tarball on the client.
@@ -724,9 +459,6 @@ Attribute key:
- `dest` - destination path where tarball will be written. “-” writes to stdout. - `dest` - destination path where tarball will be written. “-” writes to stdout.
For more information, see
[Local and tar exporters](https://docs.docker.com/build/exporters/local-tar/).
#### `oci` #### `oci`
The `oci` export type writes the result image or manifest list as an [OCI image The `oci` export type writes the result image or manifest list as an [OCI image
@@ -737,9 +469,6 @@ Attribute key:
- `dest` - destination path where tarball will be written. “-” writes to stdout. - `dest` - destination path where tarball will be written. “-” writes to stdout.
For more information, see
[OCI and Docker exporters](https://docs.docker.com/build/exporters/oci-docker/).
#### `docker` #### `docker`
The `docker` export type writes the single-platform result image as a [Docker image The `docker` export type writes the single-platform result image as a [Docker image
@@ -756,9 +485,6 @@ Attribute keys:
the tar will be loaded automatically to the local image store. the tar will be loaded automatically to the local image store.
- `context` - name for the Docker context where to import the result - `context` - name for the Docker context where to import the result
For more information, see
[OCI and Docker exporters](https://docs.docker.com/build/exporters/oci-docker/).
#### `image` #### `image`
The `image` exporter writes the build result as an image or a manifest list. When The `image` exporter writes the build result as an image or a manifest list. When
@@ -770,16 +496,10 @@ Attribute keys:
- `name` - name (references) for the new image. - `name` - name (references) for the new image.
- `push` - Boolean to automatically push the image. - `push` - Boolean to automatically push the image.
For more information, see
[Image and registry exporters](https://docs.docker.com/build/exporters/image-registry/).
#### `registry` #### `registry`
The `registry` exporter is a shortcut for `type=image,push=true`. The `registry` exporter is a shortcut for `type=image,push=true`.
For more information, see
[Image and registry exporters](https://docs.docker.com/build/exporters/image-registry/).
### <a name="platform"></a> Set the target platforms for the build (--platform) ### <a name="platform"></a> Set the target platforms for the build (--platform)
```text ```text
@@ -806,12 +526,13 @@ support for the specified platform. In a clean setup, you can only execute `RUN`
commands for your system architecture. commands for your system architecture.
If your kernel supports [`binfmt_misc`](https://en.wikipedia.org/wiki/Binfmt_misc) If your kernel supports [`binfmt_misc`](https://en.wikipedia.org/wiki/Binfmt_misc)
launchers for secondary architectures, buildx will pick them up automatically. launchers for secondary architectures, buildx will pick them up automatically.
Docker Desktop releases come with `binfmt_misc` automatically configured for `arm64` Docker desktop releases come with `binfmt_misc` automatically configured for `arm64`
and `arm` architectures. You can see what runtime platforms your current builder and `arm` architectures. You can see what runtime platforms your current builder
instance supports by running `docker buildx inspect --bootstrap`. instance supports by running `docker buildx inspect --bootstrap`.
Inside a `Dockerfile`, you can access the current platform value through Inside a `Dockerfile`, you can access the current platform value through
`TARGETPLATFORM` build argument. Refer to the [Dockerfile reference](https://docs.docker.com/reference/dockerfile/#automatic-platform-args-in-the-global-scope) `TARGETPLATFORM` build argument. Refer to the [`docker build`
documentation](https://docs.docker.com/reference/dockerfile/#automatic-platform-args-in-the-global-scope)
for the full description of automatic platform argument variants . for the full description of automatic platform argument variants .
You can find the formatting definition for the platform specifier in the You can find the formatting definition for the platform specifier in the
@@ -829,8 +550,8 @@ $ docker buildx build --platform=darwin .
--progress=VALUE --progress=VALUE
``` ```
Set type of progress output (`auto`, `plain`, `tty`, `rawjson`). Use `plain` to show container Set type of progress output (`auto`, `plain`, `tty`). Use plain to show container
output (default `auto`). output (default "auto").
> **Note** > **Note**
> >
@@ -853,11 +574,8 @@ $ docker buildx build --load --progress=plain .
> **Note** > **Note**
> >
> Check also the [`BUILDKIT_COLORS`](https://docs.docker.com/build/building/variables/#buildkit_colors) > Check also our [Color output controls guide](https://github.com/docker/buildx/blob/master/docs/guides/color-output.md)
> environment variable for modifying the colors of the terminal output. > for modifying the colors that are used to output information to the terminal.
The `rawjson` output marshals the solve status events from BuildKit to JSON lines.
This mode is designed to be read by an external program.
### <a name="provenance"></a> Create provenance attestations (--provenance) ### <a name="provenance"></a> Create provenance attestations (--provenance)
@@ -1004,46 +722,6 @@ $ ssh-add ~/.ssh/id_rsa
$ docker buildx build --ssh default=$SSH_AUTH_SOCK . $ docker buildx build --ssh default=$SSH_AUTH_SOCK .
``` ```
### <a name="tag"></a> Tag an image (-t, --tag)
```console
$ docker buildx build -t docker/apache:2.0 .
```
This examples builds in the same way as the previous example, but it then tags the resulting
image. The repository name will be `docker/apache` and the tag `2.0`.
[Read more about valid tags](https://docs.docker.com/reference/cli/docker/image/tag/).
You can apply multiple tags to an image. For example, you can apply the `latest`
tag to a newly built image and add another tag that references a specific
version.
For example, to tag an image both as `docker/fedora-jboss:latest` and
`docker/fedora-jboss:v2.1`, use the following:
```console
$ docker buildx build -t docker/fedora-jboss:latest -t docker/fedora-jboss:v2.1 .
```
### <a name="target"></a> Specifying target build stage (--target)
When building a Dockerfile with multiple build stages, use the `--target`
option to specify an intermediate build stage by name as a final stage for the
resulting image. The builder skips commands after the target stage.
```dockerfile
FROM debian AS build-env
# ...
FROM alpine AS production-env
# ...
```
```console
$ docker buildx build -t mybuildimage --target build-env .
```
### <a name="ulimit"></a> Set ulimits (--ulimit) ### <a name="ulimit"></a> Set ulimits (--ulimit)
`--ulimit` overrides the default ulimits of build's containers when using `RUN` `--ulimit` overrides the default ulimits of build's containers when using `RUN`

View File

@@ -11,17 +11,17 @@ Create a new builder instance
| Name | Type | Default | Description | | Name | Type | Default | Description |
|:------------------------------------------|:--------------|:--------|:----------------------------------------------------------------------| |:------------------------------------------|:--------------|:--------|:----------------------------------------------------------------------|
| [`--append`](#append) | `bool` | | Append a node to builder instead of changing it | | [`--append`](#append) | | | Append a node to builder instead of changing it |
| `--bootstrap` | `bool` | | Boot builder after creation | | `--bootstrap` | | | Boot builder after creation |
| [`--buildkitd-config`](#buildkitd-config) | `string` | | BuildKit daemon config file | | [`--buildkitd-config`](#buildkitd-config) | `string` | | BuildKit daemon config file |
| [`--buildkitd-flags`](#buildkitd-flags) | `string` | | BuildKit daemon flags | | [`--buildkitd-flags`](#buildkitd-flags) | `string` | | BuildKit daemon flags |
| [`--driver`](#driver) | `string` | | Driver to use (available: `docker-container`, `kubernetes`, `remote`) | | [`--driver`](#driver) | `string` | | Driver to use (available: `docker-container`, `kubernetes`, `remote`) |
| [`--driver-opt`](#driver-opt) | `stringArray` | | Options for the driver | | [`--driver-opt`](#driver-opt) | `stringArray` | | Options for the driver |
| [`--leave`](#leave) | `bool` | | Remove a node from builder instead of changing it | | [`--leave`](#leave) | | | Remove a node from builder instead of changing it |
| [`--name`](#name) | `string` | | Builder instance name | | [`--name`](#name) | `string` | | Builder instance name |
| [`--node`](#node) | `string` | | Create/modify node with given name | | [`--node`](#node) | `string` | | Create/modify node with given name |
| [`--platform`](#platform) | `stringArray` | | Fixed platforms for current node | | [`--platform`](#platform) | `stringArray` | | Fixed platforms for current node |
| [`--use`](#use) | `bool` | | Set the current builder instance | | [`--use`](#use) | | | Set the current builder instance |
<!---MARKER_GEN_END--> <!---MARKER_GEN_END-->

View File

@@ -12,15 +12,15 @@ Start debugger (EXPERIMENTAL)
### Options ### Options
| Name | Type | Default | Description | | Name | Type | Default | Description |
|:------------------|:---------|:--------|:--------------------------------------------------------------------------------------------------------------------| |:------------------|:---------|:--------|:---------------------------------------------------------------------------------------------------------|
| `--builder` | `string` | | Override the configured builder instance | | `--builder` | `string` | | Override the configured builder instance |
| `--detach` | `bool` | `true` | Detach buildx server for the monitor (supported only on linux) (EXPERIMENTAL) | | `--detach` | `bool` | `true` | Detach buildx server for the monitor (supported only on linux) (EXPERIMENTAL) |
| `--invoke` | `string` | | Launch a monitor with executing specified command (EXPERIMENTAL) | | `--invoke` | `string` | | Launch a monitor with executing specified command (EXPERIMENTAL) |
| `--on` | `string` | `error` | When to launch the monitor ([always, error]) (EXPERIMENTAL) | | `--on` | `string` | `error` | When to launch the monitor ([always, error]) (EXPERIMENTAL) |
| `--progress` | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`, `rawjson`) for the monitor. Use plain to show container output | | `--progress` | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`) for the monitor. Use plain to show container output |
| `--root` | `string` | | Specify root directory of server to connect for the monitor (EXPERIMENTAL) | | `--root` | `string` | | Specify root directory of server to connect for the monitor (EXPERIMENTAL) |
| `--server-config` | `string` | | Specify buildx server config file for the monitor (used only when launching new server) (EXPERIMENTAL) | | `--server-config` | `string` | | Specify buildx server config file for the monitor (used only when launching new server) (EXPERIMENTAL) |
<!---MARKER_GEN_END--> <!---MARKER_GEN_END-->

View File

@@ -5,49 +5,48 @@ Start a build
### Aliases ### Aliases
`docker build`, `docker builder build`, `docker image build`, `docker buildx b` `docker buildx debug build`, `docker buildx debug b`
### Options ### Options
| Name | Type | Default | Description | | Name | Type | Default | Description |
|:--------------------|:--------------|:----------|:----------------------------------------------------------------------------------------------------| |:---------------------------------------------------------------------------------------------------------------------------------------------------|:--------------|:----------|:----------------------------------------------------------------------------------------------------|
| `--add-host` | `stringSlice` | | Add a custom host-to-IP mapping (format: `host:ip`) | | [`--add-host`](https://docs.docker.com/reference/cli/docker/image/build/#add-host) | `stringSlice` | | Add a custom host-to-IP mapping (format: `host:ip`) |
| `--allow` | `stringSlice` | | Allow extra privileged entitlement (e.g., `network.host`, `security.insecure`) | | `--allow` | `stringSlice` | | Allow extra privileged entitlement (e.g., `network.host`, `security.insecure`) |
| `--annotation` | `stringArray` | | Add annotation to the image | | `--annotation` | `stringArray` | | Add annotation to the image |
| `--attest` | `stringArray` | | Attestation parameters (format: `type=sbom,generator=image`) | | `--attest` | `stringArray` | | Attestation parameters (format: `type=sbom,generator=image`) |
| `--build-arg` | `stringArray` | | Set build-time variables | | `--build-arg` | `stringArray` | | Set build-time variables |
| `--build-context` | `stringArray` | | Additional build contexts (e.g., name=path) | | `--build-context` | `stringArray` | | Additional build contexts (e.g., name=path) |
| `--builder` | `string` | | Override the configured builder instance | | `--builder` | `string` | | Override the configured builder instance |
| `--cache-from` | `stringArray` | | External cache sources (e.g., `user/app:cache`, `type=local,src=path/to/dir`) | | `--cache-from` | `stringArray` | | External cache sources (e.g., `user/app:cache`, `type=local,src=path/to/dir`) |
| `--cache-to` | `stringArray` | | Cache export destinations (e.g., `user/app:cache`, `type=local,dest=path/to/dir`) | | `--cache-to` | `stringArray` | | Cache export destinations (e.g., `user/app:cache`, `type=local,dest=path/to/dir`) |
| `--call` | `string` | `build` | Set method for evaluating build (`check`, `outline`, `targets`) | | [`--cgroup-parent`](https://docs.docker.com/reference/cli/docker/image/build/#cgroup-parent) | `string` | | Set the parent cgroup for the `RUN` instructions during build |
| `--cgroup-parent` | `string` | | Set the parent cgroup for the `RUN` instructions during build | | `--detach` | | | Detach buildx server (supported only on linux) (EXPERIMENTAL) |
| `--check` | `bool` | | Shorthand for `--call=check` | | [`-f`](https://docs.docker.com/reference/cli/docker/image/build/#file), [`--file`](https://docs.docker.com/reference/cli/docker/image/build/#file) | `string` | | Name of the Dockerfile (default: `PATH/Dockerfile`) |
| `--detach` | `bool` | | Detach buildx server (supported only on linux) (EXPERIMENTAL) | | `--iidfile` | `string` | | Write the image ID to a file |
| `-f`, `--file` | `string` | | Name of the Dockerfile (default: `PATH/Dockerfile`) | | `--label` | `stringArray` | | Set metadata for an image |
| `--iidfile` | `string` | | Write the image ID to a file | | `--load` | | | Shorthand for `--output=type=docker` |
| `--label` | `stringArray` | | Set metadata for an image | | `--metadata-file` | `string` | | Write build result metadata to a file |
| `--load` | `bool` | | Shorthand for `--output=type=docker` | | `--network` | `string` | `default` | Set the networking mode for the `RUN` instructions during build |
| `--metadata-file` | `string` | | Write build result metadata to a file | | `--no-cache` | | | Do not use cache when building the image |
| `--network` | `string` | `default` | Set the networking mode for the `RUN` instructions during build | | `--no-cache-filter` | `stringArray` | | Do not cache specified stages |
| `--no-cache` | `bool` | | Do not use cache when building the image | | `-o`, `--output` | `stringArray` | | Output destination (format: `type=local,dest=path`) |
| `--no-cache-filter` | `stringArray` | | Do not cache specified stages | | `--platform` | `stringArray` | | Set target platform for build |
| `-o`, `--output` | `stringArray` | | Output destination (format: `type=local,dest=path`) | | `--print` | `string` | | Print result of information request (e.g., outline, targets) (EXPERIMENTAL) |
| `--platform` | `stringArray` | | Set target platform for build | | `--progress` | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`). Use plain to show container output |
| `--progress` | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`, `rawjson`). Use plain to show container output | | `--provenance` | `string` | | Shorthand for `--attest=type=provenance` |
| `--provenance` | `string` | | Shorthand for `--attest=type=provenance` | | `--pull` | | | Always attempt to pull all referenced images |
| `--pull` | `bool` | | Always attempt to pull all referenced images | | `--push` | | | Shorthand for `--output=type=registry` |
| `--push` | `bool` | | Shorthand for `--output=type=registry` | | `-q`, `--quiet` | | | Suppress the build output and print image ID on success |
| `-q`, `--quiet` | `bool` | | Suppress the build output and print image ID on success | | `--root` | `string` | | Specify root directory of server to connect (EXPERIMENTAL) |
| `--root` | `string` | | Specify root directory of server to connect (EXPERIMENTAL) | | `--sbom` | `string` | | Shorthand for `--attest=type=sbom` |
| `--sbom` | `string` | | Shorthand for `--attest=type=sbom` | | `--secret` | `stringArray` | | Secret to expose to the build (format: `id=mysecret[,src=/local/secret]`) |
| `--secret` | `stringArray` | | Secret to expose to the build (format: `id=mysecret[,src=/local/secret]`) | | `--server-config` | `string` | | Specify buildx server config file (used only when launching new server) (EXPERIMENTAL) |
| `--server-config` | `string` | | Specify buildx server config file (used only when launching new server) (EXPERIMENTAL) | | `--shm-size` | `bytes` | `0` | Shared memory size for build containers |
| `--shm-size` | `bytes` | `0` | Shared memory size for build containers | | `--ssh` | `stringArray` | | SSH agent socket or keys to expose to the build (format: `default\|<id>[=<socket>\|<key>[,<key>]]`) |
| `--ssh` | `stringArray` | | SSH agent socket or keys to expose to the build (format: `default\|<id>[=<socket>\|<key>[,<key>]]`) | | [`-t`](https://docs.docker.com/reference/cli/docker/image/build/#tag), [`--tag`](https://docs.docker.com/reference/cli/docker/image/build/#tag) | `stringArray` | | Name and optionally a tag (format: `name:tag`) |
| `-t`, `--tag` | `stringArray` | | Name and optionally a tag (format: `name:tag`) | | [`--target`](https://docs.docker.com/reference/cli/docker/image/build/#target) | `string` | | Set the target build stage to build |
| `--target` | `string` | | Set the target build stage to build | | `--ulimit` | `ulimit` | | Ulimit options |
| `--ulimit` | `ulimit` | | Ulimit options |
<!---MARKER_GEN_END--> <!---MARKER_GEN_END-->

View File

@@ -5,11 +5,11 @@ Proxy current stdio streams to builder instance
### Options ### Options
| Name | Type | Default | Description | | Name | Type | Default | Description |
|:-------------|:---------|:--------|:----------------------------------------------------------------------------------------------------| |:-------------|:---------|:--------|:-------------------------------------------------|
| `--builder` | `string` | | Override the configured builder instance | | `--builder` | `string` | | Override the configured builder instance |
| `--platform` | `string` | | Target platform: this is used for node selection | | `--platform` | `string` | | Target platform: this is used for node selection |
| `--progress` | `string` | `quiet` | Set type of progress output (`auto`, `plain`, `tty`, `rawjson`). Use plain to show container output | | `--progress` | `string` | `quiet` | Set type of progress output (auto, plain, tty). |
<!---MARKER_GEN_END--> <!---MARKER_GEN_END-->

View File

@@ -13,7 +13,7 @@ Disk usage
|:------------------------|:---------|:--------|:-----------------------------------------| |:------------------------|:---------|:--------|:-----------------------------------------|
| [`--builder`](#builder) | `string` | | Override the configured builder instance | | [`--builder`](#builder) | `string` | | Override the configured builder instance |
| `--filter` | `filter` | | Provide filter values | | `--filter` | `filter` | | Provide filter values |
| [`--verbose`](#verbose) | `bool` | | Provide a more verbose output | | [`--verbose`](#verbose) | | | Provide a more verbose output |
<!---MARKER_GEN_END--> <!---MARKER_GEN_END-->

View File

@@ -9,16 +9,15 @@ Create a new image based on source images
### Options ### Options
| Name | Type | Default | Description | | Name | Type | Default | Description |
|:---------------------------------|:--------------|:--------|:------------------------------------------------------------------------------------------------------------------------------| |:---------------------------------|:--------------|:--------|:-----------------------------------------------------------------------------------------|
| [`--annotation`](#annotation) | `stringArray` | | Add annotation to the image | | [`--annotation`](#annotation) | `stringArray` | | Add annotation to the image |
| [`--append`](#append) | `bool` | | Append to existing manifest | | [`--append`](#append) | | | Append to existing manifest |
| [`--builder`](#builder) | `string` | | Override the configured builder instance | | [`--builder`](#builder) | `string` | | Override the configured builder instance |
| [`--dry-run`](#dry-run) | `bool` | | Show final image instead of pushing | | [`--dry-run`](#dry-run) | | | Show final image instead of pushing |
| [`-f`](#file), [`--file`](#file) | `stringArray` | | Read source descriptor from file | | [`-f`](#file), [`--file`](#file) | `stringArray` | | Read source descriptor from file |
| `--prefer-index` | `bool` | `true` | When only a single source is specified, prefer outputting an image index or manifest list instead of performing a carbon copy | | `--progress` | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`). Use plain to show container output |
| `--progress` | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`, `rawjson`). Use plain to show container output | | [`-t`](#tag), [`--tag`](#tag) | `stringArray` | | Set reference for new image |
| [`-t`](#tag), [`--tag`](#tag) | `stringArray` | | Set reference for new image |
<!---MARKER_GEN_END--> <!---MARKER_GEN_END-->
@@ -27,13 +26,8 @@ Create a new image based on source images
Create a new manifest list based on source manifests. The source manifests can Create a new manifest list based on source manifests. The source manifests can
be manifest lists or single platform distribution manifests and must already be manifest lists or single platform distribution manifests and must already
exist in the registry where the new manifest is created. exist in the registry where the new manifest is created. If only one source is
specified, create performs a carbon copy.
If only one source is specified and that source is a manifest list or image index,
create performs a carbon copy. If one source is specified and that source is *not*
a list or index, the output will be a manifest list, however you can disable this
behavior with `--prefer-index=false` which attempts to preserve the source manifest
format in the output.
## Examples ## Examples

View File

@@ -13,7 +13,7 @@ Show details of an image in the registry
|:------------------------|:---------|:----------------|:----------------------------------------------| |:------------------------|:---------|:----------------|:----------------------------------------------|
| [`--builder`](#builder) | `string` | | Override the configured builder instance | | [`--builder`](#builder) | `string` | | Override the configured builder instance |
| [`--format`](#format) | `string` | `{{.Manifest}}` | Format the output using the given Go template | | [`--format`](#format) | `string` | `{{.Manifest}}` | Format the output using the given Go template |
| [`--raw`](#raw) | `bool` | | Show original, unformatted JSON manifest | | [`--raw`](#raw) | | | Show original, unformatted JSON manifest |
<!---MARKER_GEN_END--> <!---MARKER_GEN_END-->

View File

@@ -11,7 +11,7 @@ Inspect current builder instance
| Name | Type | Default | Description | | Name | Type | Default | Description |
|:----------------------------|:---------|:--------|:--------------------------------------------| |:----------------------------|:---------|:--------|:--------------------------------------------|
| [`--bootstrap`](#bootstrap) | `bool` | | Ensure builder has booted before inspecting | | [`--bootstrap`](#bootstrap) | | | Ensure builder has booted before inspecting |
| [`--builder`](#builder) | `string` | | Override the configured builder instance | | [`--builder`](#builder) | `string` | | Override the configured builder instance |

View File

@@ -11,12 +11,12 @@ Remove build cache
| Name | Type | Default | Description | | Name | Type | Default | Description |
|:------------------------|:---------|:--------|:------------------------------------------| |:------------------------|:---------|:--------|:------------------------------------------|
| `-a`, `--all` | `bool` | | Include internal/frontend images | | `-a`, `--all` | | | Include internal/frontend images |
| [`--builder`](#builder) | `string` | | Override the configured builder instance | | [`--builder`](#builder) | `string` | | Override the configured builder instance |
| `--filter` | `filter` | | Provide filter values (e.g., `until=24h`) | | `--filter` | `filter` | | Provide filter values (e.g., `until=24h`) |
| `-f`, `--force` | `bool` | | Do not prompt for confirmation | | `-f`, `--force` | | | Do not prompt for confirmation |
| `--keep-storage` | `bytes` | `0` | Amount of disk space to keep for cache | | `--keep-storage` | `bytes` | `0` | Amount of disk space to keep for cache |
| `--verbose` | `bool` | | Provide a more verbose output | | `--verbose` | | | Provide a more verbose output |
<!---MARKER_GEN_END--> <!---MARKER_GEN_END-->

View File

@@ -11,11 +11,11 @@ Remove one or more builder instances
| Name | Type | Default | Description | | Name | Type | Default | Description |
|:------------------------------------|:---------|:--------|:-----------------------------------------| |:------------------------------------|:---------|:--------|:-----------------------------------------|
| [`--all-inactive`](#all-inactive) | `bool` | | Remove all inactive builders | | [`--all-inactive`](#all-inactive) | | | Remove all inactive builders |
| [`--builder`](#builder) | `string` | | Override the configured builder instance | | [`--builder`](#builder) | `string` | | Override the configured builder instance |
| [`-f`](#force), [`--force`](#force) | `bool` | | Do not prompt for confirmation | | [`-f`](#force), [`--force`](#force) | | | Do not prompt for confirmation |
| [`--keep-daemon`](#keep-daemon) | `bool` | | Keep the BuildKit daemon running | | [`--keep-daemon`](#keep-daemon) | | | Keep the BuildKit daemon running |
| [`--keep-state`](#keep-state) | `bool` | | Keep BuildKit state | | [`--keep-state`](#keep-state) | | | Keep BuildKit state |
<!---MARKER_GEN_END--> <!---MARKER_GEN_END-->

View File

@@ -12,8 +12,8 @@ Set the current builder instance
| Name | Type | Default | Description | | Name | Type | Default | Description |
|:------------------------|:---------|:--------|:-------------------------------------------| |:------------------------|:---------|:--------|:-------------------------------------------|
| [`--builder`](#builder) | `string` | | Override the configured builder instance | | [`--builder`](#builder) | `string` | | Override the configured builder instance |
| `--default` | `bool` | | Set builder as default for current context | | `--default` | | | Set builder as default for current context |
| `--global` | `bool` | | Builder persists context changes | | `--global` | | | Builder persists context changes |
<!---MARKER_GEN_END--> <!---MARKER_GEN_END-->

View File

@@ -18,8 +18,9 @@ import (
"github.com/docker/buildx/util/imagetools" "github.com/docker/buildx/util/imagetools"
"github.com/docker/buildx/util/progress" "github.com/docker/buildx/util/progress"
"github.com/docker/cli/opts" "github.com/docker/cli/opts"
dockertypes "github.com/docker/docker/api/types"
"github.com/docker/docker/api/types/container" "github.com/docker/docker/api/types/container"
"github.com/docker/docker/api/types/image" imagetypes "github.com/docker/docker/api/types/image"
"github.com/docker/docker/api/types/mount" "github.com/docker/docker/api/types/mount"
"github.com/docker/docker/api/types/network" "github.com/docker/docker/api/types/network"
"github.com/docker/docker/api/types/system" "github.com/docker/docker/api/types/system"
@@ -76,7 +77,7 @@ func (d *Driver) Bootstrap(ctx context.Context, l progress.Logger) error {
return err return err
} }
return sub.Wrap("starting container "+d.Name, func() error { return sub.Wrap("starting container "+d.Name, func() error {
if err := d.start(ctx); err != nil { if err := d.start(ctx, sub); err != nil {
return err return err
} }
return d.wait(ctx, sub) return d.wait(ctx, sub)
@@ -95,7 +96,7 @@ func (d *Driver) create(ctx context.Context, l progress.SubLogger) error {
if err != nil { if err != nil {
return err return err
} }
rc, err := d.DockerAPI.ImageCreate(ctx, imageName, image.CreateOptions{ rc, err := d.DockerAPI.ImageCreate(ctx, imageName, imagetypes.CreateOptions{
RegistryAuth: ra, RegistryAuth: ra,
}) })
if err != nil { if err != nil {
@@ -187,7 +188,7 @@ func (d *Driver) create(ctx context.Context, l progress.SubLogger) error {
if err := d.copyToContainer(ctx, d.InitConfig.Files); err != nil { if err := d.copyToContainer(ctx, d.InitConfig.Files); err != nil {
return err return err
} }
if err := d.start(ctx); err != nil { if err := d.start(ctx, l); err != nil {
return err return err
} }
} }
@@ -202,12 +203,14 @@ func (d *Driver) wait(ctx context.Context, l progress.SubLogger) error {
bufStderr := &bytes.Buffer{} bufStderr := &bytes.Buffer{}
if err := d.run(ctx, []string{"buildctl", "debug", "workers"}, bufStdout, bufStderr); err != nil { if err := d.run(ctx, []string{"buildctl", "debug", "workers"}, bufStdout, bufStderr); err != nil {
if try > 15 { if try > 15 {
d.copyLogs(context.TODO(), l) if err != nil {
if bufStdout.Len() != 0 { d.copyLogs(context.TODO(), l)
l.Log(1, bufStdout.Bytes()) if bufStdout.Len() != 0 {
} l.Log(1, bufStdout.Bytes())
if bufStderr.Len() != 0 { }
l.Log(2, bufStderr.Bytes()) if bufStderr.Len() != 0 {
l.Log(2, bufStderr.Bytes())
}
} }
return err return err
} }
@@ -255,16 +258,17 @@ func (d *Driver) copyToContainer(ctx context.Context, files map[string][]byte) e
defer srcArchive.Close() defer srcArchive.Close()
baseDir := path.Dir(confutil.DefaultBuildKitConfigDir) baseDir := path.Dir(confutil.DefaultBuildKitConfigDir)
return d.DockerAPI.CopyToContainer(ctx, d.Name, baseDir, srcArchive, container.CopyToContainerOptions{}) return d.DockerAPI.CopyToContainer(ctx, d.Name, baseDir, srcArchive, dockertypes.CopyToContainerOptions{})
} }
func (d *Driver) exec(ctx context.Context, cmd []string) (string, net.Conn, error) { func (d *Driver) exec(ctx context.Context, cmd []string) (string, net.Conn, error) {
response, err := d.DockerAPI.ContainerExecCreate(ctx, d.Name, container.ExecOptions{ execConfig := dockertypes.ExecConfig{
Cmd: cmd, Cmd: cmd,
AttachStdin: true, AttachStdin: true,
AttachStdout: true, AttachStdout: true,
AttachStderr: true, AttachStderr: true,
}) }
response, err := d.DockerAPI.ContainerExecCreate(ctx, d.Name, execConfig)
if err != nil { if err != nil {
return "", nil, err return "", nil, err
} }
@@ -274,7 +278,7 @@ func (d *Driver) exec(ctx context.Context, cmd []string) (string, net.Conn, erro
return "", nil, errors.New("exec ID empty") return "", nil, errors.New("exec ID empty")
} }
resp, err := d.DockerAPI.ContainerExecAttach(ctx, execID, container.ExecStartOptions{}) resp, err := d.DockerAPI.ContainerExecAttach(ctx, execID, dockertypes.ExecStartCheck{})
if err != nil { if err != nil {
return "", nil, err return "", nil, err
} }
@@ -300,7 +304,7 @@ func (d *Driver) run(ctx context.Context, cmd []string, stdout, stderr io.Writer
return nil return nil
} }
func (d *Driver) start(ctx context.Context) error { func (d *Driver) start(ctx context.Context, l progress.SubLogger) error {
return d.DockerAPI.ContainerStart(ctx, d.Name, container.StartOptions{}) return d.DockerAPI.ContainerStart(ctx, d.Name, container.StartOptions{})
} }

View File

@@ -9,8 +9,8 @@ contexts:
cluster: test-cluster cluster: test-cluster
user: test-user user: test-user
namespace: zoinx namespace: zoinx
name: k3s name: test
current-context: k3s current-context: test
kind: Config kind: Config
preferences: {} preferences: {}
users: users:

View File

@@ -167,12 +167,11 @@ func NewKubernetesConfig(configPath string) clientcmd.ClientConfig {
// ConfigFromEndpoint loads kubernetes config from endpoint // ConfigFromEndpoint loads kubernetes config from endpoint
func ConfigFromEndpoint(endpointName string, s store.Reader) (clientcmd.ClientConfig, error) { func ConfigFromEndpoint(endpointName string, s store.Reader) (clientcmd.ClientConfig, error) {
if strings.HasPrefix(endpointName, "kubernetes://") { if strings.HasPrefix(endpointName, "kubernetes://") {
rules := clientcmd.NewDefaultClientConfigLoadingRules()
u, _ := url.Parse(endpointName) u, _ := url.Parse(endpointName)
if kubeconfig := u.Query().Get("kubeconfig"); kubeconfig != "" { if kubeconfig := u.Query().Get("kubeconfig"); kubeconfig != "" {
rules.Precedence = append(rules.Precedence, kubeconfig) _ = os.Setenv(clientcmd.RecommendedConfigPathEnvVar, kubeconfig)
rules.ExplicitPath = kubeconfig
} }
rules := clientcmd.NewDefaultClientConfigLoadingRules()
apiConfig, err := rules.Load() apiConfig, err := rules.Load()
if err != nil { if err != nil {
return nil, err return nil, err

View File

@@ -1,35 +1,20 @@
package context package context
import ( import (
"os"
"testing" "testing"
"github.com/docker/cli/cli/command" "github.com/docker/cli/cli/command"
"github.com/docker/cli/cli/config"
"github.com/docker/cli/cli/context/store"
cliflags "github.com/docker/cli/cli/flags" cliflags "github.com/docker/cli/cli/flags"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestDefaultContextInitializer(t *testing.T) { func TestDefaultContextInitializer(t *testing.T) {
t.Setenv("KUBECONFIG", "./fixtures/test-kubeconfig") os.Setenv("KUBECONFIG", "./fixtures/test-kubeconfig")
defer os.Unsetenv("KUBECONFIG")
ctx, err := command.ResolveDefaultContext(&cliflags.ClientOptions{}, command.DefaultContextStoreConfig()) ctx, err := command.ResolveDefaultContext(&cliflags.ClientOptions{}, command.DefaultContextStoreConfig())
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, "default", ctx.Meta.Name) assert.Equal(t, "default", ctx.Meta.Name)
assert.Equal(t, "zoinx", ctx.Meta.Endpoints[KubernetesEndpoint].(EndpointMeta).DefaultNamespace) assert.Equal(t, "zoinx", ctx.Meta.Endpoints[KubernetesEndpoint].(EndpointMeta).DefaultNamespace)
} }
func TestConfigFromEndpoint(t *testing.T) {
t.Setenv("KUBECONFIG", "./fixtures/test-kubeconfig")
cfg, err := ConfigFromEndpoint(
"kubernetes:///buildx-test-4c972a3f9d369614b40f28a281790c7e?deployment=buildkit-4c2ed3ed-970f-4f3d-a6df-a4fcbab4d5cf-d9d73&kubeconfig=.%2Ffixtures%2Fk3s-kubeconfig",
store.New(config.ContextStoreDir(), command.DefaultContextStoreConfig()),
)
require.NoError(t, err)
rawcfg, err := cfg.RawConfig()
require.NoError(t, err)
ctxcfg := "k3s"
if _, ok := rawcfg.Contexts[ctxcfg]; !ok {
t.Errorf("Context config %q not found", ctxcfg)
}
}

View File

@@ -14,7 +14,6 @@ import (
"github.com/docker/buildx/store" "github.com/docker/buildx/store"
"github.com/docker/buildx/util/platformutil" "github.com/docker/buildx/util/platformutil"
"github.com/docker/buildx/util/progress" "github.com/docker/buildx/util/progress"
"github.com/docker/go-units"
"github.com/moby/buildkit/client" "github.com/moby/buildkit/client"
"github.com/pkg/errors" "github.com/pkg/errors"
appsv1 "k8s.io/api/apps/v1" appsv1 "k8s.io/api/apps/v1"
@@ -51,7 +50,6 @@ type Driver struct {
configMapClient clientcorev1.ConfigMapInterface configMapClient clientcorev1.ConfigMapInterface
podChooser podchooser.PodChooser podChooser podchooser.PodChooser
defaultLoad bool defaultLoad bool
timeout time.Duration
} }
func (d *Driver) IsMobyDriver() bool { func (d *Driver) IsMobyDriver() bool {
@@ -90,7 +88,7 @@ func (d *Driver) Bootstrap(ctx context.Context, l progress.Logger) error {
} }
} }
return sub.Wrap( return sub.Wrap(
fmt.Sprintf("waiting for %d pods to be ready, timeout: %s", d.minReplicas, units.HumanDuration(d.timeout)), fmt.Sprintf("waiting for %d pods to be ready", d.minReplicas),
func() error { func() error {
return d.wait(ctx) return d.wait(ctx)
}) })
@@ -103,27 +101,22 @@ func (d *Driver) wait(ctx context.Context) error {
err error err error
depl *appsv1.Deployment depl *appsv1.Deployment
) )
for try := 0; try < 100; try++ {
timeoutChan := time.After(d.timeout) depl, err = d.deploymentClient.Get(ctx, d.deployment.Name, metav1.GetOptions{})
ticker := time.NewTicker(100 * time.Millisecond) if err == nil {
defer ticker.Stop() if depl.Status.ReadyReplicas >= int32(d.minReplicas) {
return nil
for { }
err = errors.Errorf("expected %d replicas to be ready, got %d",
d.minReplicas, depl.Status.ReadyReplicas)
}
select { select {
case <-ctx.Done(): case <-ctx.Done():
return ctx.Err() return ctx.Err()
case <-timeoutChan: case <-time.After(time.Duration(100+try*20) * time.Millisecond):
return err
case <-ticker.C:
depl, err = d.deploymentClient.Get(ctx, d.deployment.Name, metav1.GetOptions{})
if err == nil {
if depl.Status.ReadyReplicas >= int32(d.minReplicas) {
return nil
}
err = errors.Errorf("expected %d replicas to be ready, got %d", d.minReplicas, depl.Status.ReadyReplicas)
}
} }
} }
return err
} }
func (d *Driver) Info(ctx context.Context) (*driver.Info, error) { func (d *Driver) Info(ctx context.Context) (*driver.Info, error) {

Some files were not shown because too many files have changed in this diff Show More