mirror of
https://gitea.com/Lydanne/buildx.git
synced 2025-08-14 15:55:54 +08:00
Compare commits
136 Commits
v0.15.0-rc
...
v0.16.1
Author | SHA1 | Date | |
---|---|---|---|
![]() |
34c195271a | ||
![]() |
dd3bb69a1e | ||
![]() |
9d8cf0bed3 | ||
![]() |
fb773fa805 | ||
![]() |
10c9ff901c | ||
![]() |
470e45e599 | ||
![]() |
2a2648b1db | ||
![]() |
ac930bda69 | ||
![]() |
6791ecb628 | ||
![]() |
d717237e4f | ||
![]() |
ee642ecc4c | ||
![]() |
06d96d665e | ||
![]() |
dc83501a5b | ||
![]() |
0f74f9a794 | ||
![]() |
6d6adc11a1 | ||
![]() |
68076909b9 | ||
![]() |
7957b73a30 | ||
![]() |
1dceb49a27 | ||
![]() |
b96ad59f64 | ||
![]() |
50aa895477 | ||
![]() |
74374ea418 | ||
![]() |
6bbe59697a | ||
![]() |
c51004e2e4 | ||
![]() |
8535c6b455 | ||
![]() |
153e5ed274 | ||
![]() |
cc097db675 | ||
![]() |
35313e865f | ||
![]() |
233b869c63 | ||
![]() |
7460f049f2 | ||
![]() |
8f4c8b094a | ||
![]() |
8da28574b0 | ||
![]() |
7e49141c4e | ||
![]() |
5ec703ba10 | ||
![]() |
1ffc6f1d58 | ||
![]() |
f65631546d | ||
![]() |
6fc19c4024 | ||
![]() |
5656c98133 | ||
![]() |
263a9ddaee | ||
![]() |
1774aa0cf0 | ||
![]() |
7b80ad7069 | ||
![]() |
c0c4d7172b | ||
![]() |
e498ba9c27 | ||
![]() |
2e7e7abe42 | ||
![]() |
048ef1fbf8 | ||
![]() |
cbe7901667 | ||
![]() |
f374f64d2f | ||
![]() |
4be2259719 | ||
![]() |
6627f315cb | ||
![]() |
19d838a3f4 | ||
![]() |
17878d641e | ||
![]() |
63eb73d9cf | ||
![]() |
59a0ffcf83 | ||
![]() |
2b17f277a1 | ||
![]() |
ea7c8e83d2 | ||
![]() |
9358c45b46 | ||
![]() |
cfb7fc4fb5 | ||
![]() |
d4b112ab05 | ||
![]() |
f7a32361ea | ||
![]() |
af902caeaa | ||
![]() |
04000db8da | ||
![]() |
b8da14166c | ||
![]() |
c1f680df14 | ||
![]() |
b6482ab6bb | ||
![]() |
6f45b0ea06 | ||
![]() |
3971361ed2 | ||
![]() |
818045482e | ||
![]() |
f8e1746d0d | ||
![]() |
92a6799514 | ||
![]() |
9358f84668 | ||
![]() |
dbdd3601eb | ||
![]() |
a3c8a72b54 | ||
![]() |
4c3af9becf | ||
![]() |
d8c9ebde1f | ||
![]() |
01a50aac42 | ||
![]() |
f7bcafed21 | ||
![]() |
e5ded4b2de | ||
![]() |
6ef443de41 | ||
![]() |
076e19d0ce | ||
![]() |
5599699d29 | ||
![]() |
d155747029 | ||
![]() |
9cebd0c80f | ||
![]() |
7b1ec7211d | ||
![]() |
689fd74104 | ||
![]() |
0dfd315daa | ||
![]() |
9b100c2552 | ||
![]() |
92aaaa8f67 | ||
![]() |
6111d9a00d | ||
![]() |
310aaf1891 | ||
![]() |
6c7e65c789 | ||
![]() |
66b0abf078 | ||
![]() |
6efa26c2de | ||
![]() |
5b726afa5e | ||
![]() |
009f318bbd | ||
![]() |
9f7c8ea3fb | ||
![]() |
be12199eb9 | ||
![]() |
94355517c4 | ||
![]() |
cb1be7214a | ||
![]() |
f42a4a1e94 | ||
![]() |
4d7365018c | ||
![]() |
3d0951b800 | ||
![]() |
bcd04d5a64 | ||
![]() |
b00001d8ac | ||
![]() |
31187735de | ||
![]() |
3373a27f1f | ||
![]() |
56698805a9 | ||
![]() |
4c2e0c4307 | ||
![]() |
fb6a3178c9 | ||
![]() |
8ca18dee2d | ||
![]() |
917d2f4a0a | ||
![]() |
366328ba6a | ||
![]() |
5f822b36d3 | ||
![]() |
e423d096a6 | ||
![]() |
927fb6731c | ||
![]() |
314ca32446 | ||
![]() |
3b25e3fa5c | ||
![]() |
41d369120b | ||
![]() |
56ffe55f81 | ||
![]() |
6d5823beb1 | ||
![]() |
c116af7b82 | ||
![]() |
fb130243f8 | ||
![]() |
29c8107b85 | ||
![]() |
ee3baa54f7 | ||
![]() |
9de95d81eb | ||
![]() |
d3a53189f7 | ||
![]() |
0496dae9d5 | ||
![]() |
40fcf992b1 | ||
![]() |
85c25f719c | ||
![]() |
875e4cd52e | ||
![]() |
24cedc6c0f | ||
![]() |
59f52c9505 | ||
![]() |
1e916ae6c6 | ||
![]() |
d342cb9d03 | ||
![]() |
9fdc99dc76 | ||
![]() |
ab835fd904 | ||
![]() |
87efbd43b5 | ||
![]() |
9ceda78057 |
104
.github/labeler.yml
vendored
Normal file
104
.github/labeler.yml
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
|
||||
# Add 'area/project' label to changes in basic project documentation and .github folder, excluding .github/workflows
|
||||
area/project:
|
||||
- all:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- .github/**
|
||||
- LICENSE
|
||||
- AUTHORS
|
||||
- MAINTAINERS
|
||||
- PROJECT.md
|
||||
- README.md
|
||||
- .gitignore
|
||||
- codecov.yml
|
||||
- all-globs-to-all-files: '!.github/workflows/*'
|
||||
|
||||
# Add 'area/github-actions' label to changes in the .github/workflows folder
|
||||
area/ci:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: '.github/workflows/**'
|
||||
|
||||
# Add 'area/bake' label to changes in the bake
|
||||
area/bake:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: 'bake/**'
|
||||
|
||||
# Add 'area/bake/compose' label to changes in the bake+compose
|
||||
area/bake/compose:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- bake/compose.go
|
||||
- bake/compose_test.go
|
||||
|
||||
# Add 'area/build' label to changes in build files
|
||||
area/build:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: 'build/**'
|
||||
|
||||
# Add 'area/builder' label to changes in builder files
|
||||
area/builder:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: 'builder/**'
|
||||
|
||||
# Add 'area/cli' label to changes in the CLI
|
||||
area/cli:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- cmd/**
|
||||
- commands/**
|
||||
|
||||
# Add 'area/controller' label to changes in the controller
|
||||
area/controller:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: 'controller/**'
|
||||
|
||||
# Add 'area/docs' label to markdown files in the docs folder
|
||||
area/docs:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: 'docs/**/*.md'
|
||||
|
||||
# Add 'area/dependencies' label to changes in go dependency files
|
||||
area/dependencies:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- go.mod
|
||||
- go.sum
|
||||
- vendor/**
|
||||
|
||||
# Add 'area/driver' label to changes in the driver folder
|
||||
area/driver:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: 'driver/**'
|
||||
|
||||
# Add 'area/driver/docker' label to changes in the docker driver
|
||||
area/driver/docker:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: 'driver/docker/**'
|
||||
|
||||
# Add 'area/driver/docker-container' label to changes in the docker-container driver
|
||||
area/driver/docker-container:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: 'driver/docker-container/**'
|
||||
|
||||
# Add 'area/driver/kubernetes' label to changes in the kubernetes driver
|
||||
area/driver/kubernetes:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: 'driver/kubernetes/**'
|
||||
|
||||
# Add 'area/driver/remote' label to changes in the remote driver
|
||||
area/driver/remote:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: 'driver/remote/**'
|
||||
|
||||
# Add 'area/hack' label to changes in the hack folder
|
||||
area/hack:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: 'hack/**'
|
||||
|
||||
# Add 'area/tests' label to changes in test files
|
||||
area/tests:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- tests/**
|
||||
- '**/*_test.go'
|
27
.github/workflows/build.yml
vendored
27
.github/workflows/build.yml
vendored
@@ -26,16 +26,17 @@ env:
|
||||
TEST_CACHE_SCOPE: "test"
|
||||
TESTFLAGS: "-v --parallel=6 --timeout=30m"
|
||||
GOTESTSUM_FORMAT: "standard-verbose"
|
||||
GO_VERSION: "1.21"
|
||||
GO_VERSION: "1.22"
|
||||
GOTESTSUM_VERSION: "v1.9.0" # same as one in Dockerfile
|
||||
|
||||
jobs:
|
||||
test-integration:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
env:
|
||||
TESTFLAGS_DOCKER: "-v --parallel=1 --timeout=30m"
|
||||
TEST_IMAGE_BUILD: "0"
|
||||
TEST_IMAGE_ID: "buildx-tests"
|
||||
TEST_COVERAGE: "1"
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -43,9 +44,9 @@ jobs:
|
||||
- master
|
||||
- latest
|
||||
- buildx-stable-1
|
||||
- v0.13.1
|
||||
- v0.14.1
|
||||
- v0.13.2
|
||||
- v0.12.5
|
||||
- v0.11.6
|
||||
worker:
|
||||
- docker-container
|
||||
- remote
|
||||
@@ -105,7 +106,7 @@ jobs:
|
||||
buildkitd-flags: --debug
|
||||
-
|
||||
name: Build test image
|
||||
uses: docker/bake-action@v4
|
||||
uses: docker/bake-action@v5
|
||||
with:
|
||||
targets: integration-test
|
||||
set: |
|
||||
@@ -125,6 +126,7 @@ jobs:
|
||||
directory: ./bin/testreports
|
||||
flags: integration
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
disable_file_fixes: true
|
||||
-
|
||||
name: Generate annotations
|
||||
if: always()
|
||||
@@ -145,7 +147,7 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- ubuntu-22.04
|
||||
- ubuntu-24.04
|
||||
- macos-12
|
||||
- windows-2022
|
||||
env:
|
||||
@@ -197,6 +199,7 @@ jobs:
|
||||
env_vars: RUNNER_OS
|
||||
flags: unit
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
disable_file_fixes: true
|
||||
-
|
||||
name: Generate annotations
|
||||
if: always()
|
||||
@@ -212,7 +215,7 @@ jobs:
|
||||
path: ${{ env.TESTREPORTS_BASEDIR }}
|
||||
|
||||
prepare-binaries:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
matrix: ${{ steps.platforms.outputs.matrix }}
|
||||
steps:
|
||||
@@ -230,7 +233,7 @@ jobs:
|
||||
echo ${{ steps.platforms.outputs.matrix }}
|
||||
|
||||
binaries:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- prepare-binaries
|
||||
strategy:
|
||||
@@ -273,7 +276,7 @@ jobs:
|
||||
if-no-files-found: error
|
||||
|
||||
bin-image:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- test-integration
|
||||
- test-unit
|
||||
@@ -313,7 +316,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKERPUBLICBOT_WRITE_PAT }}
|
||||
-
|
||||
name: Build and push image
|
||||
uses: docker/bake-action@v4
|
||||
uses: docker/bake-action@v5
|
||||
with:
|
||||
files: |
|
||||
./docker-bake.hcl
|
||||
@@ -326,7 +329,7 @@ jobs:
|
||||
*.cache-to=type=gha,scope=bin-image,mode=max
|
||||
|
||||
release:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- test-integration
|
||||
- test-unit
|
||||
@@ -356,7 +359,7 @@ jobs:
|
||||
-
|
||||
name: GitHub Release
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
uses: softprops/action-gh-release@69320dbe05506a9a39fc8ae11030b214ec2d1f87 # v2.0.5
|
||||
uses: softprops/action-gh-release@a74c6b72af54cfa997e81df42d94703d6313a2d0 # v2.0.6
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -13,11 +13,11 @@ permissions:
|
||||
security-events: write
|
||||
|
||||
env:
|
||||
GO_VERSION: "1.21"
|
||||
GO_VERSION: "1.22"
|
||||
|
||||
jobs:
|
||||
codeql:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
|
6
.github/workflows/docs-release.yml
vendored
6
.github/workflows/docs-release.yml
vendored
@@ -12,7 +12,7 @@ on:
|
||||
|
||||
jobs:
|
||||
open-pr:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
if: ${{ (github.event.release.prerelease != true || github.event.inputs.tag != '') && github.repository == 'docker/buildx' }}
|
||||
steps:
|
||||
-
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
uses: docker/setup-buildx-action@v3
|
||||
-
|
||||
name: Generate yaml
|
||||
uses: docker/bake-action@v4
|
||||
uses: docker/bake-action@v5
|
||||
with:
|
||||
source: ${{ github.server_url }}/${{ github.repository }}.git#${{ env.RELEASE_NAME }}
|
||||
targets: update-docs
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
VENDOR_MODULE: github.com/docker/buildx@${{ env.RELEASE_NAME }}
|
||||
-
|
||||
name: Create PR on docs repo
|
||||
uses: peter-evans/create-pull-request@6d6857d36972b65feb161a90e484f2984215f83e # v6.0.5
|
||||
uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c # v6.1.0
|
||||
with:
|
||||
token: ${{ secrets.GHPAT_DOCS_DISPATCH }}
|
||||
push-to-fork: docker-tools-robot/docker.github.io
|
||||
|
4
.github/workflows/docs-upstream.yml
vendored
4
.github/workflows/docs-upstream.yml
vendored
@@ -22,7 +22,7 @@ on:
|
||||
|
||||
jobs:
|
||||
docs-yaml:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
@@ -34,7 +34,7 @@ jobs:
|
||||
version: latest
|
||||
-
|
||||
name: Build reference YAML docs
|
||||
uses: docker/bake-action@v4
|
||||
uses: docker/bake-action@v5
|
||||
with:
|
||||
targets: update-docs
|
||||
provenance: false
|
||||
|
9
.github/workflows/e2e.yml
vendored
9
.github/workflows/e2e.yml
vendored
@@ -22,7 +22,7 @@ env:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
version: latest
|
||||
-
|
||||
name: Build
|
||||
uses: docker/bake-action@v4
|
||||
uses: docker/bake-action@v5
|
||||
with:
|
||||
targets: binaries
|
||||
set: |
|
||||
@@ -84,6 +84,8 @@ jobs:
|
||||
endpoint: tcp://localhost:1234
|
||||
- driver: docker-container
|
||||
metadata-provenance: max
|
||||
- driver: docker-container
|
||||
metadata-warnings: true
|
||||
exclude:
|
||||
- driver: docker
|
||||
multi-node: mnode-true
|
||||
@@ -134,6 +136,9 @@ jobs:
|
||||
if [ -n "${{ matrix.metadata-provenance }}" ]; then
|
||||
echo "BUILDX_METADATA_PROVENANCE=${{ matrix.metadata-provenance }}" >> $GITHUB_ENV
|
||||
fi
|
||||
if [ -n "${{ matrix.metadata-warnings }}" ]; then
|
||||
echo "BUILDX_METADATA_WARNINGS=${{ matrix.metadata-warnings }}" >> $GITHUB_ENV
|
||||
fi
|
||||
-
|
||||
name: Install k3s
|
||||
if: matrix.driver == 'kubernetes'
|
||||
|
19
.github/workflows/labeler.yml
vendored
Normal file
19
.github/workflows/labeler.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
name: labeler
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Run
|
||||
uses: actions/labeler@v5
|
6
.github/workflows/validate.yml
vendored
6
.github/workflows/validate.yml
vendored
@@ -18,7 +18,7 @@ on:
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
includes: ${{ steps.matrix.outputs.includes }}
|
||||
steps:
|
||||
@@ -67,7 +67,7 @@ jobs:
|
||||
});
|
||||
|
||||
validate:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- prepare
|
||||
strategy:
|
||||
@@ -91,7 +91,7 @@ jobs:
|
||||
version: latest
|
||||
-
|
||||
name: Validate
|
||||
uses: docker/bake-action@v4
|
||||
uses: docker/bake-action@v5
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
set: |
|
||||
|
@@ -37,13 +37,18 @@ linters-settings:
|
||||
rules:
|
||||
main:
|
||||
deny:
|
||||
# The io/ioutil package has been deprecated.
|
||||
# https://go.dev/doc/go1.16#ioutil
|
||||
- pkg: "github.com/containerd/containerd/errdefs"
|
||||
desc: The containerd errdefs package was migrated to a separate module. Use github.com/containerd/errdefs instead.
|
||||
- pkg: "github.com/containerd/containerd/log"
|
||||
desc: The containerd log package was migrated to a separate module. Use github.com/containerd/log instead.
|
||||
- pkg: "github.com/containerd/containerd/platforms"
|
||||
desc: The containerd platforms package was migrated to a separate module. Use github.com/containerd/platforms instead.
|
||||
- pkg: "io/ioutil"
|
||||
desc: The io/ioutil package has been deprecated.
|
||||
forbidigo:
|
||||
forbid:
|
||||
- '^fmt\.Errorf(# use errors\.Errorf instead)?$'
|
||||
- '^platforms\.DefaultString(# use platforms\.Format(platforms\.DefaultSpec()) instead\.)?$'
|
||||
gosec:
|
||||
excludes:
|
||||
- G204 # Audit use of command execution
|
||||
|
43
Dockerfile
43
Dockerfile
@@ -1,13 +1,13 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG GO_VERSION=1.21
|
||||
ARG GO_VERSION=1.22
|
||||
ARG XX_VERSION=1.4.0
|
||||
|
||||
# for testing
|
||||
ARG DOCKER_VERSION=26.0.0
|
||||
ARG DOCKER_VERSION=27.0.3
|
||||
ARG GOTESTSUM_VERSION=v1.9.0
|
||||
ARG REGISTRY_VERSION=2.8.0
|
||||
ARG BUILDKIT_VERSION=v0.13.1
|
||||
ARG BUILDKIT_VERSION=v0.14.1
|
||||
ARG UNDOCK_VERSION=0.7.0
|
||||
|
||||
FROM --platform=$BUILDPLATFORM tonistiigi/xx:${XX_VERSION} AS xx
|
||||
@@ -27,10 +27,36 @@ WORKDIR /src
|
||||
|
||||
FROM gobase AS gotestsum
|
||||
ARG GOTESTSUM_VERSION
|
||||
ENV GOFLAGS=
|
||||
RUN --mount=target=/root/.cache,type=cache \
|
||||
GOBIN=/out/ go install "gotest.tools/gotestsum@${GOTESTSUM_VERSION}" && \
|
||||
/out/gotestsum --version
|
||||
ENV GOFLAGS=""
|
||||
RUN --mount=target=/root/.cache,type=cache <<EOT
|
||||
set -ex
|
||||
go install "gotest.tools/gotestsum@${GOTESTSUM_VERSION}"
|
||||
go install "github.com/wadey/gocovmerge@latest"
|
||||
mkdir /out
|
||||
/go/bin/gotestsum --version
|
||||
mv /go/bin/gotestsum /out
|
||||
mv /go/bin/gocovmerge /out
|
||||
EOT
|
||||
COPY --chmod=755 <<"EOF" /out/gotestsumandcover
|
||||
#!/bin/sh
|
||||
set -x
|
||||
if [ -z "$GO_TEST_COVERPROFILE" ]; then
|
||||
exec gotestsum "$@"
|
||||
fi
|
||||
coverdir="$(dirname "$GO_TEST_COVERPROFILE")"
|
||||
mkdir -p "$coverdir/helpers"
|
||||
gotestsum "$@" "-coverprofile=$GO_TEST_COVERPROFILE"
|
||||
ecode=$?
|
||||
go tool covdata textfmt -i=$coverdir/helpers -o=$coverdir/helpers-report.txt
|
||||
gocovmerge "$coverdir/helpers-report.txt" "$GO_TEST_COVERPROFILE" > "$coverdir/merged-report.txt"
|
||||
mv "$coverdir/merged-report.txt" "$GO_TEST_COVERPROFILE"
|
||||
rm "$coverdir/helpers-report.txt"
|
||||
for f in "$coverdir/helpers"/*; do
|
||||
rm "$f"
|
||||
done
|
||||
rmdir "$coverdir/helpers"
|
||||
exit $ecode
|
||||
EOF
|
||||
|
||||
FROM gobase AS buildx-version
|
||||
RUN --mount=type=bind,target=. <<EOT
|
||||
@@ -42,6 +68,7 @@ EOT
|
||||
|
||||
FROM gobase AS buildx-build
|
||||
ARG TARGETPLATFORM
|
||||
ARG GO_EXTRA_FLAGS
|
||||
RUN --mount=type=bind,target=. \
|
||||
--mount=type=cache,target=/root/.cache \
|
||||
--mount=type=cache,target=/go/pkg/mod \
|
||||
@@ -88,7 +115,7 @@ RUN apk add --no-cache \
|
||||
shadow-uidmap \
|
||||
xfsprogs \
|
||||
xz
|
||||
COPY --link --from=gotestsum /out/gotestsum /usr/bin/
|
||||
COPY --link --from=gotestsum /out /usr/bin/
|
||||
COPY --link --from=registry /bin/registry /usr/bin/
|
||||
COPY --link --from=docker-engine / /usr/bin/
|
||||
COPY --link --from=docker-cli / /usr/bin/
|
||||
|
61
bake/bake.go
61
bake/bake.go
@@ -2,7 +2,6 @@ package bake
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/csv"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
@@ -27,6 +26,7 @@ import (
|
||||
"github.com/moby/buildkit/client/llb"
|
||||
"github.com/moby/buildkit/session/auth/authprovider"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/tonistiigi/go-csvvalue"
|
||||
"github.com/zclconf/go-cty/cty"
|
||||
"github.com/zclconf/go-cty/cty/convert"
|
||||
)
|
||||
@@ -177,7 +177,7 @@ func readWithProgress(r io.Reader, setStatus func(st *client.VertexStatus)) (dt
|
||||
}
|
||||
|
||||
func ListTargets(files []File) ([]string, error) {
|
||||
c, err := ParseFiles(files, nil)
|
||||
c, _, err := ParseFiles(files, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -192,7 +192,7 @@ func ListTargets(files []File) ([]string, error) {
|
||||
}
|
||||
|
||||
func ReadTargets(ctx context.Context, files []File, targets, overrides []string, defaults map[string]string) (map[string]*Target, map[string]*Group, error) {
|
||||
c, err := ParseFiles(files, defaults)
|
||||
c, _, err := ParseFiles(files, defaults)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
@@ -298,7 +298,7 @@ func sliceToMap(env []string) (res map[string]string) {
|
||||
return
|
||||
}
|
||||
|
||||
func ParseFiles(files []File, defaults map[string]string) (_ *Config, err error) {
|
||||
func ParseFiles(files []File, defaults map[string]string) (_ *Config, _ *hclparser.ParseMeta, err error) {
|
||||
defer func() {
|
||||
err = formatHCLError(err, files)
|
||||
}()
|
||||
@@ -310,7 +310,7 @@ func ParseFiles(files []File, defaults map[string]string) (_ *Config, err error)
|
||||
isCompose, composeErr := validateComposeFile(f.Data, f.Name)
|
||||
if isCompose {
|
||||
if composeErr != nil {
|
||||
return nil, composeErr
|
||||
return nil, nil, composeErr
|
||||
}
|
||||
composeFiles = append(composeFiles, f)
|
||||
}
|
||||
@@ -318,13 +318,13 @@ func ParseFiles(files []File, defaults map[string]string) (_ *Config, err error)
|
||||
hf, isHCL, err := ParseHCLFile(f.Data, f.Name)
|
||||
if isHCL {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
hclFiles = append(hclFiles, hf)
|
||||
} else if composeErr != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse %s: parsing yaml: %v, parsing hcl", f.Name, composeErr)
|
||||
return nil, nil, errors.Wrapf(err, "failed to parse %s: parsing yaml: %v, parsing hcl", f.Name, composeErr)
|
||||
} else {
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -332,23 +332,24 @@ func ParseFiles(files []File, defaults map[string]string) (_ *Config, err error)
|
||||
if len(composeFiles) > 0 {
|
||||
cfg, cmperr := ParseComposeFiles(composeFiles)
|
||||
if cmperr != nil {
|
||||
return nil, errors.Wrap(cmperr, "failed to parse compose file")
|
||||
return nil, nil, errors.Wrap(cmperr, "failed to parse compose file")
|
||||
}
|
||||
c = mergeConfig(c, *cfg)
|
||||
c = dedupeConfig(c)
|
||||
}
|
||||
|
||||
var pm hclparser.ParseMeta
|
||||
if len(hclFiles) > 0 {
|
||||
renamed, err := hclparser.Parse(hclparser.MergeFiles(hclFiles), hclparser.Opt{
|
||||
res, err := hclparser.Parse(hclparser.MergeFiles(hclFiles), hclparser.Opt{
|
||||
LookupVar: os.LookupEnv,
|
||||
Vars: defaults,
|
||||
ValidateLabel: validateTargetName,
|
||||
}, &c)
|
||||
if err.HasErrors() {
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
for _, renamed := range renamed {
|
||||
for _, renamed := range res.Renamed {
|
||||
for oldName, newNames := range renamed {
|
||||
newNames = dedupSlice(newNames)
|
||||
if len(newNames) == 1 && oldName == newNames[0] {
|
||||
@@ -361,9 +362,10 @@ func ParseFiles(files []File, defaults map[string]string) (_ *Config, err error)
|
||||
}
|
||||
}
|
||||
c = dedupeConfig(c)
|
||||
pm = *res
|
||||
}
|
||||
|
||||
return &c, nil
|
||||
return &c, &pm, nil
|
||||
}
|
||||
|
||||
func dedupeConfig(c Config) Config {
|
||||
@@ -388,7 +390,8 @@ func dedupeConfig(c Config) Config {
|
||||
}
|
||||
|
||||
func ParseFile(dt []byte, fn string) (*Config, error) {
|
||||
return ParseFiles([]File{{Data: dt, Name: fn}}, nil)
|
||||
c, _, err := ParseFiles([]File{{Data: dt, Name: fn}}, nil)
|
||||
return c, err
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
@@ -491,7 +494,7 @@ func (c Config) loadLinks(name string, t *Target, m map[string]*Target, o map[st
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
t2.Outputs = nil
|
||||
t2.Outputs = []string{"type=cacheonly"}
|
||||
t2.linked = true
|
||||
m[target] = t2
|
||||
}
|
||||
@@ -669,13 +672,15 @@ func (c Config) target(name string, visited map[string]*Target, overrides map[st
|
||||
}
|
||||
|
||||
type Group struct {
|
||||
Name string `json:"-" hcl:"name,label" cty:"name"`
|
||||
Targets []string `json:"targets" hcl:"targets" cty:"targets"`
|
||||
Name string `json:"-" hcl:"name,label" cty:"name"`
|
||||
Description string `json:"description,omitempty" hcl:"description,optional" cty:"description"`
|
||||
Targets []string `json:"targets" hcl:"targets" cty:"targets"`
|
||||
// Target // TODO?
|
||||
}
|
||||
|
||||
type Target struct {
|
||||
Name string `json:"-" hcl:"name,label" cty:"name"`
|
||||
Name string `json:"-" hcl:"name,label" cty:"name"`
|
||||
Description string `json:"description,omitempty" hcl:"description,optional" cty:"description"`
|
||||
|
||||
// Inherits is the only field that cannot be overridden with --set
|
||||
Inherits []string `json:"inherits,omitempty" hcl:"inherits,optional" cty:"inherits"`
|
||||
@@ -702,7 +707,8 @@ type Target struct {
|
||||
NoCacheFilter []string `json:"no-cache-filter,omitempty" hcl:"no-cache-filter,optional" cty:"no-cache-filter"`
|
||||
ShmSize *string `json:"shm-size,omitempty" hcl:"shm-size,optional"`
|
||||
Ulimits []string `json:"ulimits,omitempty" hcl:"ulimits,optional"`
|
||||
// IMPORTANT: if you add more fields here, do not forget to update newOverrides and docs/bake-reference.md.
|
||||
Call *string `json:"call,omitempty" hcl:"call,optional" cty:"call"`
|
||||
// IMPORTANT: if you add more fields here, do not forget to update newOverrides/AddOverrides and docs/bake-reference.md.
|
||||
|
||||
// linked is a private field to mark a target used as a linked one
|
||||
linked bool
|
||||
@@ -776,6 +782,9 @@ func (t *Target) Merge(t2 *Target) {
|
||||
if t2.Target != nil {
|
||||
t.Target = t2.Target
|
||||
}
|
||||
if t2.Call != nil {
|
||||
t.Call = t2.Call
|
||||
}
|
||||
if t2.Annotations != nil { // merge
|
||||
t.Annotations = append(t.Annotations, t2.Annotations...)
|
||||
}
|
||||
@@ -819,6 +828,9 @@ func (t *Target) Merge(t2 *Target) {
|
||||
if t2.Ulimits != nil { // merge
|
||||
t.Ulimits = append(t.Ulimits, t2.Ulimits...)
|
||||
}
|
||||
if t2.Description != "" {
|
||||
t.Description = t2.Description
|
||||
}
|
||||
t.Inherits = append(t.Inherits, t2.Inherits...)
|
||||
}
|
||||
|
||||
@@ -863,6 +875,8 @@ func (t *Target) AddOverrides(overrides map[string]Override) error {
|
||||
t.CacheTo = o.ArrValue
|
||||
case "target":
|
||||
t.Target = &value
|
||||
case "call":
|
||||
t.Call = &value
|
||||
case "secrets":
|
||||
t.Secrets = o.ArrValue
|
||||
case "ssh":
|
||||
@@ -1298,6 +1312,12 @@ func toBuildOpt(t *Target, inp *Input) (*build.Options, error) {
|
||||
bo.Target = *t.Target
|
||||
}
|
||||
|
||||
if t.Call != nil {
|
||||
bo.PrintFunc = &build.PrintFunc{
|
||||
Name: *t.Call,
|
||||
}
|
||||
}
|
||||
|
||||
cacheImports, err := buildflags.ParseCacheEntry(t.CacheFrom)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -1393,8 +1413,7 @@ func removeAttestDupes(s []string) []string {
|
||||
}
|
||||
|
||||
func parseOutput(str string) map[string]string {
|
||||
csvReader := csv.NewReader(strings.NewReader(str))
|
||||
fields, err := csvReader.Read()
|
||||
fields, err := csvvalue.Fields(str, nil)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
@@ -838,7 +838,8 @@ func TestReadContextFromTargetChain(t *testing.T) {
|
||||
|
||||
mid, ok := m["mid"]
|
||||
require.True(t, ok)
|
||||
require.Equal(t, 0, len(mid.Outputs))
|
||||
require.Equal(t, 1, len(mid.Outputs))
|
||||
require.Equal(t, "type=cacheonly", mid.Outputs[0])
|
||||
require.Equal(t, 1, len(mid.Contexts))
|
||||
|
||||
base, ok := m["base"]
|
||||
@@ -1528,7 +1529,7 @@ services:
|
||||
v2: "bar"
|
||||
`)
|
||||
|
||||
c, err := ParseFiles([]File{
|
||||
c, _, err := ParseFiles([]File{
|
||||
{Data: dt, Name: "c1.foo"},
|
||||
{Data: dt2, Name: "c2.bar"},
|
||||
}, nil)
|
||||
|
@@ -8,6 +8,7 @@ import (
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/compose-spec/compose-go/v2/consts"
|
||||
"github.com/compose-spec/compose-go/v2/dotenv"
|
||||
"github.com/compose-spec/compose-go/v2/loader"
|
||||
composetypes "github.com/compose-spec/compose-go/v2/types"
|
||||
@@ -40,7 +41,11 @@ func ParseCompose(cfgs []composetypes.ConfigFile, envs map[string]string) (*Conf
|
||||
ConfigFiles: cfgs,
|
||||
Environment: envs,
|
||||
}, func(options *loader.Options) {
|
||||
options.SetProjectName("bake", false)
|
||||
projectName := "bake"
|
||||
if v, ok := envs[consts.ComposeProjectName]; ok && v != "" {
|
||||
projectName = v
|
||||
}
|
||||
options.SetProjectName(projectName, false)
|
||||
options.SkipNormalization = true
|
||||
options.Profiles = []string{"*"}
|
||||
})
|
||||
|
@@ -758,6 +758,46 @@ services:
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestCgroup(t *testing.T) {
|
||||
var dt = []byte(`
|
||||
services:
|
||||
scratch:
|
||||
build:
|
||||
context: ./webapp
|
||||
cgroup: private
|
||||
`)
|
||||
|
||||
_, err := ParseCompose([]composetypes.ConfigFile{{Content: dt}}, nil)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestProjectName(t *testing.T) {
|
||||
var dt = []byte(`
|
||||
services:
|
||||
scratch:
|
||||
build:
|
||||
context: ./webapp
|
||||
args:
|
||||
PROJECT_NAME: ${COMPOSE_PROJECT_NAME}
|
||||
`)
|
||||
|
||||
t.Run("default", func(t *testing.T) {
|
||||
c, err := ParseCompose([]composetypes.ConfigFile{{Content: dt}}, nil)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, c.Targets, 1)
|
||||
require.Len(t, c.Targets[0].Args, 1)
|
||||
require.Equal(t, map[string]*string{"PROJECT_NAME": ptrstr("bake")}, c.Targets[0].Args)
|
||||
})
|
||||
|
||||
t.Run("env", func(t *testing.T) {
|
||||
c, err := ParseCompose([]composetypes.ConfigFile{{Content: dt}}, map[string]string{"COMPOSE_PROJECT_NAME": "foo"})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, c.Targets, 1)
|
||||
require.Len(t, c.Targets[0].Args, 1)
|
||||
require.Equal(t, map[string]*string{"PROJECT_NAME": ptrstr("foo")}, c.Targets[0].Args)
|
||||
})
|
||||
}
|
||||
|
||||
// chdir changes the current working directory to the named directory,
|
||||
// and then restore the original working directory at the end of the test.
|
||||
func chdir(t *testing.T, dir string) {
|
||||
|
@@ -273,7 +273,7 @@ func TestHCLMultiFileSharedVariables(t *testing.T) {
|
||||
}
|
||||
`)
|
||||
|
||||
c, err := ParseFiles([]File{
|
||||
c, _, err := ParseFiles([]File{
|
||||
{Data: dt, Name: "c1.hcl"},
|
||||
{Data: dt2, Name: "c2.hcl"},
|
||||
}, nil)
|
||||
@@ -285,7 +285,7 @@ func TestHCLMultiFileSharedVariables(t *testing.T) {
|
||||
|
||||
t.Setenv("FOO", "def")
|
||||
|
||||
c, err = ParseFiles([]File{
|
||||
c, _, err = ParseFiles([]File{
|
||||
{Data: dt, Name: "c1.hcl"},
|
||||
{Data: dt2, Name: "c2.hcl"},
|
||||
}, nil)
|
||||
@@ -322,7 +322,7 @@ func TestHCLVarsWithVars(t *testing.T) {
|
||||
}
|
||||
`)
|
||||
|
||||
c, err := ParseFiles([]File{
|
||||
c, _, err := ParseFiles([]File{
|
||||
{Data: dt, Name: "c1.hcl"},
|
||||
{Data: dt2, Name: "c2.hcl"},
|
||||
}, nil)
|
||||
@@ -334,7 +334,7 @@ func TestHCLVarsWithVars(t *testing.T) {
|
||||
|
||||
t.Setenv("BASE", "new")
|
||||
|
||||
c, err = ParseFiles([]File{
|
||||
c, _, err = ParseFiles([]File{
|
||||
{Data: dt, Name: "c1.hcl"},
|
||||
{Data: dt2, Name: "c2.hcl"},
|
||||
}, nil)
|
||||
@@ -612,7 +612,7 @@ func TestHCLMultiFileAttrs(t *testing.T) {
|
||||
FOO="def"
|
||||
`)
|
||||
|
||||
c, err := ParseFiles([]File{
|
||||
c, _, err := ParseFiles([]File{
|
||||
{Data: dt, Name: "c1.hcl"},
|
||||
{Data: dt2, Name: "c2.hcl"},
|
||||
}, nil)
|
||||
@@ -623,7 +623,7 @@ func TestHCLMultiFileAttrs(t *testing.T) {
|
||||
|
||||
t.Setenv("FOO", "ghi")
|
||||
|
||||
c, err = ParseFiles([]File{
|
||||
c, _, err = ParseFiles([]File{
|
||||
{Data: dt, Name: "c1.hcl"},
|
||||
{Data: dt2, Name: "c2.hcl"},
|
||||
}, nil)
|
||||
@@ -647,7 +647,7 @@ func TestHCLMultiFileGlobalAttrs(t *testing.T) {
|
||||
FOO = "def"
|
||||
`)
|
||||
|
||||
c, err := ParseFiles([]File{
|
||||
c, _, err := ParseFiles([]File{
|
||||
{Data: dt, Name: "c1.hcl"},
|
||||
{Data: dt2, Name: "c2.hcl"},
|
||||
}, nil)
|
||||
@@ -830,7 +830,7 @@ func TestHCLRenameMultiFile(t *testing.T) {
|
||||
}
|
||||
`)
|
||||
|
||||
c, err := ParseFiles([]File{
|
||||
c, _, err := ParseFiles([]File{
|
||||
{Data: dt, Name: "c1.hcl"},
|
||||
{Data: dt2, Name: "c2.hcl"},
|
||||
{Data: dt3, Name: "c3.hcl"},
|
||||
@@ -1050,7 +1050,7 @@ func TestHCLMatrixArgsOverride(t *testing.T) {
|
||||
}
|
||||
`)
|
||||
|
||||
c, err := ParseFiles([]File{
|
||||
c, _, err := ParseFiles([]File{
|
||||
{Data: dt, Name: "docker-bake.hcl"},
|
||||
}, map[string]string{"ABC": "11,22,33"})
|
||||
require.NoError(t, err)
|
||||
@@ -1236,7 +1236,7 @@ services:
|
||||
v2: "bar"
|
||||
`)
|
||||
|
||||
c, err := ParseFiles([]File{
|
||||
c, _, err := ParseFiles([]File{
|
||||
{Data: dt, Name: "c1.hcl"},
|
||||
{Data: dt2, Name: "c2.yml"},
|
||||
}, nil)
|
||||
@@ -1258,7 +1258,7 @@ func TestHCLBuiltinVars(t *testing.T) {
|
||||
}
|
||||
`)
|
||||
|
||||
c, err := ParseFiles([]File{
|
||||
c, _, err := ParseFiles([]File{
|
||||
{Data: dt, Name: "c1.hcl"},
|
||||
}, map[string]string{
|
||||
"BAKE_CMD_CONTEXT": "foo",
|
||||
@@ -1272,7 +1272,7 @@ func TestHCLBuiltinVars(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestCombineHCLAndJSONTargets(t *testing.T) {
|
||||
c, err := ParseFiles([]File{
|
||||
c, _, err := ParseFiles([]File{
|
||||
{
|
||||
Name: "docker-bake.hcl",
|
||||
Data: []byte(`
|
||||
@@ -1348,7 +1348,7 @@ target "b" {
|
||||
}
|
||||
|
||||
func TestCombineHCLAndJSONVars(t *testing.T) {
|
||||
c, err := ParseFiles([]File{
|
||||
c, _, err := ParseFiles([]File{
|
||||
{
|
||||
Name: "docker-bake.hcl",
|
||||
Data: []byte(`
|
||||
|
@@ -25,9 +25,11 @@ type Opt struct {
|
||||
}
|
||||
|
||||
type variable struct {
|
||||
Name string `json:"-" hcl:"name,label"`
|
||||
Default *hcl.Attribute `json:"default,omitempty" hcl:"default,optional"`
|
||||
Body hcl.Body `json:"-" hcl:",body"`
|
||||
Name string `json:"-" hcl:"name,label"`
|
||||
Default *hcl.Attribute `json:"default,omitempty" hcl:"default,optional"`
|
||||
Description string `json:"description,omitempty" hcl:"description,optional"`
|
||||
Body hcl.Body `json:"-" hcl:",body"`
|
||||
Remain hcl.Body `json:"-" hcl:",remain"`
|
||||
}
|
||||
|
||||
type functionDef struct {
|
||||
@@ -534,7 +536,18 @@ func (p *parser) resolveBlockNames(block *hcl.Block) ([]string, error) {
|
||||
return names, nil
|
||||
}
|
||||
|
||||
func Parse(b hcl.Body, opt Opt, val interface{}) (map[string]map[string][]string, hcl.Diagnostics) {
|
||||
type Variable struct {
|
||||
Name string
|
||||
Description string
|
||||
Value *string
|
||||
}
|
||||
|
||||
type ParseMeta struct {
|
||||
Renamed map[string]map[string][]string
|
||||
AllVariables []*Variable
|
||||
}
|
||||
|
||||
func Parse(b hcl.Body, opt Opt, val interface{}) (*ParseMeta, hcl.Diagnostics) {
|
||||
reserved := map[string]struct{}{}
|
||||
schema, _ := gohcl.ImpliedBodySchema(val)
|
||||
|
||||
@@ -643,6 +656,7 @@ func Parse(b hcl.Body, opt Opt, val interface{}) (map[string]map[string][]string
|
||||
}
|
||||
}
|
||||
|
||||
vars := make([]*Variable, 0, len(p.vars))
|
||||
for k := range p.vars {
|
||||
if err := p.resolveValue(p.ectx, k); err != nil {
|
||||
if diags, ok := err.(hcl.Diagnostics); ok {
|
||||
@@ -651,6 +665,21 @@ func Parse(b hcl.Body, opt Opt, val interface{}) (map[string]map[string][]string
|
||||
r := p.vars[k].Body.MissingItemRange()
|
||||
return nil, wrapErrorDiagnostic("Invalid value", err, &r, &r)
|
||||
}
|
||||
v := &Variable{
|
||||
Name: p.vars[k].Name,
|
||||
Description: p.vars[k].Description,
|
||||
}
|
||||
if vv := p.ectx.Variables[k]; !vv.IsNull() {
|
||||
var s string
|
||||
switch vv.Type() {
|
||||
case cty.String:
|
||||
s = vv.AsString()
|
||||
case cty.Bool:
|
||||
s = strconv.FormatBool(vv.True())
|
||||
}
|
||||
v.Value = &s
|
||||
}
|
||||
vars = append(vars, v)
|
||||
}
|
||||
|
||||
for k := range p.funcs {
|
||||
@@ -795,7 +824,10 @@ func Parse(b hcl.Body, opt Opt, val interface{}) (map[string]map[string][]string
|
||||
}
|
||||
}
|
||||
|
||||
return renamed, nil
|
||||
return &ParseMeta{
|
||||
Renamed: renamed,
|
||||
AllVariables: vars,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// wrapErrorDiagnostic wraps an error into a hcl.Diagnostics object.
|
||||
|
@@ -111,21 +111,19 @@ func (mb mergedBodies) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
|
||||
diags = append(diags, thisDiags...)
|
||||
}
|
||||
|
||||
if thisAttrs != nil {
|
||||
for name, attr := range thisAttrs {
|
||||
if existing := attrs[name]; existing != nil {
|
||||
diags = diags.Append(&hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: "Duplicate argument",
|
||||
Detail: fmt.Sprintf(
|
||||
"Argument %q was already set at %s",
|
||||
name, existing.NameRange.String(),
|
||||
),
|
||||
Subject: thisAttrs[name].NameRange.Ptr(),
|
||||
})
|
||||
}
|
||||
attrs[name] = attr
|
||||
for name, attr := range thisAttrs {
|
||||
if existing := attrs[name]; existing != nil {
|
||||
diags = diags.Append(&hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: "Duplicate argument",
|
||||
Detail: fmt.Sprintf(
|
||||
"Argument %q was already set at %s",
|
||||
name, existing.NameRange.String(),
|
||||
),
|
||||
Subject: thisAttrs[name].NameRange.Ptr(),
|
||||
})
|
||||
}
|
||||
attrs[name] = attr
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -18,6 +18,7 @@ import (
|
||||
"github.com/distribution/reference"
|
||||
"github.com/docker/buildx/builder"
|
||||
"github.com/docker/buildx/driver"
|
||||
"github.com/docker/buildx/util/confutil"
|
||||
"github.com/docker/buildx/util/desktop"
|
||||
"github.com/docker/buildx/util/dockerutil"
|
||||
"github.com/docker/buildx/util/imagetools"
|
||||
@@ -25,12 +26,13 @@ import (
|
||||
"github.com/docker/buildx/util/resolver"
|
||||
"github.com/docker/buildx/util/waitmap"
|
||||
"github.com/docker/cli/opts"
|
||||
imagetypes "github.com/docker/docker/api/types/image"
|
||||
"github.com/docker/docker/api/types/image"
|
||||
"github.com/docker/docker/pkg/jsonmessage"
|
||||
"github.com/moby/buildkit/client"
|
||||
"github.com/moby/buildkit/client/llb"
|
||||
"github.com/moby/buildkit/exporter/containerimage/exptypes"
|
||||
gateway "github.com/moby/buildkit/frontend/gateway/client"
|
||||
"github.com/moby/buildkit/identity"
|
||||
"github.com/moby/buildkit/session"
|
||||
"github.com/moby/buildkit/solver/errdefs"
|
||||
"github.com/moby/buildkit/solver/pb"
|
||||
@@ -53,7 +55,7 @@ var (
|
||||
|
||||
const (
|
||||
printFallbackImage = "docker/dockerfile:1.5@sha256:dbbd5e059e8a07ff7ea6233b213b36aa516b4c53c645f1817a4dd18b83cbea56"
|
||||
printLintFallbackImage = "docker.io/docker/dockerfile-upstream:1.8.0-rc2@sha256:515538ca94186029d466cf4c10c61b5147e849c592955e3a78922e24595c63a9"
|
||||
printLintFallbackImage = "docker.io/docker/dockerfile-upstream:1.8.1@sha256:e87caa74dcb7d46cd820352bfea12591f3dba3ddc4285e19c7dcd13359f7cefd"
|
||||
)
|
||||
|
||||
type Options struct {
|
||||
@@ -82,7 +84,7 @@ type Options struct {
|
||||
Session []session.Attachable
|
||||
Linked bool // Linked marks this target as exclusively linked (not requested by the user).
|
||||
PrintFunc *PrintFunc
|
||||
WithProvenanceResponse bool
|
||||
ProvenanceResponseMode confutil.MetadataProvenanceMode
|
||||
SourcePolicy *spb.Policy
|
||||
GroupRef string
|
||||
}
|
||||
@@ -215,6 +217,9 @@ func BuildWithResultHandler(ctx context.Context, nodes []builder.Node, opt map[s
|
||||
if err != nil {
|
||||
logrus.WithError(err).Warn("current commit information was not captured by the build")
|
||||
}
|
||||
if opt.Ref == "" {
|
||||
opt.Ref = identity.NewID()
|
||||
}
|
||||
var reqn []*reqForNode
|
||||
for _, np := range drivers[k] {
|
||||
if np.Node().Driver.IsMobyDriver() {
|
||||
@@ -473,8 +478,8 @@ func BuildWithResultHandler(ctx context.Context, nodes []builder.Node, opt map[s
|
||||
rr.ExporterResponse[k] = string(v)
|
||||
}
|
||||
rr.ExporterResponse["buildx.build.ref"] = buildRef
|
||||
if opt.WithProvenanceResponse && node.Driver.HistoryAPISupported(ctx) {
|
||||
if err := setRecordProvenance(ctx, c, rr, so.Ref, pw); err != nil {
|
||||
if node.Driver.HistoryAPISupported(ctx) {
|
||||
if err := setRecordProvenance(ctx, c, rr, so.Ref, opt.ProvenanceResponseMode, pw); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
@@ -607,7 +612,12 @@ func BuildWithResultHandler(ctx context.Context, nodes []builder.Node, opt map[s
|
||||
}
|
||||
}
|
||||
|
||||
dt, desc, err := itpull.Combine(ctx, srcs, nil, false)
|
||||
indexAnnotations, err := extractIndexAnnotations(opt.Exports)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
dt, desc, err := itpull.Combine(ctx, srcs, indexAnnotations, false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -655,6 +665,27 @@ func BuildWithResultHandler(ctx context.Context, nodes []builder.Node, opt map[s
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func extractIndexAnnotations(exports []client.ExportEntry) (map[exptypes.AnnotationKey]string, error) {
|
||||
annotations := map[exptypes.AnnotationKey]string{}
|
||||
for _, exp := range exports {
|
||||
for k, v := range exp.Attrs {
|
||||
ak, ok, err := exptypes.ParseAnnotationKey(k)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch ak.Type {
|
||||
case exptypes.AnnotationIndex, exptypes.AnnotationManifestDescriptor:
|
||||
annotations[ak] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
return annotations, nil
|
||||
}
|
||||
|
||||
func pushWithMoby(ctx context.Context, d *driver.DriverHandle, name string, l progress.SubLogger) error {
|
||||
api := d.Config().DockerAPI
|
||||
if api == nil {
|
||||
@@ -665,7 +696,7 @@ func pushWithMoby(ctx context.Context, d *driver.DriverHandle, name string, l pr
|
||||
return err
|
||||
}
|
||||
|
||||
rc, err := api.ImagePush(ctx, name, imagetypes.PushOptions{
|
||||
rc, err := api.ImagePush(ctx, name, image.PushOptions{
|
||||
RegistryAuth: creds,
|
||||
})
|
||||
if err != nil {
|
||||
@@ -744,11 +775,11 @@ func remoteDigestWithMoby(ctx context.Context, d *driver.DriverHandle, name stri
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
image, _, err := api.ImageInspectWithRaw(ctx, name)
|
||||
img, _, err := api.ImageInspectWithRaw(ctx, name)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if len(image.RepoDigests) == 0 {
|
||||
if len(img.RepoDigests) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
remoteImage, err := api.DistributionInspect(ctx, name, creds)
|
||||
|
@@ -5,7 +5,7 @@ import (
|
||||
stderrors "errors"
|
||||
"net"
|
||||
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/platforms"
|
||||
"github.com/docker/buildx/builder"
|
||||
"github.com/docker/buildx/util/progress"
|
||||
v1 "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
|
@@ -5,7 +5,7 @@ import (
|
||||
"fmt"
|
||||
"sync"
|
||||
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/platforms"
|
||||
"github.com/docker/buildx/builder"
|
||||
"github.com/docker/buildx/driver"
|
||||
"github.com/docker/buildx/util/progress"
|
||||
|
@@ -5,7 +5,7 @@ import (
|
||||
"sort"
|
||||
"testing"
|
||||
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/platforms"
|
||||
"github.com/docker/buildx/builder"
|
||||
specs "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
@@ -15,29 +15,29 @@ func saveLocalState(so *client.SolveOpt, target string, opts Options, node build
|
||||
}
|
||||
lp := opts.Inputs.ContextPath
|
||||
dp := opts.Inputs.DockerfilePath
|
||||
if lp != "" || dp != "" {
|
||||
if lp != "" {
|
||||
lp, err = filepath.Abs(lp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if dp != "" {
|
||||
dp, err = filepath.Abs(dp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
l, err := localstate.New(configDir)
|
||||
if dp != "" && !IsRemoteURL(lp) && lp != "-" && dp != "-" {
|
||||
dp, err = filepath.Abs(dp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return l.SaveRef(node.Builder, node.Name, so.Ref, localstate.State{
|
||||
Target: target,
|
||||
LocalPath: lp,
|
||||
DockerfilePath: dp,
|
||||
GroupRef: opts.GroupRef,
|
||||
})
|
||||
}
|
||||
return nil
|
||||
if lp != "" && !IsRemoteURL(lp) && lp != "-" {
|
||||
lp, err = filepath.Abs(lp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if lp == "" && dp == "" {
|
||||
return nil
|
||||
}
|
||||
l, err := localstate.New(configDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return l.SaveRef(node.Builder, node.Name, so.Ref, localstate.State{
|
||||
Target: target,
|
||||
LocalPath: lp,
|
||||
DockerfilePath: dp,
|
||||
GroupRef: opts.GroupRef,
|
||||
})
|
||||
}
|
||||
|
21
build/opt.go
21
build/opt.go
@@ -12,7 +12,7 @@ import (
|
||||
|
||||
"github.com/containerd/containerd/content"
|
||||
"github.com/containerd/containerd/content/local"
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/platforms"
|
||||
"github.com/distribution/reference"
|
||||
"github.com/docker/buildx/builder"
|
||||
"github.com/docker/buildx/driver"
|
||||
@@ -104,10 +104,6 @@ func toSolveOpt(ctx context.Context, node builder.Node, multiDriver bool, opt Op
|
||||
SourcePolicy: opt.SourcePolicy,
|
||||
}
|
||||
|
||||
if so.Ref == "" {
|
||||
so.Ref = identity.NewID()
|
||||
}
|
||||
|
||||
if opt.CgroupParent != "" {
|
||||
so.FrontendAttrs["cgroup-parent"] = opt.CgroupParent
|
||||
}
|
||||
@@ -258,7 +254,7 @@ func toSolveOpt(ctx context.Context, node builder.Node, multiDriver bool, opt Op
|
||||
if e.Type == "docker" || e.Type == "image" || e.Type == "oci" {
|
||||
// inline buildinfo attrs from build arg
|
||||
if v, ok := opt.BuildArgs["BUILDKIT_INLINE_BUILDINFO_ATTRS"]; ok {
|
||||
e.Attrs["buildinfo-attrs"] = v
|
||||
opt.Exports[i].Attrs["buildinfo-attrs"] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -272,11 +268,9 @@ func toSolveOpt(ctx context.Context, node builder.Node, multiDriver bool, opt Op
|
||||
}
|
||||
defers = append(defers, releaseLoad)
|
||||
|
||||
if sharedKey := so.LocalDirs["context"]; sharedKey != "" {
|
||||
if p, err := filepath.Abs(sharedKey); err == nil {
|
||||
sharedKey = filepath.Base(p)
|
||||
}
|
||||
so.SharedKey = sharedKey + ":" + confutil.TryNodeIdentifier(configDir)
|
||||
// add node identifier to shared key if one was specified
|
||||
if so.SharedKey != "" {
|
||||
so.SharedKey += ":" + confutil.TryNodeIdentifier(configDir)
|
||||
}
|
||||
|
||||
if opt.Pull {
|
||||
@@ -416,6 +410,11 @@ func loadInputs(ctx context.Context, d *driver.DriverHandle, inp Inputs, addVCSL
|
||||
if err := setLocalMount("context", inp.ContextPath, target, addVCSLocalDir); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sharedKey := inp.ContextPath
|
||||
if p, err := filepath.Abs(sharedKey); err == nil {
|
||||
sharedKey = filepath.Base(p)
|
||||
}
|
||||
target.SharedKey = sharedKey
|
||||
switch inp.DockerfilePath {
|
||||
case "-":
|
||||
dockerfileReader = inp.InStream
|
||||
|
@@ -29,8 +29,7 @@ type provenanceBuilder struct {
|
||||
ID string `json:"id,omitempty"`
|
||||
}
|
||||
|
||||
func setRecordProvenance(ctx context.Context, c *client.Client, sr *client.SolveResponse, ref string, pw progress.Writer) error {
|
||||
mode := confutil.MetadataProvenance()
|
||||
func setRecordProvenance(ctx context.Context, c *client.Client, sr *client.SolveResponse, ref string, mode confutil.MetadataProvenanceMode, pw progress.Writer) error {
|
||||
if mode == confutil.MetadataProvenanceModeDisabled {
|
||||
return nil
|
||||
}
|
||||
|
@@ -2,7 +2,6 @@ package builder
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"net/url"
|
||||
"os"
|
||||
@@ -27,6 +26,7 @@ import (
|
||||
"github.com/moby/buildkit/util/progress/progressui"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/pflag"
|
||||
"github.com/tonistiigi/go-csvvalue"
|
||||
"golang.org/x/sync/errgroup"
|
||||
)
|
||||
|
||||
@@ -601,8 +601,7 @@ func csvToMap(in []string) (map[string]string, error) {
|
||||
}
|
||||
m := make(map[string]string, len(in))
|
||||
for _, s := range in {
|
||||
csvReader := csv.NewReader(strings.NewReader(s))
|
||||
fields, err := csvReader.Read()
|
||||
fields, err := csvvalue.Fields(s, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@@ -6,7 +6,7 @@ import (
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/platforms"
|
||||
"github.com/docker/buildx/driver"
|
||||
ctxkube "github.com/docker/buildx/driver/kubernetes/context"
|
||||
"github.com/docker/buildx/store"
|
||||
@@ -48,8 +48,9 @@ func (b *Builder) Nodes() []Node {
|
||||
type LoadNodesOption func(*loadNodesOptions)
|
||||
|
||||
type loadNodesOptions struct {
|
||||
data bool
|
||||
dialMeta map[string][]string
|
||||
data bool
|
||||
dialMeta map[string][]string
|
||||
clientOpt []client.ClientOpt
|
||||
}
|
||||
|
||||
func WithData() LoadNodesOption {
|
||||
@@ -64,6 +65,12 @@ func WithDialMeta(dialMeta map[string][]string) LoadNodesOption {
|
||||
}
|
||||
}
|
||||
|
||||
func WithClientOpt(clientOpt ...client.ClientOpt) LoadNodesOption {
|
||||
return func(o *loadNodesOptions) {
|
||||
o.clientOpt = clientOpt
|
||||
}
|
||||
}
|
||||
|
||||
// LoadNodes loads and returns nodes for this builder.
|
||||
// TODO: this should be a method on a Node object and lazy load data for each driver.
|
||||
func (b *Builder) LoadNodes(ctx context.Context, opts ...LoadNodesOption) (_ []Node, err error) {
|
||||
@@ -151,7 +158,7 @@ func (b *Builder) LoadNodes(ctx context.Context, opts ...LoadNodesOption) (_ []N
|
||||
node.ImageOpt = imageopt
|
||||
|
||||
if lno.data {
|
||||
if err := node.loadData(ctx); err != nil {
|
||||
if err := node.loadData(ctx, lno.clientOpt...); err != nil {
|
||||
node.Err = err
|
||||
}
|
||||
}
|
||||
@@ -247,7 +254,7 @@ func (n *Node) MarshalJSON() ([]byte, error) {
|
||||
})
|
||||
}
|
||||
|
||||
func (n *Node) loadData(ctx context.Context) error {
|
||||
func (n *Node) loadData(ctx context.Context, clientOpt ...client.ClientOpt) error {
|
||||
if n.Driver == nil {
|
||||
return nil
|
||||
}
|
||||
@@ -257,7 +264,7 @@ func (n *Node) loadData(ctx context.Context) error {
|
||||
}
|
||||
n.DriverInfo = info
|
||||
if n.DriverInfo.Status == driver.Running {
|
||||
driverClient, err := n.Driver.Client(ctx)
|
||||
driverClient, err := n.Driver.Client(ctx, clientOpt...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@@ -1 +1,4 @@
|
||||
comment: false
|
||||
|
||||
ignore:
|
||||
- "**/*.pb.go"
|
||||
|
303
commands/bake.go
303
commands/bake.go
@@ -1,20 +1,27 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"cmp"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"slices"
|
||||
"strings"
|
||||
"text/tabwriter"
|
||||
|
||||
"github.com/containerd/console"
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/platforms"
|
||||
"github.com/docker/buildx/bake"
|
||||
"github.com/docker/buildx/bake/hclparser"
|
||||
"github.com/docker/buildx/build"
|
||||
"github.com/docker/buildx/builder"
|
||||
"github.com/docker/buildx/controller/pb"
|
||||
"github.com/docker/buildx/localstate"
|
||||
"github.com/docker/buildx/util/buildflags"
|
||||
"github.com/docker/buildx/util/cobrautil"
|
||||
"github.com/docker/buildx/util/cobrautil/completion"
|
||||
"github.com/docker/buildx/util/confutil"
|
||||
"github.com/docker/buildx/util/desktop"
|
||||
@@ -22,6 +29,7 @@ import (
|
||||
"github.com/docker/buildx/util/progress"
|
||||
"github.com/docker/buildx/util/tracing"
|
||||
"github.com/docker/cli/cli/command"
|
||||
"github.com/moby/buildkit/client"
|
||||
"github.com/moby/buildkit/identity"
|
||||
"github.com/moby/buildkit/util/progress/progressui"
|
||||
"github.com/pkg/errors"
|
||||
@@ -29,16 +37,19 @@ import (
|
||||
)
|
||||
|
||||
type bakeOptions struct {
|
||||
files []string
|
||||
overrides []string
|
||||
printOnly bool
|
||||
sbom string
|
||||
provenance string
|
||||
files []string
|
||||
overrides []string
|
||||
printOnly bool
|
||||
listTargets bool
|
||||
listVars bool
|
||||
sbom string
|
||||
provenance string
|
||||
|
||||
builder string
|
||||
metadataFile string
|
||||
exportPush bool
|
||||
exportLoad bool
|
||||
callFunc string
|
||||
}
|
||||
|
||||
func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in bakeOptions, cFlags commonFlags) (err error) {
|
||||
@@ -70,6 +81,11 @@ func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in ba
|
||||
targets = []string{"default"}
|
||||
}
|
||||
|
||||
callFunc, err := buildflags.ParsePrintFunc(in.callFunc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
overrides := in.overrides
|
||||
if in.exportPush {
|
||||
overrides = append(overrides, "*.push=true")
|
||||
@@ -77,6 +93,9 @@ func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in ba
|
||||
if in.exportLoad {
|
||||
overrides = append(overrides, "*.load=true")
|
||||
}
|
||||
if callFunc != nil {
|
||||
overrides = append(overrides, fmt.Sprintf("*.call=%s", callFunc.Name))
|
||||
}
|
||||
if cFlags.noCache != nil {
|
||||
overrides = append(overrides, fmt.Sprintf("*.no-cache=%t", *cFlags.noCache))
|
||||
}
|
||||
@@ -123,22 +142,43 @@ func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in ba
|
||||
}
|
||||
|
||||
progressMode := progressui.DisplayMode(cFlags.progress)
|
||||
printer, err := progress.NewPrinter(ctx2, os.Stderr, progressMode,
|
||||
var printer *progress.Printer
|
||||
printer, err = progress.NewPrinter(ctx2, os.Stderr, progressMode,
|
||||
progress.WithDesc(progressTextDesc, progressConsoleDesc),
|
||||
progress.WithOnClose(func() {
|
||||
if p := printer; p != nil {
|
||||
printWarnings(os.Stderr, p.Warnings(), progressMode)
|
||||
}
|
||||
}),
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var resp map[string]*client.SolveResponse
|
||||
|
||||
defer func() {
|
||||
if printer != nil {
|
||||
err1 := printer.Wait()
|
||||
if err == nil {
|
||||
err = err1
|
||||
}
|
||||
if err == nil && progressMode != progressui.QuietMode && progressMode != progressui.RawJSONMode {
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if progressMode != progressui.QuietMode && progressMode != progressui.RawJSONMode {
|
||||
desktop.PrintBuildDetails(os.Stderr, printer.BuildRefs(), term)
|
||||
}
|
||||
if resp != nil && len(in.metadataFile) > 0 {
|
||||
dt := make(map[string]interface{})
|
||||
for t, r := range resp {
|
||||
dt[t] = decodeExporterResponse(r.ExporterResponse)
|
||||
}
|
||||
if warnings := printer.Warnings(); len(warnings) > 0 && confutil.MetadataWarningsEnabled() {
|
||||
dt["buildx.build.warnings"] = warnings
|
||||
}
|
||||
err = writeMetadataFile(in.metadataFile, dt)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
@@ -151,12 +191,32 @@ func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in ba
|
||||
return errors.New("couldn't find a bake definition")
|
||||
}
|
||||
|
||||
tgts, grps, err := bake.ReadTargets(ctx, files, targets, overrides, map[string]string{
|
||||
defaults := map[string]string{
|
||||
// don't forget to update documentation if you add a new
|
||||
// built-in variable: docs/bake-reference.md#built-in-variables
|
||||
"BAKE_CMD_CONTEXT": cmdContext,
|
||||
"BAKE_LOCAL_PLATFORM": platforms.DefaultString(),
|
||||
})
|
||||
"BAKE_LOCAL_PLATFORM": platforms.Format(platforms.DefaultSpec()),
|
||||
}
|
||||
|
||||
if in.listTargets || in.listVars {
|
||||
cfg, pm, err := bake.ParseFiles(files, defaults)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = printer.Wait()
|
||||
printer = nil
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if in.listTargets {
|
||||
return printTargetList(dockerCli.Out(), cfg)
|
||||
} else if in.listVars {
|
||||
return printVars(dockerCli.Out(), pm.AllVariables)
|
||||
}
|
||||
}
|
||||
|
||||
tgts, grps, err := bake.ReadTargets(ctx, files, targets, overrides, defaults)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -202,12 +262,27 @@ func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in ba
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, opt := range bo {
|
||||
if opt.PrintFunc != nil {
|
||||
cf, err := buildflags.ParsePrintFunc(opt.PrintFunc.Name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
opt.PrintFunc.Name = cf.Name
|
||||
}
|
||||
}
|
||||
|
||||
prm := confutil.MetadataProvenance()
|
||||
if len(in.metadataFile) == 0 {
|
||||
prm = confutil.MetadataProvenanceModeDisabled
|
||||
}
|
||||
|
||||
groupRef := identity.NewID()
|
||||
var refs []string
|
||||
for k, b := range bo {
|
||||
b.Ref = identity.NewID()
|
||||
b.GroupRef = groupRef
|
||||
b.WithProvenanceResponse = len(in.metadataFile) > 0
|
||||
b.ProvenanceResponseMode = prm
|
||||
refs = append(refs, b.Ref)
|
||||
bo[k] = b
|
||||
}
|
||||
@@ -224,22 +299,122 @@ func runBake(ctx context.Context, dockerCli command.Cli, targets []string, in ba
|
||||
return err
|
||||
}
|
||||
|
||||
resp, err := build.Build(ctx, nodes, bo, dockerutil.NewClient(dockerCli), confutil.ConfigDir(dockerCli), printer)
|
||||
resp, err = build.Build(ctx, nodes, bo, dockerutil.NewClient(dockerCli), confutil.ConfigDir(dockerCli), printer)
|
||||
if err != nil {
|
||||
return wrapBuildError(err, true)
|
||||
}
|
||||
|
||||
if len(in.metadataFile) > 0 {
|
||||
dt := make(map[string]interface{})
|
||||
for t, r := range resp {
|
||||
dt[t] = decodeExporterResponse(r.ExporterResponse)
|
||||
}
|
||||
if err := writeMetadataFile(in.metadataFile, dt); err != nil {
|
||||
return err
|
||||
}
|
||||
err = printer.Wait()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return err
|
||||
var callFormatJSON bool
|
||||
var jsonResults = map[string]map[string]any{}
|
||||
if callFunc != nil {
|
||||
callFormatJSON = callFunc.Format == "json"
|
||||
}
|
||||
var sep bool
|
||||
var exitCode int
|
||||
|
||||
names := make([]string, 0, len(bo))
|
||||
for name := range bo {
|
||||
names = append(names, name)
|
||||
}
|
||||
slices.Sort(names)
|
||||
|
||||
for _, name := range names {
|
||||
req := bo[name]
|
||||
if req.PrintFunc == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
pf := &pb.PrintFunc{
|
||||
Name: req.PrintFunc.Name,
|
||||
Format: req.PrintFunc.Format,
|
||||
IgnoreStatus: req.PrintFunc.IgnoreStatus,
|
||||
}
|
||||
|
||||
if callFunc != nil {
|
||||
pf.Format = callFunc.Format
|
||||
pf.IgnoreStatus = callFunc.IgnoreStatus
|
||||
}
|
||||
|
||||
var res map[string]string
|
||||
if sp, ok := resp[name]; ok {
|
||||
res = sp.ExporterResponse
|
||||
}
|
||||
|
||||
if callFormatJSON {
|
||||
jsonResults[name] = map[string]any{}
|
||||
buf := &bytes.Buffer{}
|
||||
if code, err := printResult(buf, pf, res); err != nil {
|
||||
jsonResults[name]["error"] = err.Error()
|
||||
exitCode = 1
|
||||
} else if code != 0 && exitCode == 0 {
|
||||
exitCode = code
|
||||
}
|
||||
m := map[string]*json.RawMessage{}
|
||||
if err := json.Unmarshal(buf.Bytes(), &m); err == nil {
|
||||
for k, v := range m {
|
||||
jsonResults[name][k] = v
|
||||
}
|
||||
} else {
|
||||
jsonResults[name][pf.Name] = json.RawMessage(buf.Bytes())
|
||||
}
|
||||
} else {
|
||||
if sep {
|
||||
fmt.Fprintln(dockerCli.Out())
|
||||
} else {
|
||||
sep = true
|
||||
}
|
||||
fmt.Fprintf(dockerCli.Out(), "%s\n", name)
|
||||
if descr := tgts[name].Description; descr != "" {
|
||||
fmt.Fprintf(dockerCli.Out(), "%s\n", descr)
|
||||
}
|
||||
|
||||
fmt.Fprintln(dockerCli.Out())
|
||||
if code, err := printResult(dockerCli.Out(), pf, res); err != nil {
|
||||
fmt.Fprintf(dockerCli.Out(), "error: %v\n", err)
|
||||
exitCode = 1
|
||||
} else if code != 0 && exitCode == 0 {
|
||||
exitCode = code
|
||||
}
|
||||
}
|
||||
}
|
||||
if callFormatJSON {
|
||||
out := struct {
|
||||
Group map[string]*bake.Group `json:"group,omitempty"`
|
||||
Target map[string]map[string]any `json:"target"`
|
||||
}{
|
||||
Group: grps,
|
||||
Target: map[string]map[string]any{},
|
||||
}
|
||||
|
||||
for name, def := range tgts {
|
||||
out.Target[name] = map[string]any{
|
||||
"build": def,
|
||||
}
|
||||
if res, ok := jsonResults[name]; ok {
|
||||
printName := bo[name].PrintFunc.Name
|
||||
if printName == "lint" {
|
||||
printName = "check"
|
||||
}
|
||||
out.Target[name][printName] = res
|
||||
}
|
||||
}
|
||||
dt, err := json.MarshalIndent(out, "", " ")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fmt.Fprintln(dockerCli.Out(), string(dt))
|
||||
}
|
||||
|
||||
if exitCode != 0 {
|
||||
os.Exit(exitCode)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func bakeCmd(dockerCli command.Cli, rootOpts *rootOptions) *cobra.Command {
|
||||
@@ -275,6 +450,18 @@ func bakeCmd(dockerCli command.Cli, rootOpts *rootOptions) *cobra.Command {
|
||||
flags.StringVar(&options.sbom, "sbom", "", `Shorthand for "--set=*.attest=type=sbom"`)
|
||||
flags.StringVar(&options.provenance, "provenance", "", `Shorthand for "--set=*.attest=type=provenance"`)
|
||||
flags.StringArrayVar(&options.overrides, "set", nil, `Override target value (e.g., "targetpattern.key=value")`)
|
||||
flags.StringVar(&options.callFunc, "call", "build", `Set method for evaluating build ("check", "outline", "targets")`)
|
||||
|
||||
flags.VarPF(callAlias(&options.callFunc, "check"), "check", "", `Shorthand for "--call=check"`)
|
||||
flags.Lookup("check").NoOptDefVal = "true"
|
||||
|
||||
flags.BoolVar(&options.listTargets, "list-targets", false, "List available targets")
|
||||
cobrautil.MarkFlagsExperimental(flags, "list-targets")
|
||||
flags.MarkHidden("list-targets")
|
||||
|
||||
flags.BoolVar(&options.listVars, "list-variables", false, "List defined variables")
|
||||
cobrautil.MarkFlagsExperimental(flags, "list-variables")
|
||||
flags.MarkHidden("list-variables")
|
||||
|
||||
commonBuildFlags(&cFlags, flags)
|
||||
|
||||
@@ -331,3 +518,75 @@ func readBakeFiles(ctx context.Context, nodes []builder.Node, url string, names
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func printVars(w io.Writer, vars []*hclparser.Variable) error {
|
||||
slices.SortFunc(vars, func(a, b *hclparser.Variable) int {
|
||||
return cmp.Compare(a.Name, b.Name)
|
||||
})
|
||||
tw := tabwriter.NewWriter(w, 1, 8, 1, '\t', 0)
|
||||
defer tw.Flush()
|
||||
|
||||
tw.Write([]byte("VARIABLE\tVALUE\tDESCRIPTION\n"))
|
||||
|
||||
for _, v := range vars {
|
||||
var value string
|
||||
if v.Value != nil {
|
||||
value = *v.Value
|
||||
} else {
|
||||
value = "<null>"
|
||||
}
|
||||
fmt.Fprintf(tw, "%s\t%s\t%s\n", v.Name, value, v.Description)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func printTargetList(w io.Writer, cfg *bake.Config) error {
|
||||
tw := tabwriter.NewWriter(w, 1, 8, 1, '\t', 0)
|
||||
defer tw.Flush()
|
||||
|
||||
tw.Write([]byte("TARGET\tDESCRIPTION\n"))
|
||||
|
||||
type targetOrGroup struct {
|
||||
name string
|
||||
target *bake.Target
|
||||
group *bake.Group
|
||||
}
|
||||
|
||||
list := make([]targetOrGroup, 0, len(cfg.Targets)+len(cfg.Groups))
|
||||
for _, tgt := range cfg.Targets {
|
||||
list = append(list, targetOrGroup{name: tgt.Name, target: tgt})
|
||||
}
|
||||
for _, grp := range cfg.Groups {
|
||||
list = append(list, targetOrGroup{name: grp.Name, group: grp})
|
||||
}
|
||||
|
||||
slices.SortFunc(list, func(a, b targetOrGroup) int {
|
||||
return cmp.Compare(a.name, b.name)
|
||||
})
|
||||
|
||||
for _, tgt := range list {
|
||||
if strings.HasPrefix(tgt.name, "_") {
|
||||
// convention for a private target
|
||||
continue
|
||||
}
|
||||
var descr string
|
||||
if tgt.target != nil {
|
||||
descr = tgt.target.Description
|
||||
} else if tgt.group != nil {
|
||||
descr = tgt.group.Description
|
||||
|
||||
if len(tgt.group.Targets) > 0 {
|
||||
slices.Sort(tgt.group.Targets)
|
||||
names := strings.Join(tgt.group.Targets, ", ")
|
||||
if descr != "" {
|
||||
descr += " (" + names + ")"
|
||||
} else {
|
||||
descr = names
|
||||
}
|
||||
}
|
||||
}
|
||||
fmt.Fprintf(tw, "%s\t%s\n", tgt.name, descr)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@@ -5,12 +5,10 @@ import (
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/csv"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
@@ -39,7 +37,6 @@ import (
|
||||
"github.com/docker/buildx/util/osutil"
|
||||
"github.com/docker/buildx/util/progress"
|
||||
"github.com/docker/buildx/util/tracing"
|
||||
"github.com/docker/cli-docs-tool/annotation"
|
||||
"github.com/docker/cli/cli"
|
||||
"github.com/docker/cli/cli/command"
|
||||
dockeropts "github.com/docker/cli/opts"
|
||||
@@ -59,6 +56,7 @@ import (
|
||||
"github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
"github.com/tonistiigi/go-csvvalue"
|
||||
"go.opentelemetry.io/otel/attribute"
|
||||
"go.opentelemetry.io/otel/metric"
|
||||
"google.golang.org/grpc/codes"
|
||||
@@ -206,7 +204,11 @@ func (o *buildOptions) toControllerOptions() (*controllerapi.BuildOptions, error
|
||||
return nil, err
|
||||
}
|
||||
|
||||
opts.WithProvenanceResponse = opts.PrintFunc == nil && len(o.metadataFile) > 0
|
||||
prm := confutil.MetadataProvenance()
|
||||
if opts.PrintFunc != nil || len(o.metadataFile) == 0 {
|
||||
prm = confutil.MetadataProvenanceModeDisabled
|
||||
}
|
||||
opts.ProvenanceResponseMode = string(prm)
|
||||
|
||||
return &opts, nil
|
||||
}
|
||||
@@ -366,11 +368,17 @@ func runBuild(ctx context.Context, dockerCli command.Cli, options buildOptions)
|
||||
}
|
||||
}
|
||||
if opts.PrintFunc != nil {
|
||||
if err := printResult(opts.PrintFunc, resp.ExporterResponse); err != nil {
|
||||
if exitcode, err := printResult(dockerCli.Out(), opts.PrintFunc, resp.ExporterResponse); err != nil {
|
||||
return err
|
||||
} else if exitcode != 0 {
|
||||
os.Exit(exitcode)
|
||||
}
|
||||
} else if options.metadataFile != "" {
|
||||
if err := writeMetadataFile(options.metadataFile, decodeExporterResponse(resp.ExporterResponse)); err != nil {
|
||||
dt := decodeExporterResponse(resp.ExporterResponse)
|
||||
if warnings := printer.Warnings(); len(warnings) > 0 && confutil.MetadataWarningsEnabled() {
|
||||
dt["buildx.build.warnings"] = warnings
|
||||
}
|
||||
if err := writeMetadataFile(options.metadataFile, dt); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
@@ -522,9 +530,12 @@ func buildCmd(dockerCli command.Cli, rootOpts *rootOptions, debugConfig *debug.D
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "build [OPTIONS] PATH | URL | -",
|
||||
Aliases: []string{"b"},
|
||||
Short: "Start a build",
|
||||
Args: cli.ExactArgs(1),
|
||||
Aliases: []string{"b"},
|
||||
Annotations: map[string]string{
|
||||
"aliases": "docker build, docker builder build, docker image build, docker buildx b",
|
||||
},
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
options.contextPath = args[0]
|
||||
options.builder = rootOpts.builder
|
||||
@@ -563,7 +574,6 @@ func buildCmd(dockerCli command.Cli, rootOpts *rootOptions, debugConfig *debug.D
|
||||
flags := cmd.Flags()
|
||||
|
||||
flags.StringSliceVar(&options.extraHosts, "add-host", []string{}, `Add a custom host-to-IP mapping (format: "host:ip")`)
|
||||
flags.SetAnnotation("add-host", annotation.ExternalURL, []string{"https://docs.docker.com/reference/cli/docker/image/build/#add-host"})
|
||||
|
||||
flags.StringSliceVar(&options.allow, "allow", []string{}, `Allow extra privileged entitlement (e.g., "network.host", "security.insecure")`)
|
||||
|
||||
@@ -576,12 +586,10 @@ func buildCmd(dockerCli command.Cli, rootOpts *rootOptions, debugConfig *debug.D
|
||||
flags.StringArrayVar(&options.cacheTo, "cache-to", []string{}, `Cache export destinations (e.g., "user/app:cache", "type=local,dest=path/to/dir")`)
|
||||
|
||||
flags.StringVar(&options.cgroupParent, "cgroup-parent", "", `Set the parent cgroup for the "RUN" instructions during build`)
|
||||
flags.SetAnnotation("cgroup-parent", annotation.ExternalURL, []string{"https://docs.docker.com/reference/cli/docker/image/build/#cgroup-parent"})
|
||||
|
||||
flags.StringArrayVar(&options.contexts, "build-context", []string{}, "Additional build contexts (e.g., name=path)")
|
||||
|
||||
flags.StringVarP(&options.dockerfileName, "file", "f", "", `Name of the Dockerfile (default: "PATH/Dockerfile")`)
|
||||
flags.SetAnnotation("file", annotation.ExternalURL, []string{"https://docs.docker.com/reference/cli/docker/image/build/#file"})
|
||||
|
||||
flags.StringVar(&options.imageIDFile, "iidfile", "", "Write the image ID to a file")
|
||||
|
||||
@@ -608,10 +616,8 @@ func buildCmd(dockerCli command.Cli, rootOpts *rootOptions, debugConfig *debug.D
|
||||
flags.StringArrayVar(&options.ssh, "ssh", []string{}, `SSH agent socket or keys to expose to the build (format: "default|<id>[=<socket>|<key>[,<key>]]")`)
|
||||
|
||||
flags.StringArrayVarP(&options.tags, "tag", "t", []string{}, `Name and optionally a tag (format: "name:tag")`)
|
||||
flags.SetAnnotation("tag", annotation.ExternalURL, []string{"https://docs.docker.com/reference/cli/docker/image/build/#tag"})
|
||||
|
||||
flags.StringVar(&options.target, "target", "", "Set the target build stage to build")
|
||||
flags.SetAnnotation("target", annotation.ExternalURL, []string{"https://docs.docker.com/reference/cli/docker/image/build/#target"})
|
||||
|
||||
options.ulimits = dockeropts.NewUlimitOpt(nil)
|
||||
flags.Var(options.ulimits, "ulimit", "Ulimit options")
|
||||
@@ -629,7 +635,7 @@ func buildCmd(dockerCli command.Cli, rootOpts *rootOptions, debugConfig *debug.D
|
||||
}
|
||||
|
||||
flags.StringVar(&options.printFunc, "call", "build", `Set method for evaluating build ("check", "outline", "targets")`)
|
||||
flags.VarPF(callAlias(options, "check"), "check", "", `Shorthand for "--call=check"`)
|
||||
flags.VarPF(callAlias(&options.printFunc, "check"), "check", "", `Shorthand for "--call=check"`)
|
||||
flags.Lookup("check").NoOptDefVal = "true"
|
||||
|
||||
// hidden flags
|
||||
@@ -829,7 +835,7 @@ func printWarnings(w io.Writer, warnings []client.VertexWarning, mode progressui
|
||||
fmt.Fprintf(sb, "%d warnings found", len(warnings))
|
||||
}
|
||||
if logrus.GetLevel() < logrus.DebugLevel {
|
||||
fmt.Fprintf(sb, " (use --debug to expand)")
|
||||
fmt.Fprintf(sb, " (use docker --debug to expand)")
|
||||
}
|
||||
fmt.Fprintf(sb, ":\n")
|
||||
fmt.Fprint(w, aec.Apply(sb.String(), aec.YellowF))
|
||||
@@ -857,47 +863,78 @@ func printWarnings(w io.Writer, warnings []client.VertexWarning, mode progressui
|
||||
}
|
||||
}
|
||||
|
||||
func printResult(f *controllerapi.PrintFunc, res map[string]string) error {
|
||||
func printResult(w io.Writer, f *controllerapi.PrintFunc, res map[string]string) (int, error) {
|
||||
switch f.Name {
|
||||
case "outline":
|
||||
return printValue(outline.PrintOutline, outline.SubrequestsOutlineDefinition.Version, f.Format, res)
|
||||
return 0, printValue(w, outline.PrintOutline, outline.SubrequestsOutlineDefinition.Version, f.Format, res)
|
||||
case "targets":
|
||||
return printValue(targets.PrintTargets, targets.SubrequestsTargetsDefinition.Version, f.Format, res)
|
||||
return 0, printValue(w, targets.PrintTargets, targets.SubrequestsTargetsDefinition.Version, f.Format, res)
|
||||
case "subrequests.describe":
|
||||
return printValue(subrequests.PrintDescribe, subrequests.SubrequestsDescribeDefinition.Version, f.Format, res)
|
||||
return 0, printValue(w, subrequests.PrintDescribe, subrequests.SubrequestsDescribeDefinition.Version, f.Format, res)
|
||||
case "lint":
|
||||
return printValue(lint.PrintLintViolations, lint.SubrequestLintDefinition.Version, f.Format, res)
|
||||
err := printValue(w, lint.PrintLintViolations, lint.SubrequestLintDefinition.Version, f.Format, res)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
lintResults := lint.LintResults{}
|
||||
if result, ok := res["result.json"]; ok {
|
||||
if err := json.Unmarshal([]byte(result), &lintResults); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
if lintResults.Error != nil {
|
||||
// Print the error message and the source
|
||||
// Normally, we would use `errdefs.WithSource` to attach the source to the
|
||||
// error and let the error be printed by the handling that's already in place,
|
||||
// but here we want to print the error in a way that's consistent with how
|
||||
// the lint warnings are printed via the `lint.PrintLintViolations` function,
|
||||
// which differs from the default error printing.
|
||||
if f.Format != "json" && len(lintResults.Warnings) > 0 {
|
||||
fmt.Fprintln(w)
|
||||
}
|
||||
lintBuf := bytes.NewBuffer([]byte(lintResults.Error.Message + "\n"))
|
||||
sourceInfo := lintResults.Sources[lintResults.Error.Location.SourceIndex]
|
||||
source := errdefs.Source{
|
||||
Info: sourceInfo,
|
||||
Ranges: lintResults.Error.Location.Ranges,
|
||||
}
|
||||
source.Print(lintBuf)
|
||||
return 0, errors.New(lintBuf.String())
|
||||
} else if len(lintResults.Warnings) == 0 && f.Format != "json" {
|
||||
fmt.Fprintln(w, "Check complete, no warnings found.")
|
||||
}
|
||||
default:
|
||||
if dt, ok := res["result.json"]; ok && f.Format == "json" {
|
||||
fmt.Println(dt)
|
||||
fmt.Fprintln(w, dt)
|
||||
} else if dt, ok := res["result.txt"]; ok {
|
||||
fmt.Print(dt)
|
||||
fmt.Fprint(w, dt)
|
||||
} else {
|
||||
log.Printf("%s %+v", f, res)
|
||||
fmt.Fprintf(w, "%s %+v\n", f, res)
|
||||
}
|
||||
}
|
||||
if v, ok := res["result.statuscode"]; !f.IgnoreStatus && ok {
|
||||
if n, err := strconv.Atoi(v); err == nil && n != 0 {
|
||||
os.Exit(n)
|
||||
return n, nil
|
||||
}
|
||||
}
|
||||
return nil
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
type printFunc func([]byte, io.Writer) error
|
||||
|
||||
func printValue(printer printFunc, version string, format string, res map[string]string) error {
|
||||
func printValue(w io.Writer, printer printFunc, version string, format string, res map[string]string) error {
|
||||
if format == "json" {
|
||||
fmt.Fprintln(os.Stdout, res["result.json"])
|
||||
fmt.Fprintln(w, res["result.json"])
|
||||
return nil
|
||||
}
|
||||
|
||||
if res["version"] != "" && versions.LessThan(version, res["version"]) && res["result.txt"] != "" {
|
||||
// structure is too new and we don't know how to print it
|
||||
fmt.Fprint(os.Stdout, res["result.txt"])
|
||||
fmt.Fprint(w, res["result.txt"])
|
||||
return nil
|
||||
}
|
||||
return printer([]byte(res["result.json"]), os.Stdout)
|
||||
return printer([]byte(res["result.json"]), w)
|
||||
}
|
||||
|
||||
type invokeConfig struct {
|
||||
@@ -947,9 +984,9 @@ func (cfg *invokeConfig) parseInvokeConfig(invoke, on string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
csvReader := csv.NewReader(strings.NewReader(invoke))
|
||||
csvReader.LazyQuotes = true
|
||||
fields, err := csvReader.Read()
|
||||
csvParser := csvvalue.NewParser()
|
||||
csvParser.LazyQuotes = true
|
||||
fields, err := csvParser.Fields(invoke, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -1005,7 +1042,7 @@ func maybeJSONArray(v string) []string {
|
||||
return []string{v}
|
||||
}
|
||||
|
||||
func callAlias(options *buildOptions, value string) cobrautil.BoolFuncValue {
|
||||
func callAlias(target *string, value string) cobrautil.BoolFuncValue {
|
||||
return func(s string) error {
|
||||
v, err := strconv.ParseBool(s)
|
||||
if err != nil {
|
||||
@@ -1013,7 +1050,7 @@ func callAlias(options *buildOptions, value string) cobrautil.BoolFuncValue {
|
||||
}
|
||||
|
||||
if v {
|
||||
options.printFunc = value
|
||||
*target = value
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@@ -5,7 +5,7 @@ import (
|
||||
"net"
|
||||
"os"
|
||||
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/platforms"
|
||||
"github.com/docker/buildx/build"
|
||||
"github.com/docker/buildx/builder"
|
||||
"github.com/docker/buildx/util/progress"
|
||||
|
@@ -9,6 +9,7 @@ import (
|
||||
|
||||
"github.com/distribution/reference"
|
||||
"github.com/docker/buildx/builder"
|
||||
"github.com/docker/buildx/util/buildflags"
|
||||
"github.com/docker/buildx/util/cobrautil/completion"
|
||||
"github.com/docker/buildx/util/imagetools"
|
||||
"github.com/docker/buildx/util/progress"
|
||||
@@ -154,7 +155,12 @@ func runCreate(ctx context.Context, dockerCli command.Cli, in createOptions, arg
|
||||
}
|
||||
}
|
||||
|
||||
dt, desc, err := r.Combine(ctx, srcs, in.annotations, in.preferIndex)
|
||||
annotations, err := buildflags.ParseAnnotations(in.annotations)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "failed to parse annotations")
|
||||
}
|
||||
|
||||
dt, desc, err := r.Combine(ctx, srcs, annotations, in.preferIndex)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@@ -21,7 +21,7 @@ import (
|
||||
"github.com/docker/cli/cli/command"
|
||||
"github.com/docker/cli/cli/config"
|
||||
dockeropts "github.com/docker/cli/opts"
|
||||
"github.com/docker/go-units"
|
||||
"github.com/docker/docker/api/types/container"
|
||||
"github.com/moby/buildkit/client"
|
||||
"github.com/moby/buildkit/session/auth/authprovider"
|
||||
"github.com/moby/buildkit/util/grpcerrors"
|
||||
@@ -67,7 +67,7 @@ func RunBuild(ctx context.Context, dockerCli command.Cli, in controllerapi.Build
|
||||
Target: in.Target,
|
||||
Ulimits: controllerUlimitOpt2DockerUlimit(in.Ulimits),
|
||||
GroupRef: in.GroupRef,
|
||||
WithProvenanceResponse: in.WithProvenanceResponse,
|
||||
ProvenanceResponseMode: confutil.ParseMetadataProvenance(in.ProvenanceResponseMode),
|
||||
}
|
||||
|
||||
platforms, err := platformutil.Parse(in.Platforms)
|
||||
@@ -136,8 +136,9 @@ func RunBuild(ctx context.Context, dockerCli command.Cli, in controllerapi.Build
|
||||
|
||||
annotations, err := buildflags.ParseAnnotations(in.Annotations)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
return nil, nil, errors.Wrap(err, "parse annotations")
|
||||
}
|
||||
|
||||
for _, o := range outputs {
|
||||
for k, v := range annotations {
|
||||
o.Attrs[k.String()] = v
|
||||
@@ -270,9 +271,9 @@ func controllerUlimitOpt2DockerUlimit(u *controllerapi.UlimitOpt) *dockeropts.Ul
|
||||
if u == nil {
|
||||
return nil
|
||||
}
|
||||
values := make(map[string]*units.Ulimit)
|
||||
values := make(map[string]*container.Ulimit)
|
||||
for k, v := range u.Values {
|
||||
values[k] = &units.Ulimit{
|
||||
values[k] = &container.Ulimit{
|
||||
Name: v.Name,
|
||||
Hard: v.Hard,
|
||||
Soft: v.Soft,
|
||||
|
@@ -302,7 +302,7 @@ type BuildOptions struct {
|
||||
Ref string `protobuf:"bytes,29,opt,name=Ref,proto3" json:"Ref,omitempty"`
|
||||
GroupRef string `protobuf:"bytes,30,opt,name=GroupRef,proto3" json:"GroupRef,omitempty"`
|
||||
Annotations []string `protobuf:"bytes,31,rep,name=Annotations,proto3" json:"Annotations,omitempty"`
|
||||
WithProvenanceResponse bool `protobuf:"varint,32,opt,name=WithProvenanceResponse,proto3" json:"WithProvenanceResponse,omitempty"`
|
||||
ProvenanceResponseMode string `protobuf:"bytes,32,opt,name=ProvenanceResponseMode,proto3" json:"ProvenanceResponseMode,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
@@ -549,11 +549,11 @@ func (m *BuildOptions) GetAnnotations() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *BuildOptions) GetWithProvenanceResponse() bool {
|
||||
func (m *BuildOptions) GetProvenanceResponseMode() string {
|
||||
if m != nil {
|
||||
return m.WithProvenanceResponse
|
||||
return m.ProvenanceResponseMode
|
||||
}
|
||||
return false
|
||||
return ""
|
||||
}
|
||||
|
||||
type ExportEntry struct {
|
||||
@@ -2094,130 +2094,130 @@ func init() {
|
||||
func init() { proto.RegisterFile("controller.proto", fileDescriptor_ed7f10298fa1d90f) }
|
||||
|
||||
var fileDescriptor_ed7f10298fa1d90f = []byte{
|
||||
// 1960 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x58, 0x5f, 0x73, 0x1b, 0x49,
|
||||
0x11, 0x67, 0x25, 0x59, 0x7f, 0x5a, 0x96, 0xcf, 0x19, 0x9c, 0x30, 0xd9, 0xe4, 0x12, 0x67, 0x93,
|
||||
0x1c, 0x2a, 0x42, 0xc9, 0x77, 0x3e, 0x72, 0xb9, 0x5c, 0xee, 0xaa, 0xb0, 0x65, 0x0b, 0xfb, 0x2a,
|
||||
0xb1, 0x5d, 0x23, 0x27, 0x29, 0xb8, 0x2a, 0xae, 0x56, 0xd2, 0x58, 0xde, 0xd2, 0x6a, 0x47, 0xec,
|
||||
0x8c, 0x64, 0x8b, 0x27, 0x1e, 0xe0, 0x8d, 0xe2, 0x7b, 0x50, 0x7c, 0x04, 0x9e, 0x78, 0xe3, 0xe3,
|
||||
0xf0, 0x11, 0xa8, 0xf9, 0xb3, 0xab, 0x5d, 0x4b, 0x2b, 0xdb, 0xf0, 0xa4, 0xe9, 0x9e, 0x5f, 0x77,
|
||||
0x4f, 0xf7, 0xf6, 0x74, 0xf7, 0x08, 0xd6, 0xbb, 0x2c, 0x10, 0x21, 0xf3, 0x7d, 0x1a, 0x36, 0x46,
|
||||
0x21, 0x13, 0x0c, 0x6d, 0x74, 0xc6, 0x9e, 0xdf, 0xbb, 0x6c, 0x24, 0x36, 0x26, 0x5f, 0xd8, 0x6f,
|
||||
0xfa, 0x9e, 0x38, 0x1f, 0x77, 0x1a, 0x5d, 0x36, 0xdc, 0x1a, 0xb2, 0xce, 0x74, 0x4b, 0xa1, 0x06,
|
||||
0x9e, 0xd8, 0x72, 0x47, 0xde, 0x16, 0xa7, 0xe1, 0xc4, 0xeb, 0x52, 0xbe, 0x65, 0x84, 0xa2, 0x5f,
|
||||
0xad, 0xd2, 0x7e, 0x99, 0x29, 0xcc, 0xd9, 0x38, 0xec, 0xd2, 0x11, 0xf3, 0xbd, 0xee, 0x74, 0x6b,
|
||||
0xd4, 0xd9, 0xd2, 0x2b, 0x2d, 0xe6, 0xd4, 0x61, 0xe3, 0xad, 0xc7, 0xc5, 0x49, 0xc8, 0xba, 0x94,
|
||||
0x73, 0xca, 0x09, 0xfd, 0xc3, 0x98, 0x72, 0x81, 0xd6, 0x21, 0x4f, 0xe8, 0x19, 0xb6, 0x36, 0xad,
|
||||
0x7a, 0x85, 0xc8, 0xa5, 0x73, 0x02, 0x77, 0xaf, 0x20, 0xf9, 0x88, 0x05, 0x9c, 0xa2, 0x57, 0xb0,
|
||||
0x72, 0x18, 0x9c, 0x31, 0x8e, 0xad, 0xcd, 0x7c, 0xbd, 0xba, 0xfd, 0xa4, 0xb1, 0xc8, 0xb9, 0x86,
|
||||
0x91, 0x93, 0x48, 0xa2, 0xf1, 0x0e, 0x87, 0x6a, 0x82, 0x8b, 0x1e, 0x42, 0x25, 0x22, 0xf7, 0x8c,
|
||||
0xe1, 0x19, 0x03, 0xb5, 0x60, 0xf5, 0x30, 0x98, 0xb0, 0x01, 0x6d, 0xb2, 0xe0, 0xcc, 0xeb, 0xe3,
|
||||
0xdc, 0xa6, 0x55, 0xaf, 0x6e, 0x3b, 0x8b, 0x8d, 0x25, 0x91, 0x24, 0x25, 0xe7, 0x7c, 0x0f, 0x78,
|
||||
0xcf, 0xe3, 0x5d, 0x16, 0x04, 0xb4, 0x1b, 0x39, 0x93, 0xe9, 0x74, 0xfa, 0x4c, 0xb9, 0x2b, 0x67,
|
||||
0x72, 0x1e, 0xc0, 0xfd, 0x05, 0xba, 0x74, 0x58, 0x9c, 0xdf, 0xc3, 0xea, 0xae, 0x3c, 0x5b, 0xb6,
|
||||
0xf2, 0x6f, 0xa1, 0x74, 0x3c, 0x12, 0x1e, 0x0b, 0xf8, 0x72, 0x6f, 0x94, 0x1a, 0x83, 0x24, 0x91,
|
||||
0x88, 0xf3, 0xef, 0x55, 0x63, 0xc0, 0x30, 0xd0, 0x26, 0x54, 0x9b, 0x2c, 0x10, 0xf4, 0x52, 0x9c,
|
||||
0xb8, 0xe2, 0xdc, 0x18, 0x4a, 0xb2, 0xd0, 0x67, 0xb0, 0xb6, 0xc7, 0xba, 0x03, 0x1a, 0x9e, 0x79,
|
||||
0x3e, 0x3d, 0x72, 0x87, 0xd4, 0xb8, 0x74, 0x85, 0x8b, 0xbe, 0x93, 0x5e, 0x7b, 0x81, 0x68, 0x8d,
|
||||
0x83, 0x2e, 0xce, 0xab, 0xa3, 0x3d, 0xce, 0xfa, 0xaa, 0x06, 0x46, 0x66, 0x12, 0xe8, 0x07, 0xa8,
|
||||
0x49, 0x35, 0x3d, 0x63, 0x9a, 0xe3, 0x82, 0x4a, 0x8c, 0x97, 0xd7, 0x7b, 0xd7, 0x48, 0xc9, 0xed,
|
||||
0x07, 0x22, 0x9c, 0x92, 0xb4, 0x2e, 0xb4, 0x01, 0x2b, 0x3b, 0xbe, 0xcf, 0x2e, 0xf0, 0xca, 0x66,
|
||||
0xbe, 0x5e, 0x21, 0x9a, 0x40, 0x5f, 0x41, 0x69, 0x47, 0x08, 0xca, 0x05, 0xc7, 0x45, 0x65, 0xec,
|
||||
0xe1, 0x62, 0x63, 0x1a, 0x44, 0x22, 0x30, 0x3a, 0x86, 0x8a, 0xb2, 0xbf, 0x13, 0xf6, 0x39, 0x2e,
|
||||
0x29, 0xc9, 0x2f, 0x6e, 0x70, 0xcc, 0x58, 0x46, 0x1f, 0x71, 0xa6, 0x03, 0xed, 0x43, 0xa5, 0xe9,
|
||||
0x76, 0xcf, 0x69, 0x2b, 0x64, 0x43, 0x5c, 0x56, 0x0a, 0x7f, 0xbe, 0x58, 0xa1, 0x82, 0x19, 0x85,
|
||||
0x46, 0x4d, 0x2c, 0x89, 0x76, 0xa0, 0xa4, 0x88, 0x53, 0x86, 0x2b, 0xb7, 0x53, 0x12, 0xc9, 0x21,
|
||||
0x07, 0x56, 0x9b, 0xfd, 0x90, 0x8d, 0x47, 0x27, 0x6e, 0x48, 0x03, 0x81, 0x41, 0x7d, 0xea, 0x14,
|
||||
0x0f, 0xbd, 0x81, 0xd2, 0xfe, 0xe5, 0x88, 0x85, 0x82, 0xe3, 0xea, 0xb2, 0xcb, 0xab, 0x41, 0xc6,
|
||||
0x80, 0x91, 0x40, 0x8f, 0x00, 0xf6, 0x2f, 0x45, 0xe8, 0x1e, 0x30, 0x19, 0xf6, 0x55, 0xf5, 0x39,
|
||||
0x12, 0x1c, 0xd4, 0x82, 0xe2, 0x5b, 0xb7, 0x43, 0x7d, 0x8e, 0x6b, 0x4a, 0x77, 0xe3, 0x06, 0x81,
|
||||
0xd5, 0x02, 0xda, 0x90, 0x91, 0x96, 0x79, 0x7d, 0x44, 0xc5, 0x05, 0x0b, 0x07, 0xef, 0x58, 0x8f,
|
||||
0xe2, 0x35, 0x9d, 0xd7, 0x09, 0x16, 0x7a, 0x06, 0xb5, 0x23, 0xa6, 0x83, 0xe7, 0xf9, 0x82, 0x86,
|
||||
0xf8, 0x13, 0x75, 0x98, 0x34, 0x53, 0xdd, 0x65, 0xdf, 0x15, 0x67, 0x2c, 0x1c, 0x72, 0xbc, 0xae,
|
||||
0x10, 0x33, 0x86, 0xcc, 0xa0, 0x36, 0xed, 0x86, 0x54, 0x70, 0x7c, 0x67, 0x59, 0x06, 0x69, 0x10,
|
||||
0x89, 0xc0, 0x08, 0x43, 0xa9, 0x7d, 0x3e, 0x6c, 0x7b, 0x7f, 0xa4, 0x18, 0x6d, 0x5a, 0xf5, 0x3c,
|
||||
0x89, 0x48, 0xf4, 0x02, 0xf2, 0xed, 0xf6, 0x01, 0xfe, 0xa9, 0xd2, 0x76, 0x3f, 0x43, 0x5b, 0xfb,
|
||||
0x80, 0x48, 0x14, 0x42, 0x50, 0x38, 0x75, 0xfb, 0x1c, 0x6f, 0xa8, 0x73, 0xa9, 0x35, 0xba, 0x07,
|
||||
0xc5, 0x53, 0x37, 0xec, 0x53, 0x81, 0xef, 0x2a, 0x9f, 0x0d, 0x85, 0x5e, 0x43, 0xe9, 0xbd, 0xef,
|
||||
0x0d, 0x3d, 0xc1, 0xf1, 0xbd, 0x65, 0x97, 0x53, 0x83, 0x8e, 0x47, 0x82, 0x44, 0x78, 0x79, 0x5a,
|
||||
0x15, 0x6f, 0x1a, 0xe2, 0x9f, 0x29, 0x9d, 0x11, 0x29, 0x77, 0x4c, 0xb8, 0x30, 0xde, 0xb4, 0xea,
|
||||
0x65, 0x12, 0x91, 0xf2, 0x68, 0x27, 0x63, 0xdf, 0xc7, 0xf7, 0x15, 0x5b, 0xad, 0xf5, 0xb7, 0x97,
|
||||
0x69, 0x70, 0x32, 0xe6, 0xe7, 0xd8, 0x56, 0x3b, 0x09, 0xce, 0x6c, 0xff, 0x2d, 0x73, 0x7b, 0xf8,
|
||||
0x41, 0x72, 0x5f, 0x72, 0xd0, 0x21, 0xac, 0xb6, 0x55, 0x5b, 0x3a, 0x51, 0xcd, 0x08, 0x3f, 0x54,
|
||||
0x7e, 0x3c, 0x6f, 0xc8, 0xce, 0xd5, 0x88, 0x3a, 0x97, 0xf4, 0x21, 0xd9, 0xbc, 0x1a, 0x1a, 0x4c,
|
||||
0x52, 0xa2, 0x51, 0x5d, 0xfd, 0x74, 0x56, 0x57, 0x6d, 0x28, 0xff, 0x46, 0x26, 0xb9, 0x64, 0x3f,
|
||||
0x52, 0xec, 0x98, 0x96, 0xc9, 0xb4, 0x13, 0x04, 0x4c, 0xb8, 0xba, 0xee, 0x3e, 0x56, 0xe1, 0x4e,
|
||||
0xb2, 0xd0, 0x57, 0x70, 0xef, 0xa3, 0x27, 0xce, 0x4f, 0x42, 0x36, 0xa1, 0x81, 0x1b, 0x74, 0x69,
|
||||
0x54, 0xd1, 0xf1, 0xa6, 0x72, 0x23, 0x63, 0xd7, 0xfe, 0x35, 0xa0, 0xf9, 0xea, 0x25, 0x4f, 0x37,
|
||||
0xa0, 0xd3, 0xa8, 0xea, 0x0f, 0xe8, 0x54, 0x16, 0xb0, 0x89, 0xeb, 0x8f, 0xa3, 0xda, 0xab, 0x89,
|
||||
0x6f, 0x72, 0x5f, 0x5b, 0xf6, 0xb7, 0xb0, 0x96, 0x2e, 0x2c, 0xb7, 0x92, 0x7e, 0x0d, 0xd5, 0xc4,
|
||||
0xed, 0xb9, 0x8d, 0xa8, 0xf3, 0x2f, 0x0b, 0xaa, 0x89, 0x2b, 0xae, 0x92, 0x71, 0x3a, 0xa2, 0x46,
|
||||
0x58, 0xad, 0xd1, 0x2e, 0xac, 0xec, 0x08, 0x11, 0xca, 0x56, 0x25, 0xf3, 0xf9, 0x97, 0xd7, 0x16,
|
||||
0x8a, 0x86, 0x82, 0xeb, 0xab, 0xac, 0x45, 0x65, 0xf0, 0xf7, 0x28, 0x17, 0x5e, 0xa0, 0x42, 0xad,
|
||||
0x3a, 0x4b, 0x85, 0x24, 0x59, 0xf6, 0xd7, 0x00, 0x33, 0xb1, 0x5b, 0xf9, 0xf0, 0x0f, 0x0b, 0xee,
|
||||
0xcc, 0x55, 0xc3, 0x85, 0x9e, 0x1c, 0xa4, 0x3d, 0xd9, 0xbe, 0x61, 0x65, 0x9d, 0xf7, 0xe7, 0xff,
|
||||
0x38, 0xed, 0x11, 0x14, 0x75, 0x0b, 0x5a, 0x78, 0x42, 0x1b, 0xca, 0x7b, 0x1e, 0x77, 0x3b, 0x3e,
|
||||
0xed, 0x29, 0xd1, 0x32, 0x89, 0x69, 0xd5, 0xff, 0xd4, 0xe9, 0x75, 0xf4, 0x34, 0xe1, 0xe8, 0x5a,
|
||||
0x83, 0xd6, 0x20, 0x17, 0xcf, 0x4e, 0xb9, 0xc3, 0x3d, 0x09, 0x96, 0x8d, 0x5f, 0xbb, 0x5a, 0x21,
|
||||
0x9a, 0x70, 0x5a, 0x50, 0xd4, 0xd5, 0x6b, 0x0e, 0x6f, 0x43, 0xb9, 0xe5, 0xf9, 0x54, 0xcd, 0x0f,
|
||||
0xfa, 0xcc, 0x31, 0x2d, 0xdd, 0xdb, 0x0f, 0x26, 0xc6, 0xac, 0x5c, 0x3a, 0x3f, 0x24, 0xc6, 0x04,
|
||||
0xe9, 0x87, 0x9a, 0x28, 0x8c, 0x1f, 0x6a, 0x8e, 0xb8, 0x07, 0xc5, 0x16, 0x0b, 0x87, 0xae, 0x30,
|
||||
0xca, 0x0c, 0x25, 0x5b, 0xd3, 0x61, 0x3f, 0x60, 0x21, 0x6d, 0x0b, 0x57, 0x8c, 0xb5, 0x2b, 0x65,
|
||||
0x92, 0xe2, 0x39, 0x0e, 0xac, 0x1d, 0x06, 0x7c, 0x44, 0xbb, 0x22, 0x7b, 0x24, 0x3d, 0x86, 0x4f,
|
||||
0x62, 0x8c, 0x19, 0x46, 0x13, 0x33, 0x95, 0x75, 0xfb, 0x99, 0xea, 0xef, 0x16, 0x54, 0xe2, 0xaa,
|
||||
0x89, 0x9a, 0x50, 0x54, 0x5f, 0x2c, 0x9a, 0x6c, 0x5f, 0x5c, 0x53, 0x66, 0x1b, 0x1f, 0x14, 0xda,
|
||||
0x74, 0x2f, 0x2d, 0x6a, 0x7f, 0x84, 0x6a, 0x82, 0xbd, 0x20, 0x49, 0xb6, 0x93, 0x49, 0x92, 0xd9,
|
||||
0x76, 0xb4, 0x91, 0x64, 0x0a, 0xed, 0x41, 0x51, 0x33, 0x17, 0x86, 0x1e, 0x41, 0xe1, 0xc0, 0x0d,
|
||||
0x75, 0xfa, 0xe4, 0x89, 0x5a, 0x4b, 0x5e, 0x9b, 0x9d, 0x09, 0x15, 0xee, 0x3c, 0x51, 0x6b, 0xe7,
|
||||
0x9f, 0x16, 0xd4, 0xcc, 0x98, 0x6a, 0x22, 0x48, 0x61, 0x5d, 0xdf, 0x62, 0x1a, 0xc6, 0x95, 0x4f,
|
||||
0xfb, 0xff, 0x7a, 0x49, 0x28, 0x23, 0x68, 0xe3, 0xaa, 0xac, 0x8e, 0xc6, 0x9c, 0x4a, 0xbb, 0x09,
|
||||
0x77, 0x17, 0x42, 0x6f, 0x75, 0x8d, 0x9e, 0xc3, 0x9d, 0xd9, 0x00, 0x9e, 0x9d, 0x27, 0x1b, 0x80,
|
||||
0x92, 0x30, 0x33, 0xa0, 0x3f, 0x86, 0xaa, 0x7c, 0xd0, 0x64, 0x8b, 0x39, 0xb0, 0xaa, 0x01, 0x26,
|
||||
0x32, 0x08, 0x0a, 0x03, 0x3a, 0xd5, 0xd9, 0x50, 0x21, 0x6a, 0xed, 0xfc, 0xcd, 0x92, 0xef, 0x92,
|
||||
0xd1, 0x58, 0xbc, 0xa3, 0x9c, 0xbb, 0x7d, 0x99, 0x80, 0x85, 0xc3, 0xc0, 0x13, 0x26, 0xfb, 0x3e,
|
||||
0xcb, 0x7a, 0x9f, 0x8c, 0xc6, 0x42, 0xc2, 0x8c, 0xd4, 0xc1, 0x4f, 0x88, 0x92, 0x42, 0xaf, 0xa0,
|
||||
0xb0, 0xe7, 0x0a, 0xd7, 0xe4, 0x42, 0xc6, 0x34, 0x26, 0x11, 0x09, 0x41, 0x49, 0xee, 0x96, 0xe4,
|
||||
0x23, 0x6c, 0x34, 0x16, 0xce, 0x33, 0x58, 0xbf, 0xaa, 0x7d, 0x81, 0x6b, 0x5f, 0x42, 0x35, 0xa1,
|
||||
0x45, 0xdd, 0xed, 0xe3, 0x96, 0x02, 0x94, 0x89, 0x5c, 0x4a, 0x5f, 0xe3, 0x83, 0xac, 0x6a, 0x1b,
|
||||
0xce, 0x27, 0x50, 0x53, 0xaa, 0xe3, 0x08, 0xfe, 0x29, 0x07, 0xa5, 0x48, 0xc5, 0xab, 0x94, 0xdf,
|
||||
0x4f, 0xb2, 0xfc, 0x9e, 0x77, 0xf9, 0x25, 0x14, 0x64, 0x8d, 0x31, 0x2e, 0x67, 0x8c, 0x32, 0xad,
|
||||
0x5e, 0x42, 0x4c, 0xc2, 0xd1, 0x77, 0x50, 0x24, 0x94, 0xcb, 0xb1, 0x4b, 0x3f, 0x50, 0x9e, 0x2e,
|
||||
0x16, 0xd4, 0x98, 0x99, 0xb0, 0x11, 0x92, 0xe2, 0x6d, 0xaf, 0x1f, 0xb8, 0x3e, 0x2e, 0x2c, 0x13,
|
||||
0xd7, 0x98, 0x84, 0xb8, 0x66, 0xcc, 0xc2, 0xfd, 0x17, 0x0b, 0xaa, 0x4b, 0x43, 0xbd, 0xfc, 0x09,
|
||||
0x39, 0xf7, 0xac, 0xcd, 0xff, 0x8f, 0xcf, 0xda, 0x3f, 0xe7, 0xd2, 0x8a, 0xd4, 0x04, 0x26, 0xef,
|
||||
0xd3, 0x88, 0x79, 0x81, 0x30, 0x29, 0x9b, 0xe0, 0xc8, 0x83, 0x36, 0x87, 0x3d, 0xd3, 0x18, 0xe4,
|
||||
0x52, 0x5e, 0xb3, 0x23, 0x26, 0x79, 0x55, 0x95, 0x06, 0x9a, 0x98, 0x95, 0xfd, 0xbc, 0x29, 0xfb,
|
||||
0x32, 0x35, 0xde, 0x73, 0x1a, 0xaa, 0xc0, 0x55, 0x88, 0x5a, 0xcb, 0x4a, 0x7f, 0xc4, 0x14, 0x77,
|
||||
0x45, 0x09, 0x1b, 0x4a, 0x59, 0xb9, 0xe8, 0xe1, 0xa2, 0x0e, 0x47, 0xf3, 0x22, 0xb2, 0x72, 0xd1,
|
||||
0xc3, 0xa5, 0xd8, 0xca, 0x85, 0xb2, 0x72, 0x2a, 0xa6, 0xb8, 0xac, 0x13, 0xf0, 0x54, 0x4c, 0x65,
|
||||
0x2b, 0x22, 0xcc, 0xf7, 0x3b, 0x6e, 0x77, 0x80, 0x2b, 0xba, 0x07, 0x46, 0xb4, 0x9c, 0x55, 0x65,
|
||||
0xcc, 0x3d, 0xd7, 0x57, 0xaf, 0x9a, 0x32, 0x89, 0x48, 0x67, 0x07, 0x2a, 0x71, 0xaa, 0xc8, 0xee,
|
||||
0xd6, 0xea, 0xa9, 0x4f, 0x51, 0x23, 0xb9, 0x56, 0x2f, 0xca, 0xf2, 0xdc, 0x7c, 0x96, 0xe7, 0x13,
|
||||
0x59, 0xfe, 0x0a, 0x6a, 0xa9, 0xa4, 0x91, 0x20, 0xc2, 0x2e, 0xb8, 0x51, 0xa4, 0xd6, 0x92, 0xd7,
|
||||
0x64, 0xbe, 0x7e, 0xb7, 0xd7, 0x88, 0x5a, 0x3b, 0x4f, 0xa1, 0x96, 0x4a, 0x97, 0x45, 0x75, 0xd9,
|
||||
0x79, 0x02, 0x35, 0xdd, 0xe0, 0xb2, 0xcb, 0xce, 0x7f, 0x2c, 0x58, 0x8b, 0x30, 0xa6, 0xf2, 0xfc,
|
||||
0x0a, 0xca, 0x13, 0x1a, 0x0a, 0x7a, 0x19, 0xf7, 0x22, 0x3c, 0x3f, 0x2a, 0x7f, 0x50, 0x08, 0x12,
|
||||
0x23, 0xd1, 0x37, 0x50, 0xe6, 0x4a, 0x0f, 0x8d, 0x66, 0x9d, 0x47, 0x59, 0x52, 0xc6, 0x5e, 0x8c,
|
||||
0x47, 0x5b, 0x50, 0xf0, 0x59, 0x9f, 0xab, 0xef, 0x5e, 0xdd, 0x7e, 0x90, 0x25, 0xf7, 0x96, 0xf5,
|
||||
0x89, 0x02, 0xa2, 0x37, 0x50, 0xbe, 0x70, 0xc3, 0xc0, 0x0b, 0xfa, 0xd1, 0x7b, 0xff, 0x71, 0x96,
|
||||
0xd0, 0x47, 0x8d, 0x23, 0xb1, 0x80, 0x53, 0x93, 0x97, 0xe8, 0x8c, 0x99, 0x98, 0x38, 0xbf, 0x95,
|
||||
0xb9, 0x2c, 0x49, 0xe3, 0xfe, 0x21, 0xd4, 0xf4, 0x7d, 0xf8, 0x40, 0x43, 0x2e, 0x27, 0x47, 0x6b,
|
||||
0xd9, 0x9d, 0xdd, 0x4d, 0x42, 0x49, 0x5a, 0xd2, 0xf9, 0xd1, 0xb4, 0xbb, 0x88, 0x21, 0x73, 0x69,
|
||||
0xe4, 0x76, 0x07, 0x6e, 0x3f, 0xfa, 0x4e, 0x11, 0x29, 0x77, 0x26, 0xc6, 0x9e, 0xbe, 0xb6, 0x11,
|
||||
0x29, 0x73, 0x33, 0xa4, 0x13, 0x8f, 0xcf, 0x86, 0xd8, 0x98, 0xde, 0xfe, 0x6b, 0x09, 0xa0, 0x19,
|
||||
0x9f, 0x07, 0x9d, 0xc0, 0x8a, 0xb2, 0x87, 0x9c, 0xa5, 0xcd, 0x53, 0xf9, 0x6d, 0x3f, 0xbd, 0x41,
|
||||
0x83, 0x45, 0x1f, 0x64, 0xf2, 0xab, 0xa1, 0x07, 0x3d, 0xcb, 0x2a, 0x13, 0xc9, 0xb9, 0xc9, 0x7e,
|
||||
0x7e, 0x0d, 0xca, 0xe8, 0x7d, 0x0f, 0x45, 0x9d, 0x05, 0x28, 0xab, 0x16, 0x26, 0xf3, 0xd6, 0x7e,
|
||||
0xb6, 0x1c, 0xa4, 0x95, 0x7e, 0x6e, 0x21, 0x62, 0x2a, 0x25, 0x72, 0x96, 0xb4, 0x42, 0x73, 0x63,
|
||||
0xb2, 0x02, 0x90, 0xea, 0x3a, 0x75, 0x0b, 0x7d, 0x0f, 0x45, 0x5d, 0xeb, 0xd0, 0xa7, 0x8b, 0x05,
|
||||
0x22, 0x7d, 0xcb, 0xb7, 0xeb, 0xd6, 0xe7, 0x16, 0x7a, 0x07, 0x05, 0xd9, 0xe4, 0x51, 0x46, 0xc7,
|
||||
0x4a, 0x4c, 0x08, 0xb6, 0xb3, 0x0c, 0x62, 0xa2, 0xf8, 0x23, 0xc0, 0x6c, 0xd4, 0x40, 0x19, 0xff,
|
||||
0xda, 0xcc, 0xcd, 0x2c, 0x76, 0xfd, 0x7a, 0xa0, 0x31, 0xf0, 0x4e, 0xf6, 0xd9, 0x33, 0x86, 0x32,
|
||||
0x3b, 0x6c, 0x7c, 0x8d, 0x6c, 0x67, 0x19, 0xc4, 0xa8, 0x3b, 0x87, 0x5a, 0xea, 0x5f, 0x5d, 0xf4,
|
||||
0x8b, 0x6c, 0x27, 0xaf, 0xfe, 0x49, 0x6c, 0xbf, 0xb8, 0x11, 0xd6, 0x58, 0x12, 0xc9, 0x59, 0xcd,
|
||||
0x6c, 0xa3, 0xc6, 0x75, 0x7e, 0xa7, 0xff, 0xa1, 0xb5, 0xb7, 0x6e, 0x8c, 0xd7, 0x56, 0x77, 0x0b,
|
||||
0xbf, 0xcb, 0x8d, 0x3a, 0x9d, 0xa2, 0xfa, 0xb3, 0xfb, 0xcb, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff,
|
||||
0xc1, 0x07, 0x8b, 0x2b, 0x8a, 0x17, 0x00, 0x00,
|
||||
// 1957 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x58, 0x5f, 0x73, 0x1b, 0xb7,
|
||||
0x11, 0xef, 0x91, 0x14, 0xff, 0x2c, 0x45, 0xd9, 0x46, 0x6d, 0x17, 0x3e, 0x3b, 0xb6, 0x7c, 0xb6,
|
||||
0x53, 0x4e, 0xdd, 0xa1, 0x12, 0xa5, 0x8e, 0xe3, 0x38, 0x99, 0xa9, 0x44, 0x89, 0x95, 0x32, 0xb6,
|
||||
0xa4, 0x01, 0x65, 0x67, 0xda, 0xcc, 0x34, 0x73, 0x22, 0x21, 0xea, 0x46, 0xa7, 0x03, 0x7b, 0x00,
|
||||
0xf5, 0xa7, 0x4f, 0x7d, 0x68, 0xdf, 0x3a, 0xfd, 0x1e, 0x9d, 0x7e, 0x84, 0x3e, 0xf5, 0xad, 0x1f,
|
||||
0xa7, 0x1f, 0xa1, 0x83, 0x05, 0xee, 0x78, 0x14, 0x79, 0x94, 0xd4, 0x3e, 0x11, 0xbb, 0xf8, 0xed,
|
||||
0x2e, 0x76, 0x6f, 0xb1, 0xbb, 0x20, 0xdc, 0xee, 0x89, 0x48, 0xc5, 0x22, 0x0c, 0x79, 0xdc, 0x1a,
|
||||
0xc6, 0x42, 0x09, 0x72, 0xf7, 0x60, 0x14, 0x84, 0xfd, 0xf3, 0x56, 0x66, 0xe3, 0xf4, 0x73, 0xf7,
|
||||
0xed, 0x20, 0x50, 0x47, 0xa3, 0x83, 0x56, 0x4f, 0x9c, 0xac, 0x9c, 0x88, 0x83, 0x8b, 0x15, 0x44,
|
||||
0x1d, 0x07, 0x6a, 0xc5, 0x1f, 0x06, 0x2b, 0x92, 0xc7, 0xa7, 0x41, 0x8f, 0xcb, 0x15, 0x2b, 0x94,
|
||||
0xfc, 0x1a, 0x95, 0xee, 0xab, 0x5c, 0x61, 0x29, 0x46, 0x71, 0x8f, 0x0f, 0x45, 0x18, 0xf4, 0x2e,
|
||||
0x56, 0x86, 0x07, 0x2b, 0x66, 0x65, 0xc4, 0xbc, 0x26, 0xdc, 0x7d, 0x17, 0x48, 0xb5, 0x17, 0x8b,
|
||||
0x1e, 0x97, 0x92, 0x4b, 0xc6, 0xff, 0x30, 0xe2, 0x52, 0x91, 0xdb, 0x50, 0x64, 0xfc, 0x90, 0x3a,
|
||||
0xcb, 0x4e, 0xb3, 0xc6, 0xf4, 0xd2, 0xdb, 0x83, 0x7b, 0x97, 0x90, 0x72, 0x28, 0x22, 0xc9, 0xc9,
|
||||
0x6b, 0x58, 0xd8, 0x8e, 0x0e, 0x85, 0xa4, 0xce, 0x72, 0xb1, 0x59, 0x5f, 0x7d, 0xda, 0x9a, 0xe5,
|
||||
0x5c, 0xcb, 0xca, 0x69, 0x24, 0x33, 0x78, 0x4f, 0x42, 0x3d, 0xc3, 0x25, 0x8f, 0xa0, 0x96, 0x90,
|
||||
0x1b, 0xd6, 0xf0, 0x98, 0x41, 0x3a, 0xb0, 0xb8, 0x1d, 0x9d, 0x8a, 0x63, 0xde, 0x16, 0xd1, 0x61,
|
||||
0x30, 0xa0, 0x85, 0x65, 0xa7, 0x59, 0x5f, 0xf5, 0x66, 0x1b, 0xcb, 0x22, 0xd9, 0x84, 0x9c, 0xf7,
|
||||
0x1d, 0xd0, 0x8d, 0x40, 0xf6, 0x44, 0x14, 0xf1, 0x5e, 0xe2, 0x4c, 0xae, 0xd3, 0x93, 0x67, 0x2a,
|
||||
0x5c, 0x3a, 0x93, 0xf7, 0x10, 0x1e, 0xcc, 0xd0, 0x65, 0xc2, 0xe2, 0xfd, 0x1e, 0x16, 0xd7, 0xf5,
|
||||
0xd9, 0xf2, 0x95, 0x7f, 0x03, 0x95, 0xdd, 0xa1, 0x0a, 0x44, 0x24, 0xe7, 0x7b, 0x83, 0x6a, 0x2c,
|
||||
0x92, 0x25, 0x22, 0xde, 0xbf, 0x17, 0xad, 0x01, 0xcb, 0x20, 0xcb, 0x50, 0x6f, 0x8b, 0x48, 0xf1,
|
||||
0x73, 0xb5, 0xe7, 0xab, 0x23, 0x6b, 0x28, 0xcb, 0x22, 0x9f, 0xc2, 0xd2, 0x86, 0xe8, 0x1d, 0xf3,
|
||||
0xf8, 0x30, 0x08, 0xf9, 0x8e, 0x7f, 0xc2, 0xad, 0x4b, 0x97, 0xb8, 0xe4, 0x5b, 0xed, 0x75, 0x10,
|
||||
0xa9, 0xce, 0x28, 0xea, 0xd1, 0x22, 0x1e, 0xed, 0x49, 0xde, 0x57, 0xb5, 0x30, 0x36, 0x96, 0x20,
|
||||
0x3f, 0x40, 0x43, 0xab, 0xe9, 0x5b, 0xd3, 0x92, 0x96, 0x30, 0x31, 0x5e, 0x5d, 0xed, 0x5d, 0x6b,
|
||||
0x42, 0x6e, 0x33, 0x52, 0xf1, 0x05, 0x9b, 0xd4, 0x45, 0xee, 0xc2, 0xc2, 0x5a, 0x18, 0x8a, 0x33,
|
||||
0xba, 0xb0, 0x5c, 0x6c, 0xd6, 0x98, 0x21, 0xc8, 0x97, 0x50, 0x59, 0x53, 0x8a, 0x4b, 0x25, 0x69,
|
||||
0x19, 0x8d, 0x3d, 0x9a, 0x6d, 0xcc, 0x80, 0x58, 0x02, 0x26, 0xbb, 0x50, 0x43, 0xfb, 0x6b, 0xf1,
|
||||
0x40, 0xd2, 0x0a, 0x4a, 0x7e, 0x7e, 0x8d, 0x63, 0xa6, 0x32, 0xe6, 0x88, 0x63, 0x1d, 0x64, 0x13,
|
||||
0x6a, 0x6d, 0xbf, 0x77, 0xc4, 0x3b, 0xb1, 0x38, 0xa1, 0x55, 0x54, 0xf8, 0xf3, 0xd9, 0x0a, 0x11,
|
||||
0x66, 0x15, 0x5a, 0x35, 0xa9, 0x24, 0x59, 0x83, 0x0a, 0x12, 0xfb, 0x82, 0xd6, 0x6e, 0xa6, 0x24,
|
||||
0x91, 0x23, 0x1e, 0x2c, 0xb6, 0x07, 0xb1, 0x18, 0x0d, 0xf7, 0xfc, 0x98, 0x47, 0x8a, 0x02, 0x7e,
|
||||
0xea, 0x09, 0x1e, 0x79, 0x0b, 0x95, 0xcd, 0xf3, 0xa1, 0x88, 0x95, 0xa4, 0xf5, 0x79, 0x97, 0xd7,
|
||||
0x80, 0xac, 0x01, 0x2b, 0x41, 0x1e, 0x03, 0x6c, 0x9e, 0xab, 0xd8, 0xdf, 0x12, 0x3a, 0xec, 0x8b,
|
||||
0xf8, 0x39, 0x32, 0x1c, 0xd2, 0x81, 0xf2, 0x3b, 0xff, 0x80, 0x87, 0x92, 0x36, 0x50, 0x77, 0xeb,
|
||||
0x1a, 0x81, 0x35, 0x02, 0xc6, 0x90, 0x95, 0xd6, 0x79, 0xbd, 0xc3, 0xd5, 0x99, 0x88, 0x8f, 0xdf,
|
||||
0x8b, 0x3e, 0xa7, 0x4b, 0x26, 0xaf, 0x33, 0x2c, 0xf2, 0x1c, 0x1a, 0x3b, 0xc2, 0x04, 0x2f, 0x08,
|
||||
0x15, 0x8f, 0xe9, 0x2d, 0x3c, 0xcc, 0x24, 0x13, 0xef, 0x72, 0xe8, 0xab, 0x43, 0x11, 0x9f, 0x48,
|
||||
0x7a, 0x1b, 0x11, 0x63, 0x86, 0xce, 0xa0, 0x2e, 0xef, 0xc5, 0x5c, 0x49, 0x7a, 0x67, 0x5e, 0x06,
|
||||
0x19, 0x10, 0x4b, 0xc0, 0x84, 0x42, 0xa5, 0x7b, 0x74, 0xd2, 0x0d, 0xfe, 0xc8, 0x29, 0x59, 0x76,
|
||||
0x9a, 0x45, 0x96, 0x90, 0xe4, 0x25, 0x14, 0xbb, 0xdd, 0x2d, 0xfa, 0x53, 0xd4, 0xf6, 0x20, 0x47,
|
||||
0x5b, 0x77, 0x8b, 0x69, 0x14, 0x21, 0x50, 0xda, 0xf7, 0x07, 0x92, 0xde, 0xc5, 0x73, 0xe1, 0x9a,
|
||||
0xdc, 0x87, 0xf2, 0xbe, 0x1f, 0x0f, 0xb8, 0xa2, 0xf7, 0xd0, 0x67, 0x4b, 0x91, 0x37, 0x50, 0xf9,
|
||||
0x10, 0x06, 0x27, 0x81, 0x92, 0xf4, 0xfe, 0xbc, 0xcb, 0x69, 0x40, 0xbb, 0x43, 0xc5, 0x12, 0xbc,
|
||||
0x3e, 0x2d, 0xc6, 0x9b, 0xc7, 0xf4, 0x67, 0xa8, 0x33, 0x21, 0xf5, 0x8e, 0x0d, 0x17, 0xa5, 0xcb,
|
||||
0x4e, 0xb3, 0xca, 0x12, 0x52, 0x1f, 0x6d, 0x6f, 0x14, 0x86, 0xf4, 0x01, 0xb2, 0x71, 0x6d, 0xbe,
|
||||
0xbd, 0x4e, 0x83, 0xbd, 0x91, 0x3c, 0xa2, 0x2e, 0xee, 0x64, 0x38, 0xe3, 0xfd, 0x77, 0xc2, 0xef,
|
||||
0xd3, 0x87, 0xd9, 0x7d, 0xcd, 0x21, 0xdb, 0xb0, 0xd8, 0xc5, 0xb6, 0xb4, 0x87, 0xcd, 0x88, 0x3e,
|
||||
0x42, 0x3f, 0x5e, 0xb4, 0x74, 0xe7, 0x6a, 0x25, 0x9d, 0x4b, 0xfb, 0x90, 0x6d, 0x5e, 0x2d, 0x03,
|
||||
0x66, 0x13, 0xa2, 0x49, 0x5d, 0xfd, 0x64, 0x5c, 0x57, 0x5d, 0xa8, 0xfe, 0x46, 0x27, 0xb9, 0x66,
|
||||
0x3f, 0x46, 0x76, 0x4a, 0xeb, 0x64, 0x5a, 0x8b, 0x22, 0xa1, 0x7c, 0x53, 0x77, 0x9f, 0x60, 0xb8,
|
||||
0xb3, 0x2c, 0xf2, 0x25, 0xdc, 0xdf, 0x8b, 0xc5, 0x29, 0x8f, 0xfc, 0xa8, 0xc7, 0x93, 0x6a, 0x8e,
|
||||
0x99, 0xb7, 0x8c, 0xba, 0x72, 0x76, 0xdd, 0x5f, 0x03, 0x99, 0xae, 0x5e, 0xfa, 0x74, 0xc7, 0xfc,
|
||||
0x22, 0xa9, 0xfa, 0xc7, 0xfc, 0x42, 0x17, 0xb0, 0x53, 0x3f, 0x1c, 0x25, 0xb5, 0xd7, 0x10, 0x5f,
|
||||
0x17, 0xbe, 0x72, 0xdc, 0x6f, 0x60, 0x69, 0xb2, 0xb0, 0xdc, 0x48, 0xfa, 0x0d, 0xd4, 0x33, 0xb7,
|
||||
0xe7, 0x26, 0xa2, 0xde, 0xbf, 0x1c, 0xa8, 0x67, 0xae, 0x38, 0x26, 0xe3, 0xc5, 0x90, 0x5b, 0x61,
|
||||
0x5c, 0x93, 0x75, 0x58, 0x58, 0x53, 0x2a, 0xd6, 0xad, 0x4a, 0xe7, 0xf3, 0x2f, 0xaf, 0x2c, 0x14,
|
||||
0x2d, 0x84, 0x9b, 0xab, 0x6c, 0x44, 0x75, 0xf0, 0x37, 0xb8, 0x54, 0x41, 0x84, 0xa1, 0xc6, 0xce,
|
||||
0x52, 0x63, 0x59, 0x96, 0xfb, 0x15, 0xc0, 0x58, 0xec, 0x46, 0x3e, 0xfc, 0xc3, 0x81, 0x3b, 0x53,
|
||||
0xd5, 0x70, 0xa6, 0x27, 0x5b, 0x93, 0x9e, 0xac, 0x5e, 0xb3, 0xb2, 0x4e, 0xfb, 0xf3, 0x7f, 0x9c,
|
||||
0x76, 0x07, 0xca, 0xa6, 0x05, 0xcd, 0x3c, 0xa1, 0x0b, 0xd5, 0x8d, 0x40, 0xfa, 0x07, 0x21, 0xef,
|
||||
0xa3, 0x68, 0x95, 0xa5, 0x34, 0xf6, 0x3f, 0x3c, 0xbd, 0x89, 0x9e, 0x21, 0x3c, 0x53, 0x6b, 0xc8,
|
||||
0x12, 0x14, 0xd2, 0xd9, 0xa9, 0xb0, 0xbd, 0xa1, 0xc1, 0xba, 0xf1, 0x1b, 0x57, 0x6b, 0xcc, 0x10,
|
||||
0x5e, 0x07, 0xca, 0xa6, 0x7a, 0x4d, 0xe1, 0x5d, 0xa8, 0x76, 0x82, 0x90, 0xe3, 0xfc, 0x60, 0xce,
|
||||
0x9c, 0xd2, 0xda, 0xbd, 0xcd, 0xe8, 0xd4, 0x9a, 0xd5, 0x4b, 0xef, 0x87, 0xcc, 0x98, 0xa0, 0xfd,
|
||||
0xc0, 0x89, 0xc2, 0xfa, 0x81, 0x73, 0xc4, 0x7d, 0x28, 0x77, 0x44, 0x7c, 0xe2, 0x2b, 0xab, 0xcc,
|
||||
0x52, 0xba, 0x35, 0x6d, 0x0f, 0x22, 0x11, 0xf3, 0xae, 0xf2, 0xd5, 0xc8, 0xb8, 0x52, 0x65, 0x13,
|
||||
0x3c, 0xcf, 0x83, 0xa5, 0xed, 0x48, 0x0e, 0x79, 0x4f, 0xe5, 0x8f, 0xa4, 0xbb, 0x70, 0x2b, 0xc5,
|
||||
0xd8, 0x61, 0x34, 0x33, 0x53, 0x39, 0x37, 0x9f, 0xa9, 0xfe, 0xee, 0x40, 0x2d, 0xad, 0x9a, 0xa4,
|
||||
0x0d, 0x65, 0xfc, 0x62, 0xc9, 0x64, 0xfb, 0xf2, 0x8a, 0x32, 0xdb, 0xfa, 0x88, 0x68, 0xdb, 0xbd,
|
||||
0x8c, 0xa8, 0xfb, 0x3d, 0xd4, 0x33, 0xec, 0x19, 0x49, 0xb2, 0x9a, 0x4d, 0x92, 0xdc, 0xb6, 0x63,
|
||||
0x8c, 0x64, 0x53, 0x68, 0x03, 0xca, 0x86, 0x39, 0x33, 0xf4, 0x04, 0x4a, 0x5b, 0x7e, 0x6c, 0xd2,
|
||||
0xa7, 0xc8, 0x70, 0xad, 0x79, 0x5d, 0x71, 0xa8, 0x30, 0xdc, 0x45, 0x86, 0x6b, 0xef, 0x9f, 0x0e,
|
||||
0x34, 0xec, 0x98, 0x6a, 0x23, 0xc8, 0xe1, 0xb6, 0xb9, 0xc5, 0x3c, 0x4e, 0x78, 0xd6, 0xff, 0x37,
|
||||
0x73, 0x42, 0x99, 0x40, 0x5b, 0x97, 0x65, 0x4d, 0x34, 0xa6, 0x54, 0xba, 0x6d, 0xb8, 0x37, 0x13,
|
||||
0x7a, 0xa3, 0x6b, 0xf4, 0x02, 0xee, 0x8c, 0x07, 0xf0, 0xfc, 0x3c, 0xb9, 0x0b, 0x24, 0x0b, 0xb3,
|
||||
0x03, 0xfa, 0x13, 0xa8, 0xeb, 0x07, 0x4d, 0xbe, 0x98, 0x07, 0x8b, 0x06, 0x60, 0x23, 0x43, 0xa0,
|
||||
0x74, 0xcc, 0x2f, 0x4c, 0x36, 0xd4, 0x18, 0xae, 0xbd, 0xbf, 0x39, 0xfa, 0x5d, 0x32, 0x1c, 0xa9,
|
||||
0xf7, 0x5c, 0x4a, 0x7f, 0xa0, 0x13, 0xb0, 0xb4, 0x1d, 0x05, 0xca, 0x66, 0xdf, 0xa7, 0x79, 0xef,
|
||||
0x93, 0xe1, 0x48, 0x69, 0x98, 0x95, 0xda, 0xfa, 0x09, 0x43, 0x29, 0xf2, 0x1a, 0x4a, 0x1b, 0xbe,
|
||||
0xf2, 0x6d, 0x2e, 0xe4, 0x4c, 0x63, 0x1a, 0x91, 0x11, 0xd4, 0xe4, 0x7a, 0x45, 0x3f, 0xc2, 0x86,
|
||||
0x23, 0xe5, 0x3d, 0x87, 0xdb, 0x97, 0xb5, 0xcf, 0x70, 0xed, 0x0b, 0xa8, 0x67, 0xb4, 0xe0, 0xdd,
|
||||
0xde, 0xed, 0x20, 0xa0, 0xca, 0xf4, 0x52, 0xfb, 0x9a, 0x1e, 0x64, 0xd1, 0xd8, 0xf0, 0x6e, 0x41,
|
||||
0x03, 0x55, 0xa7, 0x11, 0xfc, 0x53, 0x01, 0x2a, 0x89, 0x8a, 0xd7, 0x13, 0x7e, 0x3f, 0xcd, 0xf3,
|
||||
0x7b, 0xda, 0xe5, 0x57, 0x50, 0xd2, 0x35, 0xc6, 0xba, 0x9c, 0x33, 0xca, 0x74, 0xfa, 0x19, 0x31,
|
||||
0x0d, 0x27, 0xdf, 0x42, 0x99, 0x71, 0xa9, 0xc7, 0x2e, 0xf3, 0x40, 0x79, 0x36, 0x5b, 0xd0, 0x60,
|
||||
0xc6, 0xc2, 0x56, 0x48, 0x8b, 0x77, 0x83, 0x41, 0xe4, 0x87, 0xb4, 0x34, 0x4f, 0xdc, 0x60, 0x32,
|
||||
0xe2, 0x86, 0x31, 0x0e, 0xf7, 0x5f, 0x1c, 0xa8, 0xcf, 0x0d, 0xf5, 0xfc, 0x27, 0xe4, 0xd4, 0xb3,
|
||||
0xb6, 0xf8, 0x3f, 0x3e, 0x6b, 0xff, 0x5c, 0x98, 0x54, 0x84, 0x13, 0x98, 0xbe, 0x4f, 0x43, 0x11,
|
||||
0x44, 0xca, 0xa6, 0x6c, 0x86, 0xa3, 0x0f, 0xda, 0x3e, 0xe9, 0xdb, 0xc6, 0xa0, 0x97, 0xfa, 0x9a,
|
||||
0xed, 0x08, 0xcd, 0xab, 0x63, 0x1a, 0x18, 0x62, 0x5c, 0xf6, 0x8b, 0xb6, 0xec, 0xeb, 0xd4, 0xf8,
|
||||
0x20, 0x79, 0x8c, 0x81, 0xab, 0x31, 0x5c, 0xeb, 0x4a, 0xbf, 0x23, 0x90, 0xbb, 0x80, 0xc2, 0x96,
|
||||
0x42, 0x2b, 0x67, 0x7d, 0x5a, 0x36, 0xe1, 0x68, 0x9f, 0x25, 0x56, 0xce, 0xfa, 0xb4, 0x92, 0x5a,
|
||||
0x39, 0x43, 0x2b, 0xfb, 0xea, 0x82, 0x56, 0x4d, 0x02, 0xee, 0xab, 0x0b, 0xdd, 0x8a, 0x98, 0x08,
|
||||
0xc3, 0x03, 0xbf, 0x77, 0x4c, 0x6b, 0xa6, 0x07, 0x26, 0xb4, 0x9e, 0x55, 0x75, 0xcc, 0x03, 0x3f,
|
||||
0xc4, 0x57, 0x4d, 0x95, 0x25, 0xa4, 0xb7, 0x06, 0xb5, 0x34, 0x55, 0x74, 0x77, 0xeb, 0xf4, 0xf1,
|
||||
0x53, 0x34, 0x58, 0xa1, 0xd3, 0x4f, 0xb2, 0xbc, 0x30, 0x9d, 0xe5, 0xc5, 0x4c, 0x96, 0xbf, 0x86,
|
||||
0xc6, 0x44, 0xd2, 0x68, 0x10, 0x13, 0x67, 0xd2, 0x2a, 0xc2, 0xb5, 0xe6, 0xb5, 0x45, 0x68, 0xde,
|
||||
0xed, 0x0d, 0x86, 0x6b, 0xef, 0x19, 0x34, 0x26, 0xd2, 0x65, 0x56, 0x5d, 0xf6, 0x9e, 0x42, 0xc3,
|
||||
0x34, 0xb8, 0xfc, 0xb2, 0xf3, 0x1f, 0x07, 0x96, 0x12, 0x8c, 0xad, 0x3c, 0xbf, 0x82, 0xea, 0x29,
|
||||
0x8f, 0x15, 0x3f, 0x4f, 0x7b, 0x11, 0x9d, 0x1e, 0x95, 0x3f, 0x22, 0x82, 0xa5, 0x48, 0xf2, 0x35,
|
||||
0x54, 0x25, 0xea, 0xe1, 0xc9, 0xac, 0xf3, 0x38, 0x4f, 0xca, 0xda, 0x4b, 0xf1, 0x64, 0x05, 0x4a,
|
||||
0xa1, 0x18, 0x48, 0xfc, 0xee, 0xf5, 0xd5, 0x87, 0x79, 0x72, 0xef, 0xc4, 0x80, 0x21, 0x90, 0xbc,
|
||||
0x85, 0xea, 0x99, 0x1f, 0x47, 0x41, 0x34, 0x48, 0xde, 0xfb, 0x4f, 0xf2, 0x84, 0xbe, 0x37, 0x38,
|
||||
0x96, 0x0a, 0x78, 0x0d, 0x7d, 0x89, 0x0e, 0x85, 0x8d, 0x89, 0xf7, 0x5b, 0x9d, 0xcb, 0x9a, 0xb4,
|
||||
0xee, 0x6f, 0x43, 0xc3, 0xdc, 0x87, 0x8f, 0x3c, 0x96, 0x7a, 0x72, 0x74, 0xe6, 0xdd, 0xd9, 0xf5,
|
||||
0x2c, 0x94, 0x4d, 0x4a, 0x7a, 0x3f, 0xda, 0x76, 0x97, 0x30, 0x74, 0x2e, 0x0d, 0xfd, 0xde, 0xb1,
|
||||
0x3f, 0x48, 0xbe, 0x53, 0x42, 0xea, 0x9d, 0x53, 0x6b, 0xcf, 0x5c, 0xdb, 0x84, 0xd4, 0xb9, 0x19,
|
||||
0xf3, 0xd3, 0x40, 0x8e, 0x87, 0xd8, 0x94, 0x5e, 0xfd, 0x6b, 0x05, 0xa0, 0x9d, 0x9e, 0x87, 0xec,
|
||||
0xc1, 0x02, 0xda, 0x23, 0xde, 0xdc, 0xe6, 0x89, 0x7e, 0xbb, 0xcf, 0xae, 0xd1, 0x60, 0xc9, 0x47,
|
||||
0x9d, 0xfc, 0x38, 0xf4, 0x90, 0xe7, 0x79, 0x65, 0x22, 0x3b, 0x37, 0xb9, 0x2f, 0xae, 0x40, 0x59,
|
||||
0xbd, 0x1f, 0xa0, 0x6c, 0xb2, 0x80, 0xe4, 0xd5, 0xc2, 0x6c, 0xde, 0xba, 0xcf, 0xe7, 0x83, 0x8c,
|
||||
0xd2, 0xcf, 0x1c, 0xc2, 0x6c, 0xa5, 0x24, 0xde, 0x9c, 0x56, 0x68, 0x6f, 0x4c, 0x5e, 0x00, 0x26,
|
||||
0xba, 0x4e, 0xd3, 0x21, 0xdf, 0x41, 0xd9, 0xd4, 0x3a, 0xf2, 0xc9, 0x6c, 0x81, 0x44, 0xdf, 0xfc,
|
||||
0xed, 0xa6, 0xf3, 0x99, 0x43, 0xde, 0x43, 0x49, 0x37, 0x79, 0x92, 0xd3, 0xb1, 0x32, 0x13, 0x82,
|
||||
0xeb, 0xcd, 0x83, 0xd8, 0x28, 0xfe, 0x08, 0x30, 0x1e, 0x35, 0x48, 0xce, 0xbf, 0x36, 0x53, 0x33,
|
||||
0x8b, 0xdb, 0xbc, 0x1a, 0x68, 0x0d, 0xbc, 0xd7, 0x7d, 0xf6, 0x50, 0x90, 0xdc, 0x0e, 0x9b, 0x5e,
|
||||
0x23, 0xd7, 0x9b, 0x07, 0xb1, 0xea, 0x8e, 0xa0, 0x31, 0xf1, 0xaf, 0x2e, 0xf9, 0x45, 0xbe, 0x93,
|
||||
0x97, 0xff, 0x24, 0x76, 0x5f, 0x5e, 0x0b, 0x6b, 0x2d, 0xa9, 0xec, 0xac, 0x66, 0xb7, 0x49, 0xeb,
|
||||
0x2a, 0xbf, 0x27, 0xff, 0xa1, 0x75, 0x57, 0xae, 0x8d, 0x37, 0x56, 0xd7, 0x4b, 0xbf, 0x2b, 0x0c,
|
||||
0x0f, 0x0e, 0xca, 0xf8, 0x67, 0xf7, 0x17, 0xff, 0x0d, 0x00, 0x00, 0xff, 0xff, 0xa2, 0x32, 0x20,
|
||||
0xaa, 0x8a, 0x17, 0x00, 0x00,
|
||||
}
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
|
@@ -80,7 +80,7 @@ message BuildOptions {
|
||||
string Ref = 29;
|
||||
string GroupRef = 30;
|
||||
repeated string Annotations = 31;
|
||||
bool WithProvenanceResponse = 32;
|
||||
string ProvenanceResponseMode = 32;
|
||||
}
|
||||
|
||||
message ExportEntry {
|
||||
|
@@ -7,6 +7,9 @@ variable "DOCS_FORMATS" {
|
||||
variable "DESTDIR" {
|
||||
default = "./bin"
|
||||
}
|
||||
variable "TEST_COVERAGE" {
|
||||
default = null
|
||||
}
|
||||
variable "GOLANGCI_LINT_MULTIPLATFORM" {
|
||||
default = ""
|
||||
}
|
||||
@@ -192,6 +195,7 @@ variable "TEST_BUILDKIT_TAG" {
|
||||
target "integration-test-base" {
|
||||
inherits = ["_common"]
|
||||
args = {
|
||||
GO_EXTRA_FLAGS = TEST_COVERAGE == "1" ? "-cover" : null
|
||||
HTTP_PROXY = HTTP_PROXY
|
||||
HTTPS_PROXY = HTTPS_PROXY
|
||||
NO_PROXY = NO_PROXY
|
||||
|
@@ -16,15 +16,17 @@ Build from a file
|
||||
| Name | Type | Default | Description |
|
||||
|:------------------------------------|:--------------|:--------|:----------------------------------------------------------------------------------------------------|
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| [`--call`](#call) | `string` | `build` | Set method for evaluating build (`check`, `outline`, `targets`) |
|
||||
| [`--check`](#check) | `bool` | | Shorthand for `--call=check` |
|
||||
| [`-f`](#file), [`--file`](#file) | `stringArray` | | Build definition file |
|
||||
| `--load` | | | Shorthand for `--set=*.output=type=docker` |
|
||||
| `--load` | `bool` | | Shorthand for `--set=*.output=type=docker` |
|
||||
| [`--metadata-file`](#metadata-file) | `string` | | Write build result metadata to a file |
|
||||
| [`--no-cache`](#no-cache) | | | Do not use cache when building the image |
|
||||
| [`--print`](#print) | | | Print the options without building |
|
||||
| [`--no-cache`](#no-cache) | `bool` | | Do not use cache when building the image |
|
||||
| [`--print`](#print) | `bool` | | Print the options without building |
|
||||
| [`--progress`](#progress) | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`, `rawjson`). Use plain to show container output |
|
||||
| [`--provenance`](#provenance) | `string` | | Shorthand for `--set=*.attest=type=provenance` |
|
||||
| [`--pull`](#pull) | | | Always attempt to pull all referenced images |
|
||||
| `--push` | | | Shorthand for `--set=*.output=type=registry` |
|
||||
| [`--pull`](#pull) | `bool` | | Always attempt to pull all referenced images |
|
||||
| `--push` | `bool` | | Shorthand for `--set=*.output=type=registry` |
|
||||
| [`--sbom`](#sbom) | `string` | | Shorthand for `--set=*.attest=type=sbom` |
|
||||
| [`--set`](#set) | `stringArray` | | Override target value (e.g., `targetpattern.key=value`) |
|
||||
|
||||
@@ -51,6 +53,14 @@ guide for introduction to writing bake files.
|
||||
|
||||
Same as [`buildx --builder`](buildx.md#builder).
|
||||
|
||||
### <a name="call"></a> Invoke a frontend method (--call)
|
||||
|
||||
Same as [`build --call`](buildx_build.md#call).
|
||||
|
||||
#### <a name="check"></a> Call: check (--check)
|
||||
|
||||
Same as [`build --check`](buildx_build.md#check).
|
||||
|
||||
### <a name="file"></a> Specify a build definition file (-f, --file)
|
||||
|
||||
Use the `-f` / `--file` option to specify the build definition file to use.
|
||||
@@ -119,6 +129,7 @@ $ cat metadata.json
|
||||
|
||||
```json
|
||||
{
|
||||
"buildx.build.warnings": {},
|
||||
"db": {
|
||||
"buildx.build.provenance": {},
|
||||
"buildx.build.ref": "mybuilder/mybuilder0/0fjb6ubs52xx3vygf6fgdl611",
|
||||
@@ -161,6 +172,12 @@ $ cat metadata.json
|
||||
> * `max` sets full provenance.
|
||||
> * `disabled`, `false` or `0` does not set any provenance.
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> Build warnings (`buildx.build.warnings`) are not included by default. Set the
|
||||
> `BUILDX_METADATA_WARNINGS` environment variable to `1` or `true` to
|
||||
> include them.
|
||||
|
||||
### <a name="no-cache"></a> Don't use cache when building the image (--no-cache)
|
||||
|
||||
Same as `build --no-cache`. Don't use cache when building the image.
|
||||
|
@@ -9,49 +9,49 @@ Start a build
|
||||
|
||||
### Aliases
|
||||
|
||||
`docker buildx build`, `docker buildx b`
|
||||
`docker build`, `docker builder build`, `docker image build`, `docker buildx b`
|
||||
|
||||
### Options
|
||||
|
||||
| Name | Type | Default | Description |
|
||||
|:---------------------------------------------------------------------------------------------------------------------------------------------------|:--------------|:----------|:----------------------------------------------------------------------------------------------------|
|
||||
| [`--add-host`](https://docs.docker.com/reference/cli/docker/image/build/#add-host) | `stringSlice` | | Add a custom host-to-IP mapping (format: `host:ip`) |
|
||||
| [`--allow`](#allow) | `stringSlice` | | Allow extra privileged entitlement (e.g., `network.host`, `security.insecure`) |
|
||||
| [`--annotation`](#annotation) | `stringArray` | | Add annotation to the image |
|
||||
| [`--attest`](#attest) | `stringArray` | | Attestation parameters (format: `type=sbom,generator=image`) |
|
||||
| [`--build-arg`](#build-arg) | `stringArray` | | Set build-time variables |
|
||||
| [`--build-context`](#build-context) | `stringArray` | | Additional build contexts (e.g., name=path) |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| [`--cache-from`](#cache-from) | `stringArray` | | External cache sources (e.g., `user/app:cache`, `type=local,src=path/to/dir`) |
|
||||
| [`--cache-to`](#cache-to) | `stringArray` | | Cache export destinations (e.g., `user/app:cache`, `type=local,dest=path/to/dir`) |
|
||||
| `--call` | `string` | `build` | Set method for evaluating build (`check`, `outline`, `targets`) |
|
||||
| [`--cgroup-parent`](https://docs.docker.com/reference/cli/docker/image/build/#cgroup-parent) | `string` | | Set the parent cgroup for the `RUN` instructions during build |
|
||||
| `--check` | | | Shorthand for `--call=check` |
|
||||
| `--detach` | | | Detach buildx server (supported only on linux) (EXPERIMENTAL) |
|
||||
| [`-f`](https://docs.docker.com/reference/cli/docker/image/build/#file), [`--file`](https://docs.docker.com/reference/cli/docker/image/build/#file) | `string` | | Name of the Dockerfile (default: `PATH/Dockerfile`) |
|
||||
| `--iidfile` | `string` | | Write the image ID to a file |
|
||||
| `--label` | `stringArray` | | Set metadata for an image |
|
||||
| [`--load`](#load) | | | Shorthand for `--output=type=docker` |
|
||||
| [`--metadata-file`](#metadata-file) | `string` | | Write build result metadata to a file |
|
||||
| `--network` | `string` | `default` | Set the networking mode for the `RUN` instructions during build |
|
||||
| `--no-cache` | | | Do not use cache when building the image |
|
||||
| [`--no-cache-filter`](#no-cache-filter) | `stringArray` | | Do not cache specified stages |
|
||||
| [`-o`](#output), [`--output`](#output) | `stringArray` | | Output destination (format: `type=local,dest=path`) |
|
||||
| [`--platform`](#platform) | `stringArray` | | Set target platform for build |
|
||||
| [`--progress`](#progress) | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`, `rawjson`). Use plain to show container output |
|
||||
| [`--provenance`](#provenance) | `string` | | Shorthand for `--attest=type=provenance` |
|
||||
| `--pull` | | | Always attempt to pull all referenced images |
|
||||
| [`--push`](#push) | | | Shorthand for `--output=type=registry` |
|
||||
| `-q`, `--quiet` | | | Suppress the build output and print image ID on success |
|
||||
| `--root` | `string` | | Specify root directory of server to connect (EXPERIMENTAL) |
|
||||
| [`--sbom`](#sbom) | `string` | | Shorthand for `--attest=type=sbom` |
|
||||
| [`--secret`](#secret) | `stringArray` | | Secret to expose to the build (format: `id=mysecret[,src=/local/secret]`) |
|
||||
| `--server-config` | `string` | | Specify buildx server config file (used only when launching new server) (EXPERIMENTAL) |
|
||||
| [`--shm-size`](#shm-size) | `bytes` | `0` | Shared memory size for build containers |
|
||||
| [`--ssh`](#ssh) | `stringArray` | | SSH agent socket or keys to expose to the build (format: `default\|<id>[=<socket>\|<key>[,<key>]]`) |
|
||||
| [`-t`](https://docs.docker.com/reference/cli/docker/image/build/#tag), [`--tag`](https://docs.docker.com/reference/cli/docker/image/build/#tag) | `stringArray` | | Name and optionally a tag (format: `name:tag`) |
|
||||
| [`--target`](https://docs.docker.com/reference/cli/docker/image/build/#target) | `string` | | Set the target build stage to build |
|
||||
| [`--ulimit`](#ulimit) | `ulimit` | | Ulimit options |
|
||||
| Name | Type | Default | Description |
|
||||
|:----------------------------------------|:--------------|:----------|:----------------------------------------------------------------------------------------------------|
|
||||
| [`--add-host`](#add-host) | `stringSlice` | | Add a custom host-to-IP mapping (format: `host:ip`) |
|
||||
| [`--allow`](#allow) | `stringSlice` | | Allow extra privileged entitlement (e.g., `network.host`, `security.insecure`) |
|
||||
| [`--annotation`](#annotation) | `stringArray` | | Add annotation to the image |
|
||||
| [`--attest`](#attest) | `stringArray` | | Attestation parameters (format: `type=sbom,generator=image`) |
|
||||
| [`--build-arg`](#build-arg) | `stringArray` | | Set build-time variables |
|
||||
| [`--build-context`](#build-context) | `stringArray` | | Additional build contexts (e.g., name=path) |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| [`--cache-from`](#cache-from) | `stringArray` | | External cache sources (e.g., `user/app:cache`, `type=local,src=path/to/dir`) |
|
||||
| [`--cache-to`](#cache-to) | `stringArray` | | Cache export destinations (e.g., `user/app:cache`, `type=local,dest=path/to/dir`) |
|
||||
| [`--call`](#call) | `string` | `build` | Set method for evaluating build (`check`, `outline`, `targets`) |
|
||||
| [`--cgroup-parent`](#cgroup-parent) | `string` | | Set the parent cgroup for the `RUN` instructions during build |
|
||||
| [`--check`](#check) | `bool` | | Shorthand for `--call=check` |
|
||||
| `--detach` | `bool` | | Detach buildx server (supported only on linux) (EXPERIMENTAL) |
|
||||
| [`-f`](#file), [`--file`](#file) | `string` | | Name of the Dockerfile (default: `PATH/Dockerfile`) |
|
||||
| `--iidfile` | `string` | | Write the image ID to a file |
|
||||
| `--label` | `stringArray` | | Set metadata for an image |
|
||||
| [`--load`](#load) | `bool` | | Shorthand for `--output=type=docker` |
|
||||
| [`--metadata-file`](#metadata-file) | `string` | | Write build result metadata to a file |
|
||||
| [`--network`](#network) | `string` | `default` | Set the networking mode for the `RUN` instructions during build |
|
||||
| `--no-cache` | `bool` | | Do not use cache when building the image |
|
||||
| [`--no-cache-filter`](#no-cache-filter) | `stringArray` | | Do not cache specified stages |
|
||||
| [`-o`](#output), [`--output`](#output) | `stringArray` | | Output destination (format: `type=local,dest=path`) |
|
||||
| [`--platform`](#platform) | `stringArray` | | Set target platform for build |
|
||||
| [`--progress`](#progress) | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`, `rawjson`). Use plain to show container output |
|
||||
| [`--provenance`](#provenance) | `string` | | Shorthand for `--attest=type=provenance` |
|
||||
| `--pull` | `bool` | | Always attempt to pull all referenced images |
|
||||
| [`--push`](#push) | `bool` | | Shorthand for `--output=type=registry` |
|
||||
| `-q`, `--quiet` | `bool` | | Suppress the build output and print image ID on success |
|
||||
| `--root` | `string` | | Specify root directory of server to connect (EXPERIMENTAL) |
|
||||
| [`--sbom`](#sbom) | `string` | | Shorthand for `--attest=type=sbom` |
|
||||
| [`--secret`](#secret) | `stringArray` | | Secret to expose to the build (format: `id=mysecret[,src=/local/secret]`) |
|
||||
| `--server-config` | `string` | | Specify buildx server config file (used only when launching new server) (EXPERIMENTAL) |
|
||||
| [`--shm-size`](#shm-size) | `bytes` | `0` | Shared memory size for build containers |
|
||||
| [`--ssh`](#ssh) | `stringArray` | | SSH agent socket or keys to expose to the build (format: `default\|<id>[=<socket>\|<key>[,<key>]]`) |
|
||||
| [`-t`](#tag), [`--tag`](#tag) | `stringArray` | | Name and optionally a tag (format: `name:tag`) |
|
||||
| [`--target`](#target) | `string` | | Set the target build stage to build |
|
||||
| [`--ulimit`](#ulimit) | `ulimit` | | Ulimit options |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
@@ -61,15 +61,36 @@ Flags marked with `[experimental]` need to be explicitly enabled by setting the
|
||||
|
||||
## Description
|
||||
|
||||
The `buildx build` command starts a build using BuildKit. This command is similar
|
||||
to the UI of `docker build` command and takes the same flags and arguments.
|
||||
|
||||
For documentation on most of these flags, refer to the [`docker build`
|
||||
documentation](https://docs.docker.com/reference/cli/docker/image/build/).
|
||||
This page describes a subset of the new flags.
|
||||
The `docker buildx build` command starts a build using BuildKit.
|
||||
|
||||
## Examples
|
||||
|
||||
### <a name="add-host"></a> Add entries to container hosts file (--add-host)
|
||||
|
||||
You can add other hosts into a build container's `/etc/hosts` file by using one
|
||||
or more `--add-host` flags. This example adds static addresses for hosts named
|
||||
`my-hostname` and `my_hostname_v6`:
|
||||
|
||||
```console
|
||||
$ docker buildx build --add-host my_hostname=8.8.8.8 --add-host my_hostname_v6=2001:4860:4860::8888 .
|
||||
```
|
||||
|
||||
If you need your build to connect to services running on the host, you can use
|
||||
the special `host-gateway` value for `--add-host`. In the following example,
|
||||
build containers resolve `host.docker.internal` to the host's gateway IP.
|
||||
|
||||
```console
|
||||
$ docker buildx build --add-host host.docker.internal=host-gateway .
|
||||
```
|
||||
|
||||
You can wrap an IPv6 address in square brackets.
|
||||
`=` and `:` are both valid separators.
|
||||
Both formats in the following example are valid:
|
||||
|
||||
```console
|
||||
$ docker buildx build --add-host my-hostname:10.180.0.1 --add-host my-hostname_v6=[2001:4860:4860::8888] .
|
||||
```
|
||||
|
||||
### <a name="annotation"></a> Create annotations (--annotation)
|
||||
|
||||
```text
|
||||
@@ -165,7 +186,40 @@ $ docker buildx build --allow security.insecure .
|
||||
|
||||
### <a name="build-arg"></a> Set build-time variables (--build-arg)
|
||||
|
||||
Same as [`docker build` command](https://docs.docker.com/reference/cli/docker/image/build/#build-arg).
|
||||
You can use `ENV` instructions in a Dockerfile to define variable values. These
|
||||
values persist in the built image. Often persistence isn't what you want. Users
|
||||
want to specify variables differently depending on which host they build an
|
||||
image on.
|
||||
|
||||
A good example is `http_proxy` or source versions for pulling intermediate
|
||||
files. The `ARG` instruction lets Dockerfile authors define values that users
|
||||
can set at build-time using the `--build-arg` flag:
|
||||
|
||||
```console
|
||||
$ docker buildx build --build-arg HTTP_PROXY=http://10.20.30.2:1234 --build-arg FTP_PROXY=http://40.50.60.5:4567 .
|
||||
```
|
||||
|
||||
This flag allows you to pass the build-time variables that are
|
||||
accessed like regular environment variables in the `RUN` instruction of the
|
||||
Dockerfile. These values don't persist in the intermediate or final images
|
||||
like `ENV` values do. You must add `--build-arg` for each build argument.
|
||||
|
||||
Using this flag doesn't alter the output you see when the build process echoes the`ARG` lines from the
|
||||
Dockerfile.
|
||||
|
||||
For detailed information on using `ARG` and `ENV` instructions, see the
|
||||
[Dockerfile reference](https://docs.docker.com/reference/dockerfile/).
|
||||
|
||||
You can also use the `--build-arg` flag without a value, in which case the daemon
|
||||
propagates the value from the local environment into the Docker container it's building:
|
||||
|
||||
```console
|
||||
$ export HTTP_PROXY=http://10.20.30.2:1234
|
||||
$ docker buildx build --build-arg HTTP_PROXY .
|
||||
```
|
||||
|
||||
This example is similar to how `docker run -e` works. Refer to the [`docker run` documentation](container_run.md#env)
|
||||
for more information.
|
||||
|
||||
There are also useful built-in build arguments, such as:
|
||||
|
||||
@@ -271,6 +325,167 @@ $ docker buildx build --cache-from=type=s3,region=eu-west-1,bucket=mybucket .
|
||||
|
||||
More info about cache exporters and available attributes: https://github.com/moby/buildkit#export-cache
|
||||
|
||||
### <a name="call"></a> Invoke a frontend method (--call)
|
||||
|
||||
```text
|
||||
--call=[build|check|outline|targets]
|
||||
```
|
||||
|
||||
BuildKit frontends can support alternative modes of executions for builds,
|
||||
using frontend methods. Frontend methods are a way to change or extend the
|
||||
behavior of a build invocation, which lets you, for example, inspect, validate,
|
||||
or generate alternative outputs from a build.
|
||||
|
||||
The `--call` flag for `docker buildx build` lets you specify the frontend
|
||||
method that you want to execute. If this flag is unspecified, it defaults to
|
||||
executing the build and evaluating [build checks](https://docs.docker.com/reference/build-checks/).
|
||||
|
||||
For Dockerfiles, the available methods are:
|
||||
|
||||
| Command | Description |
|
||||
| ------------------------------ | ------------------------------------------------------------------------------------------------------------------- |
|
||||
| `build` (default) | Execute the build and evaluate build checks for the current build target. |
|
||||
| `check` | Evaluate build checks for the either the entire Dockerfile or the selected target, without executing a build. |
|
||||
| `outline` | Show the build arguments that you can set for a target, and their default values. |
|
||||
| `targets` | List all the build targets in the Dockerfile. |
|
||||
| `subrequests.describe` | List all the frontend methods that the current frontend supports. |
|
||||
|
||||
Note that other frontends may implement these or other methods.
|
||||
To see the list of available methods for the frontend you're using,
|
||||
use `--call=subrequests.describe`.
|
||||
|
||||
```console
|
||||
$ docker buildx build -q --call=subrequests.describe .
|
||||
|
||||
NAME VERSION DESCRIPTION
|
||||
outline 1.0.0 List all parameters current build target supports
|
||||
targets 1.0.0 List all targets current build supports
|
||||
subrequests.describe 1.0.0 List available subrequest types
|
||||
```
|
||||
|
||||
#### Descriptions
|
||||
|
||||
The [`--call=targets`](#call-targets) and [`--call=outline`](#call-outline)
|
||||
methods include descriptions for build targets and arguments, if available.
|
||||
Descriptions are generated from comments in the Dockerfile. A comment on the
|
||||
line before a `FROM` instruction becomes the description of a build target, and
|
||||
a comment before an `ARG` instruction the description of a build argument. The
|
||||
comment must lead with the name of the stage or argument, for example:
|
||||
|
||||
```dockerfile
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
# GO_VERSION sets the Go version for the build
|
||||
ARG GO_VERSION=1.22
|
||||
|
||||
# base-builder is the base stage for building the project
|
||||
FROM golang:${GO_VERSION} AS base-builder
|
||||
```
|
||||
|
||||
When you run `docker buildx build --call=outline`, the output includes the
|
||||
descriptions, as follows:
|
||||
|
||||
```console
|
||||
$ docker buildx build -q --call=outline .
|
||||
|
||||
TARGET: base-builder
|
||||
DESCRIPTION: is the base stage for building the project
|
||||
|
||||
BUILD ARG VALUE DESCRIPTION
|
||||
GO_VERSION 1.22 sets the Go version for the build
|
||||
```
|
||||
|
||||
For more examples on how to write Dockerfile docstrings,
|
||||
check out [the Dockerfile for Docker docs](https://github.com/docker/docs/blob/main/Dockerfile).
|
||||
|
||||
#### <a name="check"></a> Call: check (--check)
|
||||
|
||||
The `check` method evaluates build checks without executing the build. The
|
||||
`--check` flag is a convenient shorthand for `--call=check`. Use the `check`
|
||||
method to validate the build configuration before starting the build.
|
||||
|
||||
```console
|
||||
$ docker buildx build -q --check https://github.com/docker/docs.git
|
||||
|
||||
WARNING: InvalidBaseImagePlatform
|
||||
Base image wjdp/htmltest:v0.17.0 was pulled with platform "linux/amd64", expected "linux/arm64" for current build
|
||||
Dockerfile:43
|
||||
--------------------
|
||||
41 | "#content/desktop/previous-versions/*.md"
|
||||
42 |
|
||||
43 | >>> FROM wjdp/htmltest:v${HTMLTEST_VERSION} AS test
|
||||
44 | WORKDIR /test
|
||||
45 | COPY --from=build /out ./public
|
||||
--------------------
|
||||
```
|
||||
|
||||
Using `--check` without specifying a target evaluates the entire Dockerfile.
|
||||
If you want to evaluate a specific target, use the `--target` flag.
|
||||
|
||||
#### Call: outline
|
||||
|
||||
The `outline` method prints the name of the specified target (or the default
|
||||
target, if `--target` isn't specified), and the build arguments that the target
|
||||
consumes, along with their default values, if set.
|
||||
|
||||
The following example shows the default target `release` and its build arguments:
|
||||
|
||||
```console
|
||||
$ docker buildx build -q --call=outline https://github.com/docker/docs.git
|
||||
|
||||
TARGET: release
|
||||
DESCRIPTION: is an empty scratch image with only compiled assets
|
||||
|
||||
BUILD ARG VALUE DESCRIPTION
|
||||
GO_VERSION 1.22 sets the Go version for the base stage
|
||||
HUGO_VERSION 0.127.0
|
||||
HUGO_ENV sets the hugo.Environment (production, development, preview)
|
||||
DOCS_URL sets the base URL for the site
|
||||
PAGEFIND_VERSION 1.1.0
|
||||
```
|
||||
|
||||
This means that the `release` target is configurable using these build arguments:
|
||||
|
||||
```console
|
||||
$ docker buildx build \
|
||||
--build-arg GO_VERSION=1.22 \
|
||||
--build-arg HUGO_VERSION=0.127.0 \
|
||||
--build-arg HUGO_ENV=production \
|
||||
--build-arg DOCS_URL=https://example.com \
|
||||
--build-arg PAGEFIND_VERSION=1.1.0 \
|
||||
--target release https://github.com/docker/docs.git
|
||||
```
|
||||
|
||||
#### Call: targets
|
||||
|
||||
The `targets` method lists all the build targets in the Dockerfile. These are
|
||||
the stages that you can build using the `--target` flag. It also indicates the
|
||||
default target, which is the target that will be built when you don't specify a
|
||||
target.
|
||||
|
||||
```console
|
||||
$ docker buildx build -q --call=targets https://github.com/docker/docs.git
|
||||
|
||||
TARGET DESCRIPTION
|
||||
base is the base stage with build dependencies
|
||||
node installs Node.js dependencies
|
||||
hugo downloads and extracts the Hugo binary
|
||||
build-base is the base stage for building the site
|
||||
dev is for local development with Docker Compose
|
||||
build creates production builds with Hugo
|
||||
lint lints markdown files
|
||||
test validates HTML output and checks for broken links
|
||||
update-modules downloads and vendors Hugo modules
|
||||
vendor is an empty stage with only vendored Hugo modules
|
||||
build-upstream builds an upstream project with a replacement module
|
||||
validate-upstream validates HTML output for upstream builds
|
||||
unused-media checks for unused graphics and other media
|
||||
pagefind installs the Pagefind runtime
|
||||
index generates a Pagefind index
|
||||
test-go-redirects checks that the /go/ redirects are valid
|
||||
release (default) is an empty scratch image with only compiled assets
|
||||
```
|
||||
|
||||
### <a name="cache-to"></a> Export build cache to an external cache destination (--cache-to)
|
||||
|
||||
```text
|
||||
@@ -310,6 +525,27 @@ $ docker buildx build --cache-to=type=s3,region=eu-west-1,bucket=mybucket .
|
||||
|
||||
More info about cache exporters and available attributes: https://github.com/moby/buildkit#export-cache
|
||||
|
||||
### <a name="cgroup-parent"></a> Use a custom parent cgroup (--cgroup-parent)
|
||||
|
||||
When you run `docker buildx build` with the `--cgroup-parent` option,
|
||||
the daemon runs the containers used in the build with the
|
||||
[corresponding `docker run` flag](container_run.md#cgroup-parent).
|
||||
|
||||
### <a name="file"></a> Specify a Dockerfile (-f, --file)
|
||||
|
||||
```console
|
||||
$ docker buildx build -f <filepath> .
|
||||
```
|
||||
|
||||
Specifies the filepath of the Dockerfile to use.
|
||||
If unspecified, a file named `Dockerfile` at the root of the build context is used by default.
|
||||
|
||||
To read a Dockerfile from stdin, you can use `-` as the argument for `--file`.
|
||||
|
||||
```console
|
||||
$ cat Dockerfile | docker buildx build -f - .
|
||||
```
|
||||
|
||||
### <a name="load"></a> Load the single-platform build result to `docker images` (--load)
|
||||
|
||||
Shorthand for [`--output=type=docker`](#docker). Will automatically load the
|
||||
@@ -330,6 +566,7 @@ $ cat metadata.json
|
||||
{
|
||||
"buildx.build.provenance": {},
|
||||
"buildx.build.ref": "mybuilder/mybuilder0/0fjb6ubs52xx3vygf6fgdl611",
|
||||
"buildx.build.warnings": {},
|
||||
"containerimage.config.digest": "sha256:2937f66a9722f7f4a2df583de2f8cb97fc9196059a410e7f00072fc918930e66",
|
||||
"containerimage.descriptor": {
|
||||
"annotations": {
|
||||
@@ -349,9 +586,26 @@ $ cat metadata.json
|
||||
> Build record [provenance](https://docs.docker.com/build/attestations/slsa-provenance/#provenance-attestation-example)
|
||||
> (`buildx.build.provenance`) includes minimal provenance by default. Set the
|
||||
> `BUILDX_METADATA_PROVENANCE` environment variable to customize this behavior:
|
||||
> * `min` sets minimal provenance (default).
|
||||
> * `max` sets full provenance.
|
||||
> * `disabled`, `false` or `0` does not set any provenance.
|
||||
>
|
||||
> - `min` sets minimal provenance (default).
|
||||
> - `max` sets full provenance.
|
||||
> - `disabled`, `false` or `0` doesn't set any provenance.
|
||||
|
||||
### <a name="network"></a> Set the networking mode for the RUN instructions during build (--network)
|
||||
|
||||
Available options for the networking mode are:
|
||||
|
||||
- `default` (default): Run in the default network.
|
||||
- `none`: Run with no network access.
|
||||
- `host`: Run in the host’s network environment.
|
||||
|
||||
Find more details in the [Dockerfile reference](https://docs.docker.com/reference/dockerfile/#run---network).
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> Build warnings (`buildx.build.warnings`) are not included by default. Set the
|
||||
> `BUILDX_METADATA_WARNINGS` environment variable to `1` or `true` to
|
||||
> include them.
|
||||
|
||||
### <a name="no-cache-filter"></a> Ignore build cache for specific stages (--no-cache-filter)
|
||||
|
||||
@@ -414,17 +668,19 @@ The arguments for the `--no-cache-filter` flag must be names of stages.
|
||||
-o, --output=[PATH,-,type=TYPE[,KEY=VALUE]
|
||||
```
|
||||
|
||||
Sets the export action for the build result. In `docker build` all builds finish
|
||||
by creating a container image and exporting it to `docker images`. `buildx` makes
|
||||
this step configurable allowing results to be exported directly to the client,
|
||||
OCI image tarballs, registry etc.
|
||||
Sets the export action for the build result. The default output, when using the
|
||||
`docker` [build driver](https://docs.docker.com/build/drivers/), is a container
|
||||
image exported to the local image store. The `--output` flag makes this step
|
||||
configurable allows export of results directly to the client's filesystem, an
|
||||
OCI image tarball, a registry, and more.
|
||||
|
||||
Buildx with `docker` driver currently only supports local, tarball exporter and
|
||||
image exporter. `docker-container` driver supports all the exporters.
|
||||
Buildx with `docker` driver only supports the local, tarball, and image
|
||||
[exporters](https://docs.docker.com/build/exporters/). The `docker-container`
|
||||
driver supports all exporters.
|
||||
|
||||
If just the path is specified as a value, `buildx` will use the local exporter
|
||||
with this path as the destination. If the value is "-", `buildx` will use `tar`
|
||||
exporter and write to `stdout`.
|
||||
If you only specify a filepath as the argument to `--output`, Buildx uses the
|
||||
local exporter. If the value is `-`, Buildx uses the `tar` exporter and writes
|
||||
the output to stdout.
|
||||
|
||||
```console
|
||||
$ docker buildx build -o . .
|
||||
@@ -435,12 +691,17 @@ $ docker buildx build -o type=docker,dest=- . > myimage.tar
|
||||
$ docker buildx build -t tonistiigi/foo -o type=registry
|
||||
```
|
||||
|
||||
> **Note **
|
||||
>
|
||||
> Since BuildKit v0.13.0 multiple outputs can be specified by repeating the flag.
|
||||
You can export multiple outputs by repeating the flag.
|
||||
|
||||
Supported exported types are:
|
||||
|
||||
- [`local`](#local)
|
||||
- [`tar`](#tar)
|
||||
- [`oci`](#oci)
|
||||
- [`docker`](#docker)
|
||||
- [`image`](#image)
|
||||
- [`registry`](#registry)
|
||||
|
||||
#### `local`
|
||||
|
||||
The `local` export type writes all result files to a directory on the client. The
|
||||
@@ -451,6 +712,9 @@ Attribute key:
|
||||
|
||||
- `dest` - destination directory where files will be written
|
||||
|
||||
For more information, see
|
||||
[Local and tar exporters](https://docs.docker.com/build/exporters/local-tar/).
|
||||
|
||||
#### `tar`
|
||||
|
||||
The `tar` export type writes all result files as a single tarball on the client.
|
||||
@@ -460,6 +724,9 @@ Attribute key:
|
||||
|
||||
- `dest` - destination path where tarball will be written. “-” writes to stdout.
|
||||
|
||||
For more information, see
|
||||
[Local and tar exporters](https://docs.docker.com/build/exporters/local-tar/).
|
||||
|
||||
#### `oci`
|
||||
|
||||
The `oci` export type writes the result image or manifest list as an [OCI image
|
||||
@@ -470,6 +737,9 @@ Attribute key:
|
||||
|
||||
- `dest` - destination path where tarball will be written. “-” writes to stdout.
|
||||
|
||||
For more information, see
|
||||
[OCI and Docker exporters](https://docs.docker.com/build/exporters/oci-docker/).
|
||||
|
||||
#### `docker`
|
||||
|
||||
The `docker` export type writes the single-platform result image as a [Docker image
|
||||
@@ -486,6 +756,9 @@ Attribute keys:
|
||||
the tar will be loaded automatically to the local image store.
|
||||
- `context` - name for the Docker context where to import the result
|
||||
|
||||
For more information, see
|
||||
[OCI and Docker exporters](https://docs.docker.com/build/exporters/oci-docker/).
|
||||
|
||||
#### `image`
|
||||
|
||||
The `image` exporter writes the build result as an image or a manifest list. When
|
||||
@@ -497,10 +770,16 @@ Attribute keys:
|
||||
- `name` - name (references) for the new image.
|
||||
- `push` - Boolean to automatically push the image.
|
||||
|
||||
For more information, see
|
||||
[Image and registry exporters](https://docs.docker.com/build/exporters/image-registry/).
|
||||
|
||||
#### `registry`
|
||||
|
||||
The `registry` exporter is a shortcut for `type=image,push=true`.
|
||||
|
||||
For more information, see
|
||||
[Image and registry exporters](https://docs.docker.com/build/exporters/image-registry/).
|
||||
|
||||
### <a name="platform"></a> Set the target platforms for the build (--platform)
|
||||
|
||||
```text
|
||||
@@ -527,13 +806,12 @@ support for the specified platform. In a clean setup, you can only execute `RUN`
|
||||
commands for your system architecture.
|
||||
If your kernel supports [`binfmt_misc`](https://en.wikipedia.org/wiki/Binfmt_misc)
|
||||
launchers for secondary architectures, buildx will pick them up automatically.
|
||||
Docker desktop releases come with `binfmt_misc` automatically configured for `arm64`
|
||||
Docker Desktop releases come with `binfmt_misc` automatically configured for `arm64`
|
||||
and `arm` architectures. You can see what runtime platforms your current builder
|
||||
instance supports by running `docker buildx inspect --bootstrap`.
|
||||
|
||||
Inside a `Dockerfile`, you can access the current platform value through
|
||||
`TARGETPLATFORM` build argument. Refer to the [`docker build`
|
||||
documentation](https://docs.docker.com/reference/dockerfile/#automatic-platform-args-in-the-global-scope)
|
||||
`TARGETPLATFORM` build argument. Refer to the [Dockerfile reference](https://docs.docker.com/reference/dockerfile/#automatic-platform-args-in-the-global-scope)
|
||||
for the full description of automatic platform argument variants .
|
||||
|
||||
You can find the formatting definition for the platform specifier in the
|
||||
@@ -726,6 +1004,46 @@ $ ssh-add ~/.ssh/id_rsa
|
||||
$ docker buildx build --ssh default=$SSH_AUTH_SOCK .
|
||||
```
|
||||
|
||||
### <a name="tag"></a> Tag an image (-t, --tag)
|
||||
|
||||
```console
|
||||
$ docker buildx build -t docker/apache:2.0 .
|
||||
```
|
||||
|
||||
This examples builds in the same way as the previous example, but it then tags the resulting
|
||||
image. The repository name will be `docker/apache` and the tag `2.0`.
|
||||
|
||||
[Read more about valid tags](https://docs.docker.com/reference/cli/docker/image/tag/).
|
||||
|
||||
You can apply multiple tags to an image. For example, you can apply the `latest`
|
||||
tag to a newly built image and add another tag that references a specific
|
||||
version.
|
||||
|
||||
For example, to tag an image both as `docker/fedora-jboss:latest` and
|
||||
`docker/fedora-jboss:v2.1`, use the following:
|
||||
|
||||
```console
|
||||
$ docker buildx build -t docker/fedora-jboss:latest -t docker/fedora-jboss:v2.1 .
|
||||
```
|
||||
|
||||
### <a name="target"></a> Specifying target build stage (--target)
|
||||
|
||||
When building a Dockerfile with multiple build stages, use the `--target`
|
||||
option to specify an intermediate build stage by name as a final stage for the
|
||||
resulting image. The builder skips commands after the target stage.
|
||||
|
||||
```dockerfile
|
||||
FROM debian AS build-env
|
||||
# ...
|
||||
|
||||
FROM alpine AS production-env
|
||||
# ...
|
||||
```
|
||||
|
||||
```console
|
||||
$ docker buildx build -t mybuildimage --target build-env .
|
||||
```
|
||||
|
||||
### <a name="ulimit"></a> Set ulimits (--ulimit)
|
||||
|
||||
`--ulimit` overrides the default ulimits of build's containers when using `RUN`
|
||||
|
@@ -11,17 +11,17 @@ Create a new builder instance
|
||||
|
||||
| Name | Type | Default | Description |
|
||||
|:------------------------------------------|:--------------|:--------|:----------------------------------------------------------------------|
|
||||
| [`--append`](#append) | | | Append a node to builder instead of changing it |
|
||||
| `--bootstrap` | | | Boot builder after creation |
|
||||
| [`--append`](#append) | `bool` | | Append a node to builder instead of changing it |
|
||||
| `--bootstrap` | `bool` | | Boot builder after creation |
|
||||
| [`--buildkitd-config`](#buildkitd-config) | `string` | | BuildKit daemon config file |
|
||||
| [`--buildkitd-flags`](#buildkitd-flags) | `string` | | BuildKit daemon flags |
|
||||
| [`--driver`](#driver) | `string` | | Driver to use (available: `docker-container`, `kubernetes`, `remote`) |
|
||||
| [`--driver-opt`](#driver-opt) | `stringArray` | | Options for the driver |
|
||||
| [`--leave`](#leave) | | | Remove a node from builder instead of changing it |
|
||||
| [`--leave`](#leave) | `bool` | | Remove a node from builder instead of changing it |
|
||||
| [`--name`](#name) | `string` | | Builder instance name |
|
||||
| [`--node`](#node) | `string` | | Create/modify node with given name |
|
||||
| [`--platform`](#platform) | `stringArray` | | Fixed platforms for current node |
|
||||
| [`--use`](#use) | | | Set the current builder instance |
|
||||
| [`--use`](#use) | `bool` | | Set the current builder instance |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
|
@@ -5,49 +5,49 @@ Start a build
|
||||
|
||||
### Aliases
|
||||
|
||||
`docker buildx debug build`, `docker buildx debug b`
|
||||
`docker build`, `docker builder build`, `docker image build`, `docker buildx b`
|
||||
|
||||
### Options
|
||||
|
||||
| Name | Type | Default | Description |
|
||||
|:---------------------------------------------------------------------------------------------------------------------------------------------------|:--------------|:----------|:----------------------------------------------------------------------------------------------------|
|
||||
| [`--add-host`](https://docs.docker.com/reference/cli/docker/image/build/#add-host) | `stringSlice` | | Add a custom host-to-IP mapping (format: `host:ip`) |
|
||||
| `--allow` | `stringSlice` | | Allow extra privileged entitlement (e.g., `network.host`, `security.insecure`) |
|
||||
| `--annotation` | `stringArray` | | Add annotation to the image |
|
||||
| `--attest` | `stringArray` | | Attestation parameters (format: `type=sbom,generator=image`) |
|
||||
| `--build-arg` | `stringArray` | | Set build-time variables |
|
||||
| `--build-context` | `stringArray` | | Additional build contexts (e.g., name=path) |
|
||||
| `--builder` | `string` | | Override the configured builder instance |
|
||||
| `--cache-from` | `stringArray` | | External cache sources (e.g., `user/app:cache`, `type=local,src=path/to/dir`) |
|
||||
| `--cache-to` | `stringArray` | | Cache export destinations (e.g., `user/app:cache`, `type=local,dest=path/to/dir`) |
|
||||
| `--call` | `string` | `build` | Set method for evaluating build (`check`, `outline`, `targets`) |
|
||||
| [`--cgroup-parent`](https://docs.docker.com/reference/cli/docker/image/build/#cgroup-parent) | `string` | | Set the parent cgroup for the `RUN` instructions during build |
|
||||
| `--check` | | | Shorthand for `--call=check` |
|
||||
| `--detach` | | | Detach buildx server (supported only on linux) (EXPERIMENTAL) |
|
||||
| [`-f`](https://docs.docker.com/reference/cli/docker/image/build/#file), [`--file`](https://docs.docker.com/reference/cli/docker/image/build/#file) | `string` | | Name of the Dockerfile (default: `PATH/Dockerfile`) |
|
||||
| `--iidfile` | `string` | | Write the image ID to a file |
|
||||
| `--label` | `stringArray` | | Set metadata for an image |
|
||||
| `--load` | | | Shorthand for `--output=type=docker` |
|
||||
| `--metadata-file` | `string` | | Write build result metadata to a file |
|
||||
| `--network` | `string` | `default` | Set the networking mode for the `RUN` instructions during build |
|
||||
| `--no-cache` | | | Do not use cache when building the image |
|
||||
| `--no-cache-filter` | `stringArray` | | Do not cache specified stages |
|
||||
| `-o`, `--output` | `stringArray` | | Output destination (format: `type=local,dest=path`) |
|
||||
| `--platform` | `stringArray` | | Set target platform for build |
|
||||
| `--progress` | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`, `rawjson`). Use plain to show container output |
|
||||
| `--provenance` | `string` | | Shorthand for `--attest=type=provenance` |
|
||||
| `--pull` | | | Always attempt to pull all referenced images |
|
||||
| `--push` | | | Shorthand for `--output=type=registry` |
|
||||
| `-q`, `--quiet` | | | Suppress the build output and print image ID on success |
|
||||
| `--root` | `string` | | Specify root directory of server to connect (EXPERIMENTAL) |
|
||||
| `--sbom` | `string` | | Shorthand for `--attest=type=sbom` |
|
||||
| `--secret` | `stringArray` | | Secret to expose to the build (format: `id=mysecret[,src=/local/secret]`) |
|
||||
| `--server-config` | `string` | | Specify buildx server config file (used only when launching new server) (EXPERIMENTAL) |
|
||||
| `--shm-size` | `bytes` | `0` | Shared memory size for build containers |
|
||||
| `--ssh` | `stringArray` | | SSH agent socket or keys to expose to the build (format: `default\|<id>[=<socket>\|<key>[,<key>]]`) |
|
||||
| [`-t`](https://docs.docker.com/reference/cli/docker/image/build/#tag), [`--tag`](https://docs.docker.com/reference/cli/docker/image/build/#tag) | `stringArray` | | Name and optionally a tag (format: `name:tag`) |
|
||||
| [`--target`](https://docs.docker.com/reference/cli/docker/image/build/#target) | `string` | | Set the target build stage to build |
|
||||
| `--ulimit` | `ulimit` | | Ulimit options |
|
||||
| Name | Type | Default | Description |
|
||||
|:--------------------|:--------------|:----------|:----------------------------------------------------------------------------------------------------|
|
||||
| `--add-host` | `stringSlice` | | Add a custom host-to-IP mapping (format: `host:ip`) |
|
||||
| `--allow` | `stringSlice` | | Allow extra privileged entitlement (e.g., `network.host`, `security.insecure`) |
|
||||
| `--annotation` | `stringArray` | | Add annotation to the image |
|
||||
| `--attest` | `stringArray` | | Attestation parameters (format: `type=sbom,generator=image`) |
|
||||
| `--build-arg` | `stringArray` | | Set build-time variables |
|
||||
| `--build-context` | `stringArray` | | Additional build contexts (e.g., name=path) |
|
||||
| `--builder` | `string` | | Override the configured builder instance |
|
||||
| `--cache-from` | `stringArray` | | External cache sources (e.g., `user/app:cache`, `type=local,src=path/to/dir`) |
|
||||
| `--cache-to` | `stringArray` | | Cache export destinations (e.g., `user/app:cache`, `type=local,dest=path/to/dir`) |
|
||||
| `--call` | `string` | `build` | Set method for evaluating build (`check`, `outline`, `targets`) |
|
||||
| `--cgroup-parent` | `string` | | Set the parent cgroup for the `RUN` instructions during build |
|
||||
| `--check` | `bool` | | Shorthand for `--call=check` |
|
||||
| `--detach` | `bool` | | Detach buildx server (supported only on linux) (EXPERIMENTAL) |
|
||||
| `-f`, `--file` | `string` | | Name of the Dockerfile (default: `PATH/Dockerfile`) |
|
||||
| `--iidfile` | `string` | | Write the image ID to a file |
|
||||
| `--label` | `stringArray` | | Set metadata for an image |
|
||||
| `--load` | `bool` | | Shorthand for `--output=type=docker` |
|
||||
| `--metadata-file` | `string` | | Write build result metadata to a file |
|
||||
| `--network` | `string` | `default` | Set the networking mode for the `RUN` instructions during build |
|
||||
| `--no-cache` | `bool` | | Do not use cache when building the image |
|
||||
| `--no-cache-filter` | `stringArray` | | Do not cache specified stages |
|
||||
| `-o`, `--output` | `stringArray` | | Output destination (format: `type=local,dest=path`) |
|
||||
| `--platform` | `stringArray` | | Set target platform for build |
|
||||
| `--progress` | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`, `rawjson`). Use plain to show container output |
|
||||
| `--provenance` | `string` | | Shorthand for `--attest=type=provenance` |
|
||||
| `--pull` | `bool` | | Always attempt to pull all referenced images |
|
||||
| `--push` | `bool` | | Shorthand for `--output=type=registry` |
|
||||
| `-q`, `--quiet` | `bool` | | Suppress the build output and print image ID on success |
|
||||
| `--root` | `string` | | Specify root directory of server to connect (EXPERIMENTAL) |
|
||||
| `--sbom` | `string` | | Shorthand for `--attest=type=sbom` |
|
||||
| `--secret` | `stringArray` | | Secret to expose to the build (format: `id=mysecret[,src=/local/secret]`) |
|
||||
| `--server-config` | `string` | | Specify buildx server config file (used only when launching new server) (EXPERIMENTAL) |
|
||||
| `--shm-size` | `bytes` | `0` | Shared memory size for build containers |
|
||||
| `--ssh` | `stringArray` | | SSH agent socket or keys to expose to the build (format: `default\|<id>[=<socket>\|<key>[,<key>]]`) |
|
||||
| `-t`, `--tag` | `stringArray` | | Name and optionally a tag (format: `name:tag`) |
|
||||
| `--target` | `string` | | Set the target build stage to build |
|
||||
| `--ulimit` | `ulimit` | | Ulimit options |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
|
@@ -13,7 +13,7 @@ Disk usage
|
||||
|:------------------------|:---------|:--------|:-----------------------------------------|
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| `--filter` | `filter` | | Provide filter values |
|
||||
| [`--verbose`](#verbose) | | | Provide a more verbose output |
|
||||
| [`--verbose`](#verbose) | `bool` | | Provide a more verbose output |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
|
@@ -12,9 +12,9 @@ Create a new image based on source images
|
||||
| Name | Type | Default | Description |
|
||||
|:---------------------------------|:--------------|:--------|:------------------------------------------------------------------------------------------------------------------------------|
|
||||
| [`--annotation`](#annotation) | `stringArray` | | Add annotation to the image |
|
||||
| [`--append`](#append) | | | Append to existing manifest |
|
||||
| [`--append`](#append) | `bool` | | Append to existing manifest |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| [`--dry-run`](#dry-run) | | | Show final image instead of pushing |
|
||||
| [`--dry-run`](#dry-run) | `bool` | | Show final image instead of pushing |
|
||||
| [`-f`](#file), [`--file`](#file) | `stringArray` | | Read source descriptor from file |
|
||||
| `--prefer-index` | `bool` | `true` | When only a single source is specified, prefer outputting an image index or manifest list instead of performing a carbon copy |
|
||||
| `--progress` | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`, `rawjson`). Use plain to show container output |
|
||||
|
@@ -13,7 +13,7 @@ Show details of an image in the registry
|
||||
|:------------------------|:---------|:----------------|:----------------------------------------------|
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| [`--format`](#format) | `string` | `{{.Manifest}}` | Format the output using the given Go template |
|
||||
| [`--raw`](#raw) | | | Show original, unformatted JSON manifest |
|
||||
| [`--raw`](#raw) | `bool` | | Show original, unformatted JSON manifest |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
|
@@ -11,7 +11,7 @@ Inspect current builder instance
|
||||
|
||||
| Name | Type | Default | Description |
|
||||
|:----------------------------|:---------|:--------|:--------------------------------------------|
|
||||
| [`--bootstrap`](#bootstrap) | | | Ensure builder has booted before inspecting |
|
||||
| [`--bootstrap`](#bootstrap) | `bool` | | Ensure builder has booted before inspecting |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
|
||||
|
||||
|
@@ -11,12 +11,12 @@ Remove build cache
|
||||
|
||||
| Name | Type | Default | Description |
|
||||
|:------------------------|:---------|:--------|:------------------------------------------|
|
||||
| `-a`, `--all` | | | Include internal/frontend images |
|
||||
| `-a`, `--all` | `bool` | | Include internal/frontend images |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| `--filter` | `filter` | | Provide filter values (e.g., `until=24h`) |
|
||||
| `-f`, `--force` | | | Do not prompt for confirmation |
|
||||
| `-f`, `--force` | `bool` | | Do not prompt for confirmation |
|
||||
| `--keep-storage` | `bytes` | `0` | Amount of disk space to keep for cache |
|
||||
| `--verbose` | | | Provide a more verbose output |
|
||||
| `--verbose` | `bool` | | Provide a more verbose output |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
|
@@ -11,11 +11,11 @@ Remove one or more builder instances
|
||||
|
||||
| Name | Type | Default | Description |
|
||||
|:------------------------------------|:---------|:--------|:-----------------------------------------|
|
||||
| [`--all-inactive`](#all-inactive) | | | Remove all inactive builders |
|
||||
| [`--all-inactive`](#all-inactive) | `bool` | | Remove all inactive builders |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| [`-f`](#force), [`--force`](#force) | | | Do not prompt for confirmation |
|
||||
| [`--keep-daemon`](#keep-daemon) | | | Keep the BuildKit daemon running |
|
||||
| [`--keep-state`](#keep-state) | | | Keep BuildKit state |
|
||||
| [`-f`](#force), [`--force`](#force) | `bool` | | Do not prompt for confirmation |
|
||||
| [`--keep-daemon`](#keep-daemon) | `bool` | | Keep the BuildKit daemon running |
|
||||
| [`--keep-state`](#keep-state) | `bool` | | Keep BuildKit state |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
|
@@ -12,8 +12,8 @@ Set the current builder instance
|
||||
| Name | Type | Default | Description |
|
||||
|:------------------------|:---------|:--------|:-------------------------------------------|
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| `--default` | | | Set builder as default for current context |
|
||||
| `--global` | | | Builder persists context changes |
|
||||
| `--default` | `bool` | | Set builder as default for current context |
|
||||
| `--global` | `bool` | | Builder persists context changes |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
|
@@ -18,9 +18,8 @@ import (
|
||||
"github.com/docker/buildx/util/imagetools"
|
||||
"github.com/docker/buildx/util/progress"
|
||||
"github.com/docker/cli/opts"
|
||||
dockertypes "github.com/docker/docker/api/types"
|
||||
"github.com/docker/docker/api/types/container"
|
||||
imagetypes "github.com/docker/docker/api/types/image"
|
||||
"github.com/docker/docker/api/types/image"
|
||||
"github.com/docker/docker/api/types/mount"
|
||||
"github.com/docker/docker/api/types/network"
|
||||
"github.com/docker/docker/api/types/system"
|
||||
@@ -96,7 +95,7 @@ func (d *Driver) create(ctx context.Context, l progress.SubLogger) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rc, err := d.DockerAPI.ImageCreate(ctx, imageName, imagetypes.CreateOptions{
|
||||
rc, err := d.DockerAPI.ImageCreate(ctx, imageName, image.CreateOptions{
|
||||
RegistryAuth: ra,
|
||||
})
|
||||
if err != nil {
|
||||
@@ -256,17 +255,16 @@ func (d *Driver) copyToContainer(ctx context.Context, files map[string][]byte) e
|
||||
defer srcArchive.Close()
|
||||
|
||||
baseDir := path.Dir(confutil.DefaultBuildKitConfigDir)
|
||||
return d.DockerAPI.CopyToContainer(ctx, d.Name, baseDir, srcArchive, dockertypes.CopyToContainerOptions{})
|
||||
return d.DockerAPI.CopyToContainer(ctx, d.Name, baseDir, srcArchive, container.CopyToContainerOptions{})
|
||||
}
|
||||
|
||||
func (d *Driver) exec(ctx context.Context, cmd []string) (string, net.Conn, error) {
|
||||
execConfig := dockertypes.ExecConfig{
|
||||
response, err := d.DockerAPI.ContainerExecCreate(ctx, d.Name, container.ExecOptions{
|
||||
Cmd: cmd,
|
||||
AttachStdin: true,
|
||||
AttachStdout: true,
|
||||
AttachStderr: true,
|
||||
}
|
||||
response, err := d.DockerAPI.ContainerExecCreate(ctx, d.Name, execConfig)
|
||||
})
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
@@ -276,7 +274,7 @@ func (d *Driver) exec(ctx context.Context, cmd []string) (string, net.Conn, erro
|
||||
return "", nil, errors.New("exec ID empty")
|
||||
}
|
||||
|
||||
resp, err := d.DockerAPI.ContainerExecAttach(ctx, execID, dockertypes.ExecStartCheck{})
|
||||
resp, err := d.DockerAPI.ContainerExecAttach(ctx, execID, container.ExecStartOptions{})
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
@@ -155,9 +155,9 @@ type DriverHandle struct {
|
||||
historyAPISupported bool
|
||||
}
|
||||
|
||||
func (d *DriverHandle) Client(ctx context.Context) (*client.Client, error) {
|
||||
func (d *DriverHandle) Client(ctx context.Context, opt ...client.ClientOpt) (*client.Client, error) {
|
||||
d.once.Do(func() {
|
||||
d.client, d.err = d.Driver.Client(ctx, d.getClientOptions()...)
|
||||
d.client, d.err = d.Driver.Client(ctx, append(d.getClientOptions(), opt...)...)
|
||||
})
|
||||
return d.client, d.err
|
||||
}
|
||||
|
@@ -7,6 +7,7 @@ import (
|
||||
"net"
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/docker/buildx/driver"
|
||||
@@ -14,6 +15,7 @@ import (
|
||||
"github.com/docker/buildx/util/progress"
|
||||
"github.com/moby/buildkit/client"
|
||||
"github.com/moby/buildkit/client/connhelper"
|
||||
"github.com/moby/buildkit/util/tracing/delegated"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
@@ -25,6 +27,11 @@ type Driver struct {
|
||||
// https://github.com/docker/docs/blob/main/content/build/drivers/remote.md
|
||||
*tlsOpts
|
||||
defaultLoad bool
|
||||
|
||||
// remote driver caches the client because its Bootstap/Info methods reuse it internally
|
||||
clientOnce sync.Once
|
||||
client *client.Client
|
||||
err error
|
||||
}
|
||||
|
||||
type tlsOpts struct {
|
||||
@@ -78,12 +85,18 @@ func (d *Driver) Rm(ctx context.Context, force, rmVolume, rmDaemon bool) error {
|
||||
}
|
||||
|
||||
func (d *Driver) Client(ctx context.Context, opts ...client.ClientOpt) (*client.Client, error) {
|
||||
opts = append([]client.ClientOpt{
|
||||
client.WithContextDialer(func(ctx context.Context, _ string) (net.Conn, error) {
|
||||
return d.Dial(ctx)
|
||||
}),
|
||||
}, opts...)
|
||||
return client.New(ctx, "", opts...)
|
||||
d.clientOnce.Do(func() {
|
||||
opts = append([]client.ClientOpt{
|
||||
client.WithContextDialer(func(ctx context.Context, _ string) (net.Conn, error) {
|
||||
return d.Dial(ctx)
|
||||
}),
|
||||
client.WithTracerDelegate(delegated.DefaultExporter),
|
||||
}, opts...)
|
||||
c, err := client.New(ctx, "", opts...)
|
||||
d.client = c
|
||||
d.err = err
|
||||
})
|
||||
return d.client, d.err
|
||||
}
|
||||
|
||||
func (d *Driver) Dial(ctx context.Context) (net.Conn, error) {
|
||||
|
57
go.mod
57
go.mod
@@ -1,24 +1,26 @@
|
||||
module github.com/docker/buildx
|
||||
|
||||
go 1.21
|
||||
go 1.21.0
|
||||
|
||||
require (
|
||||
github.com/Masterminds/semver/v3 v3.2.1
|
||||
github.com/Microsoft/go-winio v0.6.2
|
||||
github.com/aws/aws-sdk-go-v2/config v1.26.6
|
||||
github.com/compose-spec/compose-go/v2 v2.1.2
|
||||
github.com/compose-spec/compose-go/v2 v2.1.3
|
||||
github.com/containerd/console v1.0.4
|
||||
github.com/containerd/containerd v1.7.18
|
||||
github.com/containerd/containerd v1.7.19
|
||||
github.com/containerd/continuity v0.4.3
|
||||
github.com/containerd/errdefs v0.1.0
|
||||
github.com/containerd/log v0.1.0
|
||||
github.com/containerd/platforms v0.2.1
|
||||
github.com/containerd/typeurl/v2 v2.1.1
|
||||
github.com/creack/pty v1.1.18
|
||||
github.com/distribution/reference v0.5.0
|
||||
github.com/docker/cli v26.1.4+incompatible
|
||||
github.com/docker/cli-docs-tool v0.7.0
|
||||
github.com/docker/docker v26.1.4+incompatible
|
||||
github.com/creack/pty v1.1.21
|
||||
github.com/distribution/reference v0.6.0
|
||||
github.com/docker/cli v27.0.3+incompatible
|
||||
github.com/docker/cli-docs-tool v0.8.0
|
||||
github.com/docker/docker v27.0.3+incompatible
|
||||
github.com/docker/go-units v0.5.0
|
||||
github.com/gofrs/flock v0.8.1
|
||||
github.com/gofrs/flock v0.12.0
|
||||
github.com/gogo/protobuf v1.3.2
|
||||
github.com/golang/protobuf v1.5.4
|
||||
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510
|
||||
@@ -26,7 +28,8 @@ require (
|
||||
github.com/hashicorp/go-cty-funcs v0.0.0-20230405223818-a090f58aa992
|
||||
github.com/hashicorp/hcl/v2 v2.20.1
|
||||
github.com/in-toto/in-toto-golang v0.5.0
|
||||
github.com/moby/buildkit v0.14.0-rc2
|
||||
github.com/mitchellh/hashstructure/v2 v2.0.2
|
||||
github.com/moby/buildkit v0.15.0
|
||||
github.com/moby/sys/mountinfo v0.7.1
|
||||
github.com/moby/sys/signal v0.7.0
|
||||
github.com/morikuni/aec v1.0.0
|
||||
@@ -36,19 +39,21 @@ require (
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b
|
||||
github.com/sirupsen/logrus v1.9.3
|
||||
github.com/spf13/cobra v1.8.0
|
||||
github.com/spf13/cobra v1.8.1
|
||||
github.com/spf13/pflag v1.0.5
|
||||
github.com/stretchr/testify v1.8.4
|
||||
github.com/stretchr/testify v1.9.0
|
||||
github.com/tonistiigi/fsutil v0.0.0-20240424095704-91a3fc46842c
|
||||
github.com/tonistiigi/go-csvvalue v0.0.0-20240710180619-ddb21b71c0b4
|
||||
github.com/zclconf/go-cty v1.14.4
|
||||
go.opentelemetry.io/otel v1.21.0
|
||||
go.opentelemetry.io/otel/metric v1.21.0
|
||||
go.opentelemetry.io/otel/sdk v1.21.0
|
||||
go.opentelemetry.io/otel/trace v1.21.0
|
||||
golang.org/x/mod v0.17.0
|
||||
golang.org/x/sync v0.6.0
|
||||
golang.org/x/sys v0.18.0
|
||||
golang.org/x/term v0.18.0
|
||||
golang.org/x/sync v0.7.0
|
||||
golang.org/x/sys v0.21.0
|
||||
golang.org/x/term v0.20.0
|
||||
golang.org/x/text v0.15.0
|
||||
google.golang.org/grpc v1.59.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
k8s.io/api v0.29.2
|
||||
@@ -59,7 +64,6 @@ require (
|
||||
require (
|
||||
github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 // indirect
|
||||
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect
|
||||
github.com/Microsoft/hcsshim v0.11.5 // indirect
|
||||
github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d // indirect
|
||||
github.com/agext/levenshtein v1.2.3 // indirect
|
||||
github.com/apparentlymart/go-cidr v1.0.1 // indirect
|
||||
@@ -79,18 +83,19 @@ require (
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/cenkalti/backoff/v4 v4.2.1 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.2.0 // indirect
|
||||
github.com/containerd/errdefs v0.1.0 // indirect
|
||||
github.com/containerd/ttrpc v1.2.4 // indirect
|
||||
github.com/containerd/containerd/api v1.7.19 // indirect
|
||||
github.com/containerd/ttrpc v1.2.5 // indirect
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.4 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/docker/distribution v2.8.2+incompatible // indirect
|
||||
github.com/docker/docker-credential-helpers v0.8.0 // indirect
|
||||
github.com/docker/docker-credential-helpers v0.8.2 // indirect
|
||||
github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c // indirect
|
||||
github.com/docker/go-connections v0.5.0 // indirect
|
||||
github.com/docker/go-metrics v0.0.1 // indirect
|
||||
github.com/emicklei/go-restful/v3 v3.11.0 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/fvbommel/sortorder v1.0.1 // indirect
|
||||
github.com/go-logr/logr v1.3.0 // indirect
|
||||
github.com/go-logr/logr v1.4.1 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/go-openapi/jsonpointer v0.19.6 // indirect
|
||||
github.com/go-openapi/jsonreference v0.20.2 // indirect
|
||||
@@ -110,16 +115,14 @@ require (
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/klauspost/compress v1.17.4 // indirect
|
||||
github.com/klauspost/compress v1.17.9 // indirect
|
||||
github.com/mailru/easyjson v0.7.7 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.15 // indirect
|
||||
github.com/mattn/go-shellwords v1.0.12 // indirect
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
|
||||
github.com/miekg/pkcs11 v1.1.1 // indirect
|
||||
github.com/mitchellh/copystructure v1.2.0 // indirect
|
||||
github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||
github.com/mitchellh/reflectwalk v1.0.2 // indirect
|
||||
github.com/moby/docker-image-spec v1.3.1 // indirect
|
||||
github.com/moby/locker v1.0.1 // indirect
|
||||
github.com/moby/patternmatcher v0.6.0 // indirect
|
||||
@@ -135,8 +138,9 @@ require (
|
||||
github.com/prometheus/client_golang v1.17.0 // indirect
|
||||
github.com/prometheus/client_model v0.5.0 // indirect
|
||||
github.com/prometheus/common v0.44.0 // indirect
|
||||
github.com/prometheus/procfs v0.12.0 // indirect
|
||||
github.com/prometheus/procfs v0.15.1 // indirect
|
||||
github.com/rivo/uniseg v0.2.0 // indirect
|
||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||
github.com/secure-systems-lab/go-securesystemslib v0.4.0 // indirect
|
||||
github.com/shibumi/go-pathspec v1.3.0 // indirect
|
||||
github.com/theupdateframework/notary v0.7.0 // indirect
|
||||
@@ -156,11 +160,10 @@ require (
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.21.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk/metric v1.21.0 // indirect
|
||||
go.opentelemetry.io/proto/otlp v1.0.0 // indirect
|
||||
golang.org/x/crypto v0.21.0 // indirect
|
||||
golang.org/x/crypto v0.23.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3 // indirect
|
||||
golang.org/x/net v0.23.0 // indirect
|
||||
golang.org/x/net v0.25.0 // indirect
|
||||
golang.org/x/oauth2 v0.11.0 // indirect
|
||||
golang.org/x/text v0.14.0 // indirect
|
||||
golang.org/x/time v0.3.0 // indirect
|
||||
golang.org/x/tools v0.17.0 // indirect
|
||||
google.golang.org/appengine v1.6.7 // indirect
|
||||
|
115
go.sum
115
go.sum
@@ -15,8 +15,8 @@ github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0
|
||||
github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
|
||||
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
|
||||
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
|
||||
github.com/Microsoft/hcsshim v0.11.5 h1:haEcLNpj9Ka1gd3B3tAEs9CpE0c+1IhoL59w/exYU38=
|
||||
github.com/Microsoft/hcsshim v0.11.5/go.mod h1:MV8xMfmECjl5HdO7U/3/hFVnkmSBjAjmA09d4bExKcU=
|
||||
github.com/Microsoft/hcsshim v0.11.7 h1:vl/nj3Bar/CvJSYo7gIQPyRWc9f3c6IeSNavBTSZNZQ=
|
||||
github.com/Microsoft/hcsshim v0.11.7/go.mod h1:MV8xMfmECjl5HdO7U/3/hFVnkmSBjAjmA09d4bExKcU=
|
||||
github.com/Shopify/logrus-bugsnag v0.0.0-20170309145241-6dbc35f2c30d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ=
|
||||
github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d h1:UrqY+r/OJnIp5u0s1SbQ8dVfLCZJsnvazdBP5hS4iRs=
|
||||
github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ=
|
||||
@@ -84,14 +84,16 @@ github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4 h1:/inchEIKaYC1Akx+H+g
|
||||
github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
|
||||
github.com/codahale/rfc6979 v0.0.0-20141003034818-6a90f24967eb h1:EDmT6Q9Zs+SbUoc7Ik9EfrFqcylYqgPZ9ANSbTAntnE=
|
||||
github.com/codahale/rfc6979 v0.0.0-20141003034818-6a90f24967eb/go.mod h1:ZjrT6AXHbDs86ZSdt/osfBi5qfexBrKUdONk989Wnk4=
|
||||
github.com/compose-spec/compose-go/v2 v2.1.2 h1:N2XmNYg5jHNBaU+4/zSAe2UrZLq7Kkp1eSsOHfAHbxQ=
|
||||
github.com/compose-spec/compose-go/v2 v2.1.2/go.mod h1:NJGRGazJfh0tD7d13h66KDVvyOHK49Wil2CIhoffiD0=
|
||||
github.com/compose-spec/compose-go/v2 v2.1.3 h1:bD67uqLuL/XgkAK6ir3xZvNLFPxPScEi1KW7R5esrLE=
|
||||
github.com/compose-spec/compose-go/v2 v2.1.3/go.mod h1:lFN0DrMxIncJGYAXTfWuajfwj5haBJqrBkarHcnjJKc=
|
||||
github.com/containerd/cgroups v1.1.0 h1:v8rEWFl6EoqHB+swVNjVoCJE8o3jX7e8nqBGPLaDFBM=
|
||||
github.com/containerd/cgroups v1.1.0/go.mod h1:6ppBcbh/NOOUU+dMKrykgaBnK9lCIBxHqJDGwsa1mIw=
|
||||
github.com/containerd/console v1.0.4 h1:F2g4+oChYvBTsASRTz8NP6iIAi97J3TtSAsLbIFn4ro=
|
||||
github.com/containerd/console v1.0.4/go.mod h1:YynlIjWYF8myEu6sdkwKIvGQq+cOckRm6So2avqoYAk=
|
||||
github.com/containerd/containerd v1.7.18 h1:jqjZTQNfXGoEaZdW1WwPU0RqSn1Bm2Ay/KJPUuO8nao=
|
||||
github.com/containerd/containerd v1.7.18/go.mod h1:IYEk9/IO6wAPUz2bCMVUbsfXjzw5UNP5fLz4PsUygQ4=
|
||||
github.com/containerd/containerd v1.7.19 h1:/xQ4XRJ0tamDkdzrrBAUy/LE5nCcxFKdBm4EcPrSMEE=
|
||||
github.com/containerd/containerd v1.7.19/go.mod h1:h4FtNYUUMB4Phr6v+xG89RYKj9XccvbNSCKjdufCrkc=
|
||||
github.com/containerd/containerd/api v1.7.19 h1:VWbJL+8Ap4Ju2mx9c9qS1uFSB1OVYr5JJrW2yT5vFoA=
|
||||
github.com/containerd/containerd/api v1.7.19/go.mod h1:fwGavl3LNwAV5ilJ0sbrABL44AQxmNjDRcwheXDb6Ig=
|
||||
github.com/containerd/continuity v0.4.3 h1:6HVkalIp+2u1ZLH1J/pYX2oBVXlJZvh1X1A7bEZ9Su8=
|
||||
github.com/containerd/continuity v0.4.3/go.mod h1:F6PTNCKepoxEaXLQp3wDAjygEnImnZ/7o4JzpodfroQ=
|
||||
github.com/containerd/errdefs v0.1.0 h1:m0wCRBiu1WJT/Fr+iOoQHMQS/eP5myQ8lCv4Dz5ZURM=
|
||||
@@ -102,34 +104,37 @@ github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I=
|
||||
github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo=
|
||||
github.com/containerd/nydus-snapshotter v0.13.7 h1:x7DHvGnzJOu1ZPwPYkeOPk5MjZZYbdddygEjaSDoFTk=
|
||||
github.com/containerd/nydus-snapshotter v0.13.7/go.mod h1:VPVKQ3jmHFIcUIV2yiQ1kImZuBFS3GXDohKs9mRABVE=
|
||||
github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A=
|
||||
github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw=
|
||||
github.com/containerd/stargz-snapshotter v0.15.1 h1:fpsP4kf/Z4n2EYnU0WT8ZCE3eiKDwikDhL6VwxIlgeA=
|
||||
github.com/containerd/stargz-snapshotter/estargz v0.15.1 h1:eXJjw9RbkLFgioVaTG+G/ZW/0kEe2oEKCdS/ZxIyoCU=
|
||||
github.com/containerd/stargz-snapshotter/estargz v0.15.1/go.mod h1:gr2RNwukQ/S9Nv33Lt6UC7xEx58C+LHRdoqbEKjz1Kk=
|
||||
github.com/containerd/ttrpc v1.2.4 h1:eQCQK4h9dxDmpOb9QOOMh2NHTfzroH1IkmHiKZi05Oo=
|
||||
github.com/containerd/ttrpc v1.2.4/go.mod h1:ojvb8SJBSch0XkqNO0L0YX/5NxR3UnVk2LzFKBK0upc=
|
||||
github.com/containerd/ttrpc v1.2.5 h1:IFckT1EFQoFBMG4c3sMdT8EP3/aKfumK1msY+Ze4oLU=
|
||||
github.com/containerd/ttrpc v1.2.5/go.mod h1:YCXHsb32f+Sq5/72xHubdiJRQY9inL4a4ZQrAbN1q9o=
|
||||
github.com/containerd/typeurl/v2 v2.1.1 h1:3Q4Pt7i8nYwy2KmQWIw2+1hTvwTE/6w9FqcttATPO/4=
|
||||
github.com/containerd/typeurl/v2 v2.1.1/go.mod h1:IDp2JFvbwZ31H8dQbEIY7sDl2L3o3HZj1hsSQlywkQ0=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.4 h1:wfIWP927BUkWJb2NmU/kNDYIBTh/ziUX91+lVfRxZq4=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY=
|
||||
github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
|
||||
github.com/creack/pty v1.1.21 h1:1/QdRyBaHHJP61QkWMXlOIBfsgdDeeKfK8SYVUWJKf0=
|
||||
github.com/creack/pty v1.1.21/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/denisenkom/go-mssqldb v0.0.0-20191128021309-1d7a30a10f73/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU=
|
||||
github.com/distribution/reference v0.5.0 h1:/FUIFXtfc/x2gpa5/VGfiGLuOIdYa1t65IKK2OFGvA0=
|
||||
github.com/distribution/reference v0.5.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
|
||||
github.com/docker/cli v26.1.4+incompatible h1:I8PHdc0MtxEADqYJZvhBrW9bo8gawKwwenxRM7/rLu8=
|
||||
github.com/docker/cli v26.1.4+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
|
||||
github.com/docker/cli-docs-tool v0.7.0 h1:M2Da98Unz2kz3A5d4yeSGbhyOge2mfYSNjAFt01Rw0M=
|
||||
github.com/docker/cli-docs-tool v0.7.0/go.mod h1:zMjqTFCU361PRh8apiXzeAZ1Q/xupbIwTusYpzCXS/o=
|
||||
github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk=
|
||||
github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
|
||||
github.com/docker/cli v27.0.3+incompatible h1:usGs0/BoBW8MWxGeEtqPMkzOY56jZ6kYlSN5BLDioCQ=
|
||||
github.com/docker/cli v27.0.3+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
|
||||
github.com/docker/cli-docs-tool v0.8.0 h1:YcDWl7rQJC3lJ7WVZRwSs3bc9nka97QLWfyJQli8yJU=
|
||||
github.com/docker/cli-docs-tool v0.8.0/go.mod h1:8TQQ3E7mOXoYUs811LiPdUnAhXrcVsBIrW21a5pUbdk=
|
||||
github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
|
||||
github.com/docker/distribution v2.8.2+incompatible h1:T3de5rq0dB1j30rp0sA2rER+m322EBzniBPB6ZIzuh8=
|
||||
github.com/docker/distribution v2.8.2+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
|
||||
github.com/docker/docker v26.1.4+incompatible h1:vuTpXDuoga+Z38m1OZHzl7NKisKWaWlhjQk7IDPSLsU=
|
||||
github.com/docker/docker v26.1.4+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/docker-credential-helpers v0.8.0 h1:YQFtbBQb4VrpoPxhFuzEBPQ9E16qz5SpHLS+uswaCp8=
|
||||
github.com/docker/docker-credential-helpers v0.8.0/go.mod h1:UGFXcuoQ5TxPiB54nHOZ32AWRqQdECoh/Mg0AlEYb40=
|
||||
github.com/docker/docker v27.0.3+incompatible h1:aBGI9TeQ4MPlhquTQKq9XbK79rKFVwXNUAYz9aXyEBE=
|
||||
github.com/docker/docker v27.0.3+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/docker-credential-helpers v0.8.2 h1:bX3YxiGzFP5sOXWc3bTPEXdEaZSeVMrFgOr3T+zrFAo=
|
||||
github.com/docker/docker-credential-helpers v0.8.2/go.mod h1:P3ci7E3lwkZg6XiHdRKft1KckHiO9a2rNtyFbZ/ry9M=
|
||||
github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c h1:lzqkGL9b3znc+ZUgi7FlLnqjQhcXxkNM/quxIjBVMD0=
|
||||
github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c/go.mod h1:CADgU4DSXK5QUlFslkQu2yW2TKzFZcXq/leZfM0UH5Q=
|
||||
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
|
||||
@@ -159,8 +164,9 @@ github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2
|
||||
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
||||
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
||||
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||
github.com/go-logr/logr v1.3.0 h1:2y3SDp0ZXuc6/cjLSZ+Q3ir+QB9T/iG5yYRXqsagWSY=
|
||||
github.com/go-logr/logr v1.3.0/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||
github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
|
||||
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||
github.com/go-openapi/jsonpointer v0.19.6 h1:eCs3fxoIi3Wh6vtgmLTOjdhSpiqphQ+DaPn38N2ZdrE=
|
||||
@@ -178,8 +184,8 @@ github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68=
|
||||
github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
|
||||
github.com/go-viper/mapstructure/v2 v2.0.0 h1:dhn8MZ1gZ0mzeodTG3jt5Vj/o87xZKuNAprG2mQfMfc=
|
||||
github.com/go-viper/mapstructure/v2 v2.0.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
|
||||
github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw=
|
||||
github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU=
|
||||
github.com/gofrs/flock v0.12.0 h1:xHW8t8GPAiGtqz7KxiSqfOEXwpOaqhpYZrTE2MQBgXY=
|
||||
github.com/gofrs/flock v0.12.0/go.mod h1:FirDy1Ing0mI2+kB6wk+vyyAH+e6xiE+EYA0jnzV9jc=
|
||||
github.com/gogo/googleapis v1.4.1 h1:1Yx4Myt7BxzvUr5ldGSbwYiZG6t9wGBZ+8/fX3Wvtq0=
|
||||
github.com/gogo/googleapis v1.4.1/go.mod h1:2lpHqI5OcWCtVElxXnPt+s8oJvMpySlOyM6xDCrzib4=
|
||||
github.com/gogo/protobuf v1.0.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||
@@ -212,8 +218,8 @@ github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeN
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
|
||||
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/pprof v0.0.0-20230323073829-e72429f035bd h1:r8yyd+DJDmsUhGrRBxH5Pj7KeFK5l+Y3FsgT8keqKtk=
|
||||
github.com/google/pprof v0.0.0-20230323073829-e72429f035bd/go.mod h1:79YE0hCXdHag9sBkw2o+N/YnZtTkXi0UT9Nnixa5eYk=
|
||||
github.com/google/pprof v0.0.0-20240424215950-a892ee059fd6 h1:k7nVchz72niMH6YLQNvHSdIE7iqsQxK1P41mySCvssg=
|
||||
github.com/google/pprof v0.0.0-20240424215950-a892ee059fd6/go.mod h1:kf6iHlnVGwgKolg33glAes7Yg/8iWP8ukqeldJSO7jw=
|
||||
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4=
|
||||
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ=
|
||||
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
@@ -263,8 +269,8 @@ github.com/juju/loggo v0.0.0-20190526231331-6e530bcce5d8/go.mod h1:vgyd7OREkbtVE
|
||||
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
||||
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4=
|
||||
github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
|
||||
github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA=
|
||||
github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
|
||||
@@ -292,18 +298,16 @@ github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfr
|
||||
github.com/miekg/pkcs11 v1.0.2/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs=
|
||||
github.com/miekg/pkcs11 v1.1.1 h1:Ugu9pdy6vAYku5DEpVWVFPYnzV+bxB+iRdbuFSu7TvU=
|
||||
github.com/miekg/pkcs11 v1.1.1/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs=
|
||||
github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
|
||||
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
|
||||
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||
github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 h1:DpOJ2HYzCv8LZP15IdmG+YdwD2luVPHITV96TkirNBM=
|
||||
github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo=
|
||||
github.com/mitchellh/hashstructure/v2 v2.0.2 h1:vGKWl0YJqUNxE8d+h8f6NJLcCJrgbhC4NcD46KavDd4=
|
||||
github.com/mitchellh/hashstructure/v2 v2.0.2/go.mod h1:MG3aRVU/N29oo/V/IhBX8GR/zz4kQkprJgF2EVszyDE=
|
||||
github.com/mitchellh/mapstructure v0.0.0-20150613213606-2caf8efc9366/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
||||
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
|
||||
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
|
||||
github.com/moby/buildkit v0.14.0-rc2 h1:qvl0hOKeyAWReOkksNtstQjPNaAD4jN3Dvq4r7slqYM=
|
||||
github.com/moby/buildkit v0.14.0-rc2/go.mod h1:/ZJNHNVso1nf063XlDhEkNEcRNW19utVpUKixCUo9Ks=
|
||||
github.com/moby/buildkit v0.15.0 h1:vnZLThPr9JU6SvItctKoa6NfgPZ8oUApg/TCOaa/SVs=
|
||||
github.com/moby/buildkit v0.15.0/go.mod h1:oN9S+8I7wF26vrqn9NuAF6dFSyGTfXvtiu9o1NlnnH4=
|
||||
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
|
||||
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
|
||||
github.com/moby/locker v1.0.1 h1:fOXqR41zeveg4fFODix+1Ch4mj/gT0NE1XJbp/epuBg=
|
||||
@@ -352,8 +356,8 @@ github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3I
|
||||
github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0=
|
||||
github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug=
|
||||
github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM=
|
||||
github.com/opencontainers/runtime-spec v1.1.0 h1:HHUyrt9mwHUjtasSbXSMvs4cyFxh+Bll4AjJ9odEGpg=
|
||||
github.com/opencontainers/runtime-spec v1.1.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
|
||||
github.com/opencontainers/runtime-spec v1.2.0 h1:z97+pHb3uELt/yiAWD691HNHQIF07bE7dzrbT927iTk=
|
||||
github.com/opencontainers/runtime-spec v1.2.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
|
||||
github.com/opencontainers/selinux v1.11.0 h1:+5Zbo97w3Lbmb3PeqQtpmTkMwsW5nRI3YaLpt7tQ7oU=
|
||||
github.com/opencontainers/selinux v1.11.0/go.mod h1:E5dMC3VPuVvVHDYmi78qvhJp8+M586T4DlDRYpFkyec=
|
||||
github.com/opentracing/opentracing-go v1.1.0 h1:pWlfV3Bxv7k65HYwkikxat0+s3pV4bsqf19k25Ur8rU=
|
||||
@@ -386,12 +390,13 @@ github.com/prometheus/procfs v0.0.0-20180125133057-cb4147076ac7/go.mod h1:c3At6R
|
||||
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
||||
github.com/prometheus/procfs v0.0.3/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ=
|
||||
github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo=
|
||||
github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo=
|
||||
github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc=
|
||||
github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk=
|
||||
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ=
|
||||
github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog=
|
||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/secure-systems-lab/go-securesystemslib v0.4.0 h1:b23VGrQhTA8cN2CbBw7/FulN9fTtqYUdS5+Oxzt+DUE=
|
||||
github.com/secure-systems-lab/go-securesystemslib v0.4.0/go.mod h1:FGBZgq2tXWICsxWQW1msNf49F0Pf2Op5Htayx335Qbs=
|
||||
@@ -409,8 +414,8 @@ github.com/spdx/tools-golang v0.5.3/go.mod h1:/ETOahiAo96Ob0/RAIBmFZw6XN0yTnyr/u
|
||||
github.com/spf13/cast v0.0.0-20150508191742-4d07383ffe94 h1:JmfC365KywYwHB946TTiQWEb8kqPY+pybPLoGE9GgVk=
|
||||
github.com/spf13/cast v0.0.0-20150508191742-4d07383ffe94/go.mod h1:r2rcYCSwa1IExKTDiTfzaxqT2FNHs8hODu4LnUfgKEg=
|
||||
github.com/spf13/cobra v0.0.1/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
|
||||
github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0=
|
||||
github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho=
|
||||
github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
|
||||
github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
|
||||
github.com/spf13/jwalterweatherman v0.0.0-20141219030609-3d60171a6431 h1:XTHrT015sxHyJ5FnQ0AeemSspZWaDq7DoTRW0EVsDCE=
|
||||
github.com/spf13/jwalterweatherman v0.0.0-20141219030609-3d60171a6431/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
||||
github.com/spf13/pflag v1.0.0/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||
@@ -431,12 +436,14 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/theupdateframework/notary v0.7.0 h1:QyagRZ7wlSpjT5N2qQAh/pN+DVqgekv4DzbAiAiEL3c=
|
||||
github.com/theupdateframework/notary v0.7.0/go.mod h1:c9DRxcmhHmVLDay4/2fUYdISnHqbFDGRSlXPO0AhYWw=
|
||||
github.com/tonistiigi/fsutil v0.0.0-20240424095704-91a3fc46842c h1:+6wg/4ORAbnSoGDzg2Q1i3CeMcT/jjhye/ZfnBHy7/M=
|
||||
github.com/tonistiigi/fsutil v0.0.0-20240424095704-91a3fc46842c/go.mod h1:vbbYqJlnswsbJqWUcJN8fKtBhnEgldDrcagTgnBVKKM=
|
||||
github.com/tonistiigi/go-csvvalue v0.0.0-20240710180619-ddb21b71c0b4 h1:7I5c2Ig/5FgqkYOh/N87NzoyI9U15qUPXhDD8uCupv8=
|
||||
github.com/tonistiigi/go-csvvalue v0.0.0-20240710180619-ddb21b71c0b4/go.mod h1:278M4p8WsNh3n4a1eqiFcV2FGk7wE5fwUpUom9mK9lE=
|
||||
github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea h1:SXhTLE6pb6eld/v/cCndK0AMpt1wiVFb/YYmqB3/QG0=
|
||||
github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea/go.mod h1:WPnis/6cRcDZSUvVmezrxJPkiO87ThFYsoUiMwWNDJk=
|
||||
github.com/tonistiigi/vt100 v0.0.0-20240514184818-90bafcd6abab h1:H6aJ0yKQ0gF49Qb2z5hI1UHxSQt4JMyxebFR15KnApw=
|
||||
@@ -500,8 +507,8 @@ golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPh
|
||||
golang.org/x/crypto v0.0.0-20200422194213-44a606286825/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20201117144127-c1f2f97bffc9/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
|
||||
golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA=
|
||||
golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
|
||||
golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI=
|
||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||
golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3 h1:hNQpMuAJe5CtcUqCXaWga3FHu+kQvCqcsoVaQgSV60o=
|
||||
golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3/go.mod h1:idGWGoKP1toJGkd5/ig9ZLuPcZBC3ewk7SzmH0uou08=
|
||||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
@@ -516,8 +523,8 @@ golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLL
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs=
|
||||
golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
|
||||
golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/oauth2 v0.11.0 h1:vPL4xzxBM4niKCW6g9whtaWVXTJf1U5e4aZxxFx/gbU=
|
||||
golang.org/x/oauth2 v0.11.0/go.mod h1:LdF7O/8bLR/qWK9DrpXmbHLTouvRHK0SgJl0GmDBchk=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@@ -526,8 +533,8 @@ golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJ
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ=
|
||||
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@@ -542,16 +549,16 @@ golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
|
||||
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws=
|
||||
golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
||||
golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8=
|
||||
golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58=
|
||||
golang.org/x/term v0.20.0 h1:VnkxpohqXaOBYJtBmEppKUG6mXpi+4O6purfc2+sMhw=
|
||||
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4=
|
||||
golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
|
@@ -1,6 +1,6 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG GO_VERSION=1.21
|
||||
ARG GO_VERSION=1.22
|
||||
ARG FORMATS=md,yaml
|
||||
|
||||
FROM golang:${GO_VERSION}-alpine AS docsgen
|
||||
|
@@ -5,7 +5,7 @@
|
||||
# Copyright The Buildx Authors.
|
||||
# Licensed under the Apache License, Version 2.0
|
||||
|
||||
ARG GO_VERSION="1.21"
|
||||
ARG GO_VERSION="1.22"
|
||||
ARG PROTOC_VERSION="3.11.4"
|
||||
|
||||
# protoc is dynamically linked to glibc so can't use alpine base
|
||||
|
@@ -1,6 +1,6 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG GO_VERSION=1.21
|
||||
ARG GO_VERSION=1.22
|
||||
ARG XX_VERSION=1.3.0
|
||||
ARG GOLANGCI_LINT_VERSION=1.57.2
|
||||
ARG GOPLS_VERSION=v0.20.0
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG GO_VERSION=1.21
|
||||
ARG MODOUTDATED_VERSION=v0.8.0
|
||||
ARG GO_VERSION=1.22
|
||||
ARG MODOUTDATED_VERSION=v0.9.0
|
||||
|
||||
FROM golang:${GO_VERSION}-alpine AS base
|
||||
RUN apk add --no-cache git rsync
|
||||
@@ -41,5 +41,5 @@ FROM psampaz/go-mod-outdated:${MODOUTDATED_VERSION} AS go-mod-outdated
|
||||
FROM base AS outdated
|
||||
RUN --mount=target=.,ro \
|
||||
--mount=target=/go/pkg/mod,type=cache \
|
||||
--mount=from=go-mod-outdated,source=/home/go-mod-outdated,target=/usr/bin/go-mod-outdated \
|
||||
--mount=from=go-mod-outdated,source=/usr/bin/go-mod-outdated,target=/usr/bin/go-mod-outdated \
|
||||
go list -mod=readonly -u -m -json all | go-mod-outdated -update -direct
|
||||
|
23
hack/test
23
hack/test
@@ -4,6 +4,7 @@ set -eu -o pipefail
|
||||
|
||||
: "${BUILDX_CMD=docker buildx}"
|
||||
|
||||
: "${TEST_COVERAGE=}"
|
||||
: "${TEST_IMAGE_BUILD=1}"
|
||||
: "${TEST_IMAGE_ID=buildx-tests}"
|
||||
|
||||
@@ -22,7 +23,7 @@ testReportsDir="$(pwd)/bin/testreports"
|
||||
mkdir -p "$testReportsDir"
|
||||
testReportsVol="-v $testReportsDir:/testreports"
|
||||
gotestsumArgs="--format=standard-verbose --jsonfile=/testreports/go-test-report$TEST_REPORT_SUFFIX.json --junitfile=/testreports/junit-report$TEST_REPORT_SUFFIX.xml"
|
||||
gotestArgs="-mod=vendor -coverprofile=/testreports/coverage-report$TEST_REPORT_SUFFIX.txt -covermode=atomic"
|
||||
gotestArgs="-mod=vendor"
|
||||
|
||||
cacheVolume="buildx-test-cache"
|
||||
if ! docker container inspect "$cacheVolume" >/dev/null 2>/dev/null; then
|
||||
@@ -32,5 +33,23 @@ if [ "$TEST_KEEP_CACHE" != "1" ]; then
|
||||
trap 'docker rm -v $cacheVolume' EXIT
|
||||
fi
|
||||
|
||||
cid=$(docker create --rm -v /tmp $testReportsVol --volumes-from=$cacheVolume -e GITHUB_REF -e TEST_DOCKERD -e TEST_BUILDKIT_IMAGE -e TEST_BUILDKIT_TAG -e TEST_BUILDX_EXPERIMENTAL -e SKIP_INTEGRATION_TESTS -e GOTESTSUM_FORMAT ${BUILDKIT_INTEGRATION_SNAPSHOTTER:+"-eBUILDKIT_INTEGRATION_SNAPSHOTTER"} -e BUILDKIT_REGISTRY_MIRROR_DIR=/root/.cache/registry --privileged $TEST_IMAGE_ID gotestsum $gotestsumArgs --packages="${TESTPKGS:-./...}" -- $gotestArgs ${TESTFLAGS:--v})
|
||||
if [ "$TEST_COVERAGE" = "1" ]; then
|
||||
export GO_TEST_COVERPROFILE="/testreports/coverage-report$TEST_REPORT_SUFFIX.txt"
|
||||
fi
|
||||
|
||||
cid=$(docker create --rm --privileged \
|
||||
-v /tmp $testReportsVol \
|
||||
--volumes-from=$cacheVolume \
|
||||
-e GITHUB_REF \
|
||||
-e TEST_DOCKERD \
|
||||
-e TEST_BUILDKIT_IMAGE \
|
||||
-e TEST_BUILDKIT_TAG \
|
||||
-e TEST_BUILDX_EXPERIMENTAL \
|
||||
-e SKIP_INTEGRATION_TESTS \
|
||||
-e GOTESTSUM_FORMAT \
|
||||
-e GO_TEST_COVERPROFILE \
|
||||
${BUILDKIT_INTEGRATION_SNAPSHOTTER:+"-eBUILDKIT_INTEGRATION_SNAPSHOTTER"} \
|
||||
-e BUILDKIT_REGISTRY_MIRROR_DIR=/root/.cache/registry \
|
||||
$TEST_IMAGE_ID gotestsumandcover $gotestsumArgs --packages="${TESTPKGS:-./...}" -- $gotestArgs ${TESTFLAGS:--v})
|
||||
|
||||
docker start -a -i $cid
|
||||
|
@@ -109,21 +109,21 @@ buildxCmd inspect --bootstrap --builder="${builderName}"
|
||||
|
||||
# create dockerfile
|
||||
cat > "${dockerfile}" <<EOL
|
||||
FROM busybox as build
|
||||
fRom busybox as build
|
||||
ARG TARGETPLATFORM
|
||||
ARG BUILDPLATFORM
|
||||
RUN echo "I am running on \$BUILDPLATFORM, building for \$TARGETPLATFORM" > /log
|
||||
|
||||
FROM busybox AS log
|
||||
FROM busybox As log
|
||||
COPY --from=build /log /log
|
||||
RUN cat /log
|
||||
RUN uname -a
|
||||
|
||||
FROM busybox AS hello
|
||||
FROm busybox AS hello
|
||||
RUN echo hello > /hello
|
||||
|
||||
FROM scratch
|
||||
COPY --from=log /log /log
|
||||
CoPY --from=log /log /log
|
||||
COPY --from=hello /hello /hello
|
||||
EOL
|
||||
|
||||
|
@@ -21,9 +21,10 @@ const (
|
||||
type State struct {
|
||||
// Target is the name of the invoked target (default if empty)
|
||||
Target string
|
||||
// LocalPath is the absolute path to the context
|
||||
// LocalPath is the absolute path to the context or remote context
|
||||
LocalPath string
|
||||
// DockerfilePath is the absolute path to the Dockerfile
|
||||
// DockerfilePath is the absolute path to the Dockerfile or relative if
|
||||
// context is remote
|
||||
DockerfilePath string
|
||||
// GroupRef is the ref of the state group that this ref belongs to
|
||||
GroupRef string `json:",omitempty"`
|
||||
|
@@ -4,7 +4,7 @@ import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/platforms"
|
||||
"github.com/docker/buildx/util/confutil"
|
||||
"github.com/docker/buildx/util/platformutil"
|
||||
specs "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
|
356
tests/bake.go
356
tests/bake.go
@@ -1,14 +1,18 @@
|
||||
package tests
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/containerd/continuity/fs/fstest"
|
||||
"github.com/docker/buildx/bake"
|
||||
"github.com/docker/buildx/util/gitutil"
|
||||
"github.com/moby/buildkit/client"
|
||||
"github.com/moby/buildkit/identity"
|
||||
provenancetypes "github.com/moby/buildkit/solver/llbsolver/provenance/types"
|
||||
"github.com/moby/buildkit/util/contentutil"
|
||||
@@ -26,6 +30,7 @@ func bakeCmd(sb integration.Sandbox, opts ...cmdOpt) (string, error) {
|
||||
}
|
||||
|
||||
var bakeTests = []func(t *testing.T, sb integration.Sandbox){
|
||||
testBakePrint,
|
||||
testBakeLocal,
|
||||
testBakeLocalMulti,
|
||||
testBakeRemote,
|
||||
@@ -42,9 +47,58 @@ var bakeTests = []func(t *testing.T, sb integration.Sandbox){
|
||||
testBakeEmpty,
|
||||
testBakeShmSize,
|
||||
testBakeUlimits,
|
||||
testBakeMetadata,
|
||||
testBakeMetadataProvenance,
|
||||
testBakeMetadataWarnings,
|
||||
testBakeMetadataWarningsDedup,
|
||||
testBakeMultiExporters,
|
||||
testBakeLoadPush,
|
||||
testListTargets,
|
||||
testListVariables,
|
||||
testBakeCallCheck,
|
||||
testBakeCallCheckFlag,
|
||||
}
|
||||
|
||||
func testBakePrint(t *testing.T, sb integration.Sandbox) {
|
||||
dockerfile := []byte(`
|
||||
FROM busybox
|
||||
ARG HELLO
|
||||
RUN echo "Hello ${HELLO}"
|
||||
`)
|
||||
bakefile := []byte(`
|
||||
target "build" {
|
||||
args = {
|
||||
HELLO = "foo"
|
||||
}
|
||||
}
|
||||
`)
|
||||
dir := tmpdir(
|
||||
t,
|
||||
fstest.CreateFile("docker-bake.hcl", bakefile, 0600),
|
||||
fstest.CreateFile("Dockerfile", dockerfile, 0600),
|
||||
)
|
||||
|
||||
cmd := buildxCmd(sb, withDir(dir), withArgs("bake", "--print", "build"))
|
||||
stdout := bytes.Buffer{}
|
||||
stderr := bytes.Buffer{}
|
||||
cmd.Stdout = &stdout
|
||||
cmd.Stderr = &stderr
|
||||
require.NoError(t, cmd.Run(), stdout.String(), stderr.String())
|
||||
|
||||
var def struct {
|
||||
Group map[string]*bake.Group `json:"group,omitempty"`
|
||||
Target map[string]*bake.Target `json:"target"`
|
||||
}
|
||||
require.NoError(t, json.Unmarshal(stdout.Bytes(), &def))
|
||||
|
||||
require.Len(t, def.Group, 1)
|
||||
require.Contains(t, def.Group, "default")
|
||||
|
||||
require.Equal(t, []string{"build"}, def.Group["default"].Targets)
|
||||
require.Len(t, def.Target, 1)
|
||||
require.Contains(t, def.Target, "build")
|
||||
require.Equal(t, ".", *def.Target["build"].Context)
|
||||
require.Equal(t, "Dockerfile", *def.Target["build"].Dockerfile)
|
||||
require.Equal(t, map[string]*string{"HELLO": ptrstr("foo")}, def.Target["build"].Args)
|
||||
}
|
||||
|
||||
func testBakeLocal(t *testing.T, sb integration.Sandbox) {
|
||||
@@ -633,19 +687,22 @@ target "default" {
|
||||
require.Contains(t, string(dt), `1024`)
|
||||
}
|
||||
|
||||
func testBakeMetadata(t *testing.T, sb integration.Sandbox) {
|
||||
func testBakeMetadataProvenance(t *testing.T, sb integration.Sandbox) {
|
||||
t.Run("default", func(t *testing.T) {
|
||||
bakeMetadataProvenance(t, sb, "")
|
||||
})
|
||||
t.Run("max", func(t *testing.T) {
|
||||
bakeMetadata(t, sb, "max")
|
||||
bakeMetadataProvenance(t, sb, "max")
|
||||
})
|
||||
t.Run("min", func(t *testing.T) {
|
||||
bakeMetadata(t, sb, "min")
|
||||
bakeMetadataProvenance(t, sb, "min")
|
||||
})
|
||||
t.Run("disabled", func(t *testing.T) {
|
||||
bakeMetadata(t, sb, "disabled")
|
||||
bakeMetadataProvenance(t, sb, "disabled")
|
||||
})
|
||||
}
|
||||
|
||||
func bakeMetadata(t *testing.T, sb integration.Sandbox, metadataMode string) {
|
||||
func bakeMetadataProvenance(t *testing.T, sb integration.Sandbox, metadataMode string) {
|
||||
dockerfile := []byte(`
|
||||
FROM scratch
|
||||
COPY foo /foo
|
||||
@@ -676,7 +733,7 @@ target "default" {
|
||||
withEnv("BUILDX_METADATA_PROVENANCE="+metadataMode),
|
||||
)
|
||||
out, err := cmd.CombinedOutput()
|
||||
require.NoError(t, err, out)
|
||||
require.NoError(t, err, string(out))
|
||||
|
||||
dt, err := os.ReadFile(filepath.Join(dirDest, "md.json"))
|
||||
require.NoError(t, err)
|
||||
@@ -706,6 +763,130 @@ target "default" {
|
||||
require.Equal(t, provenancetypes.BuildKitBuildType, prv.BuildType)
|
||||
}
|
||||
|
||||
func testBakeMetadataWarnings(t *testing.T, sb integration.Sandbox) {
|
||||
t.Run("default", func(t *testing.T) {
|
||||
bakeMetadataWarnings(t, sb, "")
|
||||
})
|
||||
t.Run("true", func(t *testing.T) {
|
||||
bakeMetadataWarnings(t, sb, "true")
|
||||
})
|
||||
t.Run("false", func(t *testing.T) {
|
||||
bakeMetadataWarnings(t, sb, "false")
|
||||
})
|
||||
}
|
||||
|
||||
func bakeMetadataWarnings(t *testing.T, sb integration.Sandbox, mode string) {
|
||||
dockerfile := []byte(`
|
||||
frOM busybox as base
|
||||
cOpy Dockerfile .
|
||||
from scratch
|
||||
COPy --from=base \
|
||||
/Dockerfile \
|
||||
/
|
||||
`)
|
||||
bakefile := []byte(`
|
||||
target "default" {
|
||||
}
|
||||
`)
|
||||
dir := tmpdir(
|
||||
t,
|
||||
fstest.CreateFile("docker-bake.hcl", bakefile, 0600),
|
||||
fstest.CreateFile("Dockerfile", dockerfile, 0600),
|
||||
)
|
||||
|
||||
dirDest := t.TempDir()
|
||||
|
||||
cmd := buildxCmd(
|
||||
sb,
|
||||
withDir(dir),
|
||||
withArgs("bake", "--metadata-file", filepath.Join(dirDest, "md.json"), "--set", "*.output=type=cacheonly"),
|
||||
withEnv("BUILDX_METADATA_WARNINGS="+mode),
|
||||
)
|
||||
out, err := cmd.CombinedOutput()
|
||||
require.NoError(t, err, string(out))
|
||||
|
||||
dt, err := os.ReadFile(filepath.Join(dirDest, "md.json"))
|
||||
require.NoError(t, err)
|
||||
|
||||
type mdT struct {
|
||||
BuildWarnings []client.VertexWarning `json:"buildx.build.warnings"`
|
||||
Default struct {
|
||||
BuildRef string `json:"buildx.build.ref"`
|
||||
} `json:"default"`
|
||||
}
|
||||
var md mdT
|
||||
err = json.Unmarshal(dt, &md)
|
||||
require.NoError(t, err, string(dt))
|
||||
|
||||
require.NotEmpty(t, md.Default.BuildRef, string(dt))
|
||||
if mode == "" || mode == "false" {
|
||||
require.Empty(t, md.BuildWarnings, string(dt))
|
||||
return
|
||||
}
|
||||
|
||||
skipNoCompatBuildKit(t, sb, ">= 0.14.0-0", "lint")
|
||||
require.Len(t, md.BuildWarnings, 3, string(dt))
|
||||
}
|
||||
|
||||
func testBakeMetadataWarningsDedup(t *testing.T, sb integration.Sandbox) {
|
||||
dockerfile := []byte(`
|
||||
frOM busybox as base
|
||||
cOpy Dockerfile .
|
||||
from scratch
|
||||
COPy --from=base \
|
||||
/Dockerfile \
|
||||
/
|
||||
`)
|
||||
bakefile := []byte(`
|
||||
group "default" {
|
||||
targets = ["base", "def"]
|
||||
}
|
||||
target "base" {
|
||||
target = "base"
|
||||
}
|
||||
target "def" {
|
||||
}
|
||||
`)
|
||||
dir := tmpdir(
|
||||
t,
|
||||
fstest.CreateFile("docker-bake.hcl", bakefile, 0600),
|
||||
fstest.CreateFile("Dockerfile", dockerfile, 0600),
|
||||
)
|
||||
|
||||
dirDest := t.TempDir()
|
||||
|
||||
cmd := buildxCmd(
|
||||
sb,
|
||||
withDir(dir),
|
||||
withArgs("bake", "--metadata-file", filepath.Join(dirDest, "md.json"), "--set", "*.output=type=cacheonly"),
|
||||
withEnv("BUILDX_METADATA_WARNINGS=true"),
|
||||
)
|
||||
out, err := cmd.CombinedOutput()
|
||||
require.NoError(t, err, string(out))
|
||||
|
||||
dt, err := os.ReadFile(filepath.Join(dirDest, "md.json"))
|
||||
require.NoError(t, err)
|
||||
|
||||
type mdT struct {
|
||||
BuildWarnings []client.VertexWarning `json:"buildx.build.warnings"`
|
||||
Base struct {
|
||||
BuildRef string `json:"buildx.build.ref"`
|
||||
} `json:"base"`
|
||||
Def struct {
|
||||
BuildRef string `json:"buildx.build.ref"`
|
||||
} `json:"def"`
|
||||
}
|
||||
var md mdT
|
||||
err = json.Unmarshal(dt, &md)
|
||||
require.NoError(t, err, string(dt))
|
||||
|
||||
require.NotEmpty(t, md.Base.BuildRef, string(dt))
|
||||
require.NotEmpty(t, md.Def.BuildRef, string(dt))
|
||||
|
||||
skipNoCompatBuildKit(t, sb, ">= 0.14.0-0", "lint")
|
||||
require.Len(t, md.BuildWarnings, 3, string(dt))
|
||||
}
|
||||
|
||||
func testBakeMultiExporters(t *testing.T, sb integration.Sandbox) {
|
||||
if !isDockerContainerWorker(sb) {
|
||||
t.Skip("only testing with docker-container worker")
|
||||
@@ -821,3 +1002,164 @@ target "default" {
|
||||
|
||||
// TODO: test metadata file when supported by multi exporters https://github.com/docker/buildx/issues/2181
|
||||
}
|
||||
|
||||
func testListTargets(t *testing.T, sb integration.Sandbox) {
|
||||
bakefile := []byte(`
|
||||
target "foo" {
|
||||
description = "This builds foo"
|
||||
}
|
||||
target "abc" {
|
||||
}
|
||||
`)
|
||||
dir := tmpdir(
|
||||
t,
|
||||
fstest.CreateFile("docker-bake.hcl", bakefile, 0600),
|
||||
)
|
||||
|
||||
out, err := bakeCmd(
|
||||
sb,
|
||||
withDir(dir),
|
||||
withArgs("--list-targets"),
|
||||
)
|
||||
require.NoError(t, err, out)
|
||||
|
||||
require.Equal(t, "TARGET\tDESCRIPTION\nabc\t\nfoo\tThis builds foo", strings.TrimSpace(out))
|
||||
}
|
||||
|
||||
func testListVariables(t *testing.T, sb integration.Sandbox) {
|
||||
bakefile := []byte(`
|
||||
variable "foo" {
|
||||
default = "bar"
|
||||
description = "This is foo"
|
||||
}
|
||||
variable "abc" {
|
||||
default = null
|
||||
}
|
||||
variable "def" {
|
||||
}
|
||||
target "default" {
|
||||
}
|
||||
`)
|
||||
dir := tmpdir(
|
||||
t,
|
||||
fstest.CreateFile("docker-bake.hcl", bakefile, 0600),
|
||||
)
|
||||
|
||||
out, err := bakeCmd(
|
||||
sb,
|
||||
withDir(dir),
|
||||
withArgs("--list-variables"),
|
||||
)
|
||||
require.NoError(t, err, out)
|
||||
|
||||
require.Equal(t, "VARIABLE\tVALUE\tDESCRIPTION\nabc\t\t<null>\t\ndef\t\t\t\nfoo\t\tbar\tThis is foo", strings.TrimSpace(out))
|
||||
}
|
||||
|
||||
func testBakeCallCheck(t *testing.T, sb integration.Sandbox) {
|
||||
dockerfile := []byte(`
|
||||
FROM scratch
|
||||
COPy foo /foo
|
||||
`)
|
||||
bakefile := []byte(`
|
||||
target "validate" {
|
||||
call = "check"
|
||||
}
|
||||
`)
|
||||
dir := tmpdir(
|
||||
t,
|
||||
fstest.CreateFile("docker-bake.hcl", bakefile, 0600),
|
||||
fstest.CreateFile("Dockerfile", dockerfile, 0600),
|
||||
)
|
||||
|
||||
out, err := bakeCmd(
|
||||
sb,
|
||||
withDir(dir),
|
||||
withArgs("validate"),
|
||||
)
|
||||
require.Error(t, err, out)
|
||||
|
||||
require.Contains(t, out, "validate")
|
||||
require.Contains(t, out, "ConsistentInstructionCasing")
|
||||
}
|
||||
|
||||
func testBakeCallCheckFlag(t *testing.T, sb integration.Sandbox) {
|
||||
dockerfile := []byte(`
|
||||
FROM scratch
|
||||
COPy foo /foo
|
||||
`)
|
||||
dockerfile2 := []byte(`
|
||||
FROM scratch
|
||||
COPY foo$BAR /foo
|
||||
`)
|
||||
bakefile := []byte(`
|
||||
target "build" {
|
||||
dockerfile = "a.Dockerfile"
|
||||
}
|
||||
|
||||
target "another" {
|
||||
dockerfile = "b.Dockerfile"
|
||||
}
|
||||
`)
|
||||
dir := tmpdir(
|
||||
t,
|
||||
fstest.CreateFile("docker-bake.hcl", bakefile, 0600),
|
||||
fstest.CreateFile("a.Dockerfile", dockerfile, 0600),
|
||||
fstest.CreateFile("b.Dockerfile", dockerfile2, 0600),
|
||||
)
|
||||
|
||||
out, err := bakeCmd(
|
||||
sb,
|
||||
withDir(dir),
|
||||
withArgs("build", "another", "--check"),
|
||||
)
|
||||
require.Error(t, err, out)
|
||||
|
||||
require.Contains(t, out, "build")
|
||||
require.Contains(t, out, "ConsistentInstructionCasing")
|
||||
|
||||
require.Contains(t, out, "another")
|
||||
require.Contains(t, out, "UndefinedVar")
|
||||
|
||||
cmd := buildxCmd(
|
||||
sb,
|
||||
withDir(dir),
|
||||
withArgs("bake", "--progress=quiet", "build", "another", "--call", "check,format=json"),
|
||||
)
|
||||
outB, err := cmd.Output()
|
||||
require.Error(t, err, string(outB))
|
||||
|
||||
var res map[string]any
|
||||
err = json.Unmarshal(outB, &res)
|
||||
require.NoError(t, err, out)
|
||||
|
||||
targets, ok := res["target"].(map[string]any)
|
||||
require.True(t, ok)
|
||||
|
||||
build, ok := targets["build"].(map[string]any)
|
||||
require.True(t, ok)
|
||||
|
||||
_, ok = build["build"]
|
||||
require.True(t, ok)
|
||||
|
||||
check, ok := build["check"].(map[string]any)
|
||||
require.True(t, ok)
|
||||
|
||||
warnings, ok := check["warnings"].([]any)
|
||||
require.True(t, ok)
|
||||
|
||||
require.Len(t, warnings, 1)
|
||||
|
||||
another, ok := targets["another"].(map[string]any)
|
||||
require.True(t, ok)
|
||||
|
||||
_, ok = another["build"]
|
||||
require.True(t, ok)
|
||||
|
||||
check, ok = another["check"].(map[string]any)
|
||||
require.True(t, ok)
|
||||
|
||||
warnings, ok = check["warnings"].([]any)
|
||||
require.True(t, ok)
|
||||
|
||||
require.Len(t, warnings, 1)
|
||||
}
|
||||
|
272
tests/build.go
272
tests/build.go
@@ -13,9 +13,12 @@ import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/continuity/fs/fstest"
|
||||
"github.com/containerd/platforms"
|
||||
"github.com/creack/pty"
|
||||
"github.com/docker/buildx/localstate"
|
||||
"github.com/docker/buildx/util/gitutil"
|
||||
"github.com/moby/buildkit/client"
|
||||
"github.com/moby/buildkit/frontend/subrequests/lint"
|
||||
"github.com/moby/buildkit/frontend/subrequests/outline"
|
||||
"github.com/moby/buildkit/frontend/subrequests/targets"
|
||||
@@ -40,7 +43,12 @@ func buildCmd(sb integration.Sandbox, opts ...cmdOpt) (string, error) {
|
||||
|
||||
var buildTests = []func(t *testing.T, sb integration.Sandbox){
|
||||
testBuild,
|
||||
testBuildAlias,
|
||||
testBuildStdin,
|
||||
testBuildRemote,
|
||||
testBuildLocalState,
|
||||
testBuildLocalStateStdin,
|
||||
testBuildLocalStateRemote,
|
||||
testImageIDOutput,
|
||||
testBuildLocalExport,
|
||||
testBuildRegistryExport,
|
||||
@@ -59,12 +67,13 @@ var buildTests = []func(t *testing.T, sb integration.Sandbox){
|
||||
testBuildNetworkModeBridge,
|
||||
testBuildShmSize,
|
||||
testBuildUlimit,
|
||||
testBuildMetadata,
|
||||
testBuildMetadataProvenance,
|
||||
testBuildMetadataWarnings,
|
||||
testBuildMultiExporters,
|
||||
testBuildLoadPush,
|
||||
testBuildSecret,
|
||||
testBuildDefaultLoad,
|
||||
testBuildPrint,
|
||||
testBuildCall,
|
||||
}
|
||||
|
||||
func testBuild(t *testing.T, sb integration.Sandbox) {
|
||||
@@ -73,6 +82,13 @@ func testBuild(t *testing.T, sb integration.Sandbox) {
|
||||
require.NoError(t, err, string(out))
|
||||
}
|
||||
|
||||
func testBuildAlias(t *testing.T, sb integration.Sandbox) {
|
||||
dir := createTestProject(t)
|
||||
cmd := buildxCmd(sb, withDir(dir), withArgs("b", dir))
|
||||
out, err := cmd.CombinedOutput()
|
||||
require.NoError(t, err, string(out))
|
||||
}
|
||||
|
||||
func testBuildStdin(t *testing.T, sb integration.Sandbox) {
|
||||
dockerfile := []byte(`
|
||||
FROM busybox:latest AS base
|
||||
@@ -93,6 +109,170 @@ COPY --from=base /etc/bar /bar
|
||||
require.NoError(t, err, string(out))
|
||||
}
|
||||
|
||||
func testBuildRemote(t *testing.T, sb integration.Sandbox) {
|
||||
dockerfile := []byte(`
|
||||
FROM busybox:latest
|
||||
COPY foo /foo
|
||||
`)
|
||||
dir := tmpdir(
|
||||
t,
|
||||
fstest.CreateFile("Dockerfile", dockerfile, 0600),
|
||||
fstest.CreateFile("foo", []byte("foo"), 0600),
|
||||
)
|
||||
dirDest := t.TempDir()
|
||||
|
||||
git, err := gitutil.New(gitutil.WithWorkingDir(dir))
|
||||
require.NoError(t, err)
|
||||
|
||||
gitutil.GitInit(git, t)
|
||||
gitutil.GitAdd(git, t, "Dockerfile", "foo")
|
||||
gitutil.GitCommit(git, t, "initial commit")
|
||||
addr := gitutil.GitServeHTTP(git, t)
|
||||
|
||||
out, err := buildCmd(sb, withDir(dir), withArgs("--output=type=local,dest="+dirDest, addr))
|
||||
require.NoError(t, err, out)
|
||||
require.FileExists(t, filepath.Join(dirDest, "foo"))
|
||||
}
|
||||
|
||||
func testBuildLocalState(t *testing.T, sb integration.Sandbox) {
|
||||
dockerfile := []byte(`
|
||||
FROM busybox:latest AS base
|
||||
COPY foo /etc/foo
|
||||
RUN cp /etc/foo /etc/bar
|
||||
|
||||
FROM scratch
|
||||
COPY --from=base /etc/bar /bar
|
||||
`)
|
||||
dir := tmpdir(
|
||||
t,
|
||||
fstest.CreateFile("build.Dockerfile", dockerfile, 0600),
|
||||
fstest.CreateFile("foo", []byte("foo"), 0600),
|
||||
)
|
||||
|
||||
out, err := buildCmd(sb, withDir(dir), withArgs(
|
||||
"-f", "build.Dockerfile",
|
||||
"--metadata-file", filepath.Join(dir, "md.json"),
|
||||
".",
|
||||
))
|
||||
require.NoError(t, err, out)
|
||||
|
||||
dt, err := os.ReadFile(filepath.Join(dir, "md.json"))
|
||||
require.NoError(t, err)
|
||||
|
||||
type mdT struct {
|
||||
BuildRef string `json:"buildx.build.ref"`
|
||||
}
|
||||
var md mdT
|
||||
err = json.Unmarshal(dt, &md)
|
||||
require.NoError(t, err)
|
||||
|
||||
ls, err := localstate.New(buildxConfig(sb))
|
||||
require.NoError(t, err)
|
||||
|
||||
refParts := strings.Split(md.BuildRef, "/")
|
||||
require.Len(t, refParts, 3)
|
||||
|
||||
ref, err := ls.ReadRef(refParts[0], refParts[1], refParts[2])
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, ref)
|
||||
require.DirExists(t, ref.LocalPath)
|
||||
require.FileExists(t, ref.DockerfilePath)
|
||||
}
|
||||
|
||||
func testBuildLocalStateStdin(t *testing.T, sb integration.Sandbox) {
|
||||
dockerfile := []byte(`
|
||||
FROM busybox:latest AS base
|
||||
COPY foo /etc/foo
|
||||
RUN cp /etc/foo /etc/bar
|
||||
|
||||
FROM scratch
|
||||
COPY --from=base /etc/bar /bar
|
||||
`)
|
||||
dir := tmpdir(
|
||||
t,
|
||||
fstest.CreateFile("foo", []byte("foo"), 0600),
|
||||
)
|
||||
|
||||
cmd := buildxCmd(sb, withDir(dir), withArgs("build", "--progress=quiet", "--metadata-file", filepath.Join(dir, "md.json"), "-f-", dir))
|
||||
cmd.Stdin = bytes.NewReader(dockerfile)
|
||||
out, err := cmd.CombinedOutput()
|
||||
require.NoError(t, err, string(out))
|
||||
|
||||
dt, err := os.ReadFile(filepath.Join(dir, "md.json"))
|
||||
require.NoError(t, err)
|
||||
|
||||
type mdT struct {
|
||||
BuildRef string `json:"buildx.build.ref"`
|
||||
}
|
||||
var md mdT
|
||||
err = json.Unmarshal(dt, &md)
|
||||
require.NoError(t, err)
|
||||
|
||||
ls, err := localstate.New(buildxConfig(sb))
|
||||
require.NoError(t, err)
|
||||
|
||||
refParts := strings.Split(md.BuildRef, "/")
|
||||
require.Len(t, refParts, 3)
|
||||
|
||||
ref, err := ls.ReadRef(refParts[0], refParts[1], refParts[2])
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, ref)
|
||||
require.DirExists(t, ref.LocalPath)
|
||||
require.Equal(t, "-", ref.DockerfilePath)
|
||||
}
|
||||
|
||||
func testBuildLocalStateRemote(t *testing.T, sb integration.Sandbox) {
|
||||
dockerfile := []byte(`
|
||||
FROM busybox:latest
|
||||
COPY foo /foo
|
||||
`)
|
||||
dir := tmpdir(
|
||||
t,
|
||||
fstest.CreateFile("build.Dockerfile", dockerfile, 0600),
|
||||
fstest.CreateFile("foo", []byte("foo"), 0600),
|
||||
)
|
||||
dirDest := t.TempDir()
|
||||
|
||||
git, err := gitutil.New(gitutil.WithWorkingDir(dir))
|
||||
require.NoError(t, err)
|
||||
|
||||
gitutil.GitInit(git, t)
|
||||
gitutil.GitAdd(git, t, "build.Dockerfile", "foo")
|
||||
gitutil.GitCommit(git, t, "initial commit")
|
||||
addr := gitutil.GitServeHTTP(git, t)
|
||||
|
||||
out, err := buildCmd(sb, withDir(dir), withArgs(
|
||||
"-f", "build.Dockerfile",
|
||||
"--metadata-file", filepath.Join(dirDest, "md.json"),
|
||||
"--output", "type=local,dest="+dirDest,
|
||||
addr,
|
||||
))
|
||||
require.NoError(t, err, out)
|
||||
require.FileExists(t, filepath.Join(dirDest, "foo"))
|
||||
|
||||
dt, err := os.ReadFile(filepath.Join(dirDest, "md.json"))
|
||||
require.NoError(t, err)
|
||||
|
||||
type mdT struct {
|
||||
BuildRef string `json:"buildx.build.ref"`
|
||||
}
|
||||
var md mdT
|
||||
err = json.Unmarshal(dt, &md)
|
||||
require.NoError(t, err)
|
||||
|
||||
ls, err := localstate.New(buildxConfig(sb))
|
||||
require.NoError(t, err)
|
||||
|
||||
refParts := strings.Split(md.BuildRef, "/")
|
||||
require.Len(t, refParts, 3)
|
||||
|
||||
ref, err := ls.ReadRef(refParts[0], refParts[1], refParts[2])
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, ref)
|
||||
require.Equal(t, addr, ref.LocalPath)
|
||||
require.Equal(t, "build.Dockerfile", ref.DockerfilePath)
|
||||
}
|
||||
|
||||
func testBuildLocalExport(t *testing.T, sb integration.Sandbox) {
|
||||
dir := createTestProject(t)
|
||||
out, err := buildCmd(sb, withArgs(fmt.Sprintf("--output=type=local,dest=%s/result", dir), dir))
|
||||
@@ -371,7 +551,7 @@ func testBuildAnnotations(t *testing.T, sb integration.Sandbox) {
|
||||
"--annotation", "example1=www",
|
||||
"--annotation", "index:example2=xxx",
|
||||
"--annotation", "manifest:example3=yyy",
|
||||
"--annotation", "manifest-descriptor[" + platforms.DefaultString() + "]:example4=zzz",
|
||||
"--annotation", "manifest-descriptor[" + platforms.Format(platforms.DefaultSpec()) + "]:example4=zzz",
|
||||
}
|
||||
out, err := buildCmd(sb, withArgs(annotations...), withArgs(fmt.Sprintf("--output=type=image,name=%s,push=true", target), dir))
|
||||
require.NoError(t, err, string(out))
|
||||
@@ -560,19 +740,22 @@ COPY --from=build /ulimit /
|
||||
require.Contains(t, string(dt), `1024`)
|
||||
}
|
||||
|
||||
func testBuildMetadata(t *testing.T, sb integration.Sandbox) {
|
||||
func testBuildMetadataProvenance(t *testing.T, sb integration.Sandbox) {
|
||||
t.Run("default", func(t *testing.T) {
|
||||
buildMetadataProvenance(t, sb, "")
|
||||
})
|
||||
t.Run("max", func(t *testing.T) {
|
||||
buildMetadata(t, sb, "max")
|
||||
buildMetadataProvenance(t, sb, "max")
|
||||
})
|
||||
t.Run("min", func(t *testing.T) {
|
||||
buildMetadata(t, sb, "min")
|
||||
buildMetadataProvenance(t, sb, "min")
|
||||
})
|
||||
t.Run("disabled", func(t *testing.T) {
|
||||
buildMetadata(t, sb, "disabled")
|
||||
buildMetadataProvenance(t, sb, "disabled")
|
||||
})
|
||||
}
|
||||
|
||||
func buildMetadata(t *testing.T, sb integration.Sandbox, metadataMode string) {
|
||||
func buildMetadataProvenance(t *testing.T, sb integration.Sandbox, metadataMode string) {
|
||||
dir := createTestProject(t)
|
||||
dirDest := t.TempDir()
|
||||
|
||||
@@ -616,6 +799,61 @@ func buildMetadata(t *testing.T, sb integration.Sandbox, metadataMode string) {
|
||||
require.Equal(t, provenancetypes.BuildKitBuildType, prv.BuildType)
|
||||
}
|
||||
|
||||
func testBuildMetadataWarnings(t *testing.T, sb integration.Sandbox) {
|
||||
t.Run("default", func(t *testing.T) {
|
||||
buildMetadataWarnings(t, sb, "")
|
||||
})
|
||||
t.Run("true", func(t *testing.T) {
|
||||
buildMetadataWarnings(t, sb, "true")
|
||||
})
|
||||
t.Run("false", func(t *testing.T) {
|
||||
buildMetadataWarnings(t, sb, "false")
|
||||
})
|
||||
}
|
||||
|
||||
func buildMetadataWarnings(t *testing.T, sb integration.Sandbox, mode string) {
|
||||
dockerfile := []byte(`
|
||||
frOM busybox as base
|
||||
cOpy Dockerfile .
|
||||
from scratch
|
||||
COPy --from=base \
|
||||
/Dockerfile \
|
||||
/
|
||||
`)
|
||||
dir := tmpdir(
|
||||
t,
|
||||
fstest.CreateFile("Dockerfile", dockerfile, 0600),
|
||||
)
|
||||
|
||||
cmd := buildxCmd(
|
||||
sb,
|
||||
withArgs("build", "--metadata-file", filepath.Join(dir, "md.json"), dir),
|
||||
withEnv("BUILDX_METADATA_WARNINGS="+mode),
|
||||
)
|
||||
out, err := cmd.CombinedOutput()
|
||||
require.NoError(t, err, string(out))
|
||||
|
||||
dt, err := os.ReadFile(filepath.Join(dir, "md.json"))
|
||||
require.NoError(t, err)
|
||||
|
||||
type mdT struct {
|
||||
BuildRef string `json:"buildx.build.ref"`
|
||||
BuildWarnings []client.VertexWarning `json:"buildx.build.warnings"`
|
||||
}
|
||||
var md mdT
|
||||
err = json.Unmarshal(dt, &md)
|
||||
require.NoError(t, err, string(dt))
|
||||
|
||||
require.NotEmpty(t, md.BuildRef, string(dt))
|
||||
if mode == "" || mode == "false" {
|
||||
require.Empty(t, md.BuildWarnings, string(dt))
|
||||
return
|
||||
}
|
||||
|
||||
skipNoCompatBuildKit(t, sb, ">= 0.14.0-0", "lint")
|
||||
require.Len(t, md.BuildWarnings, 3, string(dt))
|
||||
}
|
||||
|
||||
func testBuildMultiExporters(t *testing.T, sb integration.Sandbox) {
|
||||
if !isDockerContainerWorker(sb) {
|
||||
t.Skip("only testing with docker-container worker")
|
||||
@@ -797,12 +1035,8 @@ func testBuildDefaultLoad(t *testing.T, sb integration.Sandbox) {
|
||||
require.NoError(t, cmd.Run())
|
||||
}
|
||||
|
||||
func testBuildPrint(t *testing.T, sb integration.Sandbox) {
|
||||
if !isExperimental() {
|
||||
t.Skip("experimental mode required, skipping")
|
||||
}
|
||||
|
||||
t.Run("lint", func(t *testing.T) {
|
||||
func testBuildCall(t *testing.T, sb integration.Sandbox) {
|
||||
t.Run("check", func(t *testing.T) {
|
||||
dockerfile := []byte(`
|
||||
frOM busybox as base
|
||||
cOpy Dockerfile .
|
||||
@@ -816,12 +1050,12 @@ COPy --from=base \
|
||||
fstest.CreateFile("Dockerfile", dockerfile, 0600),
|
||||
)
|
||||
|
||||
cmd := buildxCmd(sb, withArgs("build", "--print=lint,format=json", dir))
|
||||
cmd := buildxCmd(sb, withArgs("build", "--call=check,format=json", dir))
|
||||
stdout := bytes.Buffer{}
|
||||
stderr := bytes.Buffer{}
|
||||
cmd.Stdout = &stdout
|
||||
cmd.Stderr = &stderr
|
||||
require.NoError(t, cmd.Run(), stdout.String(), stderr.String())
|
||||
require.Error(t, cmd.Run(), stdout.String(), stderr.String())
|
||||
|
||||
var res lint.LintResults
|
||||
require.NoError(t, json.Unmarshal(stdout.Bytes(), &res))
|
||||
@@ -851,7 +1085,7 @@ FROM second
|
||||
fstest.CreateFile("Dockerfile", dockerfile, 0600),
|
||||
)
|
||||
|
||||
cmd := buildxCmd(sb, withArgs("build", "--build-arg=BAR=678", "--target=target", "--print=outline,format=json", dir))
|
||||
cmd := buildxCmd(sb, withArgs("build", "--build-arg=BAR=678", "--target=target", "--call=outline,format=json", dir))
|
||||
stdout := bytes.Buffer{}
|
||||
stderr := bytes.Buffer{}
|
||||
cmd.Stdout = &stdout
|
||||
@@ -901,7 +1135,7 @@ FROM second AS binary
|
||||
fstest.CreateFile("Dockerfile", dockerfile, 0600),
|
||||
)
|
||||
|
||||
cmd := buildxCmd(sb, withArgs("build", "--print=targets,format=json", dir))
|
||||
cmd := buildxCmd(sb, withArgs("build", "--call=targets,format=json", dir))
|
||||
stdout := bytes.Buffer{}
|
||||
stderr := bytes.Buffer{}
|
||||
cmd.Stdout = &stdout
|
||||
|
@@ -6,8 +6,8 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/containerd/containerd/images"
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/continuity/fs/fstest"
|
||||
"github.com/containerd/platforms"
|
||||
"github.com/moby/buildkit/util/testutil/integration"
|
||||
ocispecs "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
"github.com/pkg/errors"
|
||||
|
@@ -4,6 +4,7 @@ import (
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
@@ -56,7 +57,7 @@ func buildxCmd(sb integration.Sandbox, opts ...cmdOpt) *exec.Cmd {
|
||||
|
||||
if builder := sb.Address(); builder != "" {
|
||||
cmd.Env = append(cmd.Env,
|
||||
"BUILDX_CONFIG=/tmp/buildx-"+builder,
|
||||
"BUILDX_CONFIG="+buildxConfig(sb),
|
||||
"BUILDX_BUILDER="+builder,
|
||||
)
|
||||
}
|
||||
@@ -66,6 +67,10 @@ func buildxCmd(sb integration.Sandbox, opts ...cmdOpt) *exec.Cmd {
|
||||
if isExperimental() {
|
||||
cmd.Env = append(cmd.Env, "BUILDX_EXPERIMENTAL=1")
|
||||
}
|
||||
if v := os.Getenv("GO_TEST_COVERPROFILE"); v != "" {
|
||||
coverDir := filepath.Join(filepath.Dir(v), "helpers")
|
||||
cmd.Env = append(cmd.Env, "GOCOVERDIR="+coverDir)
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
@@ -82,6 +87,13 @@ func dockerCmd(sb integration.Sandbox, opts ...cmdOpt) *exec.Cmd {
|
||||
return cmd
|
||||
}
|
||||
|
||||
func buildxConfig(sb integration.Sandbox) string {
|
||||
if builder := sb.Address(); builder != "" {
|
||||
return "/tmp/buildx-" + builder
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func isMobyWorker(sb integration.Sandbox) bool {
|
||||
name, hasFeature := driverName(sb.Name())
|
||||
return name == "docker" && !hasFeature
|
||||
@@ -197,3 +209,12 @@ func skipNoCompatBuildKit(t *testing.T, sb integration.Sandbox, constraint strin
|
||||
t.Skipf("buildkit version %s does not match %s constraint (%s)", buildkitVersion(t, sb), constraint, msg)
|
||||
}
|
||||
}
|
||||
|
||||
func ptrstr(s interface{}) *string {
|
||||
var n *string
|
||||
if reflect.ValueOf(s).Kind() == reflect.String {
|
||||
ss := s.(string)
|
||||
n = &ss
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
@@ -1,13 +1,13 @@
|
||||
package buildflags
|
||||
|
||||
import (
|
||||
"encoding/csv"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
controllerapi "github.com/docker/buildx/controller/pb"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/tonistiigi/go-csvvalue"
|
||||
)
|
||||
|
||||
func CanonicalizeAttest(attestType string, in string) string {
|
||||
@@ -45,8 +45,7 @@ func ParseAttest(in string) (*controllerapi.Attest, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
csvReader := csv.NewReader(strings.NewReader(in))
|
||||
fields, err := csvReader.Read()
|
||||
fields, err := csvvalue.Fields(in, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@@ -2,20 +2,19 @@ package buildflags
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/csv"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
awsconfig "github.com/aws/aws-sdk-go-v2/config"
|
||||
controllerapi "github.com/docker/buildx/controller/pb"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/tonistiigi/go-csvvalue"
|
||||
)
|
||||
|
||||
func ParseCacheEntry(in []string) ([]*controllerapi.CacheOptionsEntry, error) {
|
||||
outs := make([]*controllerapi.CacheOptionsEntry, 0, len(in))
|
||||
for _, in := range in {
|
||||
csvReader := csv.NewReader(strings.NewReader(in))
|
||||
fields, err := csvReader.Read()
|
||||
fields, err := csvvalue.Fields(in, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@@ -1,16 +1,16 @@
|
||||
package buildflags
|
||||
|
||||
import (
|
||||
"encoding/csv"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/platforms"
|
||||
controllerapi "github.com/docker/buildx/controller/pb"
|
||||
"github.com/moby/buildkit/client"
|
||||
"github.com/moby/buildkit/exporter/containerimage/exptypes"
|
||||
ocispecs "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/tonistiigi/go-csvvalue"
|
||||
)
|
||||
|
||||
func ParseExports(inp []string) ([]*controllerapi.ExportEntry, error) {
|
||||
@@ -19,8 +19,7 @@ func ParseExports(inp []string) ([]*controllerapi.ExportEntry, error) {
|
||||
return nil, nil
|
||||
}
|
||||
for _, s := range inp {
|
||||
csvReader := csv.NewReader(strings.NewReader(s))
|
||||
fields, err := csvReader.Read()
|
||||
fields, err := csvvalue.Fields(s, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -81,7 +80,10 @@ func ParseExports(inp []string) ([]*controllerapi.ExportEntry, error) {
|
||||
|
||||
func ParseAnnotations(inp []string) (map[exptypes.AnnotationKey]string, error) {
|
||||
// TODO: use buildkit's annotation parser once it supports setting custom prefix and ":" separator
|
||||
annotationRegexp := regexp.MustCompile(`^(?:([a-z-]+)(?:\[([A-Za-z0-9_/-]+)\])?:)?(\S+)$`)
|
||||
|
||||
// type followed by optional platform specifier in square brackets
|
||||
annotationTypeRegexp := regexp.MustCompile(`^([a-z-]+)(?:\[([A-Za-z0-9_/-]+)\])?$`)
|
||||
|
||||
annotations := make(map[exptypes.AnnotationKey]string)
|
||||
for _, inp := range inp {
|
||||
k, v, ok := strings.Cut(inp, "=")
|
||||
@@ -89,34 +91,54 @@ func ParseAnnotations(inp []string) (map[exptypes.AnnotationKey]string, error) {
|
||||
return nil, errors.Errorf("invalid annotation %q, expected key=value", inp)
|
||||
}
|
||||
|
||||
groups := annotationRegexp.FindStringSubmatch(k)
|
||||
if groups == nil {
|
||||
return nil, errors.Errorf("invalid annotation format, expected <type>:<key>=<value>, got %q", inp)
|
||||
types, key, ok := strings.Cut(k, ":")
|
||||
if !ok {
|
||||
// no types specified, swap Cut outputs
|
||||
key = types
|
||||
|
||||
ak := exptypes.AnnotationKey{Key: key}
|
||||
annotations[ak] = v
|
||||
continue
|
||||
}
|
||||
|
||||
typ, platform, key := groups[1], groups[2], groups[3]
|
||||
switch typ {
|
||||
case "":
|
||||
case exptypes.AnnotationIndex, exptypes.AnnotationIndexDescriptor, exptypes.AnnotationManifest, exptypes.AnnotationManifestDescriptor:
|
||||
default:
|
||||
return nil, errors.Errorf("unknown annotation type %q", typ)
|
||||
}
|
||||
|
||||
var ociPlatform *ocispecs.Platform
|
||||
if platform != "" {
|
||||
p, err := platforms.Parse(platform)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "invalid platform %q", platform)
|
||||
typesSplit := strings.Split(types, ",")
|
||||
for _, typeAndPlatform := range typesSplit {
|
||||
groups := annotationTypeRegexp.FindStringSubmatch(typeAndPlatform)
|
||||
if groups == nil {
|
||||
return nil, errors.Errorf(
|
||||
"invalid annotation type %q, expected type and optional platform in square brackets",
|
||||
typeAndPlatform)
|
||||
}
|
||||
ociPlatform = &p
|
||||
|
||||
typ, platform := groups[1], groups[2]
|
||||
|
||||
switch typ {
|
||||
case "":
|
||||
case exptypes.AnnotationIndex,
|
||||
exptypes.AnnotationIndexDescriptor,
|
||||
exptypes.AnnotationManifest,
|
||||
exptypes.AnnotationManifestDescriptor:
|
||||
default:
|
||||
return nil, errors.Errorf("unknown annotation type %q", typ)
|
||||
}
|
||||
|
||||
var ociPlatform *ocispecs.Platform
|
||||
if platform != "" {
|
||||
p, err := platforms.Parse(platform)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "invalid platform %q", platform)
|
||||
}
|
||||
ociPlatform = &p
|
||||
}
|
||||
|
||||
ak := exptypes.AnnotationKey{
|
||||
Type: typ,
|
||||
Platform: ociPlatform,
|
||||
Key: key,
|
||||
}
|
||||
annotations[ak] = v
|
||||
}
|
||||
|
||||
ak := exptypes.AnnotationKey{
|
||||
Type: typ,
|
||||
Platform: ociPlatform,
|
||||
Key: key,
|
||||
}
|
||||
annotations[ak] = v
|
||||
}
|
||||
return annotations, nil
|
||||
}
|
||||
|
119
util/buildflags/export_test.go
Normal file
119
util/buildflags/export_test.go
Normal file
@@ -0,0 +1,119 @@
|
||||
package buildflags
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"slices"
|
||||
"testing"
|
||||
|
||||
"github.com/moby/buildkit/exporter/containerimage/exptypes"
|
||||
ocispecs "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestParseAnnotations(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
in []string
|
||||
want map[exptypes.AnnotationKey]string
|
||||
wantErr string
|
||||
}{
|
||||
{
|
||||
name: "basic",
|
||||
in: []string{"a=b"},
|
||||
want: map[exptypes.AnnotationKey]string{
|
||||
{Key: "a"}: "b",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "reverse-DNS key",
|
||||
in: []string{"com.example=a"},
|
||||
want: map[exptypes.AnnotationKey]string{
|
||||
{Key: "com.example"}: "a",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "specify type",
|
||||
in: []string{"manifest:com.example=a"},
|
||||
want: map[exptypes.AnnotationKey]string{
|
||||
{Type: "manifest", Key: "com.example"}: "a",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "specify bad type",
|
||||
in: []string{"bad:com.example=a"},
|
||||
wantErr: "unknown annotation type",
|
||||
},
|
||||
{
|
||||
name: "specify type and platform",
|
||||
in: []string{"manifest[plat/form]:com.example=a"},
|
||||
want: map[exptypes.AnnotationKey]string{
|
||||
{
|
||||
Type: "manifest",
|
||||
Platform: &ocispecs.Platform{
|
||||
OS: "plat",
|
||||
Architecture: "form",
|
||||
},
|
||||
Key: "com.example",
|
||||
}: "a",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "specify multiple types",
|
||||
in: []string{"index,manifest:com.example=a"},
|
||||
want: map[exptypes.AnnotationKey]string{
|
||||
{Type: "index", Key: "com.example"}: "a",
|
||||
{Type: "manifest", Key: "com.example"}: "a",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "specify multiple types and platform",
|
||||
in: []string{"index,manifest[plat/form]:com.example=a"},
|
||||
want: map[exptypes.AnnotationKey]string{
|
||||
{Type: "index", Key: "com.example"}: "a",
|
||||
{
|
||||
Type: "manifest",
|
||||
Platform: &ocispecs.Platform{
|
||||
OS: "plat",
|
||||
Architecture: "form",
|
||||
},
|
||||
Key: "com.example",
|
||||
}: "a",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
got, err := ParseAnnotations(test.in)
|
||||
if test.wantErr != "" {
|
||||
require.ErrorContains(t, err, test.wantErr)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
// Can't compare maps with pointer in their keys, need to extract and sort the map entries
|
||||
wantKVs := entries(test.want)
|
||||
gotKVs := entries(got)
|
||||
|
||||
assert.Equal(t, wantKVs, gotKVs)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
type kv struct {
|
||||
Key exptypes.AnnotationKey
|
||||
Val string
|
||||
}
|
||||
|
||||
func entries(in map[exptypes.AnnotationKey]string) []kv {
|
||||
var out []kv
|
||||
for k, v := range in {
|
||||
out = append(out, kv{k, v})
|
||||
}
|
||||
|
||||
sortFunc := func(a, b kv) int { return cmp.Compare(a.Key.String(), b.Key.String()) }
|
||||
slices.SortFunc(out, sortFunc)
|
||||
|
||||
return out
|
||||
}
|
@@ -1,12 +1,12 @@
|
||||
package buildflags
|
||||
|
||||
import (
|
||||
"encoding/csv"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
controllerapi "github.com/docker/buildx/controller/pb"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/tonistiigi/go-csvvalue"
|
||||
)
|
||||
|
||||
const defaultPrintFunc = "build"
|
||||
@@ -16,8 +16,7 @@ func ParsePrintFunc(str string) (*controllerapi.PrintFunc, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
csvReader := csv.NewReader(strings.NewReader(str))
|
||||
fields, err := csvReader.Read()
|
||||
fields, err := csvvalue.Fields(str, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@@ -1,11 +1,11 @@
|
||||
package buildflags
|
||||
|
||||
import (
|
||||
"encoding/csv"
|
||||
"strings"
|
||||
|
||||
controllerapi "github.com/docker/buildx/controller/pb"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/tonistiigi/go-csvvalue"
|
||||
)
|
||||
|
||||
func ParseSecretSpecs(sl []string) ([]*controllerapi.Secret, error) {
|
||||
@@ -21,8 +21,7 @@ func ParseSecretSpecs(sl []string) ([]*controllerapi.Secret, error) {
|
||||
}
|
||||
|
||||
func parseSecret(value string) (*controllerapi.Secret, error) {
|
||||
csvReader := csv.NewReader(strings.NewReader(value))
|
||||
fields, err := csvReader.Read()
|
||||
fields, err := csvvalue.Fields(value, nil)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to parse csv secret")
|
||||
}
|
||||
|
@@ -5,22 +5,28 @@ import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// MetadataProvenanceMode is the type for setting provenance in the metdata file
|
||||
type MetadataProvenanceMode int
|
||||
// MetadataProvenanceMode is the type for setting provenance in the metadata
|
||||
// file
|
||||
type MetadataProvenanceMode string
|
||||
|
||||
const (
|
||||
// MetadataProvenanceModeMin sets minimal provenance (default)
|
||||
MetadataProvenanceModeMin MetadataProvenanceMode = iota
|
||||
MetadataProvenanceModeMin MetadataProvenanceMode = "min"
|
||||
// MetadataProvenanceModeMax sets full provenance
|
||||
MetadataProvenanceModeMax
|
||||
MetadataProvenanceModeMax MetadataProvenanceMode = "max"
|
||||
// MetadataProvenanceModeDisabled doesn't set provenance
|
||||
MetadataProvenanceModeDisabled
|
||||
MetadataProvenanceModeDisabled MetadataProvenanceMode = "disabled"
|
||||
)
|
||||
|
||||
// MetadataProvenance returns the provenance mode to set in the metadata file
|
||||
// MetadataProvenance returns the metadata provenance mode from
|
||||
// BUILDX_METADATA_PROVENANCE environment variable
|
||||
func MetadataProvenance() MetadataProvenanceMode {
|
||||
bmp := os.Getenv("BUILDX_METADATA_PROVENANCE")
|
||||
switch bmp {
|
||||
return ParseMetadataProvenance(os.Getenv("BUILDX_METADATA_PROVENANCE"))
|
||||
}
|
||||
|
||||
// ParseMetadataProvenance parses the metadata provenance mode from a string
|
||||
func ParseMetadataProvenance(inp string) MetadataProvenanceMode {
|
||||
switch inp {
|
||||
case "min":
|
||||
return MetadataProvenanceModeMin
|
||||
case "max":
|
||||
@@ -28,8 +34,17 @@ func MetadataProvenance() MetadataProvenanceMode {
|
||||
case "disabled":
|
||||
return MetadataProvenanceModeDisabled
|
||||
}
|
||||
if ok, err := strconv.ParseBool(bmp); err == nil && !ok {
|
||||
if ok, err := strconv.ParseBool(inp); err == nil && !ok {
|
||||
return MetadataProvenanceModeDisabled
|
||||
}
|
||||
return MetadataProvenanceModeMin
|
||||
}
|
||||
|
||||
// MetadataWarningsEnabled returns whether metadata warnings are enabled from
|
||||
// BUILDX_METADATA_WARNINGS environment variable (default false)
|
||||
func MetadataWarningsEnabled() bool {
|
||||
if ok, err := strconv.ParseBool(os.Getenv("BUILDX_METADATA_WARNINGS")); err == nil {
|
||||
return ok
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
@@ -9,12 +9,11 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/containerd/containerd/content"
|
||||
"github.com/containerd/containerd/errdefs"
|
||||
"github.com/containerd/containerd/images"
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/containerd/remotes"
|
||||
"github.com/containerd/errdefs"
|
||||
"github.com/containerd/platforms"
|
||||
"github.com/distribution/reference"
|
||||
"github.com/docker/buildx/util/buildflags"
|
||||
"github.com/moby/buildkit/exporter/containerimage/exptypes"
|
||||
"github.com/moby/buildkit/util/contentutil"
|
||||
"github.com/opencontainers/go-digest"
|
||||
@@ -29,7 +28,7 @@ type Source struct {
|
||||
Ref reference.Named
|
||||
}
|
||||
|
||||
func (r *Resolver) Combine(ctx context.Context, srcs []*Source, ann []string, preferIndex bool) ([]byte, ocispec.Descriptor, error) {
|
||||
func (r *Resolver) Combine(ctx context.Context, srcs []*Source, ann map[exptypes.AnnotationKey]string, preferIndex bool) ([]byte, ocispec.Descriptor, error) {
|
||||
eg, ctx := errgroup.WithContext(ctx)
|
||||
|
||||
dts := make([][]byte, len(srcs))
|
||||
@@ -152,11 +151,7 @@ func (r *Resolver) Combine(ctx context.Context, srcs []*Source, ann []string, pr
|
||||
// annotations are only allowed on OCI indexes
|
||||
indexAnnotation := make(map[string]string)
|
||||
if mt == ocispec.MediaTypeImageIndex {
|
||||
annotations, err := buildflags.ParseAnnotations(ann)
|
||||
if err != nil {
|
||||
return nil, ocispec.Descriptor{}, err
|
||||
}
|
||||
for k, v := range annotations {
|
||||
for k, v := range ann {
|
||||
switch k.Type {
|
||||
case exptypes.AnnotationIndex:
|
||||
indexAnnotation[k.Key] = v
|
||||
|
@@ -13,8 +13,8 @@ import (
|
||||
|
||||
"github.com/containerd/containerd/content"
|
||||
"github.com/containerd/containerd/images"
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/containerd/remotes"
|
||||
"github.com/containerd/platforms"
|
||||
"github.com/distribution/reference"
|
||||
intoto "github.com/in-toto/in-toto-golang/in_toto"
|
||||
"github.com/moby/buildkit/util/contentutil"
|
||||
|
@@ -11,7 +11,7 @@ import (
|
||||
"text/template"
|
||||
|
||||
"github.com/containerd/containerd/images"
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/platforms"
|
||||
"github.com/distribution/reference"
|
||||
"github.com/opencontainers/go-digest"
|
||||
ocispecs "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
|
@@ -3,7 +3,7 @@ package platformutil
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/platforms"
|
||||
specs "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
)
|
||||
|
||||
|
@@ -13,6 +13,8 @@ import (
|
||||
"github.com/opencontainers/go-digest"
|
||||
"go.opentelemetry.io/otel/attribute"
|
||||
"go.opentelemetry.io/otel/metric"
|
||||
"golang.org/x/text/cases"
|
||||
"golang.org/x/text/language"
|
||||
)
|
||||
|
||||
type metricWriter struct {
|
||||
@@ -446,14 +448,22 @@ func newLintMetricRecorder(meter metric.Meter, attrs attribute.Set) *lintMetricR
|
||||
return mr
|
||||
}
|
||||
|
||||
func kebabToCamel(s string) string {
|
||||
words := strings.Split(s, "-")
|
||||
for i, word := range words {
|
||||
words[i] = cases.Title(language.English).String(word)
|
||||
}
|
||||
return strings.Join(words, "")
|
||||
}
|
||||
|
||||
func (mr *lintMetricRecorder) Record(ss *client.SolveStatus) {
|
||||
for _, warning := range ss.Warnings {
|
||||
m := reLintMessage.FindSubmatch(warning.Short)
|
||||
if m == nil {
|
||||
m := reLintMessage.FindSubmatch([]byte(warning.URL))
|
||||
if len(m) < 2 {
|
||||
continue
|
||||
}
|
||||
|
||||
ruleName := string(m[1])
|
||||
ruleName := kebabToCamel(string(m[1]))
|
||||
mr.Count.Add(context.Background(), 1,
|
||||
metric.WithAttributeSet(mr.Attributes),
|
||||
metric.WithAttributes(
|
||||
@@ -464,6 +474,6 @@ func (mr *lintMetricRecorder) Record(ss *client.SolveStatus) {
|
||||
}
|
||||
|
||||
var (
|
||||
reLintMessage = regexp.MustCompile(`^Lint Rule '(\w+)':`)
|
||||
reLintMessage = regexp.MustCompile(`^https://docs\.docker\.com/go/dockerfile/rule/([\w|-]+)/`)
|
||||
lintRuleNameProperty = attribute.Key("lint.rule.name")
|
||||
)
|
||||
|
@@ -7,6 +7,7 @@ import (
|
||||
|
||||
"github.com/containerd/console"
|
||||
"github.com/docker/buildx/util/logutil"
|
||||
"github.com/mitchellh/hashstructure/v2"
|
||||
"github.com/moby/buildkit/client"
|
||||
"github.com/moby/buildkit/util/progress/progressui"
|
||||
"github.com/opencontainers/go-digest"
|
||||
@@ -18,9 +19,10 @@ import (
|
||||
type Printer struct {
|
||||
status chan *client.SolveStatus
|
||||
|
||||
ready chan struct{}
|
||||
done chan struct{}
|
||||
paused chan struct{}
|
||||
ready chan struct{}
|
||||
done chan struct{}
|
||||
paused chan struct{}
|
||||
closeOnce sync.Once
|
||||
|
||||
err error
|
||||
warnings []client.VertexWarning
|
||||
@@ -35,8 +37,10 @@ type Printer struct {
|
||||
}
|
||||
|
||||
func (p *Printer) Wait() error {
|
||||
close(p.status)
|
||||
<-p.done
|
||||
p.closeOnce.Do(func() {
|
||||
close(p.status)
|
||||
<-p.done
|
||||
})
|
||||
return p.err
|
||||
}
|
||||
|
||||
@@ -58,7 +62,7 @@ func (p *Printer) Write(s *client.SolveStatus) {
|
||||
}
|
||||
|
||||
func (p *Printer) Warnings() []client.VertexWarning {
|
||||
return p.warnings
|
||||
return dedupWarnings(p.warnings)
|
||||
}
|
||||
|
||||
func (p *Printer) ValidateLogSource(dgst digest.Digest, v interface{}) bool {
|
||||
@@ -184,3 +188,26 @@ func WithOnClose(onclose func()) PrinterOpt {
|
||||
opt.onclose = onclose
|
||||
}
|
||||
}
|
||||
|
||||
func dedupWarnings(inp []client.VertexWarning) []client.VertexWarning {
|
||||
m := make(map[uint64]client.VertexWarning)
|
||||
for _, w := range inp {
|
||||
wcp := w
|
||||
wcp.Vertex = ""
|
||||
if wcp.SourceInfo != nil {
|
||||
wcp.SourceInfo.Definition = nil
|
||||
}
|
||||
h, err := hashstructure.Hash(wcp, hashstructure.FormatV2, nil)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if _, ok := m[h]; !ok {
|
||||
m[h] = w
|
||||
}
|
||||
}
|
||||
res := make([]client.VertexWarning, 0, len(m))
|
||||
for _, w := range m {
|
||||
res = append(res, w)
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
59
vendor/github.com/Microsoft/hcsshim/osversion/osversion_windows.go
generated
vendored
59
vendor/github.com/Microsoft/hcsshim/osversion/osversion_windows.go
generated
vendored
@@ -1,59 +0,0 @@
|
||||
package osversion
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sync"
|
||||
|
||||
"golang.org/x/sys/windows"
|
||||
)
|
||||
|
||||
// OSVersion is a wrapper for Windows version information
|
||||
// https://msdn.microsoft.com/en-us/library/windows/desktop/ms724439(v=vs.85).aspx
|
||||
type OSVersion struct {
|
||||
Version uint32
|
||||
MajorVersion uint8
|
||||
MinorVersion uint8
|
||||
Build uint16
|
||||
}
|
||||
|
||||
var (
|
||||
osv OSVersion
|
||||
once sync.Once
|
||||
)
|
||||
|
||||
// Get gets the operating system version on Windows.
|
||||
// The calling application must be manifested to get the correct version information.
|
||||
func Get() OSVersion {
|
||||
once.Do(func() {
|
||||
var err error
|
||||
osv = OSVersion{}
|
||||
osv.Version, err = windows.GetVersion()
|
||||
if err != nil {
|
||||
// GetVersion never fails.
|
||||
panic(err)
|
||||
}
|
||||
osv.MajorVersion = uint8(osv.Version & 0xFF)
|
||||
osv.MinorVersion = uint8(osv.Version >> 8 & 0xFF)
|
||||
osv.Build = uint16(osv.Version >> 16)
|
||||
})
|
||||
return osv
|
||||
}
|
||||
|
||||
// Build gets the build-number on Windows
|
||||
// The calling application must be manifested to get the correct version information.
|
||||
func Build() uint16 {
|
||||
return Get().Build
|
||||
}
|
||||
|
||||
// String returns the OSVersion formatted as a string. It implements the
|
||||
// [fmt.Stringer] interface.
|
||||
func (osv OSVersion) String() string {
|
||||
return fmt.Sprintf("%d.%d.%d", osv.MajorVersion, osv.MinorVersion, osv.Build)
|
||||
}
|
||||
|
||||
// ToString returns the OSVersion formatted as a string.
|
||||
//
|
||||
// Deprecated: use [OSVersion.String].
|
||||
func (osv OSVersion) ToString() string {
|
||||
return osv.String()
|
||||
}
|
35
vendor/github.com/Microsoft/hcsshim/osversion/platform_compat_windows.go
generated
vendored
35
vendor/github.com/Microsoft/hcsshim/osversion/platform_compat_windows.go
generated
vendored
@@ -1,35 +0,0 @@
|
||||
package osversion
|
||||
|
||||
// List of stable ABI compliant ltsc releases
|
||||
// Note: List must be sorted in ascending order
|
||||
var compatLTSCReleases = []uint16{
|
||||
V21H2Server,
|
||||
}
|
||||
|
||||
// CheckHostAndContainerCompat checks if given host and container
|
||||
// OS versions are compatible.
|
||||
// It includes support for stable ABI compliant versions as well.
|
||||
// Every release after WS 2022 will support the previous ltsc
|
||||
// container image. Stable ABI is in preview mode for windows 11 client.
|
||||
// Refer: https://learn.microsoft.com/en-us/virtualization/windowscontainers/deploy-containers/version-compatibility?tabs=windows-server-2022%2Cwindows-10#windows-server-host-os-compatibility
|
||||
func CheckHostAndContainerCompat(host, ctr OSVersion) bool {
|
||||
// check major minor versions of host and guest
|
||||
if host.MajorVersion != ctr.MajorVersion ||
|
||||
host.MinorVersion != ctr.MinorVersion {
|
||||
return false
|
||||
}
|
||||
|
||||
// If host is < WS 2022, exact version match is required
|
||||
if host.Build < V21H2Server {
|
||||
return host.Build == ctr.Build
|
||||
}
|
||||
|
||||
var supportedLtscRelease uint16
|
||||
for i := len(compatLTSCReleases) - 1; i >= 0; i-- {
|
||||
if host.Build >= compatLTSCReleases[i] {
|
||||
supportedLtscRelease = compatLTSCReleases[i]
|
||||
break
|
||||
}
|
||||
}
|
||||
return ctr.Build >= supportedLtscRelease && ctr.Build <= host.Build
|
||||
}
|
84
vendor/github.com/Microsoft/hcsshim/osversion/windowsbuilds.go
generated
vendored
84
vendor/github.com/Microsoft/hcsshim/osversion/windowsbuilds.go
generated
vendored
@@ -1,84 +0,0 @@
|
||||
package osversion
|
||||
|
||||
// Windows Client and Server build numbers.
|
||||
//
|
||||
// See:
|
||||
// https://learn.microsoft.com/en-us/windows/release-health/release-information
|
||||
// https://learn.microsoft.com/en-us/windows/release-health/windows-server-release-info
|
||||
// https://learn.microsoft.com/en-us/windows/release-health/windows11-release-information
|
||||
const (
|
||||
// RS1 (version 1607, codename "Redstone 1") corresponds to Windows Server
|
||||
// 2016 (ltsc2016) and Windows 10 (Anniversary Update).
|
||||
RS1 = 14393
|
||||
// V1607 (version 1607, codename "Redstone 1") is an alias for [RS1].
|
||||
V1607 = RS1
|
||||
// LTSC2016 (Windows Server 2016) is an alias for [RS1].
|
||||
LTSC2016 = RS1
|
||||
|
||||
// RS2 (version 1703, codename "Redstone 2") was a client-only update, and
|
||||
// corresponds to Windows 10 (Creators Update).
|
||||
RS2 = 15063
|
||||
// V1703 (version 1703, codename "Redstone 2") is an alias for [RS2].
|
||||
V1703 = RS2
|
||||
|
||||
// RS3 (version 1709, codename "Redstone 3") corresponds to Windows Server
|
||||
// 1709 (Semi-Annual Channel (SAC)), and Windows 10 (Fall Creators Update).
|
||||
RS3 = 16299
|
||||
// V1709 (version 1709, codename "Redstone 3") is an alias for [RS3].
|
||||
V1709 = RS3
|
||||
|
||||
// RS4 (version 1803, codename "Redstone 4") corresponds to Windows Server
|
||||
// 1803 (Semi-Annual Channel (SAC)), and Windows 10 (April 2018 Update).
|
||||
RS4 = 17134
|
||||
// V1803 (version 1803, codename "Redstone 4") is an alias for [RS4].
|
||||
V1803 = RS4
|
||||
|
||||
// RS5 (version 1809, codename "Redstone 5") corresponds to Windows Server
|
||||
// 2019 (ltsc2019), and Windows 10 (October 2018 Update).
|
||||
RS5 = 17763
|
||||
// V1809 (version 1809, codename "Redstone 5") is an alias for [RS5].
|
||||
V1809 = RS5
|
||||
// LTSC2019 (Windows Server 2019) is an alias for [RS5].
|
||||
LTSC2019 = RS5
|
||||
|
||||
// V19H1 (version 1903, codename 19H1) corresponds to Windows Server 1903 (semi-annual
|
||||
// channel).
|
||||
V19H1 = 18362
|
||||
// V1903 (version 1903) is an alias for [V19H1].
|
||||
V1903 = V19H1
|
||||
|
||||
// V19H2 (version 1909, codename 19H2) corresponds to Windows Server 1909 (semi-annual
|
||||
// channel).
|
||||
V19H2 = 18363
|
||||
// V1909 (version 1909) is an alias for [V19H2].
|
||||
V1909 = V19H2
|
||||
|
||||
// V20H1 (version 2004, codename 20H1) corresponds to Windows Server 2004 (semi-annual
|
||||
// channel).
|
||||
V20H1 = 19041
|
||||
// V2004 (version 2004) is an alias for [V20H1].
|
||||
V2004 = V20H1
|
||||
|
||||
// V20H2 corresponds to Windows Server 20H2 (semi-annual channel).
|
||||
V20H2 = 19042
|
||||
|
||||
// V21H1 corresponds to Windows Server 21H1 (semi-annual channel).
|
||||
V21H1 = 19043
|
||||
|
||||
// V21H2Win10 corresponds to Windows 10 (November 2021 Update).
|
||||
V21H2Win10 = 19044
|
||||
|
||||
// V21H2Server corresponds to Windows Server 2022 (ltsc2022).
|
||||
V21H2Server = 20348
|
||||
// LTSC2022 (Windows Server 2022) is an alias for [V21H2Server]
|
||||
LTSC2022 = V21H2Server
|
||||
|
||||
// V21H2Win11 corresponds to Windows 11 (original release).
|
||||
V21H2Win11 = 22000
|
||||
|
||||
// V22H2Win10 corresponds to Windows 10 (2022 Update).
|
||||
V22H2Win10 = 19045
|
||||
|
||||
// V22H2Win11 corresponds to Windows 11 (2022 Update).
|
||||
V22H2Win11 = 22621
|
||||
)
|
8
vendor/github.com/compose-spec/compose-go/v2/types/config.go
generated
vendored
8
vendor/github.com/compose-spec/compose-go/v2/types/config.go
generated
vendored
@@ -100,7 +100,13 @@ type Secrets map[string]SecretConfig
|
||||
type Configs map[string]ConfigObjConfig
|
||||
|
||||
// Extensions is a map of custom extension
|
||||
type Extensions map[string]interface{}
|
||||
type Extensions map[string]any
|
||||
|
||||
func (e Extensions) DeepCopy(t Extensions) {
|
||||
for k, v := range e {
|
||||
t[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
// MarshalJSON makes Config implement json.Marshaler
|
||||
func (c Config) MarshalJSON() ([]byte, error) {
|
||||
|
2067
vendor/github.com/compose-spec/compose-go/v2/types/derived.gen.go
generated
vendored
Normal file
2067
vendor/github.com/compose-spec/compose-go/v2/types/derived.gen.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
11
vendor/github.com/compose-spec/compose-go/v2/types/project.go
generated
vendored
11
vendor/github.com/compose-spec/compose-go/v2/types/project.go
generated
vendored
@@ -29,7 +29,6 @@ import (
|
||||
"github.com/compose-spec/compose-go/v2/errdefs"
|
||||
"github.com/compose-spec/compose-go/v2/utils"
|
||||
"github.com/distribution/reference"
|
||||
"github.com/mitchellh/copystructure"
|
||||
godigest "github.com/opencontainers/go-digest"
|
||||
"golang.org/x/exp/maps"
|
||||
"golang.org/x/sync/errgroup"
|
||||
@@ -646,11 +645,13 @@ func (p Project) WithServicesEnvironmentResolved(discardEnvFiles bool) (*Project
|
||||
}
|
||||
|
||||
func (p *Project) deepCopy() *Project {
|
||||
instance, err := copystructure.Copy(p)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
if p == nil {
|
||||
return nil
|
||||
}
|
||||
return instance.(*Project)
|
||||
n := &Project{}
|
||||
deriveDeepCopyProject(n, p)
|
||||
return n
|
||||
|
||||
}
|
||||
|
||||
// WithServicesTransform applies a transformation to project services and return a new project with transformation results
|
||||
|
22
vendor/github.com/compose-spec/compose-go/v2/types/types.go
generated
vendored
22
vendor/github.com/compose-spec/compose-go/v2/types/types.go
generated
vendored
@@ -24,7 +24,6 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/docker/go-connections/nat"
|
||||
"github.com/mitchellh/copystructure"
|
||||
)
|
||||
|
||||
// ServiceConfig is the configuration of one service
|
||||
@@ -194,11 +193,12 @@ func (s *ServiceConfig) SetScale(scale int) {
|
||||
}
|
||||
|
||||
func (s *ServiceConfig) deepCopy() *ServiceConfig {
|
||||
instance, err := copystructure.Copy(s)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
if s == nil {
|
||||
return nil
|
||||
}
|
||||
return instance.(*ServiceConfig)
|
||||
n := &ServiceConfig{}
|
||||
deriveDeepCopyService(n, s)
|
||||
return n
|
||||
}
|
||||
|
||||
const (
|
||||
@@ -698,7 +698,7 @@ type NetworkConfig struct {
|
||||
Internal bool `yaml:"internal,omitempty" json:"internal,omitempty"`
|
||||
Attachable bool `yaml:"attachable,omitempty" json:"attachable,omitempty"`
|
||||
Labels Labels `yaml:"labels,omitempty" json:"labels,omitempty"`
|
||||
EnableIPv6 bool `yaml:"enable_ipv6,omitempty" json:"enable_ipv6,omitempty"`
|
||||
EnableIPv6 *bool `yaml:"enable_ipv6,omitempty" json:"enable_ipv6,omitempty"`
|
||||
Extensions Extensions `yaml:"#extensions,inline,omitempty" json:"-"`
|
||||
}
|
||||
|
||||
@@ -711,11 +711,11 @@ type IPAMConfig struct {
|
||||
|
||||
// IPAMPool for a network
|
||||
type IPAMPool struct {
|
||||
Subnet string `yaml:"subnet,omitempty" json:"subnet,omitempty"`
|
||||
Gateway string `yaml:"gateway,omitempty" json:"gateway,omitempty"`
|
||||
IPRange string `yaml:"ip_range,omitempty" json:"ip_range,omitempty"`
|
||||
AuxiliaryAddresses Mapping `yaml:"aux_addresses,omitempty" json:"aux_addresses,omitempty"`
|
||||
Extensions map[string]interface{} `yaml:",inline" json:"-"`
|
||||
Subnet string `yaml:"subnet,omitempty" json:"subnet,omitempty"`
|
||||
Gateway string `yaml:"gateway,omitempty" json:"gateway,omitempty"`
|
||||
IPRange string `yaml:"ip_range,omitempty" json:"ip_range,omitempty"`
|
||||
AuxiliaryAddresses Mapping `yaml:"aux_addresses,omitempty" json:"aux_addresses,omitempty"`
|
||||
Extensions Extensions `yaml:",inline" json:"-"`
|
||||
}
|
||||
|
||||
// VolumeConfig for a volume
|
||||
|
191
vendor/github.com/containerd/containerd/api/LICENSE
generated
vendored
Normal file
191
vendor/github.com/containerd/containerd/api/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,191 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
https://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
Copyright The containerd Authors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
https://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
116
vendor/github.com/containerd/containerd/errdefs/errdefs_deprecated.go
generated
vendored
116
vendor/github.com/containerd/containerd/errdefs/errdefs_deprecated.go
generated
vendored
@@ -1,116 +0,0 @@
|
||||
/*
|
||||
Copyright The containerd Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
// Package errdefs defines the common errors used throughout containerd
|
||||
// packages.
|
||||
//
|
||||
// Use with fmt.Errorf to add context to an error.
|
||||
//
|
||||
// To detect an error class, use the IsXXX functions to tell whether an error
|
||||
// is of a certain type.
|
||||
//
|
||||
// The functions ToGRPC and FromGRPC can be used to map server-side and
|
||||
// client-side errors to the correct types.
|
||||
package errdefs
|
||||
|
||||
import (
|
||||
"github.com/containerd/errdefs"
|
||||
)
|
||||
|
||||
// Definitions of common error types used throughout containerd. All containerd
|
||||
// errors returned by most packages will map into one of these errors classes.
|
||||
// Packages should return errors of these types when they want to instruct a
|
||||
// client to take a particular action.
|
||||
//
|
||||
// For the most part, we just try to provide local grpc errors. Most conditions
|
||||
// map very well to those defined by grpc.
|
||||
var (
|
||||
ErrUnknown = errdefs.ErrUnknown
|
||||
ErrInvalidArgument = errdefs.ErrInvalidArgument
|
||||
ErrNotFound = errdefs.ErrNotFound
|
||||
ErrAlreadyExists = errdefs.ErrAlreadyExists
|
||||
ErrFailedPrecondition = errdefs.ErrFailedPrecondition
|
||||
ErrUnavailable = errdefs.ErrUnavailable
|
||||
ErrNotImplemented = errdefs.ErrNotImplemented
|
||||
)
|
||||
|
||||
// IsInvalidArgument returns true if the error is due to an invalid argument
|
||||
func IsInvalidArgument(err error) bool {
|
||||
return errdefs.IsInvalidArgument(err)
|
||||
}
|
||||
|
||||
// IsNotFound returns true if the error is due to a missing object
|
||||
func IsNotFound(err error) bool {
|
||||
return errdefs.IsNotFound(err)
|
||||
}
|
||||
|
||||
// IsAlreadyExists returns true if the error is due to an already existing
|
||||
// metadata item
|
||||
func IsAlreadyExists(err error) bool {
|
||||
return errdefs.IsAlreadyExists(err)
|
||||
}
|
||||
|
||||
// IsFailedPrecondition returns true if an operation could not proceed to the
|
||||
// lack of a particular condition
|
||||
func IsFailedPrecondition(err error) bool {
|
||||
return errdefs.IsFailedPrecondition(err)
|
||||
}
|
||||
|
||||
// IsUnavailable returns true if the error is due to a resource being unavailable
|
||||
func IsUnavailable(err error) bool {
|
||||
return errdefs.IsUnavailable(err)
|
||||
}
|
||||
|
||||
// IsNotImplemented returns true if the error is due to not being implemented
|
||||
func IsNotImplemented(err error) bool {
|
||||
return errdefs.IsNotImplemented(err)
|
||||
}
|
||||
|
||||
// IsCanceled returns true if the error is due to `context.Canceled`.
|
||||
func IsCanceled(err error) bool {
|
||||
return errdefs.IsCanceled(err)
|
||||
}
|
||||
|
||||
// IsDeadlineExceeded returns true if the error is due to
|
||||
// `context.DeadlineExceeded`.
|
||||
func IsDeadlineExceeded(err error) bool {
|
||||
return errdefs.IsDeadlineExceeded(err)
|
||||
}
|
||||
|
||||
// ToGRPC will attempt to map the backend containerd error into a grpc error,
|
||||
// using the original error message as a description.
|
||||
//
|
||||
// Further information may be extracted from certain errors depending on their
|
||||
// type.
|
||||
//
|
||||
// If the error is unmapped, the original error will be returned to be handled
|
||||
// by the regular grpc error handling stack.
|
||||
func ToGRPC(err error) error {
|
||||
return errdefs.ToGRPC(err)
|
||||
}
|
||||
|
||||
// ToGRPCf maps the error to grpc error codes, assembling the formatting string
|
||||
// and combining it with the target error string.
|
||||
//
|
||||
// This is equivalent to errdefs.ToGRPC(fmt.Errorf("%s: %w", fmt.Sprintf(format, args...), err))
|
||||
func ToGRPCf(err error, format string, args ...interface{}) error {
|
||||
return errdefs.ToGRPCf(err, format, args...)
|
||||
}
|
||||
|
||||
// FromGRPC returns the underlying error from a grpc service based on the grpc error code
|
||||
func FromGRPC(err error) error {
|
||||
return errdefs.FromGRPC(err)
|
||||
}
|
2
vendor/github.com/containerd/containerd/images/archive/exporter.go
generated
vendored
2
vendor/github.com/containerd/containerd/images/archive/exporter.go
generated
vendored
@@ -29,9 +29,9 @@ import (
|
||||
"github.com/containerd/containerd/content"
|
||||
"github.com/containerd/containerd/images"
|
||||
"github.com/containerd/containerd/labels"
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/errdefs"
|
||||
"github.com/containerd/log"
|
||||
"github.com/containerd/platforms"
|
||||
digest "github.com/opencontainers/go-digest"
|
||||
ocispecs "github.com/opencontainers/image-spec/specs-go"
|
||||
ocispec "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
|
2
vendor/github.com/containerd/containerd/images/archive/importer.go
generated
vendored
2
vendor/github.com/containerd/containerd/images/archive/importer.go
generated
vendored
@@ -31,9 +31,9 @@ import (
|
||||
"github.com/containerd/containerd/content"
|
||||
"github.com/containerd/containerd/images"
|
||||
"github.com/containerd/containerd/labels"
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/errdefs"
|
||||
"github.com/containerd/log"
|
||||
"github.com/containerd/platforms"
|
||||
digest "github.com/opencontainers/go-digest"
|
||||
specs "github.com/opencontainers/image-spec/specs-go"
|
||||
ocispec "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
|
2
vendor/github.com/containerd/containerd/images/archive/reference.go
generated
vendored
2
vendor/github.com/containerd/containerd/images/archive/reference.go
generated
vendored
@@ -21,7 +21,7 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/containerd/containerd/reference"
|
||||
distref "github.com/containerd/containerd/reference/docker"
|
||||
distref "github.com/distribution/reference"
|
||||
"github.com/opencontainers/go-digest"
|
||||
)
|
||||
|
||||
|
2
vendor/github.com/containerd/containerd/images/handlers.go
generated
vendored
2
vendor/github.com/containerd/containerd/images/handlers.go
generated
vendored
@@ -23,8 +23,8 @@ import (
|
||||
"sort"
|
||||
|
||||
"github.com/containerd/containerd/content"
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/errdefs"
|
||||
"github.com/containerd/platforms"
|
||||
ocispec "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
"golang.org/x/sync/errgroup"
|
||||
"golang.org/x/sync/semaphore"
|
||||
|
2
vendor/github.com/containerd/containerd/images/image.go
generated
vendored
2
vendor/github.com/containerd/containerd/images/image.go
generated
vendored
@@ -24,9 +24,9 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/containerd/containerd/content"
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/errdefs"
|
||||
"github.com/containerd/log"
|
||||
"github.com/containerd/platforms"
|
||||
digest "github.com/opencontainers/go-digest"
|
||||
ocispec "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
)
|
||||
|
58
vendor/github.com/containerd/containerd/reference/docker/helpers.go
generated
vendored
58
vendor/github.com/containerd/containerd/reference/docker/helpers.go
generated
vendored
@@ -1,58 +0,0 @@
|
||||
/*
|
||||
Copyright The containerd Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
package docker
|
||||
|
||||
import "path"
|
||||
|
||||
// IsNameOnly returns true if reference only contains a repo name.
|
||||
func IsNameOnly(ref Named) bool {
|
||||
if _, ok := ref.(NamedTagged); ok {
|
||||
return false
|
||||
}
|
||||
if _, ok := ref.(Canonical); ok {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// FamiliarName returns the familiar name string
|
||||
// for the given named, familiarizing if needed.
|
||||
func FamiliarName(ref Named) string {
|
||||
if nn, ok := ref.(normalizedNamed); ok {
|
||||
return nn.Familiar().Name()
|
||||
}
|
||||
return ref.Name()
|
||||
}
|
||||
|
||||
// FamiliarString returns the familiar string representation
|
||||
// for the given reference, familiarizing if needed.
|
||||
func FamiliarString(ref Reference) string {
|
||||
if nn, ok := ref.(normalizedNamed); ok {
|
||||
return nn.Familiar().String()
|
||||
}
|
||||
return ref.String()
|
||||
}
|
||||
|
||||
// FamiliarMatch reports whether ref matches the specified pattern.
|
||||
// See https://godoc.org/path#Match for supported patterns.
|
||||
func FamiliarMatch(pattern string, ref Reference) (bool, error) {
|
||||
matched, err := path.Match(pattern, FamiliarString(ref))
|
||||
if namedRef, isNamed := ref.(Named); isNamed && !matched {
|
||||
matched, _ = path.Match(pattern, FamiliarName(namedRef))
|
||||
}
|
||||
return matched, err
|
||||
}
|
196
vendor/github.com/containerd/containerd/reference/docker/normalize.go
generated
vendored
196
vendor/github.com/containerd/containerd/reference/docker/normalize.go
generated
vendored
@@ -1,196 +0,0 @@
|
||||
/*
|
||||
Copyright The containerd Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
package docker
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/opencontainers/go-digest"
|
||||
)
|
||||
|
||||
var (
|
||||
legacyDefaultDomain = "index.docker.io"
|
||||
defaultDomain = "docker.io"
|
||||
officialRepoName = "library"
|
||||
defaultTag = "latest"
|
||||
)
|
||||
|
||||
// normalizedNamed represents a name which has been
|
||||
// normalized and has a familiar form. A familiar name
|
||||
// is what is used in Docker UI. An example normalized
|
||||
// name is "docker.io/library/ubuntu" and corresponding
|
||||
// familiar name of "ubuntu".
|
||||
type normalizedNamed interface {
|
||||
Named
|
||||
Familiar() Named
|
||||
}
|
||||
|
||||
// ParseNormalizedNamed parses a string into a named reference
|
||||
// transforming a familiar name from Docker UI to a fully
|
||||
// qualified reference. If the value may be an identifier
|
||||
// use ParseAnyReference.
|
||||
func ParseNormalizedNamed(s string) (Named, error) {
|
||||
if ok := anchoredIdentifierRegexp.MatchString(s); ok {
|
||||
return nil, fmt.Errorf("invalid repository name (%s), cannot specify 64-byte hexadecimal strings", s)
|
||||
}
|
||||
domain, remainder := splitDockerDomain(s)
|
||||
var remoteName string
|
||||
if tagSep := strings.IndexRune(remainder, ':'); tagSep > -1 {
|
||||
remoteName = remainder[:tagSep]
|
||||
} else {
|
||||
remoteName = remainder
|
||||
}
|
||||
if strings.ToLower(remoteName) != remoteName {
|
||||
return nil, fmt.Errorf("invalid reference format: repository name (%s) must be lowercase", remoteName)
|
||||
}
|
||||
|
||||
ref, err := Parse(domain + "/" + remainder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
named, isNamed := ref.(Named)
|
||||
if !isNamed {
|
||||
return nil, fmt.Errorf("reference %s has no name", ref.String())
|
||||
}
|
||||
return named, nil
|
||||
}
|
||||
|
||||
// ParseDockerRef normalizes the image reference following the docker convention. This is added
|
||||
// mainly for backward compatibility.
|
||||
// The reference returned can only be either tagged or digested. For reference contains both tag
|
||||
// and digest, the function returns digested reference, e.g. docker.io/library/busybox:latest@
|
||||
// sha256:7cc4b5aefd1d0cadf8d97d4350462ba51c694ebca145b08d7d41b41acc8db5aa will be returned as
|
||||
// docker.io/library/busybox@sha256:7cc4b5aefd1d0cadf8d97d4350462ba51c694ebca145b08d7d41b41acc8db5aa.
|
||||
func ParseDockerRef(ref string) (Named, error) {
|
||||
named, err := ParseNormalizedNamed(ref)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if _, ok := named.(NamedTagged); ok {
|
||||
if canonical, ok := named.(Canonical); ok {
|
||||
// The reference is both tagged and digested, only
|
||||
// return digested.
|
||||
newNamed, err := WithName(canonical.Name())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
newCanonical, err := WithDigest(newNamed, canonical.Digest())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return newCanonical, nil
|
||||
}
|
||||
}
|
||||
return TagNameOnly(named), nil
|
||||
}
|
||||
|
||||
// splitDockerDomain splits a repository name to domain and remotename string.
|
||||
// If no valid domain is found, the default domain is used. Repository name
|
||||
// needs to be already validated before.
|
||||
func splitDockerDomain(name string) (domain, remainder string) {
|
||||
i := strings.IndexRune(name, '/')
|
||||
if i == -1 || (!strings.ContainsAny(name[:i], ".:") && name[:i] != "localhost" && strings.ToLower(name[:i]) == name[:i]) {
|
||||
domain, remainder = defaultDomain, name
|
||||
} else {
|
||||
domain, remainder = name[:i], name[i+1:]
|
||||
}
|
||||
if domain == legacyDefaultDomain {
|
||||
domain = defaultDomain
|
||||
}
|
||||
if domain == defaultDomain && !strings.ContainsRune(remainder, '/') {
|
||||
remainder = officialRepoName + "/" + remainder
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// familiarizeName returns a shortened version of the name familiar
|
||||
// to the Docker UI. Familiar names have the default domain
|
||||
// "docker.io" and "library/" repository prefix removed.
|
||||
// For example, "docker.io/library/redis" will have the familiar
|
||||
// name "redis" and "docker.io/dmcgowan/myapp" will be "dmcgowan/myapp".
|
||||
// Returns a familiarized named only reference.
|
||||
func familiarizeName(named namedRepository) repository {
|
||||
repo := repository{
|
||||
domain: named.Domain(),
|
||||
path: named.Path(),
|
||||
}
|
||||
|
||||
if repo.domain == defaultDomain {
|
||||
repo.domain = ""
|
||||
// Handle official repositories which have the pattern "library/<official repo name>"
|
||||
if split := strings.Split(repo.path, "/"); len(split) == 2 && split[0] == officialRepoName {
|
||||
repo.path = split[1]
|
||||
}
|
||||
}
|
||||
return repo
|
||||
}
|
||||
|
||||
func (r reference) Familiar() Named {
|
||||
return reference{
|
||||
namedRepository: familiarizeName(r.namedRepository),
|
||||
tag: r.tag,
|
||||
digest: r.digest,
|
||||
}
|
||||
}
|
||||
|
||||
func (r repository) Familiar() Named {
|
||||
return familiarizeName(r)
|
||||
}
|
||||
|
||||
func (t taggedReference) Familiar() Named {
|
||||
return taggedReference{
|
||||
namedRepository: familiarizeName(t.namedRepository),
|
||||
tag: t.tag,
|
||||
}
|
||||
}
|
||||
|
||||
func (c canonicalReference) Familiar() Named {
|
||||
return canonicalReference{
|
||||
namedRepository: familiarizeName(c.namedRepository),
|
||||
digest: c.digest,
|
||||
}
|
||||
}
|
||||
|
||||
// TagNameOnly adds the default tag "latest" to a reference if it only has
|
||||
// a repo name.
|
||||
func TagNameOnly(ref Named) Named {
|
||||
if IsNameOnly(ref) {
|
||||
namedTagged, err := WithTag(ref, defaultTag)
|
||||
if err != nil {
|
||||
// Default tag must be valid, to create a NamedTagged
|
||||
// type with non-validated input the WithTag function
|
||||
// should be used instead
|
||||
panic(err)
|
||||
}
|
||||
return namedTagged
|
||||
}
|
||||
return ref
|
||||
}
|
||||
|
||||
// ParseAnyReference parses a reference string as a possible identifier,
|
||||
// full digest, or familiar name.
|
||||
func ParseAnyReference(ref string) (Reference, error) {
|
||||
if ok := anchoredIdentifierRegexp.MatchString(ref); ok {
|
||||
return digestReference("sha256:" + ref), nil
|
||||
}
|
||||
if dgst, err := digest.Parse(ref); err == nil {
|
||||
return digestReference(dgst), nil
|
||||
}
|
||||
|
||||
return ParseNormalizedNamed(ref)
|
||||
}
|
453
vendor/github.com/containerd/containerd/reference/docker/reference.go
generated
vendored
453
vendor/github.com/containerd/containerd/reference/docker/reference.go
generated
vendored
@@ -1,453 +0,0 @@
|
||||
/*
|
||||
Copyright The containerd Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
// Package docker provides a general type to represent any way of referencing images within the registry.
|
||||
// Its main purpose is to abstract tags and digests (content-addressable hash).
|
||||
//
|
||||
// Grammar
|
||||
//
|
||||
// reference := name [ ":" tag ] [ "@" digest ]
|
||||
// name := [domain '/'] path-component ['/' path-component]*
|
||||
// domain := host [':' port-number]
|
||||
// host := domain-name | IPv4address | \[ IPv6address \] ; rfc3986 appendix-A
|
||||
// domain-name := domain-component ['.' domain-component]*
|
||||
// domain-component := /([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*[a-zA-Z0-9])/
|
||||
// port-number := /[0-9]+/
|
||||
// path-component := alpha-numeric [separator alpha-numeric]*
|
||||
// alpha-numeric := /[a-z0-9]+/
|
||||
// separator := /[_.]|__|[-]*/
|
||||
//
|
||||
// tag := /[\w][\w.-]{0,127}/
|
||||
//
|
||||
// digest := digest-algorithm ":" digest-hex
|
||||
// digest-algorithm := digest-algorithm-component [ digest-algorithm-separator digest-algorithm-component ]*
|
||||
// digest-algorithm-separator := /[+.-_]/
|
||||
// digest-algorithm-component := /[A-Za-z][A-Za-z0-9]*/
|
||||
// digest-hex := /[0-9a-fA-F]{32,}/ ; At least 128 bit digest value
|
||||
//
|
||||
// identifier := /[a-f0-9]{64}/
|
||||
// short-identifier := /[a-f0-9]{6,64}/
|
||||
package docker
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/opencontainers/go-digest"
|
||||
)
|
||||
|
||||
const (
|
||||
// NameTotalLengthMax is the maximum total number of characters in a repository name.
|
||||
NameTotalLengthMax = 255
|
||||
)
|
||||
|
||||
var (
|
||||
// ErrReferenceInvalidFormat represents an error while trying to parse a string as a reference.
|
||||
ErrReferenceInvalidFormat = errors.New("invalid reference format")
|
||||
|
||||
// ErrTagInvalidFormat represents an error while trying to parse a string as a tag.
|
||||
ErrTagInvalidFormat = errors.New("invalid tag format")
|
||||
|
||||
// ErrDigestInvalidFormat represents an error while trying to parse a string as a tag.
|
||||
ErrDigestInvalidFormat = errors.New("invalid digest format")
|
||||
|
||||
// ErrNameContainsUppercase is returned for invalid repository names that contain uppercase characters.
|
||||
ErrNameContainsUppercase = errors.New("repository name must be lowercase")
|
||||
|
||||
// ErrNameEmpty is returned for empty, invalid repository names.
|
||||
ErrNameEmpty = errors.New("repository name must have at least one component")
|
||||
|
||||
// ErrNameTooLong is returned when a repository name is longer than NameTotalLengthMax.
|
||||
ErrNameTooLong = fmt.Errorf("repository name must not be more than %v characters", NameTotalLengthMax)
|
||||
|
||||
// ErrNameNotCanonical is returned when a name is not canonical.
|
||||
ErrNameNotCanonical = errors.New("repository name must be canonical")
|
||||
)
|
||||
|
||||
// Reference is an opaque object reference identifier that may include
|
||||
// modifiers such as a hostname, name, tag, and digest.
|
||||
type Reference interface {
|
||||
// String returns the full reference
|
||||
String() string
|
||||
}
|
||||
|
||||
// Field provides a wrapper type for resolving correct reference types when
|
||||
// working with encoding.
|
||||
type Field struct {
|
||||
reference Reference
|
||||
}
|
||||
|
||||
// AsField wraps a reference in a Field for encoding.
|
||||
func AsField(reference Reference) Field {
|
||||
return Field{reference}
|
||||
}
|
||||
|
||||
// Reference unwraps the reference type from the field to
|
||||
// return the Reference object. This object should be
|
||||
// of the appropriate type to further check for different
|
||||
// reference types.
|
||||
func (f Field) Reference() Reference {
|
||||
return f.reference
|
||||
}
|
||||
|
||||
// MarshalText serializes the field to byte text which
|
||||
// is the string of the reference.
|
||||
func (f Field) MarshalText() (p []byte, err error) {
|
||||
return []byte(f.reference.String()), nil
|
||||
}
|
||||
|
||||
// UnmarshalText parses text bytes by invoking the
|
||||
// reference parser to ensure the appropriately
|
||||
// typed reference object is wrapped by field.
|
||||
func (f *Field) UnmarshalText(p []byte) error {
|
||||
r, err := Parse(string(p))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
f.reference = r
|
||||
return nil
|
||||
}
|
||||
|
||||
// Named is an object with a full name
|
||||
type Named interface {
|
||||
Reference
|
||||
Name() string
|
||||
}
|
||||
|
||||
// Tagged is an object which has a tag
|
||||
type Tagged interface {
|
||||
Reference
|
||||
Tag() string
|
||||
}
|
||||
|
||||
// NamedTagged is an object including a name and tag.
|
||||
type NamedTagged interface {
|
||||
Named
|
||||
Tag() string
|
||||
}
|
||||
|
||||
// Digested is an object which has a digest
|
||||
// in which it can be referenced by
|
||||
type Digested interface {
|
||||
Reference
|
||||
Digest() digest.Digest
|
||||
}
|
||||
|
||||
// Canonical reference is an object with a fully unique
|
||||
// name including a name with domain and digest
|
||||
type Canonical interface {
|
||||
Named
|
||||
Digest() digest.Digest
|
||||
}
|
||||
|
||||
// namedRepository is a reference to a repository with a name.
|
||||
// A namedRepository has both domain and path components.
|
||||
type namedRepository interface {
|
||||
Named
|
||||
Domain() string
|
||||
Path() string
|
||||
}
|
||||
|
||||
// Domain returns the domain part of the Named reference
|
||||
func Domain(named Named) string {
|
||||
if r, ok := named.(namedRepository); ok {
|
||||
return r.Domain()
|
||||
}
|
||||
domain, _ := splitDomain(named.Name())
|
||||
return domain
|
||||
}
|
||||
|
||||
// Path returns the name without the domain part of the Named reference
|
||||
func Path(named Named) (name string) {
|
||||
if r, ok := named.(namedRepository); ok {
|
||||
return r.Path()
|
||||
}
|
||||
_, path := splitDomain(named.Name())
|
||||
return path
|
||||
}
|
||||
|
||||
func splitDomain(name string) (string, string) {
|
||||
match := anchoredNameRegexp.FindStringSubmatch(name)
|
||||
if len(match) != 3 {
|
||||
return "", name
|
||||
}
|
||||
return match[1], match[2]
|
||||
}
|
||||
|
||||
// SplitHostname splits a named reference into a
|
||||
// hostname and name string. If no valid hostname is
|
||||
// found, the hostname is empty and the full value
|
||||
// is returned as name
|
||||
// DEPRECATED: Use Domain or Path
|
||||
func SplitHostname(named Named) (string, string) {
|
||||
if r, ok := named.(namedRepository); ok {
|
||||
return r.Domain(), r.Path()
|
||||
}
|
||||
return splitDomain(named.Name())
|
||||
}
|
||||
|
||||
// Parse parses s and returns a syntactically valid Reference.
|
||||
// If an error was encountered it is returned, along with a nil Reference.
|
||||
// NOTE: Parse will not handle short digests.
|
||||
func Parse(s string) (Reference, error) {
|
||||
matches := ReferenceRegexp.FindStringSubmatch(s)
|
||||
if matches == nil {
|
||||
if s == "" {
|
||||
return nil, ErrNameEmpty
|
||||
}
|
||||
if ReferenceRegexp.FindStringSubmatch(strings.ToLower(s)) != nil {
|
||||
return nil, ErrNameContainsUppercase
|
||||
}
|
||||
return nil, ErrReferenceInvalidFormat
|
||||
}
|
||||
|
||||
if len(matches[1]) > NameTotalLengthMax {
|
||||
return nil, ErrNameTooLong
|
||||
}
|
||||
|
||||
var repo repository
|
||||
|
||||
nameMatch := anchoredNameRegexp.FindStringSubmatch(matches[1])
|
||||
if len(nameMatch) == 3 {
|
||||
repo.domain = nameMatch[1]
|
||||
repo.path = nameMatch[2]
|
||||
} else {
|
||||
repo.domain = ""
|
||||
repo.path = matches[1]
|
||||
}
|
||||
|
||||
ref := reference{
|
||||
namedRepository: repo,
|
||||
tag: matches[2],
|
||||
}
|
||||
if matches[3] != "" {
|
||||
var err error
|
||||
ref.digest, err = digest.Parse(matches[3])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
r := getBestReferenceType(ref)
|
||||
if r == nil {
|
||||
return nil, ErrNameEmpty
|
||||
}
|
||||
|
||||
return r, nil
|
||||
}
|
||||
|
||||
// ParseNamed parses s and returns a syntactically valid reference implementing
|
||||
// the Named interface. The reference must have a name and be in the canonical
|
||||
// form, otherwise an error is returned.
|
||||
// If an error was encountered it is returned, along with a nil Reference.
|
||||
// NOTE: ParseNamed will not handle short digests.
|
||||
func ParseNamed(s string) (Named, error) {
|
||||
named, err := ParseNormalizedNamed(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if named.String() != s {
|
||||
return nil, ErrNameNotCanonical
|
||||
}
|
||||
return named, nil
|
||||
}
|
||||
|
||||
// WithName returns a named object representing the given string. If the input
|
||||
// is invalid ErrReferenceInvalidFormat will be returned.
|
||||
func WithName(name string) (Named, error) {
|
||||
if len(name) > NameTotalLengthMax {
|
||||
return nil, ErrNameTooLong
|
||||
}
|
||||
|
||||
match := anchoredNameRegexp.FindStringSubmatch(name)
|
||||
if match == nil || len(match) != 3 {
|
||||
return nil, ErrReferenceInvalidFormat
|
||||
}
|
||||
return repository{
|
||||
domain: match[1],
|
||||
path: match[2],
|
||||
}, nil
|
||||
}
|
||||
|
||||
// WithTag combines the name from "name" and the tag from "tag" to form a
|
||||
// reference incorporating both the name and the tag.
|
||||
func WithTag(name Named, tag string) (NamedTagged, error) {
|
||||
if !anchoredTagRegexp.MatchString(tag) {
|
||||
return nil, ErrTagInvalidFormat
|
||||
}
|
||||
var repo repository
|
||||
if r, ok := name.(namedRepository); ok {
|
||||
repo.domain = r.Domain()
|
||||
repo.path = r.Path()
|
||||
} else {
|
||||
repo.path = name.Name()
|
||||
}
|
||||
if canonical, ok := name.(Canonical); ok {
|
||||
return reference{
|
||||
namedRepository: repo,
|
||||
tag: tag,
|
||||
digest: canonical.Digest(),
|
||||
}, nil
|
||||
}
|
||||
return taggedReference{
|
||||
namedRepository: repo,
|
||||
tag: tag,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// WithDigest combines the name from "name" and the digest from "digest" to form
|
||||
// a reference incorporating both the name and the digest.
|
||||
func WithDigest(name Named, digest digest.Digest) (Canonical, error) {
|
||||
if !anchoredDigestRegexp.MatchString(digest.String()) {
|
||||
return nil, ErrDigestInvalidFormat
|
||||
}
|
||||
var repo repository
|
||||
if r, ok := name.(namedRepository); ok {
|
||||
repo.domain = r.Domain()
|
||||
repo.path = r.Path()
|
||||
} else {
|
||||
repo.path = name.Name()
|
||||
}
|
||||
if tagged, ok := name.(Tagged); ok {
|
||||
return reference{
|
||||
namedRepository: repo,
|
||||
tag: tagged.Tag(),
|
||||
digest: digest,
|
||||
}, nil
|
||||
}
|
||||
return canonicalReference{
|
||||
namedRepository: repo,
|
||||
digest: digest,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// TrimNamed removes any tag or digest from the named reference.
|
||||
func TrimNamed(ref Named) Named {
|
||||
repo := repository{}
|
||||
if r, ok := ref.(namedRepository); ok {
|
||||
repo.domain, repo.path = r.Domain(), r.Path()
|
||||
} else {
|
||||
repo.domain, repo.path = splitDomain(ref.Name())
|
||||
}
|
||||
return repo
|
||||
}
|
||||
|
||||
func getBestReferenceType(ref reference) Reference {
|
||||
if ref.Name() == "" {
|
||||
// Allow digest only references
|
||||
if ref.digest != "" {
|
||||
return digestReference(ref.digest)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if ref.tag == "" {
|
||||
if ref.digest != "" {
|
||||
return canonicalReference{
|
||||
namedRepository: ref.namedRepository,
|
||||
digest: ref.digest,
|
||||
}
|
||||
}
|
||||
return ref.namedRepository
|
||||
}
|
||||
if ref.digest == "" {
|
||||
return taggedReference{
|
||||
namedRepository: ref.namedRepository,
|
||||
tag: ref.tag,
|
||||
}
|
||||
}
|
||||
|
||||
return ref
|
||||
}
|
||||
|
||||
type reference struct {
|
||||
namedRepository
|
||||
tag string
|
||||
digest digest.Digest
|
||||
}
|
||||
|
||||
func (r reference) String() string {
|
||||
return r.Name() + ":" + r.tag + "@" + r.digest.String()
|
||||
}
|
||||
|
||||
func (r reference) Tag() string {
|
||||
return r.tag
|
||||
}
|
||||
|
||||
func (r reference) Digest() digest.Digest {
|
||||
return r.digest
|
||||
}
|
||||
|
||||
type repository struct {
|
||||
domain string
|
||||
path string
|
||||
}
|
||||
|
||||
func (r repository) String() string {
|
||||
return r.Name()
|
||||
}
|
||||
|
||||
func (r repository) Name() string {
|
||||
if r.domain == "" {
|
||||
return r.path
|
||||
}
|
||||
return r.domain + "/" + r.path
|
||||
}
|
||||
|
||||
func (r repository) Domain() string {
|
||||
return r.domain
|
||||
}
|
||||
|
||||
func (r repository) Path() string {
|
||||
return r.path
|
||||
}
|
||||
|
||||
type digestReference digest.Digest
|
||||
|
||||
func (d digestReference) String() string {
|
||||
return digest.Digest(d).String()
|
||||
}
|
||||
|
||||
func (d digestReference) Digest() digest.Digest {
|
||||
return digest.Digest(d)
|
||||
}
|
||||
|
||||
type taggedReference struct {
|
||||
namedRepository
|
||||
tag string
|
||||
}
|
||||
|
||||
func (t taggedReference) String() string {
|
||||
return t.Name() + ":" + t.tag
|
||||
}
|
||||
|
||||
func (t taggedReference) Tag() string {
|
||||
return t.tag
|
||||
}
|
||||
|
||||
type canonicalReference struct {
|
||||
namedRepository
|
||||
digest digest.Digest
|
||||
}
|
||||
|
||||
func (c canonicalReference) String() string {
|
||||
return c.Name() + "@" + c.digest.String()
|
||||
}
|
||||
|
||||
func (c canonicalReference) Digest() digest.Digest {
|
||||
return c.digest
|
||||
}
|
191
vendor/github.com/containerd/containerd/reference/docker/regexp.go
generated
vendored
191
vendor/github.com/containerd/containerd/reference/docker/regexp.go
generated
vendored
@@ -1,191 +0,0 @@
|
||||
/*
|
||||
Copyright The containerd Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
package docker
|
||||
|
||||
import "regexp"
|
||||
|
||||
var (
|
||||
// alphaNumeric defines the alpha numeric atom, typically a
|
||||
// component of names. This only allows lower case characters and digits.
|
||||
alphaNumeric = `[a-z0-9]+`
|
||||
|
||||
// separator defines the separators allowed to be embedded in name
|
||||
// components. This allow one period, one or two underscore and multiple
|
||||
// dashes. Repeated dashes and underscores are intentionally treated
|
||||
// differently. In order to support valid hostnames as name components,
|
||||
// supporting repeated dash was added. Additionally double underscore is
|
||||
// now allowed as a separator to loosen the restriction for previously
|
||||
// supported names.
|
||||
separator = `(?:[._]|__|[-]*)`
|
||||
|
||||
// nameComponent restricts registry path component names to start
|
||||
// with at least one letter or number, with following parts able to be
|
||||
// separated by one period, one or two underscore and multiple dashes.
|
||||
nameComponent = expression(
|
||||
alphaNumeric,
|
||||
optional(repeated(separator, alphaNumeric)))
|
||||
|
||||
// domainNameComponent restricts the registry domain component of a
|
||||
// repository name to start with a component as defined by DomainRegexp.
|
||||
domainNameComponent = `(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*[a-zA-Z0-9])`
|
||||
|
||||
// ipv6address are enclosed between square brackets and may be represented
|
||||
// in many ways, see rfc5952. Only IPv6 in compressed or uncompressed format
|
||||
// are allowed, IPv6 zone identifiers (rfc6874) or Special addresses such as
|
||||
// IPv4-Mapped are deliberately excluded.
|
||||
ipv6address = expression(
|
||||
literal(`[`), `(?:[a-fA-F0-9:]+)`, literal(`]`),
|
||||
)
|
||||
|
||||
// domainName defines the structure of potential domain components
|
||||
// that may be part of image names. This is purposely a subset of what is
|
||||
// allowed by DNS to ensure backwards compatibility with Docker image
|
||||
// names. This includes IPv4 addresses on decimal format.
|
||||
domainName = expression(
|
||||
domainNameComponent,
|
||||
optional(repeated(literal(`.`), domainNameComponent)),
|
||||
)
|
||||
|
||||
// host defines the structure of potential domains based on the URI
|
||||
// Host subcomponent on rfc3986. It may be a subset of DNS domain name,
|
||||
// or an IPv4 address in decimal format, or an IPv6 address between square
|
||||
// brackets (excluding zone identifiers as defined by rfc6874 or special
|
||||
// addresses such as IPv4-Mapped).
|
||||
host = `(?:` + domainName + `|` + ipv6address + `)`
|
||||
|
||||
// allowed by the URI Host subcomponent on rfc3986 to ensure backwards
|
||||
// compatibility with Docker image names.
|
||||
domain = expression(
|
||||
host,
|
||||
optional(literal(`:`), `[0-9]+`))
|
||||
|
||||
// DomainRegexp defines the structure of potential domain components
|
||||
// that may be part of image names. This is purposely a subset of what is
|
||||
// allowed by DNS to ensure backwards compatibility with Docker image
|
||||
// names.
|
||||
DomainRegexp = regexp.MustCompile(domain)
|
||||
|
||||
tag = `[\w][\w.-]{0,127}`
|
||||
// TagRegexp matches valid tag names. From docker/docker:graph/tags.go.
|
||||
TagRegexp = regexp.MustCompile(tag)
|
||||
|
||||
anchoredTag = anchored(tag)
|
||||
// anchoredTagRegexp matches valid tag names, anchored at the start and
|
||||
// end of the matched string.
|
||||
anchoredTagRegexp = regexp.MustCompile(anchoredTag)
|
||||
|
||||
digestPat = `[A-Za-z][A-Za-z0-9]*(?:[-_+.][A-Za-z][A-Za-z0-9]*)*[:][[:xdigit:]]{32,}`
|
||||
// DigestRegexp matches valid digests.
|
||||
DigestRegexp = regexp.MustCompile(digestPat)
|
||||
|
||||
anchoredDigest = anchored(digestPat)
|
||||
// anchoredDigestRegexp matches valid digests, anchored at the start and
|
||||
// end of the matched string.
|
||||
anchoredDigestRegexp = regexp.MustCompile(anchoredDigest)
|
||||
|
||||
namePat = expression(
|
||||
optional(domain, literal(`/`)),
|
||||
nameComponent,
|
||||
optional(repeated(literal(`/`), nameComponent)))
|
||||
// NameRegexp is the format for the name component of references. The
|
||||
// regexp has capturing groups for the domain and name part omitting
|
||||
// the separating forward slash from either.
|
||||
NameRegexp = regexp.MustCompile(namePat)
|
||||
|
||||
anchoredName = anchored(
|
||||
optional(capture(domain), literal(`/`)),
|
||||
capture(nameComponent,
|
||||
optional(repeated(literal(`/`), nameComponent))))
|
||||
// anchoredNameRegexp is used to parse a name value, capturing the
|
||||
// domain and trailing components.
|
||||
anchoredNameRegexp = regexp.MustCompile(anchoredName)
|
||||
|
||||
referencePat = anchored(capture(namePat),
|
||||
optional(literal(":"), capture(tag)),
|
||||
optional(literal("@"), capture(digestPat)))
|
||||
// ReferenceRegexp is the full supported format of a reference. The regexp
|
||||
// is anchored and has capturing groups for name, tag, and digest
|
||||
// components.
|
||||
ReferenceRegexp = regexp.MustCompile(referencePat)
|
||||
|
||||
identifier = `([a-f0-9]{64})`
|
||||
// IdentifierRegexp is the format for string identifier used as a
|
||||
// content addressable identifier using sha256. These identifiers
|
||||
// are like digests without the algorithm, since sha256 is used.
|
||||
IdentifierRegexp = regexp.MustCompile(identifier)
|
||||
|
||||
shortIdentifier = `([a-f0-9]{6,64})`
|
||||
// ShortIdentifierRegexp is the format used to represent a prefix
|
||||
// of an identifier. A prefix may be used to match a sha256 identifier
|
||||
// within a list of trusted identifiers.
|
||||
ShortIdentifierRegexp = regexp.MustCompile(shortIdentifier)
|
||||
|
||||
anchoredIdentifier = anchored(identifier)
|
||||
// anchoredIdentifierRegexp is used to check or match an
|
||||
// identifier value, anchored at start and end of string.
|
||||
anchoredIdentifierRegexp = regexp.MustCompile(anchoredIdentifier)
|
||||
)
|
||||
|
||||
// literal compiles s into a literal regular expression, escaping any regexp
|
||||
// reserved characters.
|
||||
func literal(s string) string {
|
||||
re := regexp.MustCompile(regexp.QuoteMeta(s))
|
||||
|
||||
if _, complete := re.LiteralPrefix(); !complete {
|
||||
panic("must be a literal")
|
||||
}
|
||||
|
||||
return re.String()
|
||||
}
|
||||
|
||||
// expression defines a full expression, where each regular expression must
|
||||
// follow the previous.
|
||||
func expression(res ...string) string {
|
||||
var s string
|
||||
for _, re := range res {
|
||||
s += re
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
// optional wraps the expression in a non-capturing group and makes the
|
||||
// production optional.
|
||||
func optional(res ...string) string {
|
||||
return group(expression(res...)) + `?`
|
||||
}
|
||||
|
||||
// repeated wraps the regexp in a non-capturing group to get one or more
|
||||
// matches.
|
||||
func repeated(res ...string) string {
|
||||
return group(expression(res...)) + `+`
|
||||
}
|
||||
|
||||
// group wraps the regexp in a non-capturing group.
|
||||
func group(res ...string) string {
|
||||
return `(?:` + expression(res...) + `)`
|
||||
}
|
||||
|
||||
// capture wraps the expression in a capturing group.
|
||||
func capture(res ...string) string {
|
||||
return `(` + expression(res...) + `)`
|
||||
}
|
||||
|
||||
// anchored anchors the regular expression by adding start and end delimiters.
|
||||
func anchored(res ...string) string {
|
||||
return `^` + expression(res...) + `$`
|
||||
}
|
73
vendor/github.com/containerd/containerd/reference/docker/sort.go
generated
vendored
73
vendor/github.com/containerd/containerd/reference/docker/sort.go
generated
vendored
@@ -1,73 +0,0 @@
|
||||
/*
|
||||
Copyright The containerd Authors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
package docker
|
||||
|
||||
import (
|
||||
"sort"
|
||||
)
|
||||
|
||||
// Sort sorts string references preferring higher information references
|
||||
// The precedence is as follows:
|
||||
// 1. Name + Tag + Digest
|
||||
// 2. Name + Tag
|
||||
// 3. Name + Digest
|
||||
// 4. Name
|
||||
// 5. Digest
|
||||
// 6. Parse error
|
||||
func Sort(references []string) []string {
|
||||
var prefs []Reference
|
||||
var bad []string
|
||||
|
||||
for _, ref := range references {
|
||||
pref, err := ParseAnyReference(ref)
|
||||
if err != nil {
|
||||
bad = append(bad, ref)
|
||||
} else {
|
||||
prefs = append(prefs, pref)
|
||||
}
|
||||
}
|
||||
sort.Slice(prefs, func(a, b int) bool {
|
||||
ar := refRank(prefs[a])
|
||||
br := refRank(prefs[b])
|
||||
if ar == br {
|
||||
return prefs[a].String() < prefs[b].String()
|
||||
}
|
||||
return ar < br
|
||||
})
|
||||
sort.Strings(bad)
|
||||
var refs []string
|
||||
for _, pref := range prefs {
|
||||
refs = append(refs, pref.String())
|
||||
}
|
||||
return append(refs, bad...)
|
||||
}
|
||||
|
||||
func refRank(ref Reference) uint8 {
|
||||
if _, ok := ref.(Named); ok {
|
||||
if _, ok = ref.(Tagged); ok {
|
||||
if _, ok = ref.(Digested); ok {
|
||||
return 1
|
||||
}
|
||||
return 2
|
||||
}
|
||||
if _, ok = ref.(Digested); ok {
|
||||
return 3
|
||||
}
|
||||
return 4
|
||||
}
|
||||
return 5
|
||||
}
|
39
vendor/github.com/containerd/containerd/reference/reference.go
generated
vendored
39
vendor/github.com/containerd/containerd/reference/reference.go
generated
vendored
@@ -18,7 +18,6 @@ package reference
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"path"
|
||||
"regexp"
|
||||
@@ -136,8 +135,12 @@ func (r Spec) Hostname() string {
|
||||
// Digest returns the digest portion of the reference spec. This may be a
|
||||
// partial or invalid digest, which may be used to lookup a complete digest.
|
||||
func (r Spec) Digest() digest.Digest {
|
||||
_, dgst := SplitObject(r.Object)
|
||||
return dgst
|
||||
i := strings.Index(r.Object, "@")
|
||||
|
||||
if i < 0 {
|
||||
return ""
|
||||
}
|
||||
return digest.Digest(r.Object[i+1:])
|
||||
}
|
||||
|
||||
// String returns the normalized string for the ref.
|
||||
@@ -146,21 +149,31 @@ func (r Spec) String() string {
|
||||
return r.Locator
|
||||
}
|
||||
if r.Object[:1] == "@" {
|
||||
return fmt.Sprintf("%v%v", r.Locator, r.Object)
|
||||
return r.Locator + r.Object
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v:%v", r.Locator, r.Object)
|
||||
return r.Locator + ":" + r.Object
|
||||
}
|
||||
|
||||
// SplitObject provides two parts of the object spec, delimited by an `@`
|
||||
// symbol.
|
||||
// SplitObject provides two parts of the object spec, delimited by an "@"
|
||||
// symbol. It does not perform any validation on correctness of the values
|
||||
// returned, and it's the callers' responsibility to validate the result.
|
||||
//
|
||||
// Either may be empty and it is the callers job to validate them
|
||||
// appropriately.
|
||||
// If an "@" delimiter is found, it returns the part *including* the "@"
|
||||
// delimiter as "tag", and the part after the "@" as digest.
|
||||
//
|
||||
// The example below produces "docker.io/library/ubuntu:latest@" and
|
||||
// "sha256:deadbeef";
|
||||
//
|
||||
// t, d := SplitObject("docker.io/library/ubuntu:latest@sha256:deadbeef")
|
||||
// fmt.Println(t) // docker.io/library/ubuntu:latest@
|
||||
// fmt.Println(d) // sha256:deadbeef
|
||||
//
|
||||
// Deprecated: use [Parse] and [Spec.Digest] instead.
|
||||
func SplitObject(obj string) (tag string, dgst digest.Digest) {
|
||||
parts := strings.SplitAfterN(obj, "@", 2)
|
||||
if len(parts) < 2 {
|
||||
return parts[0], ""
|
||||
if i := strings.Index(obj, "@"); i >= 0 {
|
||||
// Offset by one so preserve the "@" in the tag returned.
|
||||
return obj[:i+1], digest.Digest(obj[i+1:])
|
||||
}
|
||||
return parts[0], digest.Digest(parts[1])
|
||||
return obj, ""
|
||||
}
|
||||
|
7
vendor/github.com/containerd/containerd/remotes/docker/fetcher_fuzz.go
generated
vendored
7
vendor/github.com/containerd/containerd/remotes/docker/fetcher_fuzz.go
generated
vendored
@@ -25,8 +25,6 @@ import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"net/url"
|
||||
|
||||
refDocker "github.com/containerd/containerd/reference/docker"
|
||||
)
|
||||
|
||||
func FuzzFetcher(data []byte) int {
|
||||
@@ -74,8 +72,3 @@ func FuzzFetcher(data []byte) int {
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func FuzzParseDockerRef(data []byte) int {
|
||||
_, _ = refDocker.ParseDockerRef(string(data))
|
||||
return 1
|
||||
}
|
||||
|
2
vendor/github.com/containerd/containerd/remotes/handlers.go
generated
vendored
2
vendor/github.com/containerd/containerd/remotes/handlers.go
generated
vendored
@@ -28,9 +28,9 @@ import (
|
||||
"github.com/containerd/containerd/content"
|
||||
"github.com/containerd/containerd/images"
|
||||
"github.com/containerd/containerd/labels"
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/errdefs"
|
||||
"github.com/containerd/log"
|
||||
"github.com/containerd/platforms"
|
||||
ocispec "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
"golang.org/x/sync/semaphore"
|
||||
)
|
||||
|
2
vendor/github.com/containerd/containerd/version/version.go
generated
vendored
2
vendor/github.com/containerd/containerd/version/version.go
generated
vendored
@@ -23,7 +23,7 @@ var (
|
||||
Package = "github.com/containerd/containerd"
|
||||
|
||||
// Version holds the complete version number. Filled in at linking time.
|
||||
Version = "1.7.18+unknown"
|
||||
Version = "1.7.19+unknown"
|
||||
|
||||
// Revision is filled with the VCS (e.g. git) revision being used to build
|
||||
// the program at linking time.
|
||||
|
1
vendor/github.com/containerd/platforms/.gitattributes
generated
vendored
Normal file
1
vendor/github.com/containerd/platforms/.gitattributes
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*.go text eol=lf
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user