mirror of
https://gitea.com/Lydanne/buildx.git
synced 2025-08-16 16:55:57 +08:00
Compare commits
80 Commits
v0.10.0-rc
...
v0.10.2
Author | SHA1 | Date | |
---|---|---|---|
![]() |
00ed17df6d | ||
![]() |
cfb71fab97 | ||
![]() |
f62342768b | ||
![]() |
7776652a4d | ||
![]() |
5a4f80f3ce | ||
![]() |
b5ea79e277 | ||
![]() |
481796f84f | ||
![]() |
0090d49e57 | ||
![]() |
389ac0c3d1 | ||
![]() |
2bb8ce2f57 | ||
![]() |
65cea456fd | ||
![]() |
f7bd5b99da | ||
![]() |
8c14407fa2 | ||
![]() |
5245a2b3ff | ||
![]() |
44d99d4573 | ||
![]() |
14942a266e | ||
![]() |
123febf107 | ||
![]() |
3f5f7c5228 | ||
![]() |
6d935625a6 | ||
![]() |
e640dc6041 | ||
![]() |
08244b12b5 | ||
![]() |
78d8b926db | ||
![]() |
19291d900e | ||
![]() |
ed9b4a7169 | ||
![]() |
033d5629c0 | ||
![]() |
7cd5add568 | ||
![]() |
2a000096fa | ||
![]() |
b7781447d7 | ||
![]() |
f6ba0a23f8 | ||
![]() |
bf4b95fc3a | ||
![]() |
467586dc8d | ||
![]() |
8764628976 | ||
![]() |
583fe71740 | ||
![]() |
9fb3ff1a27 | ||
![]() |
9d4f38c5fa | ||
![]() |
793082f543 | ||
![]() |
fe6f697205 | ||
![]() |
fd3fb752d3 | ||
![]() |
7fcea64eb4 | ||
![]() |
05e0ce4953 | ||
![]() |
f8d9d1e776 | ||
![]() |
8a7a221a7f | ||
![]() |
e4db8d2a21 | ||
![]() |
7394853ddf | ||
![]() |
a8be6b576b | ||
![]() |
8b960ededd | ||
![]() |
4735a71fbd | ||
![]() |
37fce8cc06 | ||
![]() |
82476ab039 | ||
![]() |
88852e2330 | ||
![]() |
6369c50614 | ||
![]() |
a22d0a35a4 | ||
![]() |
c93c02df85 | ||
![]() |
e584c6e1a7 | ||
![]() |
64e4c19971 | ||
![]() |
551b8f6785 | ||
![]() |
fbbe1c1b91 | ||
![]() |
1a85745bf1 | ||
![]() |
0d1fea8134 | ||
![]() |
19417e76e7 | ||
![]() |
53d88a79ef | ||
![]() |
4c21b7e680 | ||
![]() |
a8f689c223 | ||
![]() |
ba8e3f9bc5 | ||
![]() |
477200d1f9 | ||
![]() |
662738a7e5 | ||
![]() |
f992b77535 | ||
![]() |
21b2f135b5 | ||
![]() |
71e6be5d99 | ||
![]() |
df8e7d0a9a | ||
![]() |
64422a48d9 | ||
![]() |
04f9c62772 | ||
![]() |
2185d07f05 | ||
![]() |
a49d28e00e | ||
![]() |
629128c497 | ||
![]() |
b741d26eb5 | ||
![]() |
cf8fa4a404 | ||
![]() |
fe76a1b179 | ||
![]() |
df4957307f | ||
![]() |
99ac7f5f9e |
47
.github/workflows/build.yml
vendored
47
.github/workflows/build.yml
vendored
@@ -21,12 +21,14 @@ on:
|
||||
- 'docs/**'
|
||||
|
||||
env:
|
||||
BUILDX_VERSION: "v0.10.0"
|
||||
BUILDKIT_IMAGE: "moby/buildkit:v0.11.1"
|
||||
REPO_SLUG: "docker/buildx-bin"
|
||||
DESTDIR: "./bin"
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
@@ -35,7 +37,9 @@ jobs:
|
||||
name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
version: latest
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
driver-opts: image=${{ env.BUILDKIT_IMAGE }}
|
||||
buildkitd-flags: --debug
|
||||
-
|
||||
name: Test
|
||||
uses: docker/bake-action@v2
|
||||
@@ -51,7 +55,7 @@ jobs:
|
||||
directory: ${{ env.DESTDIR }}/coverage
|
||||
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
matrix: ${{ steps.platforms.outputs.matrix }}
|
||||
steps:
|
||||
@@ -62,14 +66,14 @@ jobs:
|
||||
name: Create matrix
|
||||
id: platforms
|
||||
run: |
|
||||
echo ::set-output name=matrix::$(docker buildx bake binaries-cross --print | jq -cr '.target."binaries-cross".platforms')
|
||||
echo "matrix=$(docker buildx bake binaries-cross --print | jq -cr '.target."binaries-cross".platforms')" >>${GITHUB_OUTPUT}
|
||||
-
|
||||
name: Show matrix
|
||||
run: |
|
||||
echo ${{ steps.platforms.outputs.matrix }}
|
||||
|
||||
binaries:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- prepare
|
||||
strategy:
|
||||
@@ -92,26 +96,27 @@ jobs:
|
||||
name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
version: latest
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
driver-opts: image=${{ env.BUILDKIT_IMAGE }}
|
||||
buildkitd-flags: --debug
|
||||
-
|
||||
name: Build
|
||||
uses: docker/bake-action@v2
|
||||
with:
|
||||
targets: release
|
||||
set: |
|
||||
*.platform=${{ matrix.platform }}
|
||||
*.cache-from=type=gha,scope=binaries-${{ env.PLATFORM_PAIR }}
|
||||
*.cache-to=type=gha,scope=binaries-${{ env.PLATFORM_PAIR }},mode=max
|
||||
run: |
|
||||
make release
|
||||
env:
|
||||
PLATFORMS: ${{ matrix.platform }}
|
||||
CACHE_FROM: type=gha,scope=binaries-${{ env.PLATFORM_PAIR }}
|
||||
CACHE_TO: type=gha,scope=binaries-${{ env.PLATFORM_PAIR }},mode=max
|
||||
-
|
||||
name: Upload artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: buildx
|
||||
path: ${{ env.DESTDIR }}/release/*
|
||||
path: ${{ env.DESTDIR }}/*
|
||||
if-no-files-found: error
|
||||
|
||||
bin-image:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
steps:
|
||||
-
|
||||
@@ -124,7 +129,9 @@ jobs:
|
||||
name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
version: latest
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
driver-opts: image=${{ env.BUILDKIT_IMAGE }}
|
||||
buildkitd-flags: --debug
|
||||
-
|
||||
name: Docker meta
|
||||
id: meta
|
||||
@@ -156,9 +163,11 @@ jobs:
|
||||
set: |
|
||||
*.cache-from=type=gha,scope=bin-image
|
||||
*.cache-to=type=gha,scope=bin-image,mode=max
|
||||
*.attest=type=sbom
|
||||
*.attest=type=provenance,mode=max,builder-id=https://github.com/${{ env.GITHUB_REPOSITORY }}/actions/runs/${{ env.GITHUB_RUN_ID }}
|
||||
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- binaries
|
||||
steps:
|
||||
@@ -193,7 +202,7 @@ jobs:
|
||||
files: ${{ env.DESTDIR }}/*
|
||||
|
||||
buildkit-edge:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
continue-on-error: true
|
||||
steps:
|
||||
-
|
||||
@@ -206,7 +215,7 @@ jobs:
|
||||
name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
version: latest
|
||||
version: ${{ env.BUILDX_VERSION }}
|
||||
driver-opts: image=moby/buildkit:master
|
||||
buildkitd-flags: --debug
|
||||
-
|
||||
|
6
.github/workflows/docs-release.yml
vendored
6
.github/workflows/docs-release.yml
vendored
@@ -2,11 +2,13 @@ name: docs-release
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [ released ]
|
||||
types:
|
||||
- released
|
||||
|
||||
jobs:
|
||||
open-pr:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
if: "!github.event.release.prerelease"
|
||||
steps:
|
||||
-
|
||||
name: Checkout docs repo
|
||||
|
2
.github/workflows/docs-upstream.yml
vendored
2
.github/workflows/docs-upstream.yml
vendored
@@ -22,7 +22,7 @@ on:
|
||||
|
||||
jobs:
|
||||
docs-yaml:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
|
70
.github/workflows/e2e.yml
vendored
70
.github/workflows/e2e.yml
vendored
@@ -20,10 +20,11 @@ on:
|
||||
|
||||
env:
|
||||
DESTDIR: "./bin"
|
||||
K3S_VERSION: "v1.21.2-k3s1"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
@@ -133,20 +134,67 @@ jobs:
|
||||
-
|
||||
name: Install k3s
|
||||
if: matrix.driver == 'kubernetes'
|
||||
uses: debianmaster/actions-k3s@b9cf3f599fd118699a3c8a0d18a2f2bda6cf4ce4
|
||||
id: k3s
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
version: v1.21.2-k3s1
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
|
||||
let wait = function(milliseconds) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (typeof(milliseconds) !== 'number') {
|
||||
throw new Error('milleseconds not a number');
|
||||
}
|
||||
setTimeout(() => resolve("done!"), milliseconds)
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const kubeconfig="/tmp/buildkit-k3s/kubeconfig.yaml";
|
||||
core.info(`storing kubeconfig in ${kubeconfig}`);
|
||||
|
||||
await exec.exec('docker', ["run", "-d",
|
||||
"--privileged",
|
||||
"--name=buildkit-k3s",
|
||||
"-e", "K3S_KUBECONFIG_OUTPUT="+kubeconfig,
|
||||
"-e", "K3S_KUBECONFIG_MODE=666",
|
||||
"-v", "/tmp/buildkit-k3s:/tmp/buildkit-k3s",
|
||||
"-p", "6443:6443",
|
||||
"-p", "80:80",
|
||||
"-p", "443:443",
|
||||
"-p", "8080:8080",
|
||||
"rancher/k3s:${{ env.K3S_VERSION }}", "server"
|
||||
]);
|
||||
await wait(10000);
|
||||
|
||||
core.exportVariable('KUBECONFIG', kubeconfig);
|
||||
|
||||
let nodeName;
|
||||
for (let count = 1; count <= 5; count++) {
|
||||
try {
|
||||
const nodeNameOutput = await exec.getExecOutput("kubectl get nodes --no-headers -oname");
|
||||
nodeName = nodeNameOutput.stdout
|
||||
} catch (error) {
|
||||
core.info(`Unable to resolve node name (${error.message}). Attempt ${count} of 5.`)
|
||||
} finally {
|
||||
if (nodeName) {
|
||||
break;
|
||||
}
|
||||
await wait(5000);
|
||||
}
|
||||
}
|
||||
if (!nodeName) {
|
||||
throw new Error(`Unable to resolve node name after 5 attempts.`);
|
||||
}
|
||||
|
||||
await exec.exec(`kubectl wait --for=condition=Ready ${nodeName}`);
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
}
|
||||
-
|
||||
name: Config k3s
|
||||
name: Print KUBECONFIG
|
||||
if: matrix.driver == 'kubernetes'
|
||||
run: |
|
||||
(set -x ; cat ${{ steps.k3s.outputs.kubeconfig }})
|
||||
-
|
||||
name: Check k3s nodes
|
||||
if: matrix.driver == 'kubernetes'
|
||||
run: |
|
||||
kubectl get nodes
|
||||
yq ${{ env.KUBECONFIG }}
|
||||
-
|
||||
name: Launch remote buildkitd
|
||||
if: matrix.driver == 'remote'
|
||||
|
2
.github/workflows/validate.yml
vendored
2
.github/workflows/validate.yml
vendored
@@ -19,7 +19,7 @@ on:
|
||||
|
||||
jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# syntax=docker/dockerfile:1.4
|
||||
# syntax=docker/dockerfile-upstream:1.5.0
|
||||
|
||||
ARG GO_VERSION=1.19
|
||||
ARG XX_VERSION=1.1.2
|
||||
@@ -58,6 +58,8 @@ FROM scratch AS binaries-windows
|
||||
COPY --link --from=buildx-build /usr/bin/docker-buildx /buildx.exe
|
||||
|
||||
FROM binaries-$TARGETOS AS binaries
|
||||
# enable scanning for this stage
|
||||
ARG BUILDKIT_SBOM_SCAN_STAGE=true
|
||||
|
||||
# Release
|
||||
FROM --platform=$BUILDPLATFORM alpine AS releaser
|
||||
|
@@ -2,7 +2,7 @@
|
||||
|
||||
[](https://github.com/docker/buildx/releases/latest)
|
||||
[](https://pkg.go.dev/github.com/docker/buildx)
|
||||
[](https://github.com/docker/buildx/actions?query=workflow%3Abuild)
|
||||
[](https://github.com/docker/buildx/actions?query=workflow%3Abuild)
|
||||
[](https://goreportcard.com/report/github.com/docker/buildx)
|
||||
[](https://codecov.io/gh/docker/buildx)
|
||||
|
||||
@@ -147,7 +147,7 @@ To remove this alias, run [`docker buildx uninstall`](docs/reference/buildx_unin
|
||||
# Buildx 0.6+
|
||||
$ docker buildx bake "https://github.com/docker/buildx.git"
|
||||
$ mkdir -p ~/.docker/cli-plugins
|
||||
$ mv ./bin/buildx ~/.docker/cli-plugins/docker-buildx
|
||||
$ mv ./bin/build/buildx ~/.docker/cli-plugins/docker-buildx
|
||||
|
||||
# Docker 19.03+
|
||||
$ DOCKER_BUILDKIT=1 docker build --platform=local -o . "https://github.com/docker/buildx.git"
|
||||
|
87
bake/bake.go
87
bake/bake.go
@@ -140,6 +140,19 @@ func ReadTargets(ctx context.Context, files []File, targets, overrides []string,
|
||||
}
|
||||
}
|
||||
|
||||
// Propagate SOURCE_DATE_EPOCH from the client env.
|
||||
// The logic is purposely duplicated from `build/build`.go for keeping this visible in `bake --print`.
|
||||
if v := os.Getenv("SOURCE_DATE_EPOCH"); v != "" {
|
||||
for _, f := range m {
|
||||
if f.Args == nil {
|
||||
f.Args = make(map[string]*string)
|
||||
}
|
||||
if _, ok := f.Args["SOURCE_DATE_EPOCH"]; !ok {
|
||||
f.Args["SOURCE_DATE_EPOCH"] = &v
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return m, n, nil
|
||||
}
|
||||
|
||||
@@ -559,24 +572,24 @@ type Target struct {
|
||||
Attest []string `json:"attest,omitempty" hcl:"attest,optional" cty:"attest"`
|
||||
Inherits []string `json:"inherits,omitempty" hcl:"inherits,optional" cty:"inherits"`
|
||||
|
||||
Context *string `json:"context,omitempty" hcl:"context,optional" cty:"context"`
|
||||
Contexts map[string]string `json:"contexts,omitempty" hcl:"contexts,optional" cty:"contexts"`
|
||||
Dockerfile *string `json:"dockerfile,omitempty" hcl:"dockerfile,optional" cty:"dockerfile"`
|
||||
DockerfileInline *string `json:"dockerfile-inline,omitempty" hcl:"dockerfile-inline,optional" cty:"dockerfile-inline"`
|
||||
Args map[string]string `json:"args,omitempty" hcl:"args,optional" cty:"args"`
|
||||
Labels map[string]string `json:"labels,omitempty" hcl:"labels,optional" cty:"labels"`
|
||||
Tags []string `json:"tags,omitempty" hcl:"tags,optional" cty:"tags"`
|
||||
CacheFrom []string `json:"cache-from,omitempty" hcl:"cache-from,optional" cty:"cache-from"`
|
||||
CacheTo []string `json:"cache-to,omitempty" hcl:"cache-to,optional" cty:"cache-to"`
|
||||
Target *string `json:"target,omitempty" hcl:"target,optional" cty:"target"`
|
||||
Secrets []string `json:"secret,omitempty" hcl:"secret,optional" cty:"secret"`
|
||||
SSH []string `json:"ssh,omitempty" hcl:"ssh,optional" cty:"ssh"`
|
||||
Platforms []string `json:"platforms,omitempty" hcl:"platforms,optional" cty:"platforms"`
|
||||
Outputs []string `json:"output,omitempty" hcl:"output,optional" cty:"output"`
|
||||
Pull *bool `json:"pull,omitempty" hcl:"pull,optional" cty:"pull"`
|
||||
NoCache *bool `json:"no-cache,omitempty" hcl:"no-cache,optional" cty:"no-cache"`
|
||||
NetworkMode *string `json:"-" hcl:"-" cty:"-"`
|
||||
NoCacheFilter []string `json:"no-cache-filter,omitempty" hcl:"no-cache-filter,optional" cty:"no-cache-filter"`
|
||||
Context *string `json:"context,omitempty" hcl:"context,optional" cty:"context"`
|
||||
Contexts map[string]string `json:"contexts,omitempty" hcl:"contexts,optional" cty:"contexts"`
|
||||
Dockerfile *string `json:"dockerfile,omitempty" hcl:"dockerfile,optional" cty:"dockerfile"`
|
||||
DockerfileInline *string `json:"dockerfile-inline,omitempty" hcl:"dockerfile-inline,optional" cty:"dockerfile-inline"`
|
||||
Args map[string]*string `json:"args,omitempty" hcl:"args,optional" cty:"args"`
|
||||
Labels map[string]*string `json:"labels,omitempty" hcl:"labels,optional" cty:"labels"`
|
||||
Tags []string `json:"tags,omitempty" hcl:"tags,optional" cty:"tags"`
|
||||
CacheFrom []string `json:"cache-from,omitempty" hcl:"cache-from,optional" cty:"cache-from"`
|
||||
CacheTo []string `json:"cache-to,omitempty" hcl:"cache-to,optional" cty:"cache-to"`
|
||||
Target *string `json:"target,omitempty" hcl:"target,optional" cty:"target"`
|
||||
Secrets []string `json:"secret,omitempty" hcl:"secret,optional" cty:"secret"`
|
||||
SSH []string `json:"ssh,omitempty" hcl:"ssh,optional" cty:"ssh"`
|
||||
Platforms []string `json:"platforms,omitempty" hcl:"platforms,optional" cty:"platforms"`
|
||||
Outputs []string `json:"output,omitempty" hcl:"output,optional" cty:"output"`
|
||||
Pull *bool `json:"pull,omitempty" hcl:"pull,optional" cty:"pull"`
|
||||
NoCache *bool `json:"no-cache,omitempty" hcl:"no-cache,optional" cty:"no-cache"`
|
||||
NetworkMode *string `json:"-" hcl:"-" cty:"-"`
|
||||
NoCacheFilter []string `json:"no-cache-filter,omitempty" hcl:"no-cache-filter,optional" cty:"no-cache-filter"`
|
||||
// IMPORTANT: if you add more fields here, do not forget to update newOverrides and docs/manuals/bake/file-definition.md.
|
||||
|
||||
// linked is a private field to mark a target used as a linked one
|
||||
@@ -615,8 +628,11 @@ func (t *Target) Merge(t2 *Target) {
|
||||
t.DockerfileInline = t2.DockerfileInline
|
||||
}
|
||||
for k, v := range t2.Args {
|
||||
if v == nil {
|
||||
continue
|
||||
}
|
||||
if t.Args == nil {
|
||||
t.Args = map[string]string{}
|
||||
t.Args = map[string]*string{}
|
||||
}
|
||||
t.Args[k] = v
|
||||
}
|
||||
@@ -627,8 +643,11 @@ func (t *Target) Merge(t2 *Target) {
|
||||
t.Contexts[k] = v
|
||||
}
|
||||
for k, v := range t2.Labels {
|
||||
if v == nil {
|
||||
continue
|
||||
}
|
||||
if t.Labels == nil {
|
||||
t.Labels = map[string]string{}
|
||||
t.Labels = map[string]*string{}
|
||||
}
|
||||
t.Labels[k] = v
|
||||
}
|
||||
@@ -688,9 +707,9 @@ func (t *Target) AddOverrides(overrides map[string]Override) error {
|
||||
return errors.Errorf("args require name")
|
||||
}
|
||||
if t.Args == nil {
|
||||
t.Args = map[string]string{}
|
||||
t.Args = map[string]*string{}
|
||||
}
|
||||
t.Args[keys[1]] = value
|
||||
t.Args[keys[1]] = &value
|
||||
case "contexts":
|
||||
if len(keys) != 2 {
|
||||
return errors.Errorf("contexts require name")
|
||||
@@ -704,9 +723,9 @@ func (t *Target) AddOverrides(overrides map[string]Override) error {
|
||||
return errors.Errorf("labels require name")
|
||||
}
|
||||
if t.Labels == nil {
|
||||
t.Labels = map[string]string{}
|
||||
t.Labels = map[string]*string{}
|
||||
}
|
||||
t.Labels[keys[1]] = value
|
||||
t.Labels[keys[1]] = &value
|
||||
case "tags":
|
||||
t.Tags = o.ArrValue
|
||||
case "cache-from":
|
||||
@@ -882,6 +901,22 @@ func toBuildOpt(t *Target, inp *Input) (*build.Options, error) {
|
||||
dockerfilePath = path.Join(contextPath, dockerfilePath)
|
||||
}
|
||||
|
||||
args := map[string]string{}
|
||||
for k, v := range t.Args {
|
||||
if v == nil {
|
||||
continue
|
||||
}
|
||||
args[k] = *v
|
||||
}
|
||||
|
||||
labels := map[string]string{}
|
||||
for k, v := range t.Labels {
|
||||
if v == nil {
|
||||
continue
|
||||
}
|
||||
labels[k] = *v
|
||||
}
|
||||
|
||||
noCache := false
|
||||
if t.NoCache != nil {
|
||||
noCache = *t.NoCache
|
||||
@@ -922,8 +957,8 @@ func toBuildOpt(t *Target, inp *Input) (*build.Options, error) {
|
||||
bo := &build.Options{
|
||||
Inputs: bi,
|
||||
Tags: t.Tags,
|
||||
BuildArgs: t.Args,
|
||||
Labels: t.Labels,
|
||||
BuildArgs: args,
|
||||
Labels: labels,
|
||||
NoCache: noCache,
|
||||
NoCacheFilter: t.NoCacheFilter,
|
||||
Pull: pull,
|
||||
|
@@ -2,7 +2,6 @@ package bake
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
"testing"
|
||||
@@ -42,7 +41,7 @@ target "webapp" {
|
||||
|
||||
require.Equal(t, "Dockerfile.webapp", *m["webapp"].Dockerfile)
|
||||
require.Equal(t, ".", *m["webapp"].Context)
|
||||
require.Equal(t, "webDEP", m["webapp"].Args["VAR_INHERITED"])
|
||||
require.Equal(t, ptrstr("webDEP"), m["webapp"].Args["VAR_INHERITED"])
|
||||
require.Equal(t, true, *m["webapp"].NoCache)
|
||||
require.Nil(t, m["webapp"].Pull)
|
||||
|
||||
@@ -58,8 +57,7 @@ target "webapp" {
|
||||
|
||||
t.Run("ArgsOverrides", func(t *testing.T) {
|
||||
t.Run("leaf", func(t *testing.T) {
|
||||
os.Setenv("VAR_FROMENV"+t.Name(), "fromEnv")
|
||||
defer os.Unsetenv("VAR_FROM_ENV" + t.Name())
|
||||
t.Setenv("VAR_FROMENV"+t.Name(), "fromEnv")
|
||||
|
||||
m, g, err := ReadTargets(ctx, []File{fp}, []string{"webapp"}, []string{
|
||||
"webapp.args.VAR_UNSET",
|
||||
@@ -80,12 +78,12 @@ target "webapp" {
|
||||
_, isSet = m["webapp"].Args["VAR_EMPTY"]
|
||||
require.True(t, isSet, m["webapp"].Args["VAR_EMPTY"])
|
||||
|
||||
require.Equal(t, m["webapp"].Args["VAR_SET"], "bananas")
|
||||
require.Equal(t, ptrstr("bananas"), m["webapp"].Args["VAR_SET"])
|
||||
|
||||
require.Equal(t, m["webapp"].Args["VAR_FROMENV"+t.Name()], "fromEnv")
|
||||
require.Equal(t, ptrstr("fromEnv"), m["webapp"].Args["VAR_FROMENV"+t.Name()])
|
||||
|
||||
require.Equal(t, m["webapp"].Args["VAR_BOTH"], "webapp")
|
||||
require.Equal(t, m["webapp"].Args["VAR_INHERITED"], "override")
|
||||
require.Equal(t, ptrstr("webapp"), m["webapp"].Args["VAR_BOTH"])
|
||||
require.Equal(t, ptrstr("override"), m["webapp"].Args["VAR_INHERITED"])
|
||||
|
||||
require.Equal(t, 1, len(g))
|
||||
require.Equal(t, []string{"webapp"}, g["default"].Targets)
|
||||
@@ -99,8 +97,8 @@ target "webapp" {
|
||||
}, nil)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, m["webapp"].Args["VAR_INHERITED"], "override")
|
||||
require.Equal(t, m["webapp"].Args["VAR_BOTH"], "webapp")
|
||||
require.Equal(t, ptrstr("override"), m["webapp"].Args["VAR_INHERITED"])
|
||||
require.Equal(t, ptrstr("webapp"), m["webapp"].Args["VAR_BOTH"])
|
||||
require.Equal(t, 1, len(g))
|
||||
require.Equal(t, []string{"webapp"}, g["default"].Targets)
|
||||
})
|
||||
@@ -139,9 +137,9 @@ target "webapp" {
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 2, len(m))
|
||||
require.Equal(t, "foo", *m["webapp"].Dockerfile)
|
||||
require.Equal(t, "webDEP", m["webapp"].Args["VAR_INHERITED"])
|
||||
require.Equal(t, ptrstr("webDEP"), m["webapp"].Args["VAR_INHERITED"])
|
||||
require.Equal(t, "foo", *m["webDEP"].Dockerfile)
|
||||
require.Equal(t, "webDEP", m["webDEP"].Args["VAR_INHERITED"])
|
||||
require.Equal(t, ptrstr("webDEP"), m["webDEP"].Args["VAR_INHERITED"])
|
||||
require.Equal(t, 1, len(g))
|
||||
sort.Strings(g["default"].Targets)
|
||||
require.Equal(t, []string{"webDEP", "webapp"}, g["default"].Targets)
|
||||
@@ -173,7 +171,7 @@ target "webapp" {
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(m))
|
||||
require.Equal(t, "foo", *m["webapp"].Dockerfile)
|
||||
require.Equal(t, "webDEP", m["webapp"].Args["VAR_INHERITED"])
|
||||
require.Equal(t, ptrstr("webDEP"), m["webapp"].Args["VAR_INHERITED"])
|
||||
require.Equal(t, 1, len(g))
|
||||
require.Equal(t, []string{"webapp"}, g["default"].Targets)
|
||||
},
|
||||
@@ -300,8 +298,8 @@ services:
|
||||
require.True(t, ok)
|
||||
require.Equal(t, "Dockerfile.webapp", *m["webapp"].Dockerfile)
|
||||
require.Equal(t, ".", *m["webapp"].Context)
|
||||
require.Equal(t, "1", m["webapp"].Args["buildno"])
|
||||
require.Equal(t, "12", m["webapp"].Args["buildno2"])
|
||||
require.Equal(t, ptrstr("1"), m["webapp"].Args["buildno"])
|
||||
require.Equal(t, ptrstr("12"), m["webapp"].Args["buildno2"])
|
||||
|
||||
require.Equal(t, 1, len(g))
|
||||
sort.Strings(g["default"].Targets)
|
||||
@@ -344,7 +342,7 @@ services:
|
||||
_, ok := m["web_app"]
|
||||
require.True(t, ok)
|
||||
require.Equal(t, "Dockerfile.webapp", *m["web_app"].Dockerfile)
|
||||
require.Equal(t, "1", m["web_app"].Args["buildno"])
|
||||
require.Equal(t, ptrstr("1"), m["web_app"].Args["buildno"])
|
||||
|
||||
m, _, err = ReadTargets(ctx, []File{fp2}, []string{"web_app"}, nil, nil)
|
||||
require.NoError(t, err)
|
||||
@@ -352,7 +350,7 @@ services:
|
||||
_, ok = m["web_app"]
|
||||
require.True(t, ok)
|
||||
require.Equal(t, "Dockerfile", *m["web_app"].Dockerfile)
|
||||
require.Equal(t, "12", m["web_app"].Args["buildno2"])
|
||||
require.Equal(t, ptrstr("12"), m["web_app"].Args["buildno2"])
|
||||
|
||||
m, g, err := ReadTargets(ctx, []File{fp, fp2}, []string{"default"}, nil, nil)
|
||||
require.NoError(t, err)
|
||||
@@ -361,8 +359,8 @@ services:
|
||||
require.True(t, ok)
|
||||
require.Equal(t, "Dockerfile.webapp", *m["web_app"].Dockerfile)
|
||||
require.Equal(t, ".", *m["web_app"].Context)
|
||||
require.Equal(t, "1", m["web_app"].Args["buildno"])
|
||||
require.Equal(t, "12", m["web_app"].Args["buildno2"])
|
||||
require.Equal(t, ptrstr("1"), m["web_app"].Args["buildno"])
|
||||
require.Equal(t, ptrstr("12"), m["web_app"].Args["buildno2"])
|
||||
|
||||
require.Equal(t, 1, len(g))
|
||||
sort.Strings(g["default"].Targets)
|
||||
@@ -999,22 +997,22 @@ target "d" {
|
||||
cases := []struct {
|
||||
name string
|
||||
overrides []string
|
||||
want map[string]string
|
||||
want map[string]*string
|
||||
}{
|
||||
{
|
||||
name: "nested simple",
|
||||
overrides: nil,
|
||||
want: map[string]string{"bar": "234", "baz": "890", "foo": "123"},
|
||||
want: map[string]*string{"bar": ptrstr("234"), "baz": ptrstr("890"), "foo": ptrstr("123")},
|
||||
},
|
||||
{
|
||||
name: "nested with overrides first",
|
||||
overrides: []string{"a.args.foo=321", "b.args.bar=432"},
|
||||
want: map[string]string{"bar": "234", "baz": "890", "foo": "321"},
|
||||
want: map[string]*string{"bar": ptrstr("234"), "baz": ptrstr("890"), "foo": ptrstr("321")},
|
||||
},
|
||||
{
|
||||
name: "nested with overrides last",
|
||||
overrides: []string{"a.args.foo=321", "c.args.bar=432"},
|
||||
want: map[string]string{"bar": "432", "baz": "890", "foo": "321"},
|
||||
want: map[string]*string{"bar": ptrstr("432"), "baz": ptrstr("890"), "foo": ptrstr("321")},
|
||||
},
|
||||
}
|
||||
for _, tt := range cases {
|
||||
@@ -1067,26 +1065,26 @@ group "default" {
|
||||
cases := []struct {
|
||||
name string
|
||||
overrides []string
|
||||
wantch1 map[string]string
|
||||
wantch2 map[string]string
|
||||
wantch1 map[string]*string
|
||||
wantch2 map[string]*string
|
||||
}{
|
||||
{
|
||||
name: "nested simple",
|
||||
overrides: nil,
|
||||
wantch1: map[string]string{"BAR": "fuu", "FOO": "bar"},
|
||||
wantch2: map[string]string{"BAR": "fuu", "FOO": "bar", "FOO2": "bar2"},
|
||||
wantch1: map[string]*string{"BAR": ptrstr("fuu"), "FOO": ptrstr("bar")},
|
||||
wantch2: map[string]*string{"BAR": ptrstr("fuu"), "FOO": ptrstr("bar"), "FOO2": ptrstr("bar2")},
|
||||
},
|
||||
{
|
||||
name: "nested with overrides first",
|
||||
overrides: []string{"grandparent.args.BAR=fii", "child1.args.FOO=baaar"},
|
||||
wantch1: map[string]string{"BAR": "fii", "FOO": "baaar"},
|
||||
wantch2: map[string]string{"BAR": "fii", "FOO": "bar", "FOO2": "bar2"},
|
||||
wantch1: map[string]*string{"BAR": ptrstr("fii"), "FOO": ptrstr("baaar")},
|
||||
wantch2: map[string]*string{"BAR": ptrstr("fii"), "FOO": ptrstr("bar"), "FOO2": ptrstr("bar2")},
|
||||
},
|
||||
{
|
||||
name: "nested with overrides last",
|
||||
overrides: []string{"grandparent.args.BAR=fii", "child2.args.FOO=baaar"},
|
||||
wantch1: map[string]string{"BAR": "fii", "FOO": "bar"},
|
||||
wantch2: map[string]string{"BAR": "fii", "FOO": "baaar", "FOO2": "bar2"},
|
||||
wantch1: map[string]*string{"BAR": ptrstr("fii"), "FOO": ptrstr("bar")},
|
||||
wantch2: map[string]*string{"BAR": ptrstr("fii"), "FOO": ptrstr("baaar"), "FOO2": ptrstr("bar2")},
|
||||
},
|
||||
}
|
||||
for _, tt := range cases {
|
||||
@@ -1285,8 +1283,78 @@ services:
|
||||
|
||||
require.Equal(t, 1, len(c.Targets))
|
||||
require.Equal(t, "app", c.Targets[0].Name)
|
||||
require.Equal(t, "foo", c.Targets[0].Args["v1"])
|
||||
require.Equal(t, "bar", c.Targets[0].Args["v2"])
|
||||
require.Equal(t, ptrstr("foo"), c.Targets[0].Args["v1"])
|
||||
require.Equal(t, ptrstr("bar"), c.Targets[0].Args["v2"])
|
||||
require.Equal(t, "dir", *c.Targets[0].Context)
|
||||
require.Equal(t, "Dockerfile-alternate", *c.Targets[0].Dockerfile)
|
||||
}
|
||||
|
||||
func TestHCLNullVars(t *testing.T) {
|
||||
fp := File{
|
||||
Name: "docker-bake.hcl",
|
||||
Data: []byte(
|
||||
`variable "FOO" {
|
||||
default = null
|
||||
}
|
||||
variable "BAR" {
|
||||
default = null
|
||||
}
|
||||
target "default" {
|
||||
args = {
|
||||
foo = FOO
|
||||
bar = "baz"
|
||||
}
|
||||
labels = {
|
||||
"com.docker.app.bar" = BAR
|
||||
"com.docker.app.baz" = "foo"
|
||||
}
|
||||
}`),
|
||||
}
|
||||
|
||||
ctx := context.TODO()
|
||||
m, _, err := ReadTargets(ctx, []File{fp}, []string{"default"}, nil, nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, 1, len(m))
|
||||
_, ok := m["default"]
|
||||
require.True(t, ok)
|
||||
|
||||
_, err = TargetsToBuildOpt(m, &Input{})
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, map[string]*string{"bar": ptrstr("baz")}, m["default"].Args)
|
||||
require.Equal(t, map[string]*string{"com.docker.app.baz": ptrstr("foo")}, m["default"].Labels)
|
||||
}
|
||||
|
||||
func TestJSONNullVars(t *testing.T) {
|
||||
fp := File{
|
||||
Name: "docker-bake.json",
|
||||
Data: []byte(
|
||||
`{
|
||||
"variable": {
|
||||
"FOO": {
|
||||
"default": null
|
||||
}
|
||||
},
|
||||
"target": {
|
||||
"default": {
|
||||
"args": {
|
||||
"foo": "${FOO}",
|
||||
"bar": "baz"
|
||||
}
|
||||
}
|
||||
}
|
||||
}`),
|
||||
}
|
||||
|
||||
ctx := context.TODO()
|
||||
m, _, err := ReadTargets(ctx, []File{fp}, []string{"default"}, nil, nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, 1, len(m))
|
||||
_, ok := m["default"]
|
||||
require.True(t, ok)
|
||||
|
||||
_, err = TargetsToBuildOpt(m, &Input{})
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, map[string]*string{"bar": ptrstr("baz")}, m["default"].Args)
|
||||
}
|
||||
|
@@ -75,13 +75,19 @@ func ParseCompose(cfgs []compose.ConfigFile, envs map[string]string) (*Config, e
|
||||
secrets = append(secrets, secret)
|
||||
}
|
||||
|
||||
// compose does not support nil values for labels
|
||||
labels := map[string]*string{}
|
||||
for k, v := range s.Build.Labels {
|
||||
labels[k] = &v
|
||||
}
|
||||
|
||||
g.Targets = append(g.Targets, targetName)
|
||||
t := &Target{
|
||||
Name: targetName,
|
||||
Context: contextPathP,
|
||||
Dockerfile: dockerfilePathP,
|
||||
Tags: s.Build.Tags,
|
||||
Labels: s.Build.Labels,
|
||||
Labels: labels,
|
||||
Args: flatten(s.Build.Args.Resolve(func(val string) (string, bool) {
|
||||
if val, ok := s.Environment[val]; ok && val != nil {
|
||||
return *val, true
|
||||
@@ -193,16 +199,16 @@ func loadDotEnv(curenv map[string]string, workingDir string) (map[string]string,
|
||||
return curenv, nil
|
||||
}
|
||||
|
||||
func flatten(in compose.MappingWithEquals) compose.Mapping {
|
||||
func flatten(in compose.MappingWithEquals) map[string]*string {
|
||||
if len(in) == 0 {
|
||||
return nil
|
||||
}
|
||||
out := compose.Mapping{}
|
||||
out := map[string]*string{}
|
||||
for k, v := range in {
|
||||
if v == nil {
|
||||
continue
|
||||
}
|
||||
out[k] = *v
|
||||
out[k] = v
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
@@ -60,7 +60,7 @@ secrets:
|
||||
require.Equal(t, "./dir", *c.Targets[1].Context)
|
||||
require.Equal(t, "Dockerfile-alternate", *c.Targets[1].Dockerfile)
|
||||
require.Equal(t, 1, len(c.Targets[1].Args))
|
||||
require.Equal(t, "123", c.Targets[1].Args["buildno"])
|
||||
require.Equal(t, ptrstr("123"), c.Targets[1].Args["buildno"])
|
||||
require.Equal(t, []string{"type=local,src=path/to/cache"}, c.Targets[1].CacheFrom)
|
||||
require.Equal(t, []string{"type=local,dest=path/to/cache"}, c.Targets[1].CacheTo)
|
||||
require.Equal(t, "none", *c.Targets[1].NetworkMode)
|
||||
@@ -149,18 +149,15 @@ services:
|
||||
BRB: FOO
|
||||
`)
|
||||
|
||||
os.Setenv("FOO", "bar")
|
||||
defer os.Unsetenv("FOO")
|
||||
os.Setenv("BAR", "foo")
|
||||
defer os.Unsetenv("BAR")
|
||||
os.Setenv("ZZZ_BAR", "zzz_foo")
|
||||
defer os.Unsetenv("ZZZ_BAR")
|
||||
t.Setenv("FOO", "bar")
|
||||
t.Setenv("BAR", "foo")
|
||||
t.Setenv("ZZZ_BAR", "zzz_foo")
|
||||
|
||||
c, err := ParseCompose([]compose.ConfigFile{{Content: dt}}, sliceToMap(os.Environ()))
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "bar", c.Targets[0].Args["FOO"])
|
||||
require.Equal(t, "zzz_foo", c.Targets[0].Args["BAR"])
|
||||
require.Equal(t, "FOO", c.Targets[0].Args["BRB"])
|
||||
require.Equal(t, ptrstr("bar"), c.Targets[0].Args["FOO"])
|
||||
require.Equal(t, ptrstr("zzz_foo"), c.Targets[0].Args["BAR"])
|
||||
require.Equal(t, ptrstr("FOO"), c.Targets[0].Args["BRB"])
|
||||
}
|
||||
|
||||
func TestInconsistentComposeFile(t *testing.T) {
|
||||
@@ -308,7 +305,7 @@ services:
|
||||
sort.Slice(c.Targets, func(i, j int) bool {
|
||||
return c.Targets[i].Name < c.Targets[j].Name
|
||||
})
|
||||
require.Equal(t, map[string]string{"CT_ECR": "foo", "CT_TAG": "bar"}, c.Targets[0].Args)
|
||||
require.Equal(t, map[string]*string{"CT_ECR": ptrstr("foo"), "CT_TAG": ptrstr("bar")}, c.Targets[0].Args)
|
||||
require.Equal(t, []string{"ct-addon:baz", "ct-addon:foo", "ct-addon:alp"}, c.Targets[0].Tags)
|
||||
require.Equal(t, []string{"linux/amd64", "linux/arm64"}, c.Targets[0].Platforms)
|
||||
require.Equal(t, []string{"user/app:cache", "type=local,src=path/to/cache"}, c.Targets[0].CacheFrom)
|
||||
@@ -381,7 +378,7 @@ services:
|
||||
|
||||
c, err := ParseCompose([]compose.ConfigFile{{Content: dt}}, nil)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, map[string]string{"CT_ECR": "foo", "FOO": "bsdf -csdf", "NODE_ENV": "test"}, c.Targets[0].Args)
|
||||
require.Equal(t, map[string]*string{"CT_ECR": ptrstr("foo"), "FOO": ptrstr("bsdf -csdf"), "NODE_ENV": ptrstr("test")}, c.Targets[0].Args)
|
||||
}
|
||||
|
||||
func TestDotEnv(t *testing.T) {
|
||||
@@ -405,7 +402,7 @@ services:
|
||||
Data: dt,
|
||||
}})
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, map[string]string{"FOO": "bar"}, c.Targets[0].Args)
|
||||
require.Equal(t, map[string]*string{"FOO": ptrstr("bar")}, c.Targets[0].Args)
|
||||
}
|
||||
|
||||
func TestPorts(t *testing.T) {
|
||||
@@ -629,6 +626,22 @@ target "default" {
|
||||
}
|
||||
}
|
||||
|
||||
func TestComposeNullArgs(t *testing.T) {
|
||||
var dt = []byte(`
|
||||
services:
|
||||
scratch:
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
FOO: null
|
||||
bar: "baz"
|
||||
`)
|
||||
|
||||
c, err := ParseCompose([]compose.ConfigFile{{Content: dt}}, nil)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, map[string]*string{"bar": ptrstr("baz")}, c.Targets[0].Args)
|
||||
}
|
||||
|
||||
// chdir changes the current working directory to the named directory,
|
||||
// and then restore the original working directory at the end of the test.
|
||||
func chdir(t *testing.T, dir string) {
|
||||
|
147
bake/hcl_test.go
147
bake/hcl_test.go
@@ -1,7 +1,7 @@
|
||||
package bake
|
||||
|
||||
import (
|
||||
"os"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
@@ -54,7 +54,7 @@ func TestHCLBasic(t *testing.T) {
|
||||
|
||||
require.Equal(t, c.Targets[1].Name, "webapp")
|
||||
require.Equal(t, 1, len(c.Targets[1].Args))
|
||||
require.Equal(t, "123", c.Targets[1].Args["buildno"])
|
||||
require.Equal(t, ptrstr("123"), c.Targets[1].Args["buildno"])
|
||||
|
||||
require.Equal(t, c.Targets[2].Name, "cross")
|
||||
require.Equal(t, 2, len(c.Targets[2].Platforms))
|
||||
@@ -62,7 +62,7 @@ func TestHCLBasic(t *testing.T) {
|
||||
|
||||
require.Equal(t, c.Targets[3].Name, "webapp-plus")
|
||||
require.Equal(t, 1, len(c.Targets[3].Args))
|
||||
require.Equal(t, map[string]string{"IAMCROSS": "true"}, c.Targets[3].Args)
|
||||
require.Equal(t, map[string]*string{"IAMCROSS": ptrstr("true")}, c.Targets[3].Args)
|
||||
}
|
||||
|
||||
func TestHCLBasicInJSON(t *testing.T) {
|
||||
@@ -114,7 +114,7 @@ func TestHCLBasicInJSON(t *testing.T) {
|
||||
|
||||
require.Equal(t, c.Targets[1].Name, "webapp")
|
||||
require.Equal(t, 1, len(c.Targets[1].Args))
|
||||
require.Equal(t, "123", c.Targets[1].Args["buildno"])
|
||||
require.Equal(t, ptrstr("123"), c.Targets[1].Args["buildno"])
|
||||
|
||||
require.Equal(t, c.Targets[2].Name, "cross")
|
||||
require.Equal(t, 2, len(c.Targets[2].Platforms))
|
||||
@@ -122,7 +122,7 @@ func TestHCLBasicInJSON(t *testing.T) {
|
||||
|
||||
require.Equal(t, c.Targets[3].Name, "webapp-plus")
|
||||
require.Equal(t, 1, len(c.Targets[3].Args))
|
||||
require.Equal(t, map[string]string{"IAMCROSS": "true"}, c.Targets[3].Args)
|
||||
require.Equal(t, map[string]*string{"IAMCROSS": ptrstr("true")}, c.Targets[3].Args)
|
||||
}
|
||||
|
||||
func TestHCLWithFunctions(t *testing.T) {
|
||||
@@ -147,7 +147,7 @@ func TestHCLWithFunctions(t *testing.T) {
|
||||
|
||||
require.Equal(t, 1, len(c.Targets))
|
||||
require.Equal(t, c.Targets[0].Name, "webapp")
|
||||
require.Equal(t, "124", c.Targets[0].Args["buildno"])
|
||||
require.Equal(t, ptrstr("124"), c.Targets[0].Args["buildno"])
|
||||
}
|
||||
|
||||
func TestHCLWithUserDefinedFunctions(t *testing.T) {
|
||||
@@ -177,7 +177,7 @@ func TestHCLWithUserDefinedFunctions(t *testing.T) {
|
||||
|
||||
require.Equal(t, 1, len(c.Targets))
|
||||
require.Equal(t, c.Targets[0].Name, "webapp")
|
||||
require.Equal(t, "124", c.Targets[0].Args["buildno"])
|
||||
require.Equal(t, ptrstr("124"), c.Targets[0].Args["buildno"])
|
||||
}
|
||||
|
||||
func TestHCLWithVariables(t *testing.T) {
|
||||
@@ -206,9 +206,9 @@ func TestHCLWithVariables(t *testing.T) {
|
||||
|
||||
require.Equal(t, 1, len(c.Targets))
|
||||
require.Equal(t, c.Targets[0].Name, "webapp")
|
||||
require.Equal(t, "123", c.Targets[0].Args["buildno"])
|
||||
require.Equal(t, ptrstr("123"), c.Targets[0].Args["buildno"])
|
||||
|
||||
os.Setenv("BUILD_NUMBER", "456")
|
||||
t.Setenv("BUILD_NUMBER", "456")
|
||||
|
||||
c, err = ParseFile(dt, "docker-bake.hcl")
|
||||
require.NoError(t, err)
|
||||
@@ -219,7 +219,7 @@ func TestHCLWithVariables(t *testing.T) {
|
||||
|
||||
require.Equal(t, 1, len(c.Targets))
|
||||
require.Equal(t, c.Targets[0].Name, "webapp")
|
||||
require.Equal(t, "456", c.Targets[0].Args["buildno"])
|
||||
require.Equal(t, ptrstr("456"), c.Targets[0].Args["buildno"])
|
||||
}
|
||||
|
||||
func TestHCLWithVariablesInFunctions(t *testing.T) {
|
||||
@@ -244,7 +244,7 @@ func TestHCLWithVariablesInFunctions(t *testing.T) {
|
||||
require.Equal(t, c.Targets[0].Name, "webapp")
|
||||
require.Equal(t, []string{"user/repo:v1"}, c.Targets[0].Tags)
|
||||
|
||||
os.Setenv("REPO", "docker/buildx")
|
||||
t.Setenv("REPO", "docker/buildx")
|
||||
|
||||
c, err = ParseFile(dt, "docker-bake.hcl")
|
||||
require.NoError(t, err)
|
||||
@@ -280,10 +280,10 @@ func TestHCLMultiFileSharedVariables(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(c.Targets))
|
||||
require.Equal(t, c.Targets[0].Name, "app")
|
||||
require.Equal(t, "pre-abc", c.Targets[0].Args["v1"])
|
||||
require.Equal(t, "abc-post", c.Targets[0].Args["v2"])
|
||||
require.Equal(t, ptrstr("pre-abc"), c.Targets[0].Args["v1"])
|
||||
require.Equal(t, ptrstr("abc-post"), c.Targets[0].Args["v2"])
|
||||
|
||||
os.Setenv("FOO", "def")
|
||||
t.Setenv("FOO", "def")
|
||||
|
||||
c, err = ParseFiles([]File{
|
||||
{Data: dt, Name: "c1.hcl"},
|
||||
@@ -293,12 +293,11 @@ func TestHCLMultiFileSharedVariables(t *testing.T) {
|
||||
|
||||
require.Equal(t, 1, len(c.Targets))
|
||||
require.Equal(t, c.Targets[0].Name, "app")
|
||||
require.Equal(t, "pre-def", c.Targets[0].Args["v1"])
|
||||
require.Equal(t, "def-post", c.Targets[0].Args["v2"])
|
||||
require.Equal(t, ptrstr("pre-def"), c.Targets[0].Args["v1"])
|
||||
require.Equal(t, ptrstr("def-post"), c.Targets[0].Args["v2"])
|
||||
}
|
||||
|
||||
func TestHCLVarsWithVars(t *testing.T) {
|
||||
os.Unsetenv("FOO")
|
||||
dt := []byte(`
|
||||
variable "FOO" {
|
||||
default = upper("${BASE}def")
|
||||
@@ -330,10 +329,10 @@ func TestHCLVarsWithVars(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(c.Targets))
|
||||
require.Equal(t, c.Targets[0].Name, "app")
|
||||
require.Equal(t, "pre--ABCDEF-", c.Targets[0].Args["v1"])
|
||||
require.Equal(t, "ABCDEF-post", c.Targets[0].Args["v2"])
|
||||
require.Equal(t, ptrstr("pre--ABCDEF-"), c.Targets[0].Args["v1"])
|
||||
require.Equal(t, ptrstr("ABCDEF-post"), c.Targets[0].Args["v2"])
|
||||
|
||||
os.Setenv("BASE", "new")
|
||||
t.Setenv("BASE", "new")
|
||||
|
||||
c, err = ParseFiles([]File{
|
||||
{Data: dt, Name: "c1.hcl"},
|
||||
@@ -343,12 +342,11 @@ func TestHCLVarsWithVars(t *testing.T) {
|
||||
|
||||
require.Equal(t, 1, len(c.Targets))
|
||||
require.Equal(t, c.Targets[0].Name, "app")
|
||||
require.Equal(t, "pre--NEWDEF-", c.Targets[0].Args["v1"])
|
||||
require.Equal(t, "NEWDEF-post", c.Targets[0].Args["v2"])
|
||||
require.Equal(t, ptrstr("pre--NEWDEF-"), c.Targets[0].Args["v1"])
|
||||
require.Equal(t, ptrstr("NEWDEF-post"), c.Targets[0].Args["v2"])
|
||||
}
|
||||
|
||||
func TestHCLTypedVariables(t *testing.T) {
|
||||
os.Unsetenv("FOO")
|
||||
dt := []byte(`
|
||||
variable "FOO" {
|
||||
default = 3
|
||||
@@ -369,33 +367,80 @@ func TestHCLTypedVariables(t *testing.T) {
|
||||
|
||||
require.Equal(t, 1, len(c.Targets))
|
||||
require.Equal(t, c.Targets[0].Name, "app")
|
||||
require.Equal(t, "lower", c.Targets[0].Args["v1"])
|
||||
require.Equal(t, "yes", c.Targets[0].Args["v2"])
|
||||
require.Equal(t, ptrstr("lower"), c.Targets[0].Args["v1"])
|
||||
require.Equal(t, ptrstr("yes"), c.Targets[0].Args["v2"])
|
||||
|
||||
os.Setenv("FOO", "5.1")
|
||||
os.Setenv("IS_FOO", "0")
|
||||
t.Setenv("FOO", "5.1")
|
||||
t.Setenv("IS_FOO", "0")
|
||||
|
||||
c, err = ParseFile(dt, "docker-bake.hcl")
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, 1, len(c.Targets))
|
||||
require.Equal(t, c.Targets[0].Name, "app")
|
||||
require.Equal(t, "higher", c.Targets[0].Args["v1"])
|
||||
require.Equal(t, "no", c.Targets[0].Args["v2"])
|
||||
require.Equal(t, ptrstr("higher"), c.Targets[0].Args["v1"])
|
||||
require.Equal(t, ptrstr("no"), c.Targets[0].Args["v2"])
|
||||
|
||||
os.Setenv("FOO", "NaN")
|
||||
t.Setenv("FOO", "NaN")
|
||||
_, err = ParseFile(dt, "docker-bake.hcl")
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), "failed to parse FOO as number")
|
||||
|
||||
os.Setenv("FOO", "0")
|
||||
os.Setenv("IS_FOO", "maybe")
|
||||
t.Setenv("FOO", "0")
|
||||
t.Setenv("IS_FOO", "maybe")
|
||||
|
||||
_, err = ParseFile(dt, "docker-bake.hcl")
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), "failed to parse IS_FOO as bool")
|
||||
}
|
||||
|
||||
func TestHCLNullVariables(t *testing.T) {
|
||||
dt := []byte(`
|
||||
variable "FOO" {
|
||||
default = null
|
||||
}
|
||||
target "default" {
|
||||
args = {
|
||||
foo = FOO
|
||||
}
|
||||
}`)
|
||||
|
||||
c, err := ParseFile(dt, "docker-bake.hcl")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, ptrstr(nil), c.Targets[0].Args["foo"])
|
||||
|
||||
t.Setenv("FOO", "bar")
|
||||
c, err = ParseFile(dt, "docker-bake.hcl")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, ptrstr("bar"), c.Targets[0].Args["foo"])
|
||||
}
|
||||
|
||||
func TestJSONNullVariables(t *testing.T) {
|
||||
dt := []byte(`{
|
||||
"variable": {
|
||||
"FOO": {
|
||||
"default": null
|
||||
}
|
||||
},
|
||||
"target": {
|
||||
"default": {
|
||||
"args": {
|
||||
"foo": "${FOO}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}`)
|
||||
|
||||
c, err := ParseFile(dt, "docker-bake.json")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, ptrstr(nil), c.Targets[0].Args["foo"])
|
||||
|
||||
t.Setenv("FOO", "bar")
|
||||
c, err = ParseFile(dt, "docker-bake.json")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, ptrstr("bar"), c.Targets[0].Args["foo"])
|
||||
}
|
||||
|
||||
func TestHCLVariableCycle(t *testing.T) {
|
||||
dt := []byte(`
|
||||
variable "FOO" {
|
||||
@@ -431,16 +476,16 @@ func TestHCLAttrs(t *testing.T) {
|
||||
|
||||
require.Equal(t, 1, len(c.Targets))
|
||||
require.Equal(t, c.Targets[0].Name, "app")
|
||||
require.Equal(t, "attr-abcdef", c.Targets[0].Args["v1"])
|
||||
require.Equal(t, ptrstr("attr-abcdef"), c.Targets[0].Args["v1"])
|
||||
|
||||
// env does not apply if no variable
|
||||
os.Setenv("FOO", "bar")
|
||||
t.Setenv("FOO", "bar")
|
||||
c, err = ParseFile(dt, "docker-bake.hcl")
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, 1, len(c.Targets))
|
||||
require.Equal(t, c.Targets[0].Name, "app")
|
||||
require.Equal(t, "attr-abcdef", c.Targets[0].Args["v1"])
|
||||
require.Equal(t, ptrstr("attr-abcdef"), c.Targets[0].Args["v1"])
|
||||
// attr-multifile
|
||||
}
|
||||
|
||||
@@ -549,11 +594,10 @@ func TestHCLAttrsCustomType(t *testing.T) {
|
||||
require.Equal(t, 1, len(c.Targets))
|
||||
require.Equal(t, c.Targets[0].Name, "app")
|
||||
require.Equal(t, []string{"linux/arm64", "linux/amd64"}, c.Targets[0].Platforms)
|
||||
require.Equal(t, "linux/arm64", c.Targets[0].Args["v1"])
|
||||
require.Equal(t, ptrstr("linux/arm64"), c.Targets[0].Args["v1"])
|
||||
}
|
||||
|
||||
func TestHCLMultiFileAttrs(t *testing.T) {
|
||||
os.Unsetenv("FOO")
|
||||
dt := []byte(`
|
||||
variable "FOO" {
|
||||
default = "abc"
|
||||
@@ -575,9 +619,9 @@ func TestHCLMultiFileAttrs(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(c.Targets))
|
||||
require.Equal(t, c.Targets[0].Name, "app")
|
||||
require.Equal(t, "pre-def", c.Targets[0].Args["v1"])
|
||||
require.Equal(t, ptrstr("pre-def"), c.Targets[0].Args["v1"])
|
||||
|
||||
os.Setenv("FOO", "ghi")
|
||||
t.Setenv("FOO", "ghi")
|
||||
|
||||
c, err = ParseFiles([]File{
|
||||
{Data: dt, Name: "c1.hcl"},
|
||||
@@ -587,7 +631,7 @@ func TestHCLMultiFileAttrs(t *testing.T) {
|
||||
|
||||
require.Equal(t, 1, len(c.Targets))
|
||||
require.Equal(t, c.Targets[0].Name, "app")
|
||||
require.Equal(t, "pre-ghi", c.Targets[0].Args["v1"])
|
||||
require.Equal(t, ptrstr("pre-ghi"), c.Targets[0].Args["v1"])
|
||||
}
|
||||
|
||||
func TestJSONAttributes(t *testing.T) {
|
||||
@@ -598,7 +642,7 @@ func TestJSONAttributes(t *testing.T) {
|
||||
|
||||
require.Equal(t, 1, len(c.Targets))
|
||||
require.Equal(t, c.Targets[0].Name, "app")
|
||||
require.Equal(t, "pre-abc-def", c.Targets[0].Args["v1"])
|
||||
require.Equal(t, ptrstr("pre-abc-def"), c.Targets[0].Args["v1"])
|
||||
}
|
||||
|
||||
func TestJSONFunctions(t *testing.T) {
|
||||
@@ -623,7 +667,7 @@ func TestJSONFunctions(t *testing.T) {
|
||||
|
||||
require.Equal(t, 1, len(c.Targets))
|
||||
require.Equal(t, c.Targets[0].Name, "app")
|
||||
require.Equal(t, "pre-<FOO-abc>", c.Targets[0].Args["v1"])
|
||||
require.Equal(t, ptrstr("pre-<FOO-abc>"), c.Targets[0].Args["v1"])
|
||||
}
|
||||
|
||||
func TestHCLFunctionInAttr(t *testing.T) {
|
||||
@@ -651,7 +695,7 @@ func TestHCLFunctionInAttr(t *testing.T) {
|
||||
|
||||
require.Equal(t, 1, len(c.Targets))
|
||||
require.Equal(t, c.Targets[0].Name, "app")
|
||||
require.Equal(t, "FOO <> [baz]", c.Targets[0].Args["v1"])
|
||||
require.Equal(t, ptrstr("FOO <> [baz]"), c.Targets[0].Args["v1"])
|
||||
}
|
||||
|
||||
func TestHCLCombineCompose(t *testing.T) {
|
||||
@@ -682,8 +726,8 @@ services:
|
||||
|
||||
require.Equal(t, 1, len(c.Targets))
|
||||
require.Equal(t, c.Targets[0].Name, "app")
|
||||
require.Equal(t, "foo", c.Targets[0].Args["v1"])
|
||||
require.Equal(t, "bar", c.Targets[0].Args["v2"])
|
||||
require.Equal(t, ptrstr("foo"), c.Targets[0].Args["v1"])
|
||||
require.Equal(t, ptrstr("bar"), c.Targets[0].Args["v2"])
|
||||
require.Equal(t, "dir", *c.Targets[0].Context)
|
||||
require.Equal(t, "Dockerfile-alternate", *c.Targets[0].Dockerfile)
|
||||
}
|
||||
@@ -828,10 +872,10 @@ target "two" {
|
||||
require.Equal(t, 2, len(c.Targets))
|
||||
|
||||
require.Equal(t, c.Targets[0].Name, "one")
|
||||
require.Equal(t, map[string]string{"a": "pre-ghi-jkl"}, c.Targets[0].Args)
|
||||
require.Equal(t, map[string]*string{"a": ptrstr("pre-ghi-jkl")}, c.Targets[0].Args)
|
||||
|
||||
require.Equal(t, c.Targets[1].Name, "two")
|
||||
require.Equal(t, map[string]string{"b": "pre-jkl"}, c.Targets[1].Args)
|
||||
require.Equal(t, map[string]*string{"b": ptrstr("pre-jkl")}, c.Targets[1].Args)
|
||||
}
|
||||
|
||||
func TestEmptyVariableJSON(t *testing.T) {
|
||||
@@ -882,3 +926,12 @@ func TestVarUnsupportedType(t *testing.T) {
|
||||
_, err := ParseFile(dt, "docker-bake.hcl")
|
||||
require.Error(t, err)
|
||||
}
|
||||
|
||||
func ptrstr(s interface{}) *string {
|
||||
var n *string = nil
|
||||
if reflect.ValueOf(s).Kind() == reflect.String {
|
||||
ss := s.(string)
|
||||
n = &ss
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
@@ -83,7 +83,7 @@ func appendJSONFuncCalls(exp hcl.Expression, m map[string]struct{}) error {
|
||||
|
||||
// hcl/v2/json/ast#stringVal
|
||||
val := src.FieldByName("Value")
|
||||
if val.IsZero() {
|
||||
if !val.IsValid() || val.IsZero() {
|
||||
return nil
|
||||
}
|
||||
rng := src.FieldByName("SrcRange")
|
||||
|
@@ -281,19 +281,16 @@ func (p *parser) resolveValue(name string) (err error) {
|
||||
_, isVar := p.vars[name]
|
||||
|
||||
if envv, ok := p.opt.LookupVar(name); ok && isVar {
|
||||
if vv.Type().Equals(cty.Bool) {
|
||||
switch {
|
||||
case vv.Type().Equals(cty.Bool):
|
||||
b, err := strconv.ParseBool(envv)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "failed to parse %s as bool", name)
|
||||
}
|
||||
vv := cty.BoolVal(b)
|
||||
v = &vv
|
||||
return nil
|
||||
} else if vv.Type().Equals(cty.String) {
|
||||
vv := cty.StringVal(envv)
|
||||
v = &vv
|
||||
return nil
|
||||
} else if vv.Type().Equals(cty.Number) {
|
||||
vv = cty.BoolVal(b)
|
||||
case vv.Type().Equals(cty.String), vv.Type().Equals(cty.DynamicPseudoType):
|
||||
vv = cty.StringVal(envv)
|
||||
case vv.Type().Equals(cty.Number):
|
||||
n, err := strconv.ParseFloat(envv, 64)
|
||||
if err == nil && (math.IsNaN(n) || math.IsInf(n, 0)) {
|
||||
err = errors.Errorf("invalid number value")
|
||||
@@ -301,10 +298,8 @@ func (p *parser) resolveValue(name string) (err error) {
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "failed to parse %s as number", name)
|
||||
}
|
||||
vv := cty.NumberVal(big.NewFloat(n))
|
||||
v = &vv
|
||||
return nil
|
||||
} else {
|
||||
vv = cty.NumberVal(big.NewFloat(n))
|
||||
default:
|
||||
// TODO: support lists with csv values
|
||||
return errors.Errorf("unsupported type %s for variable %s", vv.Type().FriendlyName(), name)
|
||||
}
|
||||
|
476
build/build.go
476
build/build.go
@@ -6,6 +6,7 @@ import (
|
||||
"context"
|
||||
"crypto/rand"
|
||||
_ "crypto/sha256" // ensure digests can be computed
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
@@ -36,9 +37,10 @@ import (
|
||||
"github.com/docker/docker/pkg/jsonmessage"
|
||||
"github.com/moby/buildkit/client"
|
||||
"github.com/moby/buildkit/client/llb"
|
||||
"github.com/moby/buildkit/client/ociindex"
|
||||
"github.com/moby/buildkit/exporter/containerimage/exptypes"
|
||||
"github.com/moby/buildkit/frontend/attestations"
|
||||
gateway "github.com/moby/buildkit/frontend/gateway/client"
|
||||
"github.com/moby/buildkit/identity"
|
||||
"github.com/moby/buildkit/session"
|
||||
"github.com/moby/buildkit/session/upload/uploadprovider"
|
||||
"github.com/moby/buildkit/solver/errdefs"
|
||||
@@ -227,7 +229,9 @@ func resolveDrivers(ctx context.Context, nodes []builder.Node, opt map[string]Op
|
||||
|
||||
func(i int, c *client.Client) {
|
||||
eg.Go(func() error {
|
||||
clients[i].Build(ctx, client.SolveOpt{}, "buildx", func(ctx context.Context, c gateway.Client) (*gateway.Result, error) {
|
||||
clients[i].Build(ctx, client.SolveOpt{
|
||||
Internal: true,
|
||||
}, "buildx", func(ctx context.Context, c gateway.Client) (*gateway.Result, error) {
|
||||
bopts[i] = c.BuildOpts()
|
||||
return nil, nil
|
||||
}, nil)
|
||||
@@ -446,6 +450,25 @@ func toSolveOpt(ctx context.Context, node builder.Node, multiDriver bool, opt Op
|
||||
so.FrontendAttrs["multi-platform"] = "true"
|
||||
}
|
||||
|
||||
attests := make(map[string]string)
|
||||
for k, v := range opt.Attests {
|
||||
if v != nil {
|
||||
attests[k] = *v
|
||||
}
|
||||
}
|
||||
supportsAttestations := bopts.LLBCaps.Contains(apicaps.CapID("exporter.image.attestations"))
|
||||
if len(attests) > 0 {
|
||||
if !supportsAttestations {
|
||||
return nil, nil, errors.Errorf("attestations are not supported by the current buildkitd")
|
||||
}
|
||||
for k, v := range attests {
|
||||
so.FrontendAttrs[k] = v
|
||||
}
|
||||
}
|
||||
if _, ok := opt.Attests["attest:provenance"]; !ok && supportsAttestations {
|
||||
so.FrontendAttrs["attest:provenance"] = "mode=min,inline-only=true"
|
||||
}
|
||||
|
||||
switch len(opt.Exports) {
|
||||
case 1:
|
||||
// valid
|
||||
@@ -503,7 +526,7 @@ func toSolveOpt(ctx context.Context, node builder.Node, multiDriver bool, opt Op
|
||||
return nil, nil, notSupported(nodeDriver, driver.OCIExporter)
|
||||
}
|
||||
if e.Type == "docker" {
|
||||
if len(opt.Platforms) > 1 {
|
||||
if len(opt.Platforms) > 1 || len(attests) > 0 {
|
||||
return nil, nil, errors.Errorf("docker exporter does not currently support exporting manifest lists")
|
||||
}
|
||||
if e.Output == nil {
|
||||
@@ -580,23 +603,11 @@ func toSolveOpt(ctx context.Context, node builder.Node, multiDriver bool, opt Op
|
||||
}
|
||||
}
|
||||
|
||||
if len(opt.Attests) > 0 {
|
||||
if !bopts.LLBCaps.Contains(apicaps.CapID("exporter.image.attestations")) {
|
||||
return nil, nil, errors.Errorf("attestations are not supported by the current buildkitd")
|
||||
// Propagate SOURCE_DATE_EPOCH from the client env
|
||||
if v := os.Getenv("SOURCE_DATE_EPOCH"); v != "" {
|
||||
if _, ok := so.FrontendAttrs["build-arg:SOURCE_DATE_EPOCH"]; !ok {
|
||||
so.FrontendAttrs["build-arg:SOURCE_DATE_EPOCH"] = v
|
||||
}
|
||||
for k, v := range opt.Attests {
|
||||
if v == nil {
|
||||
continue
|
||||
}
|
||||
so.FrontendAttrs[k] = *v
|
||||
}
|
||||
}
|
||||
if _, ok := opt.Attests["attest:provenance"]; !ok {
|
||||
so.FrontendAttrs["attest:provenance"] = "mode=min,inline-only=true"
|
||||
}
|
||||
|
||||
for k, v := range getGitAttributes(ctx, opt.Inputs.ContextPath, opt.Inputs.DockerfilePath) {
|
||||
so.FrontendAttrs[k] = v
|
||||
}
|
||||
|
||||
// set platforms
|
||||
@@ -853,6 +864,10 @@ func BuildWithResultHandler(ctx context.Context, nodes []builder.Node, opt map[s
|
||||
for k, opt := range opt {
|
||||
multiDriver := len(m[k]) > 1
|
||||
hasMobyDriver := false
|
||||
gitattrs, err := getGitAttributes(ctx, opt.Inputs.ContextPath, opt.Inputs.DockerfilePath)
|
||||
if err != nil {
|
||||
logrus.Warn(err)
|
||||
}
|
||||
for i, np := range m[k] {
|
||||
node := nodes[np.driverIndex]
|
||||
if node.Driver.IsMobyDriver() {
|
||||
@@ -865,6 +880,9 @@ func BuildWithResultHandler(ctx context.Context, nodes []builder.Node, opt map[s
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for k, v := range gitattrs {
|
||||
so.FrontendAttrs[k] = v
|
||||
}
|
||||
defers = append(defers, release)
|
||||
m[k][i].so = so
|
||||
}
|
||||
@@ -935,26 +953,190 @@ func BuildWithResultHandler(ctx context.Context, nodes []builder.Node, opt map[s
|
||||
if multiTarget {
|
||||
span, ctx = tracing.StartSpan(ctx, k)
|
||||
}
|
||||
baseCtx := ctx
|
||||
|
||||
res := make([]*client.SolveResponse, len(dps))
|
||||
wg := &sync.WaitGroup{}
|
||||
wg.Add(len(dps))
|
||||
eg2, ctx := errgroup.WithContext(ctx)
|
||||
|
||||
var pushNames string
|
||||
var insecurePush bool
|
||||
|
||||
for i, dp := range dps {
|
||||
i, dp, so := i, dp, *dp.so
|
||||
if multiDriver {
|
||||
for i, e := range so.Exports {
|
||||
switch e.Type {
|
||||
case "oci", "tar":
|
||||
return errors.Errorf("%s for multi-node builds currently not supported", e.Type)
|
||||
case "image":
|
||||
if pushNames == "" && e.Attrs["push"] != "" {
|
||||
if ok, _ := strconv.ParseBool(e.Attrs["push"]); ok {
|
||||
pushNames = e.Attrs["name"]
|
||||
if pushNames == "" {
|
||||
return errors.Errorf("tag is needed when pushing to registry")
|
||||
}
|
||||
names, err := toRepoOnly(e.Attrs["name"])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if ok, _ := strconv.ParseBool(e.Attrs["registry.insecure"]); ok {
|
||||
insecurePush = true
|
||||
}
|
||||
e.Attrs["name"] = names
|
||||
e.Attrs["push-by-digest"] = "true"
|
||||
so.Exports[i].Attrs = e.Attrs
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pw := progress.WithPrefix(w, k, multiTarget)
|
||||
|
||||
c := clients[dp.driverIndex]
|
||||
eg2.Go(func() error {
|
||||
pw = progress.ResetTime(pw)
|
||||
|
||||
if err := waitContextDeps(ctx, dp.driverIndex, results, &so); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
frontendInputs := make(map[string]*pb.Definition)
|
||||
for key, st := range so.FrontendInputs {
|
||||
def, err := st.Marshal(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
frontendInputs[key] = def.ToPB()
|
||||
}
|
||||
|
||||
req := gateway.SolveRequest{
|
||||
Frontend: so.Frontend,
|
||||
FrontendInputs: frontendInputs,
|
||||
FrontendOpt: make(map[string]string),
|
||||
}
|
||||
for k, v := range so.FrontendAttrs {
|
||||
req.FrontendOpt[k] = v
|
||||
}
|
||||
so.Frontend = ""
|
||||
so.FrontendInputs = nil
|
||||
|
||||
ch, done := progress.NewChannel(pw)
|
||||
defer func() { <-done }()
|
||||
|
||||
cc := c
|
||||
var printRes map[string][]byte
|
||||
rr, err := c.Build(ctx, so, "buildx", func(ctx context.Context, c gateway.Client) (*gateway.Result, error) {
|
||||
var isFallback bool
|
||||
var origErr error
|
||||
for {
|
||||
if opt.PrintFunc != nil {
|
||||
if _, ok := req.FrontendOpt["frontend.caps"]; !ok {
|
||||
req.FrontendOpt["frontend.caps"] = "moby.buildkit.frontend.subrequests+forward"
|
||||
} else {
|
||||
req.FrontendOpt["frontend.caps"] += ",moby.buildkit.frontend.subrequests+forward"
|
||||
}
|
||||
req.FrontendOpt["requestid"] = "frontend." + opt.PrintFunc.Name
|
||||
if isFallback {
|
||||
req.FrontendOpt["build-arg:BUILDKIT_SYNTAX"] = printFallbackImage
|
||||
}
|
||||
}
|
||||
res, err := c.Solve(ctx, req)
|
||||
if err != nil {
|
||||
if origErr != nil {
|
||||
return nil, err
|
||||
}
|
||||
var reqErr *errdefs.UnsupportedSubrequestError
|
||||
if !isFallback {
|
||||
if errors.As(err, &reqErr) {
|
||||
switch reqErr.Name {
|
||||
case "frontend.outline", "frontend.targets":
|
||||
isFallback = true
|
||||
origErr = err
|
||||
continue
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
// buildkit v0.8 vendored in Docker 20.10 does not support typed errors
|
||||
if strings.Contains(err.Error(), "unsupported request frontend.outline") || strings.Contains(err.Error(), "unsupported request frontend.targets") {
|
||||
isFallback = true
|
||||
origErr = err
|
||||
continue
|
||||
}
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
if opt.PrintFunc != nil {
|
||||
printRes = res.Metadata
|
||||
}
|
||||
results.Set(resultKey(dp.driverIndex, k), res)
|
||||
if resultHandleFunc != nil {
|
||||
resultHandleFunc(dp.driverIndex, &ResultContext{cc, res})
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
}, ch)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
res[i] = rr
|
||||
|
||||
if rr.ExporterResponse == nil {
|
||||
rr.ExporterResponse = map[string]string{}
|
||||
}
|
||||
for k, v := range printRes {
|
||||
rr.ExporterResponse[k] = string(v)
|
||||
}
|
||||
|
||||
node := nodes[dp.driverIndex].Driver
|
||||
if node.IsMobyDriver() {
|
||||
for _, e := range so.Exports {
|
||||
if e.Type == "moby" && e.Attrs["push"] != "" {
|
||||
if ok, _ := strconv.ParseBool(e.Attrs["push"]); ok {
|
||||
pushNames = e.Attrs["name"]
|
||||
if pushNames == "" {
|
||||
return errors.Errorf("tag is needed when pushing to registry")
|
||||
}
|
||||
pw := progress.ResetTime(pw)
|
||||
pushList := strings.Split(pushNames, ",")
|
||||
for _, name := range pushList {
|
||||
if err := progress.Wrap(fmt.Sprintf("pushing %s with docker", name), pw.Write, func(l progress.SubLogger) error {
|
||||
return pushWithMoby(ctx, node, name, l)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
remoteDigest, err := remoteDigestWithMoby(ctx, node, pushList[0])
|
||||
if err == nil && remoteDigest != "" {
|
||||
// old daemons might not have containerimage.config.digest set
|
||||
// in response so use containerimage.digest value for it if available
|
||||
if _, ok := rr.ExporterResponse[exptypes.ExporterImageConfigDigestKey]; !ok {
|
||||
if v, ok := rr.ExporterResponse[exptypes.ExporterImageDigestKey]; ok {
|
||||
rr.ExporterResponse[exptypes.ExporterImageConfigDigestKey] = v
|
||||
}
|
||||
}
|
||||
rr.ExporterResponse[exptypes.ExporterImageDigestKey] = remoteDigest
|
||||
} else if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
eg.Go(func() (err error) {
|
||||
ctx := baseCtx
|
||||
defer func() {
|
||||
if span != nil {
|
||||
tracing.FinishWithError(span, err)
|
||||
}
|
||||
}()
|
||||
pw := progress.WithPrefix(w, "default", false)
|
||||
wg.Wait()
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
default:
|
||||
if err := eg2.Wait(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
respMu.Lock()
|
||||
@@ -976,7 +1158,24 @@ func BuildWithResultHandler(ctx context.Context, nodes []builder.Node, opt map[s
|
||||
descs := make([]specs.Descriptor, 0, len(res))
|
||||
|
||||
for _, r := range res {
|
||||
s, ok := r.ExporterResponse[exptypes.ExporterImageDigestKey]
|
||||
s, ok := r.ExporterResponse[exptypes.ExporterImageDescriptorKey]
|
||||
if ok {
|
||||
dt, err := base64.StdEncoding.DecodeString(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var desc specs.Descriptor
|
||||
if err := json.Unmarshal(dt, &desc); err != nil {
|
||||
return errors.Wrapf(err, "failed to unmarshal descriptor %s", s)
|
||||
}
|
||||
descs = append(descs, desc)
|
||||
continue
|
||||
}
|
||||
// This is fallback for some very old buildkit versions.
|
||||
// Note that the mediatype isn't really correct as most of the time it is image manifest and
|
||||
// not manifest list but actually both are handled because for Docker mediatypes the
|
||||
// mediatype value in the Accpet header does not seem to matter.
|
||||
s, ok = r.ExporterResponse[exptypes.ExporterImageDigestKey]
|
||||
if ok {
|
||||
descs = append(descs, specs.Descriptor{
|
||||
Digest: digest.Digest(s),
|
||||
@@ -1060,174 +1259,6 @@ func BuildWithResultHandler(ctx context.Context, nodes []builder.Node, opt map[s
|
||||
return nil
|
||||
})
|
||||
|
||||
for i, dp := range dps {
|
||||
so := *dp.so
|
||||
if multiDriver {
|
||||
for i, e := range so.Exports {
|
||||
switch e.Type {
|
||||
case "oci", "tar":
|
||||
return errors.Errorf("%s for multi-node builds currently not supported", e.Type)
|
||||
case "image":
|
||||
if pushNames == "" && e.Attrs["push"] != "" {
|
||||
if ok, _ := strconv.ParseBool(e.Attrs["push"]); ok {
|
||||
pushNames = e.Attrs["name"]
|
||||
if pushNames == "" {
|
||||
return errors.Errorf("tag is needed when pushing to registry")
|
||||
}
|
||||
names, err := toRepoOnly(e.Attrs["name"])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if ok, _ := strconv.ParseBool(e.Attrs["registry.insecure"]); ok {
|
||||
insecurePush = true
|
||||
}
|
||||
e.Attrs["name"] = names
|
||||
e.Attrs["push-by-digest"] = "true"
|
||||
so.Exports[i].Attrs = e.Attrs
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func(i int, dp driverPair, so client.SolveOpt) {
|
||||
pw := progress.WithPrefix(w, k, multiTarget)
|
||||
|
||||
c := clients[dp.driverIndex]
|
||||
eg.Go(func() error {
|
||||
pw = progress.ResetTime(pw)
|
||||
defer wg.Done()
|
||||
|
||||
if err := waitContextDeps(ctx, dp.driverIndex, results, &so); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
frontendInputs := make(map[string]*pb.Definition)
|
||||
for key, st := range so.FrontendInputs {
|
||||
def, err := st.Marshal(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
frontendInputs[key] = def.ToPB()
|
||||
}
|
||||
|
||||
req := gateway.SolveRequest{
|
||||
Frontend: so.Frontend,
|
||||
FrontendOpt: so.FrontendAttrs,
|
||||
FrontendInputs: frontendInputs,
|
||||
}
|
||||
so.Frontend = ""
|
||||
so.FrontendAttrs = attestations.Filter(so.FrontendAttrs)
|
||||
so.FrontendInputs = nil
|
||||
|
||||
ch, done := progress.NewChannel(pw)
|
||||
defer func() { <-done }()
|
||||
|
||||
cc := c
|
||||
var printRes map[string][]byte
|
||||
rr, err := c.Build(ctx, so, "buildx", func(ctx context.Context, c gateway.Client) (*gateway.Result, error) {
|
||||
var isFallback bool
|
||||
var origErr error
|
||||
for {
|
||||
if opt.PrintFunc != nil {
|
||||
if _, ok := req.FrontendOpt["frontend.caps"]; !ok {
|
||||
req.FrontendOpt["frontend.caps"] = "moby.buildkit.frontend.subrequests+forward"
|
||||
} else {
|
||||
req.FrontendOpt["frontend.caps"] += ",moby.buildkit.frontend.subrequests+forward"
|
||||
}
|
||||
req.FrontendOpt["requestid"] = "frontend." + opt.PrintFunc.Name
|
||||
if isFallback {
|
||||
req.FrontendOpt["build-arg:BUILDKIT_SYNTAX"] = printFallbackImage
|
||||
}
|
||||
}
|
||||
res, err := c.Solve(ctx, req)
|
||||
if err != nil {
|
||||
if origErr != nil {
|
||||
return nil, err
|
||||
}
|
||||
var reqErr *errdefs.UnsupportedSubrequestError
|
||||
if !isFallback {
|
||||
if errors.As(err, &reqErr) {
|
||||
switch reqErr.Name {
|
||||
case "frontend.outline", "frontend.targets":
|
||||
isFallback = true
|
||||
origErr = err
|
||||
continue
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
// buildkit v0.8 vendored in Docker 20.10 does not support typed errors
|
||||
if strings.Contains(err.Error(), "unsupported request frontend.outline") || strings.Contains(err.Error(), "unsupported request frontend.targets") {
|
||||
isFallback = true
|
||||
origErr = err
|
||||
continue
|
||||
}
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
if opt.PrintFunc != nil {
|
||||
printRes = res.Metadata
|
||||
}
|
||||
results.Set(resultKey(dp.driverIndex, k), res)
|
||||
if resultHandleFunc != nil {
|
||||
resultHandleFunc(dp.driverIndex, &ResultContext{cc, res})
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
}, ch)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
res[i] = rr
|
||||
|
||||
if rr.ExporterResponse == nil {
|
||||
rr.ExporterResponse = map[string]string{}
|
||||
}
|
||||
for k, v := range printRes {
|
||||
rr.ExporterResponse[k] = string(v)
|
||||
}
|
||||
|
||||
node := nodes[dp.driverIndex].Driver
|
||||
if node.IsMobyDriver() {
|
||||
for _, e := range so.Exports {
|
||||
if e.Type == "moby" && e.Attrs["push"] != "" {
|
||||
if ok, _ := strconv.ParseBool(e.Attrs["push"]); ok {
|
||||
pushNames = e.Attrs["name"]
|
||||
if pushNames == "" {
|
||||
return errors.Errorf("tag is needed when pushing to registry")
|
||||
}
|
||||
pw := progress.ResetTime(pw)
|
||||
pushList := strings.Split(pushNames, ",")
|
||||
for _, name := range pushList {
|
||||
if err := progress.Wrap(fmt.Sprintf("pushing %s with docker", name), pw.Write, func(l progress.SubLogger) error {
|
||||
return pushWithMoby(ctx, node, name, l)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
remoteDigest, err := remoteDigestWithMoby(ctx, node, pushList[0])
|
||||
if err == nil && remoteDigest != "" {
|
||||
// old daemons might not have containerimage.config.digest set
|
||||
// in response so use containerimage.digest value for it if available
|
||||
if _, ok := rr.ExporterResponse[exptypes.ExporterImageConfigDigestKey]; !ok {
|
||||
if v, ok := rr.ExporterResponse[exptypes.ExporterImageDigestKey]; ok {
|
||||
rr.ExporterResponse[exptypes.ExporterImageConfigDigestKey] = v
|
||||
}
|
||||
}
|
||||
rr.ExporterResponse[exptypes.ExporterImageDigestKey] = remoteDigest
|
||||
} else if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
}(i, dp, so)
|
||||
}
|
||||
|
||||
return nil
|
||||
}(k)
|
||||
if err != nil {
|
||||
@@ -1486,26 +1517,63 @@ func LoadInputs(ctx context.Context, d driver.Driver, inp Inputs, pw progress.Wr
|
||||
// handle OCI layout
|
||||
if strings.HasPrefix(v.Path, "oci-layout://") {
|
||||
pathAlone := strings.TrimPrefix(v.Path, "oci-layout://")
|
||||
parts := strings.SplitN(pathAlone, "@", 2)
|
||||
if len(parts) != 2 {
|
||||
return nil, errors.Errorf("invalid oci-layout context %s, must be oci-layout:///path/to/layout@sha256:hash", v.Path)
|
||||
localPath := pathAlone
|
||||
localPath, dig, hasDigest := strings.Cut(localPath, "@")
|
||||
localPath, tag, hasTag := strings.Cut(localPath, ":")
|
||||
if !hasTag {
|
||||
tag = "latest"
|
||||
hasTag = true
|
||||
}
|
||||
localPath := parts[0]
|
||||
dgst, err := digest.Parse(parts[1])
|
||||
idx := ociindex.NewStoreIndex(localPath)
|
||||
if !hasDigest {
|
||||
// lookup by name
|
||||
desc, err := idx.Get(tag)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if desc != nil {
|
||||
dig = string(desc.Digest)
|
||||
hasDigest = true
|
||||
}
|
||||
}
|
||||
if !hasDigest {
|
||||
// lookup single
|
||||
desc, err := idx.GetSingle()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if desc != nil {
|
||||
dig = string(desc.Digest)
|
||||
hasDigest = true
|
||||
}
|
||||
}
|
||||
if !hasDigest {
|
||||
return nil, errors.Errorf("oci-layout reference %q could not be resolved", v.Path)
|
||||
}
|
||||
_, err := digest.Parse(dig)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "invalid oci-layout context %s, does not have proper hash, must be oci-layout:///path/to/layout@sha256:hash", v.Path)
|
||||
return nil, errors.Wrapf(err, "invalid oci-layout digest %s", dig)
|
||||
}
|
||||
|
||||
store, err := local.NewStore(localPath)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "invalid store at %s", localPath)
|
||||
}
|
||||
// now we can add it
|
||||
storeName := identity.NewID()
|
||||
if target.OCIStores == nil {
|
||||
target.OCIStores = map[string]content.Store{}
|
||||
}
|
||||
target.OCIStores[k] = store
|
||||
target.OCIStores[storeName] = store
|
||||
|
||||
target.FrontendAttrs["context:"+k] = fmt.Sprintf("oci-layout:%s@%s", k, dgst.String())
|
||||
layout := "oci-layout://" + storeName
|
||||
if hasTag {
|
||||
layout += ":" + tag
|
||||
}
|
||||
if hasDigest {
|
||||
layout += "@" + dig
|
||||
}
|
||||
|
||||
target.FrontendAttrs["context:"+k] = layout
|
||||
continue
|
||||
}
|
||||
st, err := os.Stat(v.Path)
|
||||
|
72
build/git.go
72
build/git.go
@@ -3,18 +3,19 @@ package build
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/docker/buildx/util/gitutil"
|
||||
specs "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
"github.com/sirupsen/logrus"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
const DockerfileLabel = "com.docker.image.source.entrypoint"
|
||||
|
||||
func getGitAttributes(ctx context.Context, contextPath string, dockerfilePath string) (res map[string]string) {
|
||||
func getGitAttributes(ctx context.Context, contextPath string, dockerfilePath string) (res map[string]string, _ error) {
|
||||
res = make(map[string]string)
|
||||
if contextPath == "" {
|
||||
return
|
||||
@@ -24,13 +25,13 @@ func getGitAttributes(ctx context.Context, contextPath string, dockerfilePath st
|
||||
if v, ok := os.LookupEnv("BUILDX_GIT_LABELS"); ok {
|
||||
if v == "full" { // backward compatibility with old "full" mode
|
||||
setGitLabels = true
|
||||
} else if v, _ := strconv.ParseBool(v); v {
|
||||
} else if v, err := strconv.ParseBool(v); err == nil {
|
||||
setGitLabels = v
|
||||
}
|
||||
}
|
||||
setGitInfo := true
|
||||
if v, ok := os.LookupEnv("BUILDX_GIT_INFO"); ok {
|
||||
if v, _ := strconv.ParseBool(v); v {
|
||||
if v, err := strconv.ParseBool(v); err == nil {
|
||||
setGitInfo = v
|
||||
}
|
||||
}
|
||||
@@ -48,27 +49,48 @@ func getGitAttributes(ctx context.Context, contextPath string, dockerfilePath st
|
||||
wd, _ = filepath.Abs(filepath.Join(cwd, contextPath))
|
||||
}
|
||||
|
||||
gitc := gitutil.New(gitutil.WithContext(ctx), gitutil.WithWorkingDir(wd))
|
||||
if !gitc.IsInsideWorkTree() {
|
||||
logrus.Warnf("Unable to determine Git information")
|
||||
gitc, err := gitutil.New(gitutil.WithContext(ctx), gitutil.WithWorkingDir(wd))
|
||||
if err != nil {
|
||||
if st, err := os.Stat(path.Join(wd, ".git")); err == nil && st.IsDir() {
|
||||
return res, errors.New("buildx: git was not found in the system. Current commit information was not captured by the build")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
var resRevision, resSource, resDockerfilePath string
|
||||
if !gitc.IsInsideWorkTree() {
|
||||
if st, err := os.Stat(path.Join(wd, ".git")); err == nil && st.IsDir() {
|
||||
return res, errors.New("buildx: failed to read current commit information with git rev-parse --is-inside-work-tree")
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
if sha, err := gitc.FullCommit(); err == nil && sha != "" {
|
||||
resRevision = sha
|
||||
if sha, err := gitc.FullCommit(); err != nil {
|
||||
return res, errors.Wrapf(err, "buildx: failed to get git commit")
|
||||
} else if sha != "" {
|
||||
if gitc.IsDirty() {
|
||||
resRevision += "-dirty"
|
||||
sha += "-dirty"
|
||||
}
|
||||
if setGitLabels {
|
||||
res["label:"+specs.AnnotationRevision] = sha
|
||||
}
|
||||
if setGitInfo {
|
||||
res["vcs:revision"] = sha
|
||||
}
|
||||
}
|
||||
|
||||
if rurl, err := gitc.RemoteURL(); err == nil && rurl != "" {
|
||||
resSource = rurl
|
||||
if setGitLabels {
|
||||
res["label:"+specs.AnnotationSource] = rurl
|
||||
}
|
||||
if setGitInfo {
|
||||
res["vcs:source"] = rurl
|
||||
}
|
||||
}
|
||||
|
||||
if setGitLabels {
|
||||
if root, err := gitc.RootDir(); err == nil && root != "" {
|
||||
if root, err := gitc.RootDir(); err != nil {
|
||||
return res, errors.Wrapf(err, "buildx: failed to get git root dir")
|
||||
} else if root != "" {
|
||||
if dockerfilePath == "" {
|
||||
dockerfilePath = filepath.Join(wd, "Dockerfile")
|
||||
}
|
||||
@@ -78,32 +100,10 @@ func getGitAttributes(ctx context.Context, contextPath string, dockerfilePath st
|
||||
}
|
||||
dockerfilePath, _ = filepath.Rel(root, dockerfilePath)
|
||||
if !strings.HasPrefix(dockerfilePath, "..") {
|
||||
resDockerfilePath = dockerfilePath
|
||||
res["label:"+DockerfileLabel] = dockerfilePath
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if resSource != "" {
|
||||
if setGitLabels {
|
||||
res["label:"+specs.AnnotationSource] = resSource
|
||||
}
|
||||
if setGitInfo {
|
||||
res["vcs:source"] = resSource
|
||||
}
|
||||
}
|
||||
if resRevision != "" {
|
||||
if setGitLabels {
|
||||
res["label:"+specs.AnnotationRevision] = resRevision
|
||||
}
|
||||
if setGitInfo {
|
||||
res["vcs:revision"] = resRevision
|
||||
}
|
||||
}
|
||||
if resDockerfilePath != "" {
|
||||
if setGitLabels {
|
||||
res["label:"+DockerfileLabel] = resDockerfilePath
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
@@ -3,6 +3,7 @@ package build
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
@@ -10,21 +11,42 @@ import (
|
||||
"github.com/docker/buildx/util/gitutil"
|
||||
specs "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func setupTest(tb testing.TB) {
|
||||
gitutil.Mktmp(tb)
|
||||
gitutil.GitInit(tb)
|
||||
|
||||
c, err := gitutil.New()
|
||||
require.NoError(tb, err)
|
||||
gitutil.GitInit(c, tb)
|
||||
|
||||
df := []byte("FROM alpine:latest\n")
|
||||
assert.NoError(tb, os.WriteFile("Dockerfile", df, 0644))
|
||||
gitutil.GitAdd(tb, "Dockerfile")
|
||||
gitutil.GitCommit(tb, "initial commit")
|
||||
|
||||
gitutil.GitAdd(c, tb, "Dockerfile")
|
||||
gitutil.GitCommit(c, tb, "initial commit")
|
||||
gitutil.GitSetRemote(c, tb, "origin", "git@github.com:docker/buildx.git")
|
||||
}
|
||||
|
||||
func TestGetGitAttributesNotGitRepo(t *testing.T) {
|
||||
_, err := getGitAttributes(context.Background(), t.TempDir(), "Dockerfile")
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestGetGitAttributesBadGitRepo(t *testing.T) {
|
||||
tmp := t.TempDir()
|
||||
require.NoError(t, os.MkdirAll(path.Join(tmp, ".git"), 0755))
|
||||
|
||||
_, err := getGitAttributes(context.Background(), tmp, "Dockerfile")
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestGetGitAttributesNoContext(t *testing.T) {
|
||||
setupTest(t)
|
||||
|
||||
gitattrs := getGitAttributes(context.Background(), "", "Dockerfile")
|
||||
gitattrs, err := getGitAttributes(context.Background(), "", "Dockerfile")
|
||||
assert.NoError(t, err)
|
||||
assert.Empty(t, gitattrs)
|
||||
}
|
||||
|
||||
@@ -41,14 +63,22 @@ func TestGetGitAttributes(t *testing.T) {
|
||||
envGitInfo: "",
|
||||
expected: []string{
|
||||
"vcs:revision",
|
||||
"vcs:source",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "none",
|
||||
envGitLabels: "false",
|
||||
envGitInfo: "false",
|
||||
expected: []string{},
|
||||
},
|
||||
{
|
||||
name: "gitinfo",
|
||||
envGitLabels: "false",
|
||||
envGitInfo: "true",
|
||||
expected: []string{
|
||||
"vcs:revision",
|
||||
"vcs:source",
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -58,6 +88,7 @@ func TestGetGitAttributes(t *testing.T) {
|
||||
expected: []string{
|
||||
"label:" + DockerfileLabel,
|
||||
"label:" + specs.AnnotationRevision,
|
||||
"label:" + specs.AnnotationSource,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -67,7 +98,9 @@ func TestGetGitAttributes(t *testing.T) {
|
||||
expected: []string{
|
||||
"label:" + DockerfileLabel,
|
||||
"label:" + specs.AnnotationRevision,
|
||||
"label:" + specs.AnnotationSource,
|
||||
"vcs:revision",
|
||||
"vcs:source",
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -81,52 +114,42 @@ func TestGetGitAttributes(t *testing.T) {
|
||||
if tt.envGitInfo != "" {
|
||||
t.Setenv("BUILDX_GIT_INFO", tt.envGitInfo)
|
||||
}
|
||||
gitattrs := getGitAttributes(context.Background(), ".", "Dockerfile")
|
||||
gitattrs, err := getGitAttributes(context.Background(), ".", "Dockerfile")
|
||||
require.NoError(t, err)
|
||||
for _, e := range tt.expected {
|
||||
assert.Contains(t, gitattrs, e)
|
||||
assert.NotEmpty(t, gitattrs[e])
|
||||
if e == "label:"+DockerfileLabel {
|
||||
assert.Equal(t, "Dockerfile", gitattrs[e])
|
||||
} else if e == "label:"+specs.AnnotationSource || e == "vcs:source" {
|
||||
assert.Equal(t, "git@github.com:docker/buildx.git", gitattrs[e])
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetGitAttributesWithRemote(t *testing.T) {
|
||||
setupTest(t)
|
||||
gitutil.GitSetRemote(t, "git@github.com:docker/buildx.git")
|
||||
|
||||
t.Setenv("BUILDX_GIT_LABELS", "true")
|
||||
gitattrs := getGitAttributes(context.Background(), ".", "Dockerfile")
|
||||
assert.Equal(t, 5, len(gitattrs))
|
||||
assert.Contains(t, gitattrs, "label:"+DockerfileLabel)
|
||||
assert.Equal(t, "Dockerfile", gitattrs["label:"+DockerfileLabel])
|
||||
assert.Contains(t, gitattrs, "label:"+specs.AnnotationRevision)
|
||||
assert.NotEmpty(t, gitattrs["label:"+specs.AnnotationRevision])
|
||||
assert.Contains(t, gitattrs, "label:"+specs.AnnotationSource)
|
||||
assert.Equal(t, "git@github.com:docker/buildx.git", gitattrs["label:"+specs.AnnotationSource])
|
||||
assert.Contains(t, gitattrs, "vcs:revision")
|
||||
assert.NotEmpty(t, gitattrs["vcs:revision"])
|
||||
assert.Contains(t, gitattrs, "vcs:source")
|
||||
assert.Equal(t, "git@github.com:docker/buildx.git", gitattrs["vcs:source"])
|
||||
}
|
||||
|
||||
func TestGetGitAttributesDirty(t *testing.T) {
|
||||
setupTest(t)
|
||||
|
||||
// make a change to test dirty flag
|
||||
df := []byte("FROM alpine:edge\n")
|
||||
assert.NoError(t, os.Mkdir("dir", 0755))
|
||||
assert.NoError(t, os.WriteFile(filepath.Join("dir", "Dockerfile"), df, 0644))
|
||||
require.NoError(t, os.Mkdir("dir", 0755))
|
||||
require.NoError(t, os.WriteFile(filepath.Join("dir", "Dockerfile"), df, 0644))
|
||||
|
||||
t.Setenv("BUILDX_GIT_LABELS", "true")
|
||||
gitattrs := getGitAttributes(context.Background(), ".", "Dockerfile")
|
||||
assert.Equal(t, 3, len(gitattrs))
|
||||
gitattrs, _ := getGitAttributes(context.Background(), ".", "Dockerfile")
|
||||
assert.Equal(t, 5, len(gitattrs))
|
||||
|
||||
assert.Contains(t, gitattrs, "label:"+DockerfileLabel)
|
||||
assert.Equal(t, "Dockerfile", gitattrs["label:"+DockerfileLabel])
|
||||
assert.Contains(t, gitattrs, "label:"+specs.AnnotationSource)
|
||||
assert.Equal(t, "git@github.com:docker/buildx.git", gitattrs["label:"+specs.AnnotationSource])
|
||||
assert.Contains(t, gitattrs, "label:"+specs.AnnotationRevision)
|
||||
assert.True(t, strings.HasSuffix(gitattrs["label:"+specs.AnnotationRevision], "-dirty"))
|
||||
|
||||
assert.Contains(t, gitattrs, "vcs:source")
|
||||
assert.Equal(t, "git@github.com:docker/buildx.git", gitattrs["vcs:source"])
|
||||
assert.Contains(t, gitattrs, "vcs:revision")
|
||||
assert.True(t, strings.HasSuffix(gitattrs["vcs:revision"], "-dirty"))
|
||||
}
|
||||
|
@@ -219,9 +219,9 @@ func (b *Builder) Factory(ctx context.Context) (_ driver.Factory, err error) {
|
||||
// driver for a docker context and allows falling back to a
|
||||
// docker-container driver for older daemon that doesn't support
|
||||
// buildkit (< 18.06).
|
||||
ep := b.nodes[0].Endpoint
|
||||
ep := b.NodeGroup.Nodes[0].Endpoint
|
||||
var dockerapi *dockerutil.ClientAPI
|
||||
dockerapi, err = dockerutil.NewClientAPI(b.opts.dockerCli, b.nodes[0].Endpoint)
|
||||
dockerapi, err = dockerutil.NewClientAPI(b.opts.dockerCli, b.NodeGroup.Nodes[0].Endpoint)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
@@ -62,6 +62,7 @@ func (b *Builder) LoadNodes(ctx context.Context, withData bool) (_ []Node, err e
|
||||
node := Node{
|
||||
Node: n,
|
||||
ProxyConfig: storeutil.GetProxyConfig(b.opts.dockerCli),
|
||||
Platforms: n.Platforms,
|
||||
}
|
||||
defer func() {
|
||||
b.nodes[i] = node
|
||||
|
@@ -470,7 +470,7 @@ func buildCmd(dockerCli command.Cli, rootOpts *rootOptions) *cobra.Command {
|
||||
flags := cmd.Flags()
|
||||
|
||||
flags.StringSliceVar(&options.extraHosts, "add-host", []string{}, `Add a custom host-to-IP mapping (format: "host:ip")`)
|
||||
flags.SetAnnotation("add-host", annotation.ExternalURL, []string{"https://docs.docker.com/engine/reference/commandline/build/#add-entries-to-container-hosts-file---add-host"})
|
||||
flags.SetAnnotation("add-host", annotation.ExternalURL, []string{"https://docs.docker.com/engine/reference/commandline/build/#add-host"})
|
||||
|
||||
flags.StringSliceVar(&options.allow, "allow", []string{}, `Allow extra privileged entitlement (e.g., "network.host", "security.insecure")`)
|
||||
|
||||
@@ -481,12 +481,12 @@ func buildCmd(dockerCli command.Cli, rootOpts *rootOptions) *cobra.Command {
|
||||
flags.StringArrayVar(&options.cacheTo, "cache-to", []string{}, `Cache export destinations (e.g., "user/app:cache", "type=local,dest=path/to/dir")`)
|
||||
|
||||
flags.StringVar(&options.cgroupParent, "cgroup-parent", "", "Optional parent cgroup for the container")
|
||||
flags.SetAnnotation("cgroup-parent", annotation.ExternalURL, []string{"https://docs.docker.com/engine/reference/commandline/build/#use-a-custom-parent-cgroup---cgroup-parent"})
|
||||
flags.SetAnnotation("cgroup-parent", annotation.ExternalURL, []string{"https://docs.docker.com/engine/reference/commandline/build/#cgroup-parent"})
|
||||
|
||||
flags.StringArrayVar(&options.contexts, "build-context", []string{}, "Additional build contexts (e.g., name=path)")
|
||||
|
||||
flags.StringVarP(&options.dockerfileName, "file", "f", "", `Name of the Dockerfile (default: "PATH/Dockerfile")`)
|
||||
flags.SetAnnotation("file", annotation.ExternalURL, []string{"https://docs.docker.com/engine/reference/commandline/build/#specify-a-dockerfile--f"})
|
||||
flags.SetAnnotation("file", annotation.ExternalURL, []string{"https://docs.docker.com/engine/reference/commandline/build/#file"})
|
||||
|
||||
flags.StringVar(&options.imageIDFile, "iidfile", "", "Write the image ID to the file")
|
||||
|
||||
@@ -517,10 +517,10 @@ func buildCmd(dockerCli command.Cli, rootOpts *rootOptions) *cobra.Command {
|
||||
flags.StringArrayVar(&options.ssh, "ssh", []string{}, `SSH agent socket or keys to expose to the build (format: "default|<id>[=<socket>|<key>[,<key>]]")`)
|
||||
|
||||
flags.StringArrayVarP(&options.tags, "tag", "t", []string{}, `Name and optionally a tag (format: "name:tag")`)
|
||||
flags.SetAnnotation("tag", annotation.ExternalURL, []string{"https://docs.docker.com/engine/reference/commandline/build/#tag-an-image--t"})
|
||||
flags.SetAnnotation("tag", annotation.ExternalURL, []string{"https://docs.docker.com/engine/reference/commandline/build/#tag"})
|
||||
|
||||
flags.StringVar(&options.target, "target", "", "Set the target build stage to build")
|
||||
flags.SetAnnotation("target", annotation.ExternalURL, []string{"https://docs.docker.com/engine/reference/commandline/build/#specifying-target-build-stage---target"})
|
||||
flags.SetAnnotation("target", annotation.ExternalURL, []string{"https://docs.docker.com/engine/reference/commandline/build/#target"})
|
||||
|
||||
flags.Var(options.ulimits, "ulimit", "Ulimit options")
|
||||
|
||||
|
@@ -141,9 +141,6 @@ func rmAllInactive(ctx context.Context, txn *store.Txn, dockerCli command.Cli, i
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "cannot load %s", b.Name)
|
||||
}
|
||||
if cb := b.ContextName(); cb != "" {
|
||||
return errors.Errorf("context builder cannot be removed, run `docker context rm %s` to remove this context", cb)
|
||||
}
|
||||
if b.Dynamic {
|
||||
return nil
|
||||
}
|
||||
|
@@ -17,7 +17,6 @@ target "_common" {
|
||||
args = {
|
||||
GO_VERSION = GO_VERSION
|
||||
BUILDKIT_CONTEXT_KEEP_GIT_DIR = 1
|
||||
BUILDX_EXPERIMENTAL = 1
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,6 +45,7 @@ target "validate-docs" {
|
||||
inherits = ["_common"]
|
||||
args = {
|
||||
FORMATS = DOCS_FORMATS
|
||||
BUILDX_EXPERIMENTAL = 1 // enables experimental cmds/flags for docs generation
|
||||
}
|
||||
dockerfile = "./hack/dockerfiles/docs.Dockerfile"
|
||||
target = "validate"
|
||||
@@ -70,6 +70,7 @@ target "update-docs" {
|
||||
inherits = ["_common"]
|
||||
args = {
|
||||
FORMATS = DOCS_FORMATS
|
||||
BUILDX_EXPERIMENTAL = 1 // enables experimental cmds/flags for docs generation
|
||||
}
|
||||
dockerfile = "./hack/dockerfiles/docs.Dockerfile"
|
||||
target = "update"
|
||||
|
@@ -1,3 +1,3 @@
|
||||
# Defining additional build contexts and linking targets
|
||||
|
||||
Moved to [docs.docker.com](https://docs.docker.com/build/customize/bake/build-contexts)
|
||||
Moved to [docs.docker.com](https://docs.docker.com/build/bake/build-contexts)
|
||||
|
@@ -1,3 +1,3 @@
|
||||
# Building from Compose file
|
||||
|
||||
Moved to [docs.docker.com](https://docs.docker.com/build/customize/bake/compose-file)
|
||||
Moved to [docs.docker.com](https://docs.docker.com/build/bake/compose-file)
|
||||
|
@@ -1,3 +1,3 @@
|
||||
# Configuring builds
|
||||
|
||||
Moved to [docs.docker.com](https://docs.docker.com/build/customize/bake/configuring-build)
|
||||
Moved to [docs.docker.com](https://docs.docker.com/build/bake/configuring-build)
|
||||
|
@@ -1,3 +1,3 @@
|
||||
# Bake file definition
|
||||
|
||||
Moved to [docs.docker.com](https://docs.docker.com/build/customize/bake/file-definition)
|
||||
Moved to [docs.docker.com](https://docs.docker.com/build/bake/file-definition)
|
||||
|
@@ -1,3 +1,3 @@
|
||||
# User defined HCL functions
|
||||
|
||||
Moved to [docs.docker.com](https://docs.docker.com/build/customize/bake/hcl-funcs)
|
||||
Moved to [docs.docker.com](https://docs.docker.com/build/bake/hcl-funcs)
|
||||
|
@@ -1,3 +1,3 @@
|
||||
# High-level build options with Bake
|
||||
|
||||
Moved to [docs.docker.com](https://docs.docker.com/build/customize/bake)
|
||||
Moved to [docs.docker.com](https://docs.docker.com/build/bake)
|
||||
|
@@ -9,29 +9,29 @@ Extended build capabilities with BuildKit
|
||||
|
||||
### Subcommands
|
||||
|
||||
| Name | Description |
|
||||
| --- | --- |
|
||||
| [`bake`](buildx_bake.md) | Build from a file |
|
||||
| [`build`](buildx_build.md) | Start a build |
|
||||
| [`create`](buildx_create.md) | Create a new builder instance |
|
||||
| [`du`](buildx_du.md) | Disk usage |
|
||||
| [`imagetools`](buildx_imagetools.md) | Commands to work on images in registry |
|
||||
| [`inspect`](buildx_inspect.md) | Inspect current builder instance |
|
||||
| [`install`](buildx_install.md) | Install buildx as a 'docker builder' alias |
|
||||
| [`ls`](buildx_ls.md) | List builder instances |
|
||||
| [`prune`](buildx_prune.md) | Remove build cache |
|
||||
| [`rm`](buildx_rm.md) | Remove a builder instance |
|
||||
| [`stop`](buildx_stop.md) | Stop builder instance |
|
||||
| [`uninstall`](buildx_uninstall.md) | Uninstall the 'docker builder' alias |
|
||||
| [`use`](buildx_use.md) | Set the current builder instance |
|
||||
| [`version`](buildx_version.md) | Show buildx version information |
|
||||
| Name | Description |
|
||||
|:-------------------------------------|:-------------------------------------------|
|
||||
| [`bake`](buildx_bake.md) | Build from a file |
|
||||
| [`build`](buildx_build.md) | Start a build |
|
||||
| [`create`](buildx_create.md) | Create a new builder instance |
|
||||
| [`du`](buildx_du.md) | Disk usage |
|
||||
| [`imagetools`](buildx_imagetools.md) | Commands to work on images in registry |
|
||||
| [`inspect`](buildx_inspect.md) | Inspect current builder instance |
|
||||
| [`install`](buildx_install.md) | Install buildx as a 'docker builder' alias |
|
||||
| [`ls`](buildx_ls.md) | List builder instances |
|
||||
| [`prune`](buildx_prune.md) | Remove build cache |
|
||||
| [`rm`](buildx_rm.md) | Remove a builder instance |
|
||||
| [`stop`](buildx_stop.md) | Stop builder instance |
|
||||
| [`uninstall`](buildx_uninstall.md) | Uninstall the 'docker builder' alias |
|
||||
| [`use`](buildx_use.md) | Set the current builder instance |
|
||||
| [`version`](buildx_version.md) | Show buildx version information |
|
||||
|
||||
|
||||
### Options
|
||||
|
||||
| Name | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| Name | Type | Default | Description |
|
||||
|:------------------------|:---------|:--------|:-----------------------------------------|
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
|
@@ -13,20 +13,20 @@ Build from a file
|
||||
|
||||
### Options
|
||||
|
||||
| Name | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| [`-f`](#file), [`--file`](#file) | `stringArray` | | Build definition file |
|
||||
| `--load` | | | Shorthand for `--set=*.output=type=docker` |
|
||||
| `--metadata-file` | `string` | | Write build result metadata to the file |
|
||||
| [`--no-cache`](#no-cache) | | | Do not use cache when building the image |
|
||||
| [`--print`](#print) | | | Print the options without building |
|
||||
| [`--progress`](#progress) | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`). Use plain to show container output |
|
||||
| `--provenance` | `string` | | Shorthand for `--set=*.attest=type=provenance` |
|
||||
| [`--pull`](#pull) | | | Always attempt to pull all referenced images |
|
||||
| `--push` | | | Shorthand for `--set=*.output=type=registry` |
|
||||
| `--sbom` | `string` | | Shorthand for `--set=*.attest=type=sbom` |
|
||||
| [`--set`](#set) | `stringArray` | | Override target value (e.g., `targetpattern.key=value`) |
|
||||
| Name | Type | Default | Description |
|
||||
|:---------------------------------|:--------------|:--------|:-----------------------------------------------------------------------------------------|
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| [`-f`](#file), [`--file`](#file) | `stringArray` | | Build definition file |
|
||||
| `--load` | | | Shorthand for `--set=*.output=type=docker` |
|
||||
| `--metadata-file` | `string` | | Write build result metadata to the file |
|
||||
| [`--no-cache`](#no-cache) | | | Do not use cache when building the image |
|
||||
| [`--print`](#print) | | | Print the options without building |
|
||||
| [`--progress`](#progress) | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`). Use plain to show container output |
|
||||
| [`--provenance`](#provenance) | `string` | | Shorthand for `--set=*.attest=type=provenance` |
|
||||
| [`--pull`](#pull) | | | Always attempt to pull all referenced images |
|
||||
| `--push` | | | Shorthand for `--set=*.output=type=registry` |
|
||||
| [`--sbom`](#sbom) | `string` | | Shorthand for `--set=*.attest=type=sbom` |
|
||||
| [`--set`](#set) | `stringArray` | | Override target value (e.g., `targetpattern.key=value`) |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
@@ -36,7 +36,7 @@ Build from a file
|
||||
Bake is a high-level build command. Each specified target will run in parallel
|
||||
as part of the build.
|
||||
|
||||
Read [High-level build options with Bake](https://docs.docker.com/build/customize/bake/)
|
||||
Read [High-level build options with Bake](https://docs.docker.com/build/bake/)
|
||||
guide for introduction to writing bake files.
|
||||
|
||||
> **Note**
|
||||
@@ -87,7 +87,7 @@ target "db" {
|
||||
$ docker buildx bake -f docker-bake.dev.hcl db webapp-release
|
||||
```
|
||||
|
||||
See our [file definition](https://docs.docker.com/build/customize/bake/file-definition/)
|
||||
See our [file definition](https://docs.docker.com/build/bake/file-definition/)
|
||||
guide for more details.
|
||||
|
||||
### <a name="no-cache"></a> Do not use cache when building the image (--no-cache)
|
||||
@@ -125,10 +125,18 @@ $ docker buildx bake -f docker-bake.hcl --print db
|
||||
|
||||
Same as [`build --progress`](buildx_build.md#progress).
|
||||
|
||||
### <a name="provenance"></a> Create provenance attestations (--provenance)
|
||||
|
||||
Same as [`build --provenance`](buildx_build.md#provenance).
|
||||
|
||||
### <a name="pull"></a> Always attempt to pull a newer version of the image (--pull)
|
||||
|
||||
Same as `build --pull`.
|
||||
|
||||
### <a name="sbom"></a> Create SBOM attestations (--sbom)
|
||||
|
||||
Same as [`build --sbom`](buildx_build.md#sbom).
|
||||
|
||||
### <a name="set"></a> Override target configurations from command line (--set)
|
||||
|
||||
```
|
||||
|
@@ -13,41 +13,41 @@ Start a build
|
||||
|
||||
### Options
|
||||
|
||||
| Name | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [`--add-host`](https://docs.docker.com/engine/reference/commandline/build/#add-entries-to-container-hosts-file---add-host) | `stringSlice` | | Add a custom host-to-IP mapping (format: `host:ip`) |
|
||||
| [`--allow`](#allow) | `stringSlice` | | Allow extra privileged entitlement (e.g., `network.host`, `security.insecure`) |
|
||||
| `--attest` | `stringArray` | | Attestation parameters (format: `type=sbom,generator=image`) |
|
||||
| [`--build-arg`](#build-arg) | `stringArray` | | Set build-time variables |
|
||||
| [`--build-context`](#build-context) | `stringArray` | | Additional build contexts (e.g., name=path) |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| [`--cache-from`](#cache-from) | `stringArray` | | External cache sources (e.g., `user/app:cache`, `type=local,src=path/to/dir`) |
|
||||
| [`--cache-to`](#cache-to) | `stringArray` | | Cache export destinations (e.g., `user/app:cache`, `type=local,dest=path/to/dir`) |
|
||||
| [`--cgroup-parent`](https://docs.docker.com/engine/reference/commandline/build/#use-a-custom-parent-cgroup---cgroup-parent) | `string` | | Optional parent cgroup for the container |
|
||||
| [`-f`](https://docs.docker.com/engine/reference/commandline/build/#specify-a-dockerfile--f), [`--file`](https://docs.docker.com/engine/reference/commandline/build/#specify-a-dockerfile--f) | `string` | | Name of the Dockerfile (default: `PATH/Dockerfile`) |
|
||||
| `--iidfile` | `string` | | Write the image ID to the file |
|
||||
| `--invoke` | `string` | | Invoke a command after the build [experimental] |
|
||||
| `--label` | `stringArray` | | Set metadata for an image |
|
||||
| [`--load`](#load) | | | Shorthand for `--output=type=docker` |
|
||||
| [`--metadata-file`](#metadata-file) | `string` | | Write build result metadata to the file |
|
||||
| `--network` | `string` | `default` | Set the networking mode for the `RUN` instructions during build |
|
||||
| `--no-cache` | | | Do not use cache when building the image |
|
||||
| `--no-cache-filter` | `stringArray` | | Do not cache specified stages |
|
||||
| [`-o`](#output), [`--output`](#output) | `stringArray` | | Output destination (format: `type=local,dest=path`) |
|
||||
| [`--platform`](#platform) | `stringArray` | | Set target platform for build |
|
||||
| `--print` | `string` | | Print result of information request (e.g., outline, targets) [experimental] |
|
||||
| [`--progress`](#progress) | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`). Use plain to show container output |
|
||||
| `--provenance` | `string` | | Shortand for `--attest=type=provenance` |
|
||||
| `--pull` | | | Always attempt to pull all referenced images |
|
||||
| [`--push`](#push) | | | Shorthand for `--output=type=registry` |
|
||||
| `-q`, `--quiet` | | | Suppress the build output and print image ID on success |
|
||||
| `--sbom` | `string` | | Shorthand for `--attest=type=sbom` |
|
||||
| [`--secret`](#secret) | `stringArray` | | Secret to expose to the build (format: `id=mysecret[,src=/local/secret]`) |
|
||||
| [`--shm-size`](#shm-size) | `bytes` | `0` | Size of `/dev/shm` |
|
||||
| [`--ssh`](#ssh) | `stringArray` | | SSH agent socket or keys to expose to the build (format: `default\|<id>[=<socket>\|<key>[,<key>]]`) |
|
||||
| [`-t`](https://docs.docker.com/engine/reference/commandline/build/#tag-an-image--t), [`--tag`](https://docs.docker.com/engine/reference/commandline/build/#tag-an-image--t) | `stringArray` | | Name and optionally a tag (format: `name:tag`) |
|
||||
| [`--target`](https://docs.docker.com/engine/reference/commandline/build/#specifying-target-build-stage---target) | `string` | | Set the target build stage to build |
|
||||
| [`--ulimit`](#ulimit) | `ulimit` | | Ulimit options |
|
||||
| Name | Type | Default | Description |
|
||||
|:-------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------|:----------|:----------------------------------------------------------------------------------------------------|
|
||||
| [`--add-host`](https://docs.docker.com/engine/reference/commandline/build/#add-host) | `stringSlice` | | Add a custom host-to-IP mapping (format: `host:ip`) |
|
||||
| [`--allow`](#allow) | `stringSlice` | | Allow extra privileged entitlement (e.g., `network.host`, `security.insecure`) |
|
||||
| [`--attest`](#attest) | `stringArray` | | Attestation parameters (format: `type=sbom,generator=image`) |
|
||||
| [`--build-arg`](#build-arg) | `stringArray` | | Set build-time variables |
|
||||
| [`--build-context`](#build-context) | `stringArray` | | Additional build contexts (e.g., name=path) |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| [`--cache-from`](#cache-from) | `stringArray` | | External cache sources (e.g., `user/app:cache`, `type=local,src=path/to/dir`) |
|
||||
| [`--cache-to`](#cache-to) | `stringArray` | | Cache export destinations (e.g., `user/app:cache`, `type=local,dest=path/to/dir`) |
|
||||
| [`--cgroup-parent`](https://docs.docker.com/engine/reference/commandline/build/#cgroup-parent) | `string` | | Optional parent cgroup for the container |
|
||||
| [`-f`](https://docs.docker.com/engine/reference/commandline/build/#file), [`--file`](https://docs.docker.com/engine/reference/commandline/build/#file) | `string` | | Name of the Dockerfile (default: `PATH/Dockerfile`) |
|
||||
| `--iidfile` | `string` | | Write the image ID to the file |
|
||||
| `--invoke` | `string` | | Invoke a command after the build [experimental] |
|
||||
| `--label` | `stringArray` | | Set metadata for an image |
|
||||
| [`--load`](#load) | | | Shorthand for `--output=type=docker` |
|
||||
| [`--metadata-file`](#metadata-file) | `string` | | Write build result metadata to the file |
|
||||
| `--network` | `string` | `default` | Set the networking mode for the `RUN` instructions during build |
|
||||
| `--no-cache` | | | Do not use cache when building the image |
|
||||
| `--no-cache-filter` | `stringArray` | | Do not cache specified stages |
|
||||
| [`-o`](#output), [`--output`](#output) | `stringArray` | | Output destination (format: `type=local,dest=path`) |
|
||||
| [`--platform`](#platform) | `stringArray` | | Set target platform for build |
|
||||
| `--print` | `string` | | Print result of information request (e.g., outline, targets) [experimental] |
|
||||
| [`--progress`](#progress) | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`). Use plain to show container output |
|
||||
| [`--provenance`](#provenance) | `string` | | Shortand for `--attest=type=provenance` |
|
||||
| `--pull` | | | Always attempt to pull all referenced images |
|
||||
| [`--push`](#push) | | | Shorthand for `--output=type=registry` |
|
||||
| `-q`, `--quiet` | | | Suppress the build output and print image ID on success |
|
||||
| [`--sbom`](#sbom) | `string` | | Shorthand for `--attest=type=sbom` |
|
||||
| [`--secret`](#secret) | `stringArray` | | Secret to expose to the build (format: `id=mysecret[,src=/local/secret]`) |
|
||||
| [`--shm-size`](#shm-size) | `bytes` | `0` | Size of `/dev/shm` |
|
||||
| [`--ssh`](#ssh) | `stringArray` | | SSH agent socket or keys to expose to the build (format: `default\|<id>[=<socket>\|<key>[,<key>]]`) |
|
||||
| [`-t`](https://docs.docker.com/engine/reference/commandline/build/#tag), [`--tag`](https://docs.docker.com/engine/reference/commandline/build/#tag) | `stringArray` | | Name and optionally a tag (format: `name:tag`) |
|
||||
| [`--target`](https://docs.docker.com/engine/reference/commandline/build/#target) | `string` | | Set the target build stage to build |
|
||||
| [`--ulimit`](#ulimit) | `ulimit` | | Ulimit options |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
@@ -66,6 +66,30 @@ here we'll document a subset of the new flags.
|
||||
|
||||
## Examples
|
||||
|
||||
### <a name="attest"></a> Create attestations (--attest)
|
||||
|
||||
```
|
||||
--attest=type=sbom,...
|
||||
--attest=type=provenance,...
|
||||
```
|
||||
|
||||
Create [image attestations](https://docs.docker.com/build/attestations/).
|
||||
BuildKit currently supports:
|
||||
|
||||
- `sbom` - Software Bill of Materials.
|
||||
|
||||
Use `--attest=type=sbom` to generate an SBOM for an image at build-time.
|
||||
Alternatively, you can use the [`--sbom` shorthand](#sbom).
|
||||
|
||||
For more information, see [here](https://docs.docker.com/build/attestations/sbom/).
|
||||
|
||||
- `provenance` - SLSA Provenance
|
||||
|
||||
Use `--attest=type=provenance` to generate provenance for an image at
|
||||
build-time. Alternatively, you can use the [`--provenance` shorthand](#provenance).
|
||||
|
||||
For more information, see [here](https://docs.docker.com/build/attestations/slsa-provenance/).
|
||||
|
||||
### <a name="allow"></a> Allow extra privileged entitlement (--allow)
|
||||
|
||||
```
|
||||
@@ -90,7 +114,7 @@ $ docker buildx build --allow security.insecure .
|
||||
|
||||
### <a name="build-arg"></a> Set build-time variables (--build-arg)
|
||||
|
||||
Same as [`docker build` command](https://docs.docker.com/engine/reference/commandline/build/#set-build-time-variables---build-arg).
|
||||
Same as [`docker build` command](https://docs.docker.com/engine/reference/commandline/build/#build-arg).
|
||||
|
||||
There are also useful built-in build args like:
|
||||
|
||||
@@ -139,10 +163,12 @@ COPY --from=project myfile /
|
||||
|
||||
#### <a name="source-oci-layout"></a> Source image from OCI layout directory
|
||||
|
||||
Source an image from a local [OCI layout compliant directory](https://github.com/opencontainers/image-spec/blob/main/image-layout.md):
|
||||
Source an image from a local [OCI layout compliant directory](https://github.com/opencontainers/image-spec/blob/main/image-layout.md),
|
||||
either by tag, or by digest:
|
||||
|
||||
```console
|
||||
$ docker buildx build --build-context foo=oci-layout:///path/to/local/layout@sha256:abcd12345 .
|
||||
$ docker buildx build --build-context foo=oci-layout:///path/to/local/layout:<tag>
|
||||
$ docker buildx build --build-context foo=oci-layout:///path/to/local/layout@sha256:<digest>
|
||||
```
|
||||
|
||||
```dockerfile
|
||||
@@ -154,14 +180,8 @@ COPY --from=foo myfile /
|
||||
FROM foo
|
||||
```
|
||||
|
||||
The OCI layout directory must be compliant with the [OCI layout specification](https://github.com/opencontainers/image-spec/blob/main/image-layout.md). It looks _solely_ for hashes. It does not
|
||||
do any form of `image:tag` resolution to find the hash of the manifest; that is up to you.
|
||||
|
||||
The format of the `--build-context` must be: `<context>=oci-layout://<path-to-local-layout>@sha256:<hash-of-manifest>`, where:
|
||||
|
||||
* `context` is the name of the build context as used in the `Dockerfile`.
|
||||
* `path-to-local-layout` is the path on the local machine, where you are running `docker build`, to the spec-compliant OCI layout.
|
||||
* `hash-of-manifest` is the hash of the manifest for the image. It can be a single-architecture manifest or a multi-architecture index.
|
||||
The OCI layout directory must be compliant with the [OCI layout specification](https://github.com/opencontainers/image-spec/blob/main/image-layout.md).
|
||||
You can reference an image in the layout using either tags, or the exact digest.
|
||||
|
||||
### <a name="builder"></a> Override the configured builder instance (--builder)
|
||||
|
||||
@@ -455,11 +475,21 @@ $ docker buildx build --load --progress=plain .
|
||||
> Check also our [Color output controls guide](https://github.com/docker/buildx/blob/master/docs/guides/color-output.md)
|
||||
> for modifying the colors that are used to output information to the terminal.
|
||||
|
||||
### <a name="provenance"></a> Create provenance attestations (--provenance)
|
||||
|
||||
Shorthand for [`--attest=type=provenance`](#attest). Enables provenance
|
||||
attestations for the build result.
|
||||
|
||||
### <a name="push"></a> Push the build result to a registry (--push)
|
||||
|
||||
Shorthand for [`--output=type=registry`](#registry). Will automatically push the
|
||||
build result to registry.
|
||||
|
||||
### <a name="sbom"></a> Create SBOM attestations (--sbom)
|
||||
|
||||
Shorthand for [`--attest=type=sbom`](#attest). Enables SBOM attestations for
|
||||
the build result.
|
||||
|
||||
### <a name="secret"></a> Secret to expose to the build (--secret)
|
||||
|
||||
```
|
||||
|
@@ -9,19 +9,19 @@ Create a new builder instance
|
||||
|
||||
### Options
|
||||
|
||||
| Name | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [`--append`](#append) | | | Append a node to builder instead of changing it |
|
||||
| `--bootstrap` | | | Boot builder after creation |
|
||||
| [`--buildkitd-flags`](#buildkitd-flags) | `string` | | Flags for buildkitd daemon |
|
||||
| [`--config`](#config) | `string` | | BuildKit config file |
|
||||
| [`--driver`](#driver) | `string` | | Driver to use (available: `docker-container`, `kubernetes`, `remote`) |
|
||||
| [`--driver-opt`](#driver-opt) | `stringArray` | | Options for the driver |
|
||||
| [`--leave`](#leave) | | | Remove a node from builder instead of changing it |
|
||||
| [`--name`](#name) | `string` | | Builder instance name |
|
||||
| [`--node`](#node) | `string` | | Create/modify node with given name |
|
||||
| [`--platform`](#platform) | `stringArray` | | Fixed platforms for current node |
|
||||
| [`--use`](#use) | | | Set the current builder instance |
|
||||
| Name | Type | Default | Description |
|
||||
|:----------------------------------------|:--------------|:--------|:----------------------------------------------------------------------|
|
||||
| [`--append`](#append) | | | Append a node to builder instead of changing it |
|
||||
| `--bootstrap` | | | Boot builder after creation |
|
||||
| [`--buildkitd-flags`](#buildkitd-flags) | `string` | | Flags for buildkitd daemon |
|
||||
| [`--config`](#config) | `string` | | BuildKit config file |
|
||||
| [`--driver`](#driver) | `string` | | Driver to use (available: `docker-container`, `kubernetes`, `remote`) |
|
||||
| [`--driver-opt`](#driver-opt) | `stringArray` | | Options for the driver |
|
||||
| [`--leave`](#leave) | | | Remove a node from builder instead of changing it |
|
||||
| [`--name`](#name) | `string` | | Builder instance name |
|
||||
| [`--node`](#node) | `string` | | Create/modify node with given name |
|
||||
| [`--platform`](#platform) | `stringArray` | | Fixed platforms for current node |
|
||||
| [`--use`](#use) | | | Set the current builder instance |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
|
@@ -9,11 +9,11 @@ Disk usage
|
||||
|
||||
### Options
|
||||
|
||||
| Name | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| `--filter` | `filter` | | Provide filter values |
|
||||
| `--verbose` | | | Provide a more verbose output |
|
||||
| Name | Type | Default | Description |
|
||||
|:------------------------|:---------|:--------|:-----------------------------------------|
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| `--filter` | `filter` | | Provide filter values |
|
||||
| `--verbose` | | | Provide a more verbose output |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
|
@@ -9,17 +9,17 @@ Commands to work on images in registry
|
||||
|
||||
### Subcommands
|
||||
|
||||
| Name | Description |
|
||||
| --- | --- |
|
||||
| [`create`](buildx_imagetools_create.md) | Create a new image based on source images |
|
||||
| [`inspect`](buildx_imagetools_inspect.md) | Show details of an image in the registry |
|
||||
| Name | Description |
|
||||
|:------------------------------------------|:------------------------------------------|
|
||||
| [`create`](buildx_imagetools_create.md) | Create a new image based on source images |
|
||||
| [`inspect`](buildx_imagetools_inspect.md) | Show details of an image in the registry |
|
||||
|
||||
|
||||
### Options
|
||||
|
||||
| Name | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| Name | Type | Default | Description |
|
||||
|:------------------------|:---------|:--------|:-----------------------------------------|
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
|
@@ -9,14 +9,14 @@ Create a new image based on source images
|
||||
|
||||
### Options
|
||||
|
||||
| Name | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [`--append`](#append) | | | Append to existing manifest |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| [`--dry-run`](#dry-run) | | | Show final image instead of pushing |
|
||||
| [`-f`](#file), [`--file`](#file) | `stringArray` | | Read source descriptor from file |
|
||||
| `--progress` | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`). Use plain to show container output |
|
||||
| [`-t`](#tag), [`--tag`](#tag) | `stringArray` | | Set reference for new image |
|
||||
| Name | Type | Default | Description |
|
||||
|:---------------------------------|:--------------|:--------|:-----------------------------------------------------------------------------------------|
|
||||
| [`--append`](#append) | | | Append to existing manifest |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| [`--dry-run`](#dry-run) | | | Show final image instead of pushing |
|
||||
| [`-f`](#file), [`--file`](#file) | `stringArray` | | Read source descriptor from file |
|
||||
| `--progress` | `string` | `auto` | Set type of progress output (`auto`, `plain`, `tty`). Use plain to show container output |
|
||||
| [`-t`](#tag), [`--tag`](#tag) | `stringArray` | | Set reference for new image |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
|
@@ -9,11 +9,11 @@ Show details of an image in the registry
|
||||
|
||||
### Options
|
||||
|
||||
| Name | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| [`--format`](#format) | `string` | `{{.Manifest}}` | Format the output using the given Go template |
|
||||
| [`--raw`](#raw) | | | Show original, unformatted JSON manifest |
|
||||
| Name | Type | Default | Description |
|
||||
|:------------------------|:---------|:----------------|:----------------------------------------------|
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| [`--format`](#format) | `string` | `{{.Manifest}}` | Format the output using the given Go template |
|
||||
| [`--raw`](#raw) | | | Show original, unformatted JSON manifest |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
@@ -72,7 +72,6 @@ unset. Following fields are available:
|
||||
* `.Name`: provides the reference of the image
|
||||
* `.Manifest`: provides the manifest or manifest list
|
||||
* `.Image`: provides the image config
|
||||
* `.BuildInfo`: provides [build info from image config](https://github.com/moby/buildkit/blob/master/docs/build-repro.md#image-config)
|
||||
|
||||
#### `.Name`
|
||||
|
||||
@@ -122,39 +121,6 @@ Manifests:
|
||||
Platform: linux/riscv64
|
||||
```
|
||||
|
||||
#### `.BuildInfo`
|
||||
|
||||
```console
|
||||
$ docker buildx imagetools inspect crazymax/buildx:buildinfo --format "{{.BuildInfo}}"
|
||||
Name: docker.io/crazymax/buildx:buildinfo
|
||||
Frontend: dockerfile.v0
|
||||
Attrs:
|
||||
filename: Dockerfile
|
||||
source: docker/dockerfile-upstream:master-labs
|
||||
build-arg:bar: foo
|
||||
build-arg:foo: bar
|
||||
Sources:
|
||||
Type: docker-image
|
||||
Ref: docker.io/docker/buildx-bin:0.6.1@sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0
|
||||
Pin: sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0
|
||||
|
||||
Type: docker-image
|
||||
Ref: docker.io/library/alpine:3.13
|
||||
Pin: sha256:026f721af4cf2843e07bba648e158fb35ecc876d822130633cc49f707f0fc88c
|
||||
|
||||
Type: docker-image
|
||||
Ref: docker.io/moby/buildkit:v0.9.0
|
||||
Pin: sha256:8dc668e7f66db1c044aadbed306020743516a94848793e0f81f94a087ee78cab
|
||||
|
||||
Type: docker-image
|
||||
Ref: docker.io/tonistiigi/xx@sha256:21a61be4744f6531cb5f33b0e6f40ede41fa3a1b8c82d5946178f80cc84bfc04
|
||||
Pin: sha256:21a61be4744f6531cb5f33b0e6f40ede41fa3a1b8c82d5946178f80cc84bfc04
|
||||
|
||||
Type: http
|
||||
Ref: https://raw.githubusercontent.com/moby/moby/master/README.md
|
||||
Pin: sha256:419455202b0ef97e480d7f8199b26a721a417818bc0e2d106975f74323f25e6c
|
||||
```
|
||||
|
||||
#### JSON output
|
||||
|
||||
A `json` go template func is also available if you want to render fields as
|
||||
@@ -166,7 +132,7 @@ $ docker buildx imagetools inspect crazymax/loop --format "{{json .Manifest}}"
|
||||
```json
|
||||
{
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
"digest": "sha256:08602e7340970e92bde5e0a2e887c1fde4d9ae753d1e05efb4c8ef3b609f97f1",
|
||||
"digest": "sha256:a9ca35b798e0b198f9be7f3b8b53982e9a6cf96814cb10d78083f40ad8c127f1",
|
||||
"size": 949
|
||||
}
|
||||
```
|
||||
@@ -177,23 +143,23 @@ $ docker buildx imagetools inspect moby/buildkit:master --format "{{json .Manife
|
||||
```json
|
||||
{
|
||||
"schemaVersion": 2,
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.list.v2+json",
|
||||
"digest": "sha256:79d97f205e2799d99a3a8ae2a1ef17acb331e11784262c3faada847dc6972c52",
|
||||
"size": 2010,
|
||||
"mediaType": "application/vnd.oci.image.index.v1+json",
|
||||
"digest": "sha256:d895e8fdcf5e2bb39acb5966f97fc4cd87a2d13d27c939c320025eb4aca5440c",
|
||||
"size": 4654,
|
||||
"manifests": [
|
||||
{
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
"digest": "sha256:bd1e78f06de26610fadf4eb9d04b1a45a545799d6342701726e952cc0c11c912",
|
||||
"size": 1158,
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"digest": "sha256:ac9dd4fbec9e36b562f910618975a2936533f8e411a3fea2858aacc0ac972e1c",
|
||||
"size": 1054,
|
||||
"platform": {
|
||||
"architecture": "amd64",
|
||||
"os": "linux"
|
||||
}
|
||||
},
|
||||
{
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
"digest": "sha256:d37dcced63ec0965824fca644f0ac9efad8569434ec15b4c83adfcb3dcfc743b",
|
||||
"size": 1158,
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"digest": "sha256:0f4dc6797db467372cbf52c7236816203654a839f64a6542c9135d1973c9d744",
|
||||
"size": 1054,
|
||||
"platform": {
|
||||
"architecture": "arm",
|
||||
"os": "linux",
|
||||
@@ -201,260 +167,356 @@ $ docker buildx imagetools inspect moby/buildkit:master --format "{{json .Manife
|
||||
}
|
||||
},
|
||||
{
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
"digest": "sha256:ce142eb2255e6af46f2809e159fd03081697c7605a3de03b9cbe9a52ddb244bf",
|
||||
"size": 1158,
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"digest": "sha256:d62bb533d95afe17c4a9caf1e7c57a3b0a7a67409ccfa7af947aeb0f670ffb87",
|
||||
"size": 1054,
|
||||
"platform": {
|
||||
"architecture": "arm64",
|
||||
"os": "linux"
|
||||
}
|
||||
},
|
||||
{
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
"digest": "sha256:f59bfb5062fff76ce464bfa4e25ebaaaac887d6818238e119d68613c456d360c",
|
||||
"size": 1158,
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"digest": "sha256:b4944057e0c68203cdcc3dceff3b2df3c7d9e3dd801724fa977b01081da7771e",
|
||||
"size": 1054,
|
||||
"platform": {
|
||||
"architecture": "s390x",
|
||||
"os": "linux"
|
||||
}
|
||||
},
|
||||
{
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
"digest": "sha256:cc96426e0c50a78105d5637d31356db5dd6ec594f21b24276e534a32da09645c",
|
||||
"size": 1159,
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"digest": "sha256:825702a51eb4234904fc9253d8b0bf0a584787ffd8fc3fd6fa374188233ce399",
|
||||
"size": 1054,
|
||||
"platform": {
|
||||
"architecture": "ppc64le",
|
||||
"os": "linux"
|
||||
}
|
||||
},
|
||||
{
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
"digest": "sha256:39f9c1e2878e6c333acb23187d6b205ce82ed934c60da326cb2c698192631478",
|
||||
"size": 1158,
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"digest": "sha256:dfb27c6acc9b9f3a7c9d47366d137089565062f43c8063c9f5e408d34c87ee4a",
|
||||
"size": 1054,
|
||||
"platform": {
|
||||
"architecture": "riscv64",
|
||||
"os": "linux"
|
||||
}
|
||||
},
|
||||
{
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"digest": "sha256:f2fe69bccc878e658caf21dfc99eaf726fb20d28f17398c1d66a90e62cc019f9",
|
||||
"size": 1113,
|
||||
"annotations": {
|
||||
"vnd.docker.reference.digest": "sha256:ac9dd4fbec9e36b562f910618975a2936533f8e411a3fea2858aacc0ac972e1c",
|
||||
"vnd.docker.reference.type": "attestation-manifest"
|
||||
},
|
||||
"platform": {
|
||||
"architecture": "unknown",
|
||||
"os": "unknown"
|
||||
}
|
||||
},
|
||||
{
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"digest": "sha256:9e112f8d4e383186f36369fba7b454e246d2e9ca5def797f1b84ede265e9f3ca",
|
||||
"size": 1113,
|
||||
"annotations": {
|
||||
"vnd.docker.reference.digest": "sha256:0f4dc6797db467372cbf52c7236816203654a839f64a6542c9135d1973c9d744",
|
||||
"vnd.docker.reference.type": "attestation-manifest"
|
||||
},
|
||||
"platform": {
|
||||
"architecture": "unknown",
|
||||
"os": "unknown"
|
||||
}
|
||||
},
|
||||
{
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"digest": "sha256:09d593587f8665269ec6753eaed7fbdb09968f71587dd53e06519502cbc16775",
|
||||
"size": 1113,
|
||||
"annotations": {
|
||||
"vnd.docker.reference.digest": "sha256:d62bb533d95afe17c4a9caf1e7c57a3b0a7a67409ccfa7af947aeb0f670ffb87",
|
||||
"vnd.docker.reference.type": "attestation-manifest"
|
||||
},
|
||||
"platform": {
|
||||
"architecture": "unknown",
|
||||
"os": "unknown"
|
||||
}
|
||||
},
|
||||
{
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"digest": "sha256:985a3f4544dfb042db6a8703f5f76438667dd7958aba14cb04bebe3b4cbd9307",
|
||||
"size": 1113,
|
||||
"annotations": {
|
||||
"vnd.docker.reference.digest": "sha256:b4944057e0c68203cdcc3dceff3b2df3c7d9e3dd801724fa977b01081da7771e",
|
||||
"vnd.docker.reference.type": "attestation-manifest"
|
||||
},
|
||||
"platform": {
|
||||
"architecture": "unknown",
|
||||
"os": "unknown"
|
||||
}
|
||||
},
|
||||
{
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"digest": "sha256:cfccb6afeede7dc29bf8abef4815d56f2723fa482ea63c9cd519cd991c379294",
|
||||
"size": 1113,
|
||||
"annotations": {
|
||||
"vnd.docker.reference.digest": "sha256:825702a51eb4234904fc9253d8b0bf0a584787ffd8fc3fd6fa374188233ce399",
|
||||
"vnd.docker.reference.type": "attestation-manifest"
|
||||
},
|
||||
"platform": {
|
||||
"architecture": "unknown",
|
||||
"os": "unknown"
|
||||
}
|
||||
},
|
||||
{
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"digest": "sha256:2e93733432c6a14cb57db33928b3a17d7ca298b3babe24d9f56dca2754dbde3b",
|
||||
"size": 1113,
|
||||
"annotations": {
|
||||
"vnd.docker.reference.digest": "sha256:dfb27c6acc9b9f3a7c9d47366d137089565062f43c8063c9f5e408d34c87ee4a",
|
||||
"vnd.docker.reference.type": "attestation-manifest"
|
||||
},
|
||||
"platform": {
|
||||
"architecture": "unknown",
|
||||
"os": "unknown"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Following command provides [SLSA](https://github.com/moby/buildkit/blob/master/docs/attestations/slsa-provenance.md) JSON output:
|
||||
|
||||
```console
|
||||
$ docker buildx imagetools inspect crazymax/buildx:buildinfo --format "{{json .BuildInfo}}"
|
||||
$ docker buildx imagetools inspect crazymax/buildkit:attest --format "{{json .Provenance}}"
|
||||
```
|
||||
```json
|
||||
{
|
||||
"frontend": "dockerfile.v0",
|
||||
"attrs": {
|
||||
"build-arg:bar": "foo",
|
||||
"build-arg:foo": "bar",
|
||||
"filename": "Dockerfile",
|
||||
"source": "crazymax/dockerfile:buildattrs"
|
||||
},
|
||||
"sources": [
|
||||
{
|
||||
"type": "docker-image",
|
||||
"ref": "docker.io/docker/buildx-bin:0.6.1@sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0",
|
||||
"pin": "sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0"
|
||||
"SLSA": {
|
||||
"builder": {
|
||||
"id": ""
|
||||
},
|
||||
{
|
||||
"type": "docker-image",
|
||||
"ref": "docker.io/library/alpine:3.13@sha256:026f721af4cf2843e07bba648e158fb35ecc876d822130633cc49f707f0fc88c",
|
||||
"pin": "sha256:026f721af4cf2843e07bba648e158fb35ecc876d822130633cc49f707f0fc88c"
|
||||
"buildType": "https://mobyproject.org/buildkit@v1",
|
||||
"materials": [
|
||||
{
|
||||
"uri": "pkg:docker/docker/buildkit-syft-scanner@stable-1",
|
||||
"digest": {
|
||||
"sha256": "b45f1d207e16c3a3a5a10b254ad8ad358d01f7ea090d382b95c6b2ee2b3ef765"
|
||||
}
|
||||
},
|
||||
{
|
||||
"uri": "pkg:docker/alpine@latest?platform=linux%2Famd64",
|
||||
"digest": {
|
||||
"sha256": "8914eb54f968791faf6a8638949e480fef81e697984fba772b3976835194c6d4"
|
||||
}
|
||||
}
|
||||
],
|
||||
"invocation": {
|
||||
"configSource": {},
|
||||
"parameters": {
|
||||
"frontend": "dockerfile.v0",
|
||||
"locals": [
|
||||
{
|
||||
"name": "context"
|
||||
},
|
||||
{
|
||||
"name": "dockerfile"
|
||||
}
|
||||
]
|
||||
},
|
||||
"environment": {
|
||||
"platform": "linux/amd64"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "docker-image",
|
||||
"ref": "docker.io/moby/buildkit:v0.9.0@sha256:8dc668e7f66db1c044aadbed306020743516a94848793e0f81f94a087ee78cab",
|
||||
"pin": "sha256:8dc668e7f66db1c044aadbed306020743516a94848793e0f81f94a087ee78cab"
|
||||
},
|
||||
{
|
||||
"type": "docker-image",
|
||||
"ref": "docker.io/tonistiigi/xx@sha256:21a61be4744f6531cb5f33b0e6f40ede41fa3a1b8c82d5946178f80cc84bfc04",
|
||||
"pin": "sha256:21a61be4744f6531cb5f33b0e6f40ede41fa3a1b8c82d5946178f80cc84bfc04"
|
||||
},
|
||||
{
|
||||
"type": "http",
|
||||
"ref": "https://raw.githubusercontent.com/moby/moby/master/README.md",
|
||||
"pin": "sha256:419455202b0ef97e480d7f8199b26a721a417818bc0e2d106975f74323f25e6c"
|
||||
"metadata": {
|
||||
"buildInvocationID": "02tdha2xkbxvin87mz9drhag4",
|
||||
"buildStartedOn": "2022-12-01T11:50:07.264704131Z",
|
||||
"buildFinishedOn": "2022-12-01T11:50:08.243788739Z",
|
||||
"reproducible": false,
|
||||
"completeness": {
|
||||
"parameters": true,
|
||||
"environment": true,
|
||||
"materials": false
|
||||
},
|
||||
"https://mobyproject.org/buildkit@v1#metadata": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Following command provides [SBOM](https://github.com/moby/buildkit/blob/master/docs/attestations/sbom.md) JSON output:
|
||||
|
||||
```console
|
||||
$ docker buildx imagetools inspect crazymax/buildkit:attest --format "{{json .SBOM}}"
|
||||
```
|
||||
```json
|
||||
{
|
||||
"SPDX": {
|
||||
"SPDXID": "SPDXRef-DOCUMENT",
|
||||
"creationInfo": {
|
||||
"created": "2022-12-01T11:46:48.063400162Z",
|
||||
"creators": [
|
||||
"Tool: syft-v0.60.3",
|
||||
"Tool: buildkit-1ace2bb",
|
||||
"Organization: Anchore, Inc"
|
||||
],
|
||||
"licenseListVersion": "3.18"
|
||||
},
|
||||
"dataLicense": "CC0-1.0",
|
||||
"documentNamespace": "https://anchore.com/syft/dir/run/src/core-0a4ccc6d-1a72-4c3a-a40e-3df1a2ffca94",
|
||||
"files": [...],
|
||||
"spdxVersion": "SPDX-2.2"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
```console
|
||||
$ docker buildx imagetools inspect crazymax/buildx:buildinfo --format "{{json .}}"
|
||||
$ docker buildx imagetools inspect crazymax/buildkit:attest --format "{{json .}}"
|
||||
```
|
||||
```json
|
||||
{
|
||||
"name": "crazymax/buildx:buildinfo",
|
||||
"name": "crazymax/buildkit:attest",
|
||||
"manifest": {
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
"digest": "sha256:899d2c7acbc124d406820857bb51d9089717bbe4e22b97eb4bc5789e99f09f83",
|
||||
"size": 2628
|
||||
"schemaVersion": 2,
|
||||
"mediaType": "application/vnd.oci.image.index.v1+json",
|
||||
"digest": "sha256:7007b387ccd52bd42a050f2e8020e56e64622c9269bf7bbe257b326fe99daf19",
|
||||
"size": 855,
|
||||
"manifests": [
|
||||
{
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"digest": "sha256:fbd10fe50b4b174bb9ea273e2eb9827fa8bf5c88edd8635a93dc83e0d1aecb55",
|
||||
"size": 673,
|
||||
"platform": {
|
||||
"architecture": "amd64",
|
||||
"os": "linux"
|
||||
}
|
||||
},
|
||||
{
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"digest": "sha256:a9de632c16998489fd63fbca42a03431df00639cfb2ecb8982bf9984b83c5b2b",
|
||||
"size": 839,
|
||||
"annotations": {
|
||||
"vnd.docker.reference.digest": "sha256:fbd10fe50b4b174bb9ea273e2eb9827fa8bf5c88edd8635a93dc83e0d1aecb55",
|
||||
"vnd.docker.reference.type": "attestation-manifest"
|
||||
},
|
||||
"platform": {
|
||||
"architecture": "unknown",
|
||||
"os": "unknown"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"image": {
|
||||
"created": "2022-02-24T12:27:43.627154558Z",
|
||||
"created": "2022-12-01T11:46:47.713777178Z",
|
||||
"architecture": "amd64",
|
||||
"os": "linux",
|
||||
"config": {
|
||||
"Env": [
|
||||
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
|
||||
"DOCKER_TLS_CERTDIR=/certs",
|
||||
"DOCKER_CLI_EXPERIMENTAL=enabled"
|
||||
],
|
||||
"Entrypoint": [
|
||||
"docker-entrypoint.sh"
|
||||
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
|
||||
],
|
||||
"Cmd": [
|
||||
"sh"
|
||||
"/bin/sh"
|
||||
]
|
||||
},
|
||||
"rootfs": {
|
||||
"type": "layers",
|
||||
"diff_ids": [
|
||||
"sha256:7fcb75871b2101082203959c83514ac8a9f4ecfee77a0fe9aa73bbe56afdf1b4",
|
||||
"sha256:d3c0b963ff5684160641f936d6a4aa14efc8ff27b6edac255c07f2d03ff92e82",
|
||||
"sha256:3f8d78f13fa9b1f35d3bc3f1351d03a027c38018c37baca73f93eecdea17f244",
|
||||
"sha256:8e6eb1137b182ae0c3f5d40ca46341fda2eaeeeb5fa516a9a2bf96171238e2e0",
|
||||
"sha256:fde4c869a56b54dd76d7352ddaa813fd96202bda30b9dceb2c2f2ad22fa2e6ce",
|
||||
"sha256:52025823edb284321af7846419899234b3c66219bf06061692b709875ed0760f",
|
||||
"sha256:50adb5982dbf6126c7cf279ac3181d1e39fc9116b610b947a3dadae6f7e7c5bc",
|
||||
"sha256:9801c319e1c66c5d295e78b2d3e80547e73c7e3c63a4b71e97c8ca357224af24",
|
||||
"sha256:dfbfac44d5d228c49b42194c8a2f470abd6916d072f612a6fb14318e94fde8ae",
|
||||
"sha256:3dfb74e19dedf61568b917c19b0fd3ee4580870027ca0b6054baf239855d1322",
|
||||
"sha256:b182e707c23e4f19be73f9022a99d2d1ca7bf1ca8f280d40e4d1c10a6f51550e"
|
||||
"sha256:ded7a220bb058e28ee3254fbba04ca90b679070424424761a53a043b93b612bf",
|
||||
"sha256:d85d09ab4b4e921666ccc2db8532e857bf3476b7588e52c9c17741d7af14204f"
|
||||
]
|
||||
},
|
||||
"history": [
|
||||
{
|
||||
"created": "2021-11-12T17:19:58.698676655Z",
|
||||
"created_by": "/bin/sh -c #(nop) ADD file:5a707b9d6cb5fff532e4c2141bc35707593f21da5528c9e71ae2ddb6ba4a4eb6 in / "
|
||||
"created": "2022-11-22T22:19:28.870801855Z",
|
||||
"created_by": "/bin/sh -c #(nop) ADD file:587cae71969871d3c6456d844a8795df9b64b12c710c275295a1182b46f630e7 in / "
|
||||
},
|
||||
{
|
||||
"created": "2021-11-12T17:19:58.948920855Z",
|
||||
"created": "2022-11-22T22:19:29.008562326Z",
|
||||
"created_by": "/bin/sh -c #(nop) CMD [\"/bin/sh\"]",
|
||||
"empty_layer": true
|
||||
},
|
||||
{
|
||||
"created": "2022-02-24T12:27:38.285594601Z",
|
||||
"created_by": "RUN /bin/sh -c apk --update --no-cache add bash ca-certificates openssh-client \u0026\u0026 rm -rf /tmp/* /var/cache/apk/* # buildkit",
|
||||
"created": "2022-12-01T11:46:47.713777178Z",
|
||||
"created_by": "RUN /bin/sh -c apk add curl # buildkit",
|
||||
"comment": "buildkit.dockerfile.v0"
|
||||
},
|
||||
{
|
||||
"created": "2022-02-24T12:27:41.061874167Z",
|
||||
"created_by": "COPY /opt/docker/ /usr/local/bin/ # buildkit",
|
||||
"comment": "buildkit.dockerfile.v0"
|
||||
},
|
||||
{
|
||||
"created": "2022-02-24T12:27:41.174098947Z",
|
||||
"created_by": "COPY /usr/bin/buildctl /usr/local/bin/buildctl # buildkit",
|
||||
"comment": "buildkit.dockerfile.v0"
|
||||
},
|
||||
{
|
||||
"created": "2022-02-24T12:27:41.320343683Z",
|
||||
"created_by": "COPY /usr/bin/buildkit* /usr/local/bin/ # buildkit",
|
||||
"comment": "buildkit.dockerfile.v0"
|
||||
},
|
||||
{
|
||||
"created": "2022-02-24T12:27:41.447149933Z",
|
||||
"created_by": "COPY /buildx /usr/libexec/docker/cli-plugins/docker-buildx # buildkit",
|
||||
"comment": "buildkit.dockerfile.v0"
|
||||
},
|
||||
{
|
||||
"created": "2022-02-24T12:27:43.057722191Z",
|
||||
"created_by": "COPY /opt/docker-compose /usr/libexec/docker/cli-plugins/docker-compose # buildkit",
|
||||
"comment": "buildkit.dockerfile.v0"
|
||||
},
|
||||
{
|
||||
"created": "2022-02-24T12:27:43.145224134Z",
|
||||
"created_by": "ADD https://raw.githubusercontent.com/moby/moby/master/README.md / # buildkit",
|
||||
"comment": "buildkit.dockerfile.v0"
|
||||
},
|
||||
{
|
||||
"created": "2022-02-24T12:27:43.422212427Z",
|
||||
"created_by": "ENV DOCKER_TLS_CERTDIR=/certs",
|
||||
"comment": "buildkit.dockerfile.v0",
|
||||
"empty_layer": true
|
||||
},
|
||||
{
|
||||
"created": "2022-02-24T12:27:43.422212427Z",
|
||||
"created_by": "ENV DOCKER_CLI_EXPERIMENTAL=enabled",
|
||||
"comment": "buildkit.dockerfile.v0",
|
||||
"empty_layer": true
|
||||
},
|
||||
{
|
||||
"created": "2022-02-24T12:27:43.422212427Z",
|
||||
"created_by": "RUN /bin/sh -c docker --version \u0026\u0026 buildkitd --version \u0026\u0026 buildctl --version \u0026\u0026 docker buildx version \u0026\u0026 docker compose version \u0026\u0026 mkdir /certs /certs/client \u0026\u0026 chmod 1777 /certs /certs/client # buildkit",
|
||||
"comment": "buildkit.dockerfile.v0"
|
||||
},
|
||||
{
|
||||
"created": "2022-02-24T12:27:43.514320155Z",
|
||||
"created_by": "COPY rootfs/modprobe.sh /usr/local/bin/modprobe # buildkit",
|
||||
"comment": "buildkit.dockerfile.v0"
|
||||
},
|
||||
{
|
||||
"created": "2022-02-24T12:27:43.627154558Z",
|
||||
"created_by": "COPY rootfs/docker-entrypoint.sh /usr/local/bin/ # buildkit",
|
||||
"comment": "buildkit.dockerfile.v0"
|
||||
},
|
||||
{
|
||||
"created": "2022-02-24T12:27:43.627154558Z",
|
||||
"created_by": "ENTRYPOINT [\"docker-entrypoint.sh\"]",
|
||||
"comment": "buildkit.dockerfile.v0",
|
||||
"empty_layer": true
|
||||
},
|
||||
{
|
||||
"created": "2022-02-24T12:27:43.627154558Z",
|
||||
"created_by": "CMD [\"sh\"]",
|
||||
"comment": "buildkit.dockerfile.v0",
|
||||
"empty_layer": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"buildinfo": {
|
||||
"frontend": "dockerfile.v0",
|
||||
"attrs": {
|
||||
"build-arg:bar": "foo",
|
||||
"build-arg:foo": "bar",
|
||||
"filename": "Dockerfile",
|
||||
"source": "docker/dockerfile-upstream:master-labs"
|
||||
},
|
||||
"sources": [
|
||||
{
|
||||
"type": "docker-image",
|
||||
"ref": "docker.io/docker/buildx-bin:0.6.1@sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0",
|
||||
"pin": "sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0"
|
||||
"Provenance": {
|
||||
"SLSA": {
|
||||
"builder": {
|
||||
"id": ""
|
||||
},
|
||||
{
|
||||
"type": "docker-image",
|
||||
"ref": "docker.io/library/alpine:3.13",
|
||||
"pin": "sha256:026f721af4cf2843e07bba648e158fb35ecc876d822130633cc49f707f0fc88c"
|
||||
"buildType": "https://mobyproject.org/buildkit@v1",
|
||||
"materials": [
|
||||
{
|
||||
"uri": "pkg:docker/docker/buildkit-syft-scanner@stable-1",
|
||||
"digest": {
|
||||
"sha256": "b45f1d207e16c3a3a5a10b254ad8ad358d01f7ea090d382b95c6b2ee2b3ef765"
|
||||
}
|
||||
},
|
||||
{
|
||||
"uri": "pkg:docker/alpine@latest?platform=linux%2Famd64",
|
||||
"digest": {
|
||||
"sha256": "8914eb54f968791faf6a8638949e480fef81e697984fba772b3976835194c6d4"
|
||||
}
|
||||
}
|
||||
],
|
||||
"invocation": {
|
||||
"configSource": {},
|
||||
"parameters": {
|
||||
"frontend": "dockerfile.v0",
|
||||
"locals": [
|
||||
{
|
||||
"name": "context"
|
||||
},
|
||||
{
|
||||
"name": "dockerfile"
|
||||
}
|
||||
]
|
||||
},
|
||||
"environment": {
|
||||
"platform": "linux/amd64"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "docker-image",
|
||||
"ref": "docker.io/moby/buildkit:v0.9.0",
|
||||
"pin": "sha256:8dc668e7f66db1c044aadbed306020743516a94848793e0f81f94a087ee78cab"
|
||||
},
|
||||
{
|
||||
"type": "docker-image",
|
||||
"ref": "docker.io/tonistiigi/xx@sha256:21a61be4744f6531cb5f33b0e6f40ede41fa3a1b8c82d5946178f80cc84bfc04",
|
||||
"pin": "sha256:21a61be4744f6531cb5f33b0e6f40ede41fa3a1b8c82d5946178f80cc84bfc04"
|
||||
},
|
||||
{
|
||||
"type": "http",
|
||||
"ref": "https://raw.githubusercontent.com/moby/moby/master/README.md",
|
||||
"pin": "sha256:419455202b0ef97e480d7f8199b26a721a417818bc0e2d106975f74323f25e6c"
|
||||
"metadata": {
|
||||
"buildInvocationID": "02tdha2xkbxvin87mz9drhag4",
|
||||
"buildStartedOn": "2022-12-01T11:50:07.264704131Z",
|
||||
"buildFinishedOn": "2022-12-01T11:50:08.243788739Z",
|
||||
"reproducible": false,
|
||||
"completeness": {
|
||||
"parameters": true,
|
||||
"environment": true,
|
||||
"materials": false
|
||||
},
|
||||
"https://mobyproject.org/buildkit@v1#metadata": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"SBOM": {
|
||||
"SPDX": {
|
||||
"SPDXID": "SPDXRef-DOCUMENT",
|
||||
"creationInfo": {
|
||||
"created": "2022-12-01T11:46:48.063400162Z",
|
||||
"creators": [
|
||||
"Tool: syft-v0.60.3",
|
||||
"Tool: buildkit-1ace2bb",
|
||||
"Organization: Anchore, Inc"
|
||||
],
|
||||
"licenseListVersion": "3.18"
|
||||
},
|
||||
"dataLicense": "CC0-1.0",
|
||||
"documentNamespace": "https://anchore.com/syft/dir/run/src/core-0a4ccc6d-1a72-4c3a-a40e-3df1a2ffca94",
|
||||
"files": [...],
|
||||
"spdxVersion": "SPDX-2.2"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Multi-platform
|
||||
|
||||
Multi-platform images are supported for `.Image` and `.BuildInfo` fields. If
|
||||
you want to pick up a specific platform, you can specify it using the `index`
|
||||
Multi-platform images are supported for `.Image`, `.SLSA` and `.SBOM` fields.
|
||||
If you want to pick up a specific platform, you can specify it using the `index`
|
||||
go template function:
|
||||
|
||||
```console
|
||||
@@ -462,7 +524,7 @@ $ docker buildx imagetools inspect --format '{{json (index .Image "linux/s390x")
|
||||
```
|
||||
```json
|
||||
{
|
||||
"created": "2022-02-25T17:13:27.89891722Z",
|
||||
"created": "2022-11-30T17:42:26.414957336Z",
|
||||
"architecture": "s390x",
|
||||
"os": "linux",
|
||||
"config": {
|
||||
@@ -481,8 +543,8 @@ $ docker buildx imagetools inspect --format '{{json (index .Image "linux/s390x")
|
||||
"diff_ids": [
|
||||
"sha256:41048e32d0684349141cf05f629c5fc3c5915d1f3426b66dbb8953a540e01e1e",
|
||||
"sha256:2651209b9208fff6c053bc3c17353cb07874e50f1a9bc96d6afd03aef63de76a",
|
||||
"sha256:6741ed7e73039d853fa8902246a4c7e8bf9dd09652fd1b08251bc5f9e8876a7f",
|
||||
"sha256:92ac046adeeb65c86ae3f0b458dee04ad4a462e417661c04d77642c66494f69b"
|
||||
"sha256:88577322e65f094ce8ac27435880f1a8a9baadb569258026bb141770451bafcb",
|
||||
"sha256:de8f9a790e4ed10ff1f1f8ea923c9da4f97246a7e200add2dc6650eba3f10a20"
|
||||
]
|
||||
},
|
||||
"history": [
|
||||
@@ -501,23 +563,23 @@ $ docker buildx imagetools inspect --format '{{json (index .Image "linux/s390x")
|
||||
"comment": "buildkit.dockerfile.v0"
|
||||
},
|
||||
{
|
||||
"created": "2022-02-24T00:34:00.924540012Z",
|
||||
"created": "2022-08-25T00:39:25.652811078Z",
|
||||
"created_by": "COPY examples/buildctl-daemonless/buildctl-daemonless.sh /usr/bin/ # buildkit",
|
||||
"comment": "buildkit.dockerfile.v0"
|
||||
},
|
||||
{
|
||||
"created": "2022-02-25T17:13:27.89891722Z",
|
||||
"created": "2022-11-30T17:42:26.414957336Z",
|
||||
"created_by": "VOLUME [/var/lib/buildkit]",
|
||||
"comment": "buildkit.dockerfile.v0",
|
||||
"empty_layer": true
|
||||
},
|
||||
{
|
||||
"created": "2022-02-25T17:13:27.89891722Z",
|
||||
"created": "2022-11-30T17:42:26.414957336Z",
|
||||
"created_by": "COPY / /usr/bin/ # buildkit",
|
||||
"comment": "buildkit.dockerfile.v0"
|
||||
},
|
||||
{
|
||||
"created": "2022-02-25T17:13:27.89891722Z",
|
||||
"created": "2022-11-30T17:42:26.414957336Z",
|
||||
"created_by": "ENTRYPOINT [\"buildkitd\"]",
|
||||
"comment": "buildkit.dockerfile.v0",
|
||||
"empty_layer": true
|
||||
@@ -541,24 +603,24 @@ $ docker buildx imagetools inspect --raw crazymax/loop | jq
|
||||
"schemaVersion": 2,
|
||||
"config": {
|
||||
"mediaType": "application/vnd.docker.container.image.v1+json",
|
||||
"digest": "sha256:7ace7d324e79b360b2db8b820d83081863d96d22e734cdf297a8e7fd83f6ceb3",
|
||||
"size": 2298
|
||||
"digest": "sha256:a98999183d2c7a8845f6d56496e51099ce6e4359ee7255504174b05430c4b78b",
|
||||
"size": 2762
|
||||
},
|
||||
"layers": [
|
||||
{
|
||||
"mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip",
|
||||
"digest": "sha256:5843afab387455b37944e709ee8c78d7520df80f8d01cf7f861aae63beeddb6b",
|
||||
"size": 2811478
|
||||
"digest": "sha256:8663204ce13b2961da55026a2034abb9e5afaaccf6a9cfb44ad71406dcd07c7b",
|
||||
"size": 2818370
|
||||
},
|
||||
{
|
||||
"mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip",
|
||||
"digest": "sha256:726d3732a87e1c430d67e8969de6b222a889d45e045ebae1a008a37ba38f3b1f",
|
||||
"size": 1776812
|
||||
"digest": "sha256:f0868a92f8e1e5018ed4e60eb845ed4ff0e2229897f4105e5a4735c1d6fd874f",
|
||||
"size": 1821402
|
||||
},
|
||||
{
|
||||
"mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip",
|
||||
"digest": "sha256:5d7cf9b33148a8f220c84f27dd2cfae46aca019a3ea3fbf7274f6d6dbfae8f3b",
|
||||
"size": 382855
|
||||
"digest": "sha256:d010066dbdfcf7c12fca30cd4b567aa7218eb6762ab53169d043655b7a8d7f2e",
|
||||
"size": 404457
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -574,7 +636,7 @@ $ docker buildx imagetools inspect --raw moby/buildkit:master | jq
|
||||
"manifests": [
|
||||
{
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
"digest": "sha256:667d28c9fb33820ce686887a717a148e89fa77f9097f9352996bbcce99d352b1",
|
||||
"digest": "sha256:f9f41c85124686c2afe330a985066748a91d7a5d505777fe274df804ab5e077e",
|
||||
"size": 1158,
|
||||
"platform": {
|
||||
"architecture": "amd64",
|
||||
@@ -583,7 +645,7 @@ $ docker buildx imagetools inspect --raw moby/buildkit:master | jq
|
||||
},
|
||||
{
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
"digest": "sha256:71789527b64ab3d7b3de01d364b449cd7f7a3da758218fbf73b9c9aae05a6775",
|
||||
"digest": "sha256:82097c2be19c617aafb3c3e43c88548738d4b2bf3db5c36666283a918b390266",
|
||||
"size": 1158,
|
||||
"platform": {
|
||||
"architecture": "arm",
|
||||
@@ -593,7 +655,7 @@ $ docker buildx imagetools inspect --raw moby/buildkit:master | jq
|
||||
},
|
||||
{
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
"digest": "sha256:fb64667e1ce6ab0d05478f3a8402af07b27737598dcf9a510fb1d792b13a66be",
|
||||
"digest": "sha256:b6b91e6c823d7220ded7d3b688e571ba800b13d91bbc904c1d8053593e3ee42c",
|
||||
"size": 1158,
|
||||
"platform": {
|
||||
"architecture": "arm64",
|
||||
@@ -602,7 +664,7 @@ $ docker buildx imagetools inspect --raw moby/buildkit:master | jq
|
||||
},
|
||||
{
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
"digest": "sha256:1c3ddf95a0788e23f72f25800c05abc4458946685e2b66788c3d978cde6da92b",
|
||||
"digest": "sha256:797061bcc16778de048b96f769c018ec24da221088050bbe926ea3b8d51d77e8",
|
||||
"size": 1158,
|
||||
"platform": {
|
||||
"architecture": "s390x",
|
||||
@@ -611,7 +673,7 @@ $ docker buildx imagetools inspect --raw moby/buildkit:master | jq
|
||||
},
|
||||
{
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
"digest": "sha256:05bcde6d460a284e5bc88026cd070277e8380355de3126cbc8fe8a452708c6b1",
|
||||
"digest": "sha256:b93d3a84d18c4d0b8c279e77343d854d9b5177df7ea55cf468d461aa2523364e",
|
||||
"size": 1159,
|
||||
"platform": {
|
||||
"architecture": "ppc64le",
|
||||
@@ -620,7 +682,7 @@ $ docker buildx imagetools inspect --raw moby/buildkit:master | jq
|
||||
},
|
||||
{
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
"digest": "sha256:c04c57765304ab84f4f9807fff3e11605c3a60e16435c734b02c723680f6bd6e",
|
||||
"digest": "sha256:d5c950dd1b270d437c838187112a0cb44c9258248d7a3a8bcb42fae8f717dc01",
|
||||
"size": 1158,
|
||||
"platform": {
|
||||
"architecture": "riscv64",
|
||||
|
@@ -9,10 +9,10 @@ Inspect current builder instance
|
||||
|
||||
### Options
|
||||
|
||||
| Name | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [`--bootstrap`](#bootstrap) | | | Ensure builder has booted before inspecting |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| Name | Type | Default | Description |
|
||||
|:----------------------------|:---------|:--------|:--------------------------------------------|
|
||||
| [`--bootstrap`](#bootstrap) | | | Ensure builder has booted before inspecting |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
|
@@ -9,14 +9,14 @@ Remove build cache
|
||||
|
||||
### Options
|
||||
|
||||
| Name | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| `-a`, `--all` | | | Include internal/frontend images |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| `--filter` | `filter` | | Provide filter values (e.g., `until=24h`) |
|
||||
| `-f`, `--force` | | | Do not prompt for confirmation |
|
||||
| `--keep-storage` | `bytes` | `0` | Amount of disk space to keep for cache |
|
||||
| `--verbose` | | | Provide a more verbose output |
|
||||
| Name | Type | Default | Description |
|
||||
|:------------------------|:---------|:--------|:------------------------------------------|
|
||||
| `-a`, `--all` | | | Include internal/frontend images |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| `--filter` | `filter` | | Provide filter values (e.g., `until=24h`) |
|
||||
| `-f`, `--force` | | | Do not prompt for confirmation |
|
||||
| `--keep-storage` | `bytes` | `0` | Amount of disk space to keep for cache |
|
||||
| `--verbose` | | | Provide a more verbose output |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
|
@@ -9,13 +9,13 @@ Remove a builder instance
|
||||
|
||||
### Options
|
||||
|
||||
| Name | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [`--all-inactive`](#all-inactive) | | | Remove all inactive builders |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| [`-f`](#force), [`--force`](#force) | | | Do not prompt for confirmation |
|
||||
| [`--keep-daemon`](#keep-daemon) | | | Keep the buildkitd daemon running |
|
||||
| [`--keep-state`](#keep-state) | | | Keep BuildKit state |
|
||||
| Name | Type | Default | Description |
|
||||
|:------------------------------------|:---------|:--------|:-----------------------------------------|
|
||||
| [`--all-inactive`](#all-inactive) | | | Remove all inactive builders |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| [`-f`](#force), [`--force`](#force) | | | Do not prompt for confirmation |
|
||||
| [`--keep-daemon`](#keep-daemon) | | | Keep the buildkitd daemon running |
|
||||
| [`--keep-state`](#keep-state) | | | Keep BuildKit state |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
|
@@ -9,9 +9,9 @@ Stop builder instance
|
||||
|
||||
### Options
|
||||
|
||||
| Name | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| Name | Type | Default | Description |
|
||||
|:------------------------|:---------|:--------|:-----------------------------------------|
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
|
@@ -9,11 +9,11 @@ Set the current builder instance
|
||||
|
||||
### Options
|
||||
|
||||
| Name | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| `--default` | | | Set builder as default for current context |
|
||||
| `--global` | | | Builder persists context changes |
|
||||
| Name | Type | Default | Description |
|
||||
|:------------------------|:---------|:--------|:-------------------------------------------|
|
||||
| [`--builder`](#builder) | `string` | | Override the configured builder instance |
|
||||
| `--default` | | | Set builder as default for current context |
|
||||
| `--global` | | | Builder persists context changes |
|
||||
|
||||
|
||||
<!---MARKER_GEN_END-->
|
||||
|
13
go.mod
13
go.mod
@@ -6,17 +6,18 @@ require (
|
||||
github.com/aws/aws-sdk-go-v2/config v1.15.5
|
||||
github.com/compose-spec/compose-go v1.6.0
|
||||
github.com/containerd/console v1.0.3
|
||||
github.com/containerd/containerd v1.6.11
|
||||
github.com/docker/cli v23.0.0-beta.1+incompatible
|
||||
github.com/docker/cli-docs-tool v0.5.0
|
||||
github.com/containerd/containerd v1.6.16-0.20230124210447-1709cfe273d9
|
||||
github.com/docker/cli v23.0.0-rc.1+incompatible
|
||||
github.com/docker/cli-docs-tool v0.5.1
|
||||
github.com/docker/distribution v2.8.1+incompatible
|
||||
github.com/docker/docker v23.0.0-beta.1+incompatible
|
||||
github.com/docker/docker v23.0.0-rc.1+incompatible
|
||||
github.com/docker/go-units v0.5.0
|
||||
github.com/gofrs/flock v0.8.1
|
||||
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510
|
||||
github.com/hashicorp/go-cty-funcs v0.0.0-20200930094925-2721b1e36840
|
||||
github.com/hashicorp/hcl/v2 v2.8.2
|
||||
github.com/moby/buildkit v0.11.0-rc1.0.20221213193744-862b22d7e7cf
|
||||
github.com/moby/buildkit v0.11.2
|
||||
github.com/moby/sys/mountinfo v0.6.2
|
||||
github.com/morikuni/aec v1.0.0
|
||||
github.com/opencontainers/go-digest v1.0.0
|
||||
github.com/opencontainers/image-spec v1.0.3-0.20220303224323-02efb9a75ee1
|
||||
@@ -131,7 +132,7 @@ require (
|
||||
github.com/rogpeppe/go-internal v1.8.1 // indirect
|
||||
github.com/spf13/viper v1.14.0 // indirect
|
||||
github.com/theupdateframework/notary v0.6.1 // indirect
|
||||
github.com/tonistiigi/fsutil v0.0.0-20221114235510-0127568185cf // indirect
|
||||
github.com/tonistiigi/fsutil v0.0.0-20230105215944-fb433841cbfa // indirect
|
||||
github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea // indirect
|
||||
github.com/tonistiigi/vt100 v0.0.0-20210615222946-8066bb97264f // indirect
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect
|
||||
|
30
go.sum
30
go.sum
@@ -57,7 +57,7 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03
|
||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||
github.com/Microsoft/go-winio v0.5.2 h1:a9IhgEQBCUEk6QCdml9CiJGhAws+YwffDHEMp1VMrpA=
|
||||
github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY=
|
||||
github.com/Microsoft/hcsshim v0.9.5 h1:AbV+VPfTrIVffukazHcpxmz/sRiE6YaMDzHWR9BXZHo=
|
||||
github.com/Microsoft/hcsshim v0.9.6 h1:VwnDOgLeoi2du6dAznfmspNqTiwczvjv4K7NxuY9jsY=
|
||||
github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ=
|
||||
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
|
||||
@@ -140,8 +140,8 @@ github.com/compose-spec/compose-go v1.6.0/go.mod h1:os+Ulh2jlZxY1XT1hbciERadjSUU
|
||||
github.com/containerd/cgroups v1.0.4 h1:jN/mbWBEaz+T1pi5OFtnkQ+8qnmEbAr1Oo1FRm5B0dA=
|
||||
github.com/containerd/console v1.0.3 h1:lIr7SlA5PxZyMV30bDW0MGbiOPXwc63yRuCP0ARubLw=
|
||||
github.com/containerd/console v1.0.3/go.mod h1:7LqA/THxQ86k76b8c/EMSiaJ3h1eZkMkXar0TQ1gf3U=
|
||||
github.com/containerd/containerd v1.6.11 h1:uIn0uKrRhETIPyAb0lz6WY2xhYBObUOF1bBi5rqZ5x4=
|
||||
github.com/containerd/containerd v1.6.11/go.mod h1:K4Bw7gjgh4TnkmQY+py/PYQGp4e7xgnHAeg87VeWb3A=
|
||||
github.com/containerd/containerd v1.6.16-0.20230124210447-1709cfe273d9 h1:zdFesNKUzj0PDylWScwyU6zv3KAKwYeSE1ZLUmi01wk=
|
||||
github.com/containerd/containerd v1.6.16-0.20230124210447-1709cfe273d9/go.mod h1:1RdCUu95+gc2v9t3IL+zIlpClSmew7/0YS8O5eQZrOw=
|
||||
github.com/containerd/continuity v0.3.0 h1:nisirsYROK15TAMVukJOUyGJjz4BNQJBVsNvAXZJ/eg=
|
||||
github.com/containerd/continuity v0.3.0/go.mod h1:wJEAIwKOm/pBZuBd0JmeTvnLquTB1Ag8espWhkykbPM=
|
||||
github.com/containerd/fifo v1.0.0 h1:6PirWBr9/L7GDamKr+XM0IeUFXu5mf3M/BPpH9gaLBU=
|
||||
@@ -163,14 +163,14 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/distribution/distribution/v3 v3.0.0-20220725133111-4bf3547399eb h1:oCCuuU3kMO3sjZH/p7LamvQNW9SWoT4yQuMGcdSxGAE=
|
||||
github.com/distribution/distribution/v3 v3.0.0-20220725133111-4bf3547399eb/go.mod h1:28YO/VJk9/64+sTGNuYaBjWxrXTPrj0C0XmgTIOjxX4=
|
||||
github.com/docker/cli v23.0.0-beta.1+incompatible h1:K9CMaN5nHB1eu2f02PURnJhlPhWuFl0s9mL3kildAtE=
|
||||
github.com/docker/cli v23.0.0-beta.1+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
|
||||
github.com/docker/cli-docs-tool v0.5.0 h1:EjGwI6EyB7YemHCC7R8mwXszJTbuq0T0pFuDC5bMhcE=
|
||||
github.com/docker/cli-docs-tool v0.5.0/go.mod h1:zMjqTFCU361PRh8apiXzeAZ1Q/xupbIwTusYpzCXS/o=
|
||||
github.com/docker/cli v23.0.0-rc.1+incompatible h1:Vl3pcUK4/LFAD56Ys3BrqgAtuwpWd/IO3amuSL0ZbP0=
|
||||
github.com/docker/cli v23.0.0-rc.1+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
|
||||
github.com/docker/cli-docs-tool v0.5.1 h1:jIk/cCZurZERhALPVKhqlNxTQGxn2kcI+56gE57PQXg=
|
||||
github.com/docker/cli-docs-tool v0.5.1/go.mod h1:zMjqTFCU361PRh8apiXzeAZ1Q/xupbIwTusYpzCXS/o=
|
||||
github.com/docker/distribution v2.8.1+incompatible h1:Q50tZOPR6T/hjNsyc9g8/syEs6bk8XXApsHjKukMl68=
|
||||
github.com/docker/distribution v2.8.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
|
||||
github.com/docker/docker v23.0.0-beta.1+incompatible h1:0Xv+AFPWxTbmohdLK57pYRPmefCKthtfRF/qQwXHolg=
|
||||
github.com/docker/docker v23.0.0-beta.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/docker v23.0.0-rc.1+incompatible h1:Dmn88McWuHc7BSNN1s6RtfhMmt6ZPQAYUEf7FhqpiQI=
|
||||
github.com/docker/docker v23.0.0-rc.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/docker-credential-helpers v0.7.0 h1:xtCHsjxogADNZcdv1pKUHXryefjlVRqWqIhk/uXJp0A=
|
||||
github.com/docker/docker-credential-helpers v0.7.0/go.mod h1:rETQfLdHNT3foU5kuNkFR1R1V12OJRRO5lzt2D1b5X0=
|
||||
github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c h1:lzqkGL9b3znc+ZUgi7FlLnqjQhcXxkNM/quxIjBVMD0=
|
||||
@@ -345,7 +345,7 @@ github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:
|
||||
github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
|
||||
github.com/imdario/mergo v0.3.13 h1:lFzP57bqS/wsqKssCGmtLAb8A0wKjLGrve2q3PPVcBk=
|
||||
github.com/imdario/mergo v0.3.13/go.mod h1:4lJ1jqUDcsbIECGy0RUJAXNIhg+6ocWgb1ALK2O4oXg=
|
||||
github.com/in-toto/in-toto-golang v0.3.4-0.20220709202702-fa494aaa0add h1:DAh7mHiRT7wc6kKepYdCpH16ElPciMPQWJaJ7H3l/ng=
|
||||
github.com/in-toto/in-toto-golang v0.5.0 h1:hb8bgwr0M2hGdDsLjkJ3ZqJ8JFLL/tgYdAxF/XEFBbY=
|
||||
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
|
||||
github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/jinzhu/gorm v1.9.2 h1:lCvgEaqe/HVE+tjAR2mt4HbbHAZsQOv3XAZiEZV37iw=
|
||||
@@ -401,8 +401,8 @@ github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZX
|
||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
||||
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/moby/buildkit v0.11.0-rc1.0.20221213193744-862b22d7e7cf h1:X4p2o1QeaKAJ8prPctFx98UrokFHsUgbiC0lDUObpOk=
|
||||
github.com/moby/buildkit v0.11.0-rc1.0.20221213193744-862b22d7e7cf/go.mod h1:f3jvilDvcG14z+gzPpA2lcWRwIRyFiNTo5bMwHiYDk0=
|
||||
github.com/moby/buildkit v0.11.2 h1:hNNsYuRssvFnp/qJ8FifStEUzROl5riPAEwk7cRzMjg=
|
||||
github.com/moby/buildkit v0.11.2/go.mod h1:b5hR8j3BZaOj5+gf6yielP9YLT9mU92zy3zZtdoUTrw=
|
||||
github.com/moby/locker v1.0.1 h1:fOXqR41zeveg4fFODix+1Ch4mj/gT0NE1XJbp/epuBg=
|
||||
github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc=
|
||||
github.com/moby/patternmatcher v0.5.0 h1:YCZgJOeULcxLw1Q+sVR636pmS7sPEn1Qo2iAN6M7DBo=
|
||||
@@ -411,6 +411,7 @@ github.com/moby/spdystream v0.2.0 h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8
|
||||
github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c=
|
||||
github.com/moby/sys/mountinfo v0.5.0/go.mod h1:3bMD3Rg+zkqx8MRYPi7Pyb0Ie97QEBmdxbhnCLlSvSU=
|
||||
github.com/moby/sys/mountinfo v0.6.2 h1:BzJjoreD5BMFNmD9Rus6gdd1pLuecOFPt8wC+Vygl78=
|
||||
github.com/moby/sys/mountinfo v0.6.2/go.mod h1:IJb6JQeOklcdMU9F5xQ8ZALD+CUr5VlGpwtX+VE0rpI=
|
||||
github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc=
|
||||
github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo=
|
||||
github.com/moby/sys/signal v0.7.0 h1:25RW3d5TnQEoKvRbEKUGay6DCQ46IxAVTT9CUMgmsSI=
|
||||
@@ -546,8 +547,8 @@ github.com/subosito/gotenv v1.4.1 h1:jyEFiXpy21Wm81FBN71l9VoMMV8H8jG+qIK3GCpY6Qs
|
||||
github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
|
||||
github.com/theupdateframework/notary v0.6.1 h1:7wshjstgS9x9F5LuB1L5mBI2xNMObWqjz+cjWoom6l0=
|
||||
github.com/theupdateframework/notary v0.6.1/go.mod h1:MOfgIfmox8s7/7fduvB2xyPPMJCrjRLRizA8OFwpnKY=
|
||||
github.com/tonistiigi/fsutil v0.0.0-20221114235510-0127568185cf h1:2n2v98sRhXEG0Kh7+EvctaNIyOim36Ekp4pGDzbuvO8=
|
||||
github.com/tonistiigi/fsutil v0.0.0-20221114235510-0127568185cf/go.mod h1:AvLEd1LEIl64G2Jpgwo7aVV5lGH0ePcKl0ygGIHNYl8=
|
||||
github.com/tonistiigi/fsutil v0.0.0-20230105215944-fb433841cbfa h1:XOFp/3aBXlqmOFAg3r6e0qQjPnK5I970LilqX+Is1W8=
|
||||
github.com/tonistiigi/fsutil v0.0.0-20230105215944-fb433841cbfa/go.mod h1:AvLEd1LEIl64G2Jpgwo7aVV5lGH0ePcKl0ygGIHNYl8=
|
||||
github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea h1:SXhTLE6pb6eld/v/cCndK0AMpt1wiVFb/YYmqB3/QG0=
|
||||
github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea/go.mod h1:WPnis/6cRcDZSUvVmezrxJPkiO87ThFYsoUiMwWNDJk=
|
||||
github.com/tonistiigi/vt100 v0.0.0-20210615222946-8066bb97264f h1:DLpt6B5oaaS8jyXHa9VA4rrZloBVPVXeCtrOsrFauxc=
|
||||
@@ -780,6 +781,7 @@ golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20211116061358-0a5406a5449c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ=
|
||||
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
|
45
hack/release
45
hack/release
@@ -2,27 +2,56 @@
|
||||
|
||||
set -eu -o pipefail
|
||||
|
||||
: "${GITHUB_ACTIONS=}"
|
||||
: "${GITHUB_REPOSITORY=}"
|
||||
: "${GITHUB_RUN_ID=}"
|
||||
|
||||
: "${BUILDX_CMD=docker buildx}"
|
||||
: "${DESTDIR=./bin/release}"
|
||||
: "${CACHE_FROM=}"
|
||||
: "${CACHE_TO=}"
|
||||
: "${PLATFORMS=}"
|
||||
|
||||
if [ -n "$CACHE_FROM" ]; then
|
||||
for cfrom in $CACHE_FROM; do
|
||||
cacheFlags+=(--set "*.cache-from=$cfrom")
|
||||
setFlags+=(--set "*.cache-from=$cfrom")
|
||||
done
|
||||
fi
|
||||
if [ -n "$CACHE_TO" ]; then
|
||||
for cto in $CACHE_TO; do
|
||||
cacheFlags+=(--set "*.cache-to=$cto")
|
||||
setFlags+=(--set "*.cache-to=$cto")
|
||||
done
|
||||
fi
|
||||
if [ -n "$PLATFORMS" ]; then
|
||||
setFlags+=(--set "*.platform=$PLATFORMS")
|
||||
fi
|
||||
if ${BUILDX_CMD} build --help 2>&1 | grep -- '--attest' >/dev/null; then
|
||||
prvattrs="mode=max"
|
||||
if [ "$GITHUB_ACTIONS" = "true" ]; then
|
||||
prvattrs="$prvattrs,builder-id=https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}"
|
||||
fi
|
||||
setFlags+=(--set "*.attest=type=sbom")
|
||||
setFlags+=(--set "*.attest=type=provenance,$prvattrs")
|
||||
fi
|
||||
|
||||
# release
|
||||
(set -x ; ${BUILDX_CMD} bake "${cacheFlags[@]}" --set "*.output=$DESTDIR" release)
|
||||
output=$(mktemp -d -t buildx-output.XXXXXXXXXX)
|
||||
|
||||
# wrap binaries
|
||||
mv -f ./${DESTDIR}/**/* ./${DESTDIR}/
|
||||
find ./${DESTDIR} -type d -empty -delete
|
||||
(
|
||||
set -x
|
||||
${BUILDX_CMD} bake "${setFlags[@]}" --set "*.args.BUILDKIT_MULTI_PLATFORM=true" --set "*.output=$output" release
|
||||
)
|
||||
|
||||
source ./hack/hash-files
|
||||
for pdir in "${output}"/*/; do
|
||||
(
|
||||
cd "$pdir"
|
||||
binname=$(find . -name 'buildx-*')
|
||||
filename=$(basename "${binname%.exe}")
|
||||
mv "provenance.json" "${filename}.provenance.json"
|
||||
mv "sbom-binaries.spdx.json" "${filename}.sbom.json"
|
||||
find . -name 'sbom*.json' -exec rm {} \;
|
||||
)
|
||||
done
|
||||
|
||||
mkdir -p "$DESTDIR"
|
||||
mv "$output"/**/* "$DESTDIR/"
|
||||
rm -rf "$output"
|
||||
|
@@ -131,6 +131,14 @@ buildxCmd build ${buildPlatformFlag} \
|
||||
"${context}"
|
||||
cat "${context}/metadata-build.json"
|
||||
|
||||
# load to docker store
|
||||
if [ "$DRIVER" != "docker" ]; then
|
||||
buildxCmd build \
|
||||
--output="type=docker,name=buildx-test-load" \
|
||||
--builder="${builderName}" \
|
||||
"${context}"
|
||||
fi
|
||||
|
||||
# create bake def
|
||||
cat > "${bakedef}" <<EOL
|
||||
group "default" {
|
||||
|
@@ -29,6 +29,9 @@ func ParseAttests(in []string) (map[string]*string, error) {
|
||||
}
|
||||
|
||||
k := "attest:" + attestType
|
||||
if _, ok := out[k]; ok {
|
||||
return nil, errors.Errorf("duplicate attestation field %s", attestType)
|
||||
}
|
||||
if enabled {
|
||||
out[k] = &in
|
||||
} else {
|
||||
|
42
util/gitutil/gitpath_unix.go
Normal file
42
util/gitutil/gitpath_unix.go
Normal file
@@ -0,0 +1,42 @@
|
||||
//go:build !windows
|
||||
// +build !windows
|
||||
|
||||
package gitutil
|
||||
|
||||
import (
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/moby/sys/mountinfo"
|
||||
)
|
||||
|
||||
func gitPath(wd string) (string, error) {
|
||||
// On WSL2 we need to check if the current working directory is mounted on
|
||||
// a Windows drive and if so, we need to use the Windows git executable.
|
||||
if os.Getenv("WSL_DISTRO_NAME") != "" && wd != "" {
|
||||
// ensure any symlinks are resolved
|
||||
wdPath, err := filepath.EvalSymlinks(wd)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
mi, err := mountinfo.GetMounts(mountinfo.ParentsFilter(wdPath))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
// find the longest mount point
|
||||
var idx, maxlen int
|
||||
for i := range mi {
|
||||
if len(mi[i].Mountpoint) > maxlen {
|
||||
maxlen = len(mi[i].Mountpoint)
|
||||
idx = i
|
||||
}
|
||||
}
|
||||
if mi[idx].FSType == "9p" {
|
||||
if p, err := exec.LookPath("git.exe"); err == nil {
|
||||
return p, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return exec.LookPath("git")
|
||||
}
|
9
util/gitutil/gitpath_windows.go
Normal file
9
util/gitutil/gitpath_windows.go
Normal file
@@ -0,0 +1,9 @@
|
||||
package gitutil
|
||||
|
||||
import (
|
||||
"os/exec"
|
||||
)
|
||||
|
||||
func gitPath(wd string) (string, error) {
|
||||
return exec.LookPath("git.exe")
|
||||
}
|
@@ -11,8 +11,9 @@ import (
|
||||
|
||||
// Git represents an active git object
|
||||
type Git struct {
|
||||
ctx context.Context
|
||||
wd string
|
||||
ctx context.Context
|
||||
wd string
|
||||
gitpath string
|
||||
}
|
||||
|
||||
// Option provides a variadic option for configuring the git client.
|
||||
@@ -33,14 +34,22 @@ func WithWorkingDir(wd string) Option {
|
||||
}
|
||||
|
||||
// New initializes a new git client
|
||||
func New(opts ...Option) *Git {
|
||||
func New(opts ...Option) (*Git, error) {
|
||||
var err error
|
||||
c := &Git{
|
||||
ctx: context.Background(),
|
||||
}
|
||||
|
||||
for _, opt := range opts {
|
||||
opt(c)
|
||||
}
|
||||
return c
|
||||
|
||||
c.gitpath, err = gitPath(c.wd)
|
||||
if err != nil {
|
||||
return nil, errors.New("git not found in PATH")
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func (c *Git) IsInsideWorkTree() bool {
|
||||
@@ -58,7 +67,15 @@ func (c *Git) RootDir() (string, error) {
|
||||
}
|
||||
|
||||
func (c *Git) RemoteURL() (string, error) {
|
||||
return c.clean(c.run("ls-remote", "--get-url"))
|
||||
// Try to get the remote URL from the origin remote first
|
||||
if ru, err := c.clean(c.run("remote", "get-url", "origin")); err == nil && ru != "" {
|
||||
return ru, nil
|
||||
}
|
||||
// If that fails, try to get the remote URL from the upstream remote
|
||||
if ru, err := c.clean(c.run("remote", "get-url", "upstream")); err == nil && ru != "" {
|
||||
return ru, nil
|
||||
}
|
||||
return "", errors.New("no remote URL found for either origin or upstream")
|
||||
}
|
||||
|
||||
func (c *Git) FullCommit() (string, error) {
|
||||
@@ -89,16 +106,12 @@ func (c *Git) Tag() (string, error) {
|
||||
}
|
||||
|
||||
func (c *Git) run(args ...string) (string, error) {
|
||||
if _, err := exec.LookPath("git"); err != nil {
|
||||
return "", errors.New("git not present in PATH")
|
||||
}
|
||||
|
||||
var extraArgs = []string{
|
||||
"-c", "log.showSignature=false",
|
||||
}
|
||||
|
||||
args = append(extraArgs, args...)
|
||||
cmd := exec.Command("git", args...)
|
||||
cmd := exec.CommandContext(c.ctx, c.gitpath, args...)
|
||||
if c.wd != "" {
|
||||
cmd.Dir = c.wd
|
||||
}
|
||||
|
@@ -7,7 +7,9 @@ import (
|
||||
)
|
||||
|
||||
func TestGit(t *testing.T) {
|
||||
c := New()
|
||||
c, err := New()
|
||||
require.NoError(t, err)
|
||||
|
||||
out, err := c.run("status")
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, out)
|
||||
@@ -20,10 +22,12 @@ func TestGit(t *testing.T) {
|
||||
|
||||
func TestGitFullCommit(t *testing.T) {
|
||||
Mktmp(t)
|
||||
GitInit(t)
|
||||
GitCommit(t, "bar")
|
||||
c, err := New()
|
||||
require.NoError(t, err)
|
||||
|
||||
GitInit(c, t)
|
||||
GitCommit(c, t, "bar")
|
||||
|
||||
c := New()
|
||||
out, err := c.FullCommit()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 40, len(out))
|
||||
@@ -31,10 +35,12 @@ func TestGitFullCommit(t *testing.T) {
|
||||
|
||||
func TestGitShortCommit(t *testing.T) {
|
||||
Mktmp(t)
|
||||
GitInit(t)
|
||||
GitCommit(t, "bar")
|
||||
c, err := New()
|
||||
require.NoError(t, err)
|
||||
|
||||
GitInit(c, t)
|
||||
GitCommit(c, t, "bar")
|
||||
|
||||
c := New()
|
||||
out, err := c.ShortCommit()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 7, len(out))
|
||||
@@ -42,13 +48,15 @@ func TestGitShortCommit(t *testing.T) {
|
||||
|
||||
func TestGitTagsPointsAt(t *testing.T) {
|
||||
Mktmp(t)
|
||||
GitInit(t)
|
||||
GitCommit(t, "bar")
|
||||
GitTag(t, "v0.8.0")
|
||||
GitCommit(t, "foo")
|
||||
GitTag(t, "v0.9.0")
|
||||
c, err := New()
|
||||
require.NoError(t, err)
|
||||
|
||||
GitInit(c, t)
|
||||
GitCommit(c, t, "bar")
|
||||
GitTag(c, t, "v0.8.0")
|
||||
GitCommit(c, t, "foo")
|
||||
GitTag(c, t, "v0.9.0")
|
||||
|
||||
c := New()
|
||||
out, err := c.clean(c.run("tag", "--points-at", "HEAD", "--sort", "-version:creatordate"))
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "v0.9.0", out)
|
||||
@@ -56,14 +64,102 @@ func TestGitTagsPointsAt(t *testing.T) {
|
||||
|
||||
func TestGitDescribeTags(t *testing.T) {
|
||||
Mktmp(t)
|
||||
GitInit(t)
|
||||
GitCommit(t, "bar")
|
||||
GitTag(t, "v0.8.0")
|
||||
GitCommit(t, "foo")
|
||||
GitTag(t, "v0.9.0")
|
||||
c, err := New()
|
||||
require.NoError(t, err)
|
||||
|
||||
GitInit(c, t)
|
||||
GitCommit(c, t, "bar")
|
||||
GitTag(c, t, "v0.8.0")
|
||||
GitCommit(c, t, "foo")
|
||||
GitTag(c, t, "v0.9.0")
|
||||
|
||||
c := New()
|
||||
out, err := c.clean(c.run("describe", "--tags", "--abbrev=0"))
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "v0.9.0", out)
|
||||
}
|
||||
|
||||
func TestGitRemoteURL(t *testing.T) {
|
||||
type remote struct {
|
||||
name string
|
||||
url string
|
||||
}
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
remotes []remote
|
||||
expected string
|
||||
fail bool
|
||||
}{
|
||||
{
|
||||
name: "no remotes",
|
||||
remotes: []remote{},
|
||||
fail: true,
|
||||
},
|
||||
{
|
||||
name: "origin",
|
||||
remotes: []remote{
|
||||
{
|
||||
name: "origin",
|
||||
url: "git@github.com:crazy-max/buildx.git",
|
||||
},
|
||||
},
|
||||
expected: "git@github.com:crazy-max/buildx.git",
|
||||
},
|
||||
{
|
||||
name: "upstream",
|
||||
remotes: []remote{
|
||||
{
|
||||
name: "upstream",
|
||||
url: "git@github.com:docker/buildx.git",
|
||||
},
|
||||
},
|
||||
expected: "git@github.com:docker/buildx.git",
|
||||
},
|
||||
{
|
||||
name: "origin and upstream",
|
||||
remotes: []remote{
|
||||
{
|
||||
name: "upstream",
|
||||
url: "git@github.com:docker/buildx.git",
|
||||
},
|
||||
{
|
||||
name: "origin",
|
||||
url: "git@github.com:crazy-max/buildx.git",
|
||||
},
|
||||
},
|
||||
expected: "git@github.com:crazy-max/buildx.git",
|
||||
},
|
||||
{
|
||||
name: "not found",
|
||||
remotes: []remote{
|
||||
{
|
||||
name: "foo",
|
||||
url: "git@github.com:docker/buildx.git",
|
||||
},
|
||||
},
|
||||
fail: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range cases {
|
||||
tt := tt
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
Mktmp(t)
|
||||
c, err := New()
|
||||
require.NoError(t, err)
|
||||
|
||||
GitInit(c, t)
|
||||
GitCommit(c, t, "initial commit")
|
||||
for _, r := range tt.remotes {
|
||||
GitSetRemote(c, t, r.name, r.url)
|
||||
}
|
||||
|
||||
ru, err := c.RemoteURL()
|
||||
if tt.fail {
|
||||
require.Error(t, err)
|
||||
return
|
||||
}
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, tt.expected, ru)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@@ -7,46 +7,46 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func GitInit(tb testing.TB) {
|
||||
func GitInit(c *Git, tb testing.TB) {
|
||||
tb.Helper()
|
||||
out, err := fakeGit("init")
|
||||
out, err := fakeGit(c, "init")
|
||||
require.NoError(tb, err)
|
||||
require.Contains(tb, out, "Initialized empty Git repository")
|
||||
require.NoError(tb, err)
|
||||
GitCheckoutBranch(tb, "main")
|
||||
_, _ = fakeGit("branch", "-D", "master")
|
||||
GitCheckoutBranch(c, tb, "main")
|
||||
_, _ = fakeGit(c, "branch", "-D", "master")
|
||||
}
|
||||
|
||||
func GitCommit(tb testing.TB, msg string) {
|
||||
func GitCommit(c *Git, tb testing.TB, msg string) {
|
||||
tb.Helper()
|
||||
out, err := fakeGit("commit", "--allow-empty", "-m", msg)
|
||||
out, err := fakeGit(c, "commit", "--allow-empty", "-m", msg)
|
||||
require.NoError(tb, err)
|
||||
require.Contains(tb, out, "main", msg)
|
||||
}
|
||||
|
||||
func GitTag(tb testing.TB, tag string) {
|
||||
func GitTag(c *Git, tb testing.TB, tag string) {
|
||||
tb.Helper()
|
||||
out, err := fakeGit("tag", tag)
|
||||
out, err := fakeGit(c, "tag", tag)
|
||||
require.NoError(tb, err)
|
||||
require.Empty(tb, out)
|
||||
}
|
||||
|
||||
func GitCheckoutBranch(tb testing.TB, name string) {
|
||||
func GitCheckoutBranch(c *Git, tb testing.TB, name string) {
|
||||
tb.Helper()
|
||||
out, err := fakeGit("checkout", "-b", name)
|
||||
out, err := fakeGit(c, "checkout", "-b", name)
|
||||
require.NoError(tb, err)
|
||||
require.Empty(tb, out)
|
||||
}
|
||||
|
||||
func GitAdd(tb testing.TB, file string) {
|
||||
func GitAdd(c *Git, tb testing.TB, file string) {
|
||||
tb.Helper()
|
||||
_, err := fakeGit("add", file)
|
||||
_, err := fakeGit(c, "add", file)
|
||||
require.NoError(tb, err)
|
||||
}
|
||||
|
||||
func GitSetRemote(tb testing.TB, url string) {
|
||||
func GitSetRemote(c *Git, tb testing.TB, name string, url string) {
|
||||
tb.Helper()
|
||||
_, err := fakeGit("remote", "add", "origin", url)
|
||||
_, err := fakeGit(c, "remote", "add", name, url)
|
||||
require.NoError(tb, err)
|
||||
}
|
||||
|
||||
@@ -62,7 +62,7 @@ func Mktmp(tb testing.TB) string {
|
||||
return folder
|
||||
}
|
||||
|
||||
func fakeGit(args ...string) (string, error) {
|
||||
func fakeGit(c *Git, args ...string) (string, error) {
|
||||
allArgs := []string{
|
||||
"-c", "user.name=buildx",
|
||||
"-c", "user.email=buildx@docker.com",
|
||||
@@ -71,6 +71,5 @@ func fakeGit(args ...string) (string, error) {
|
||||
"-c", "log.showSignature=false",
|
||||
}
|
||||
allArgs = append(allArgs, args...)
|
||||
c := New()
|
||||
return c.clean(c.run(allArgs...))
|
||||
}
|
||||
|
@@ -11,6 +11,7 @@ import (
|
||||
"github.com/containerd/containerd/errdefs"
|
||||
"github.com/containerd/containerd/images"
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/containerd/remotes"
|
||||
"github.com/docker/distribution/reference"
|
||||
"github.com/moby/buildkit/util/contentutil"
|
||||
"github.com/opencontainers/go-digest"
|
||||
@@ -151,8 +152,9 @@ func (r *Resolver) Combine(ctx context.Context, srcs []*Source) ([]byte, ocispec
|
||||
}
|
||||
|
||||
func (r *Resolver) Push(ctx context.Context, ref reference.Named, desc ocispec.Descriptor, dt []byte) error {
|
||||
ref = reference.TagNameOnly(ref)
|
||||
ctx = remotes.WithMediaTypeKeyPrefix(ctx, "application/vnd.in-toto+json", "intoto")
|
||||
|
||||
ref = reference.TagNameOnly(ref)
|
||||
p, err := r.resolver().Pusher(ctx, ref.String())
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -173,6 +175,8 @@ func (r *Resolver) Push(ctx context.Context, ref reference.Named, desc ocispec.D
|
||||
}
|
||||
|
||||
func (r *Resolver) Copy(ctx context.Context, src *Source, dest reference.Named) error {
|
||||
ctx = remotes.WithMediaTypeKeyPrefix(ctx, "application/vnd.in-toto+json", "intoto")
|
||||
|
||||
dest = reference.TagNameOnly(dest)
|
||||
p, err := r.resolver().Pusher(ctx, dest.String())
|
||||
if err != nil {
|
||||
|
406
util/imagetools/loader.go
Normal file
406
util/imagetools/loader.go
Normal file
@@ -0,0 +1,406 @@
|
||||
package imagetools
|
||||
|
||||
// TODO: replace with go-imageinspect library when public
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/containerd/containerd/content"
|
||||
"github.com/containerd/containerd/images"
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/containerd/containerd/remotes"
|
||||
"github.com/docker/distribution/reference"
|
||||
"github.com/moby/buildkit/util/contentutil"
|
||||
"github.com/opencontainers/go-digest"
|
||||
ocispec "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
"github.com/pkg/errors"
|
||||
"golang.org/x/sync/errgroup"
|
||||
)
|
||||
|
||||
const (
|
||||
annotationReference = "vnd.docker.reference.digest"
|
||||
)
|
||||
|
||||
type contentCache interface {
|
||||
content.Provider
|
||||
content.Ingester
|
||||
}
|
||||
|
||||
type loader struct {
|
||||
resolver remotes.Resolver
|
||||
cache contentCache
|
||||
}
|
||||
|
||||
type manifest struct {
|
||||
desc ocispec.Descriptor
|
||||
manifest ocispec.Manifest
|
||||
}
|
||||
|
||||
type index struct {
|
||||
desc ocispec.Descriptor
|
||||
index ocispec.Index
|
||||
}
|
||||
|
||||
type asset struct {
|
||||
config *ocispec.Image
|
||||
sbom *sbomStub
|
||||
provenance *provenanceStub
|
||||
|
||||
deferredSbom func() (*sbomStub, error)
|
||||
deferredProvenance func() (*provenanceStub, error)
|
||||
}
|
||||
|
||||
type result struct {
|
||||
mu sync.Mutex
|
||||
indexes map[digest.Digest]index
|
||||
manifests map[digest.Digest]manifest
|
||||
images map[string]digest.Digest
|
||||
refs map[digest.Digest][]digest.Digest
|
||||
|
||||
platforms []string
|
||||
assets map[string]asset
|
||||
}
|
||||
|
||||
func newLoader(resolver remotes.Resolver) *loader {
|
||||
return &loader{
|
||||
resolver: resolver,
|
||||
cache: contentutil.NewBuffer(),
|
||||
}
|
||||
}
|
||||
|
||||
func (l *loader) Load(ctx context.Context, ref string) (*result, error) {
|
||||
named, err := parseRef(ref)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, desc, err := l.resolver.Resolve(ctx, named.String())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
canonical, err := reference.WithDigest(named, desc.Digest)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fetcher, err := l.resolver.Fetcher(ctx, canonical.String())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
r := &result{
|
||||
indexes: make(map[digest.Digest]index),
|
||||
manifests: make(map[digest.Digest]manifest),
|
||||
images: make(map[string]digest.Digest),
|
||||
refs: make(map[digest.Digest][]digest.Digest),
|
||||
assets: make(map[string]asset),
|
||||
}
|
||||
|
||||
if err := l.fetch(ctx, fetcher, desc, r); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for platform, dgst := range r.images {
|
||||
r.platforms = append(r.platforms, platform)
|
||||
|
||||
mfst, ok := r.manifests[dgst]
|
||||
if !ok {
|
||||
return nil, errors.Errorf("image %s not found", platform)
|
||||
}
|
||||
|
||||
var a asset
|
||||
annotations := make(map[string]string, len(mfst.manifest.Annotations)+len(mfst.desc.Annotations))
|
||||
for k, v := range mfst.desc.Annotations {
|
||||
annotations[k] = v
|
||||
}
|
||||
for k, v := range mfst.manifest.Annotations {
|
||||
annotations[k] = v
|
||||
}
|
||||
|
||||
if err := l.scanConfig(ctx, fetcher, mfst.manifest.Config, &a); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
refs, ok := r.refs[dgst]
|
||||
if ok {
|
||||
if err := l.scanSBOM(ctx, fetcher, r, refs, &a); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if err := l.scanProvenance(ctx, fetcher, r, refs, &a); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
r.assets[platform] = a
|
||||
}
|
||||
|
||||
sort.Strings(r.platforms)
|
||||
return r, nil
|
||||
}
|
||||
|
||||
func (l *loader) fetch(ctx context.Context, fetcher remotes.Fetcher, desc ocispec.Descriptor, r *result) error {
|
||||
_, err := remotes.FetchHandler(l.cache, fetcher)(ctx, desc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch desc.MediaType {
|
||||
case images.MediaTypeDockerSchema2Manifest, ocispec.MediaTypeImageManifest:
|
||||
var mfst ocispec.Manifest
|
||||
dt, err := content.ReadBlob(ctx, l.cache, desc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := json.Unmarshal(dt, &mfst); err != nil {
|
||||
return err
|
||||
}
|
||||
r.mu.Lock()
|
||||
r.manifests[desc.Digest] = manifest{
|
||||
desc: desc,
|
||||
manifest: mfst,
|
||||
}
|
||||
r.mu.Unlock()
|
||||
|
||||
ref, ok := desc.Annotations[annotationReference]
|
||||
if ok {
|
||||
refdgst, err := digest.Parse(ref)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
r.mu.Lock()
|
||||
r.refs[refdgst] = append(r.refs[refdgst], desc.Digest)
|
||||
r.mu.Unlock()
|
||||
} else {
|
||||
p := desc.Platform
|
||||
if p == nil {
|
||||
p, err = l.readPlatformFromConfig(ctx, fetcher, mfst.Config)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
r.mu.Lock()
|
||||
r.images[platforms.Format(platforms.Normalize(*p))] = desc.Digest
|
||||
r.mu.Unlock()
|
||||
}
|
||||
case images.MediaTypeDockerSchema2ManifestList, ocispec.MediaTypeImageIndex:
|
||||
var idx ocispec.Index
|
||||
dt, err := content.ReadBlob(ctx, l.cache, desc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := json.Unmarshal(dt, &idx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r.mu.Lock()
|
||||
r.indexes[desc.Digest] = index{
|
||||
desc: desc,
|
||||
index: idx,
|
||||
}
|
||||
r.mu.Unlock()
|
||||
|
||||
eg, ctx := errgroup.WithContext(ctx)
|
||||
for _, d := range idx.Manifests {
|
||||
d := d
|
||||
eg.Go(func() error {
|
||||
return l.fetch(ctx, fetcher, d, r)
|
||||
})
|
||||
}
|
||||
|
||||
if err := eg.Wait(); err != nil {
|
||||
return err
|
||||
}
|
||||
default:
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (l *loader) readPlatformFromConfig(ctx context.Context, fetcher remotes.Fetcher, desc ocispec.Descriptor) (*ocispec.Platform, error) {
|
||||
_, err := remotes.FetchHandler(l.cache, fetcher)(ctx, desc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
dt, err := content.ReadBlob(ctx, l.cache, desc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var config ocispec.Image
|
||||
if err := json.Unmarshal(dt, &config); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &ocispec.Platform{
|
||||
OS: config.OS,
|
||||
Architecture: config.Architecture,
|
||||
Variant: config.Variant,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (l *loader) scanConfig(ctx context.Context, fetcher remotes.Fetcher, desc ocispec.Descriptor, as *asset) error {
|
||||
_, err := remotes.FetchHandler(l.cache, fetcher)(ctx, desc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
dt, err := content.ReadBlob(ctx, l.cache, desc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return json.Unmarshal(dt, &as.config)
|
||||
}
|
||||
|
||||
type sbomStub struct {
|
||||
SPDX interface{} `json:",omitempty"`
|
||||
AdditionalSPDXs []interface{} `json:",omitempty"`
|
||||
}
|
||||
|
||||
func (l *loader) scanSBOM(ctx context.Context, fetcher remotes.Fetcher, r *result, refs []digest.Digest, as *asset) error {
|
||||
ctx = remotes.WithMediaTypeKeyPrefix(ctx, "application/vnd.in-toto+json", "intoto")
|
||||
as.deferredSbom = func() (*sbomStub, error) {
|
||||
var sbom *sbomStub
|
||||
for _, dgst := range refs {
|
||||
mfst, ok := r.manifests[dgst]
|
||||
if !ok {
|
||||
return nil, errors.Errorf("referenced image %s not found", dgst)
|
||||
}
|
||||
for _, layer := range mfst.manifest.Layers {
|
||||
if layer.MediaType == "application/vnd.in-toto+json" && layer.Annotations["in-toto.io/predicate-type"] == "https://spdx.dev/Document" {
|
||||
_, err := remotes.FetchHandler(l.cache, fetcher)(ctx, layer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dt, err := content.ReadBlob(ctx, l.cache, layer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var spdx struct {
|
||||
Predicate interface{} `json:"predicate"`
|
||||
}
|
||||
if err := json.Unmarshal(dt, &spdx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if sbom == nil {
|
||||
sbom = &sbomStub{}
|
||||
sbom.SPDX = spdx.Predicate
|
||||
} else {
|
||||
sbom.AdditionalSPDXs = append(sbom.AdditionalSPDXs, spdx.Predicate)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return sbom, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type provenanceStub struct {
|
||||
SLSA interface{} `json:",omitempty"`
|
||||
}
|
||||
|
||||
func (l *loader) scanProvenance(ctx context.Context, fetcher remotes.Fetcher, r *result, refs []digest.Digest, as *asset) error {
|
||||
ctx = remotes.WithMediaTypeKeyPrefix(ctx, "application/vnd.in-toto+json", "intoto")
|
||||
as.deferredProvenance = func() (*provenanceStub, error) {
|
||||
var provenance *provenanceStub
|
||||
for _, dgst := range refs {
|
||||
mfst, ok := r.manifests[dgst]
|
||||
if !ok {
|
||||
return nil, errors.Errorf("referenced image %s not found", dgst)
|
||||
}
|
||||
for _, layer := range mfst.manifest.Layers {
|
||||
if layer.MediaType == "application/vnd.in-toto+json" && strings.HasPrefix(layer.Annotations["in-toto.io/predicate-type"], "https://slsa.dev/provenance/") {
|
||||
_, err := remotes.FetchHandler(l.cache, fetcher)(ctx, layer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dt, err := content.ReadBlob(ctx, l.cache, layer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var slsa struct {
|
||||
Predicate interface{} `json:"predicate"`
|
||||
}
|
||||
if err := json.Unmarshal(dt, &slsa); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
provenance = &provenanceStub{
|
||||
SLSA: slsa.Predicate,
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return provenance, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *result) Configs() map[string]*ocispec.Image {
|
||||
if len(r.assets) == 0 {
|
||||
return nil
|
||||
}
|
||||
res := make(map[string]*ocispec.Image)
|
||||
for p, a := range r.assets {
|
||||
if a.config == nil {
|
||||
continue
|
||||
}
|
||||
res[p] = a.config
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func (r *result) Provenance() (map[string]provenanceStub, error) {
|
||||
if len(r.assets) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
res := make(map[string]provenanceStub)
|
||||
for p, a := range r.assets {
|
||||
if a.deferredProvenance == nil {
|
||||
continue
|
||||
}
|
||||
if a.provenance == nil {
|
||||
provenance, err := a.deferredProvenance()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if provenance == nil {
|
||||
continue
|
||||
}
|
||||
a.provenance = provenance
|
||||
}
|
||||
res[p] = *a.provenance
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (r *result) SBOM() (map[string]sbomStub, error) {
|
||||
if len(r.assets) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
res := make(map[string]sbomStub)
|
||||
for p, a := range r.assets {
|
||||
if a.deferredSbom == nil {
|
||||
continue
|
||||
}
|
||||
if a.sbom == nil {
|
||||
sbom, err := a.deferredSbom()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if sbom == nil {
|
||||
continue
|
||||
}
|
||||
a.sbom = sbom
|
||||
}
|
||||
res[p] = *a.sbom
|
||||
}
|
||||
return res, nil
|
||||
}
|
@@ -6,20 +6,15 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"text/tabwriter"
|
||||
"text/template"
|
||||
|
||||
"github.com/containerd/containerd/images"
|
||||
"github.com/containerd/containerd/platforms"
|
||||
"github.com/docker/distribution/reference"
|
||||
binfotypes "github.com/moby/buildkit/util/buildinfo/types"
|
||||
"github.com/moby/buildkit/util/imageutil"
|
||||
"github.com/opencontainers/go-digest"
|
||||
ocispecs "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
"golang.org/x/sync/errgroup"
|
||||
)
|
||||
|
||||
const defaultPfx = " "
|
||||
@@ -31,11 +26,10 @@ type Printer struct {
|
||||
name string
|
||||
format string
|
||||
|
||||
raw []byte
|
||||
ref reference.Named
|
||||
manifest ocispecs.Descriptor
|
||||
index ocispecs.Index
|
||||
platforms []ocispecs.Platform
|
||||
raw []byte
|
||||
ref reference.Named
|
||||
manifest ocispecs.Descriptor
|
||||
index ocispecs.Index
|
||||
}
|
||||
|
||||
func NewPrinter(ctx context.Context, opt Opt, name string, format string) (*Printer, error) {
|
||||
@@ -46,38 +40,25 @@ func NewPrinter(ctx context.Context, opt Opt, name string, format string) (*Prin
|
||||
return nil, err
|
||||
}
|
||||
|
||||
dt, manifest, err := resolver.Get(ctx, name)
|
||||
dt, mfst, err := resolver.Get(ctx, ref.String())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var index ocispecs.Index
|
||||
if err = json.Unmarshal(dt, &index); err != nil {
|
||||
var idx ocispecs.Index
|
||||
if err = json.Unmarshal(dt, &idx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var pforms []ocispecs.Platform
|
||||
switch manifest.MediaType {
|
||||
case images.MediaTypeDockerSchema2ManifestList, ocispecs.MediaTypeImageIndex:
|
||||
for _, m := range index.Manifests {
|
||||
if m.Platform != nil {
|
||||
pforms = append(pforms, *m.Platform)
|
||||
}
|
||||
}
|
||||
default:
|
||||
pforms = append(pforms, platforms.DefaultSpec())
|
||||
}
|
||||
|
||||
return &Printer{
|
||||
ctx: ctx,
|
||||
resolver: resolver,
|
||||
name: name,
|
||||
format: format,
|
||||
raw: dt,
|
||||
ref: ref,
|
||||
manifest: manifest,
|
||||
index: index,
|
||||
platforms: pforms,
|
||||
ctx: ctx,
|
||||
resolver: resolver,
|
||||
name: name,
|
||||
format: format,
|
||||
raw: dt,
|
||||
ref: ref,
|
||||
manifest: mfst,
|
||||
index: idx,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -102,6 +83,11 @@ func (p *Printer) Print(raw bool, out io.Writer) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
res, err := newLoader(p.resolver.resolver()).Load(p.ctx, p.name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tpl, err := template.New("").Funcs(template.FuncMap{
|
||||
"json": func(v interface{}) string {
|
||||
b, _ := json.MarshalIndent(v, "", " ")
|
||||
@@ -112,46 +98,15 @@ func (p *Printer) Print(raw bool, out io.Writer) error {
|
||||
return err
|
||||
}
|
||||
|
||||
imageconfigs := make(map[string]*ocispecs.Image)
|
||||
imageconfigsMutex := sync.Mutex{}
|
||||
buildinfos := make(map[string]*binfotypes.BuildInfo)
|
||||
buildinfosMutex := sync.Mutex{}
|
||||
|
||||
eg, _ := errgroup.WithContext(p.ctx)
|
||||
for _, platform := range p.platforms {
|
||||
func(platform ocispecs.Platform) {
|
||||
eg.Go(func() error {
|
||||
img, dtic, err := p.getImageConfig(&platform)
|
||||
if err != nil {
|
||||
return err
|
||||
} else if img != nil {
|
||||
imageconfigsMutex.Lock()
|
||||
imageconfigs[platforms.Format(platform)] = img
|
||||
imageconfigsMutex.Unlock()
|
||||
}
|
||||
if bi, err := imageutil.BuildInfo(dtic); err != nil {
|
||||
return err
|
||||
} else if bi != nil {
|
||||
buildinfosMutex.Lock()
|
||||
buildinfos[platforms.Format(platform)] = bi
|
||||
buildinfosMutex.Unlock()
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}(platform)
|
||||
}
|
||||
if err := eg.Wait(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
imageconfigs := res.Configs()
|
||||
format := tpl.Root.String()
|
||||
|
||||
var manifest interface{}
|
||||
var mfst interface{}
|
||||
switch p.manifest.MediaType {
|
||||
case images.MediaTypeDockerSchema2Manifest, ocispecs.MediaTypeImageManifest:
|
||||
manifest = p.manifest
|
||||
mfst = p.manifest
|
||||
case images.MediaTypeDockerSchema2ManifestList, ocispecs.MediaTypeImageIndex:
|
||||
manifest = struct {
|
||||
mfst = struct {
|
||||
SchemaVersion int `json:"schemaVersion"`
|
||||
MediaType string `json:"mediaType,omitempty"`
|
||||
Digest digest.Digest `json:"digest"`
|
||||
@@ -170,10 +125,11 @@ func (p *Printer) Print(raw bool, out io.Writer) error {
|
||||
|
||||
switch {
|
||||
// TODO: print formatted config
|
||||
case strings.HasPrefix(format, "{{.Manifest"), strings.HasPrefix(format, "{{.BuildInfo"):
|
||||
case strings.HasPrefix(format, "{{.Manifest"):
|
||||
w := tabwriter.NewWriter(out, 0, 0, 1, ' ', 0)
|
||||
_, _ = fmt.Fprintf(w, "Name:\t%s\n", p.ref.String())
|
||||
if strings.HasPrefix(format, "{{.Manifest") {
|
||||
switch {
|
||||
case strings.HasPrefix(format, "{{.Manifest"):
|
||||
_, _ = fmt.Fprintf(w, "MediaType:\t%s\n", p.manifest.MediaType)
|
||||
_, _ = fmt.Fprintf(w, "Digest:\t%s\n", p.manifest.Digest)
|
||||
_ = w.Flush()
|
||||
@@ -181,42 +137,25 @@ func (p *Printer) Print(raw bool, out io.Writer) error {
|
||||
case images.MediaTypeDockerSchema2ManifestList, ocispecs.MediaTypeImageIndex:
|
||||
_ = p.printManifestList(out)
|
||||
}
|
||||
} else if strings.HasPrefix(format, "{{.BuildInfo") {
|
||||
_ = w.Flush()
|
||||
_ = p.printBuildInfos(buildinfos, out)
|
||||
}
|
||||
default:
|
||||
if len(p.platforms) > 1 {
|
||||
return tpl.Execute(out, struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
Manifest interface{} `json:"manifest,omitempty"`
|
||||
Image map[string]*ocispecs.Image `json:"image,omitempty"`
|
||||
BuildInfo map[string]*binfotypes.BuildInfo `json:"buildinfo,omitempty"`
|
||||
}{
|
||||
Name: p.name,
|
||||
Manifest: manifest,
|
||||
Image: imageconfigs,
|
||||
BuildInfo: buildinfos,
|
||||
if len(res.platforms) > 1 {
|
||||
return tpl.Execute(out, tplInputs{
|
||||
Name: p.name,
|
||||
Manifest: mfst,
|
||||
Image: imageconfigs,
|
||||
result: res,
|
||||
})
|
||||
}
|
||||
var ic *ocispecs.Image
|
||||
for _, v := range imageconfigs {
|
||||
ic = v
|
||||
}
|
||||
var bi *binfotypes.BuildInfo
|
||||
for _, v := range buildinfos {
|
||||
bi = v
|
||||
}
|
||||
return tpl.Execute(out, struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
Manifest interface{} `json:"manifest,omitempty"`
|
||||
Image *ocispecs.Image `json:"image,omitempty"`
|
||||
BuildInfo *binfotypes.BuildInfo `json:"buildinfo,omitempty"`
|
||||
}{
|
||||
Name: p.name,
|
||||
Manifest: manifest,
|
||||
Image: ic,
|
||||
BuildInfo: bi,
|
||||
return tpl.Execute(out, tplInput{
|
||||
Name: p.name,
|
||||
Manifest: mfst,
|
||||
Image: ic,
|
||||
result: res,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -252,6 +191,7 @@ func (p *Printer) printManifestList(out io.Writer) error {
|
||||
_, _ = fmt.Fprintf(w, "%sURLs:\t%s\n", defaultPfx, strings.Join(m.URLs, ", "))
|
||||
}
|
||||
if len(m.Annotations) > 0 {
|
||||
_, _ = fmt.Fprintf(w, "%sAnnotations:\t\n", defaultPfx)
|
||||
_ = w.Flush()
|
||||
w2 := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', 0)
|
||||
for k, v := range m.Annotations {
|
||||
@@ -264,82 +204,48 @@ func (p *Printer) printManifestList(out io.Writer) error {
|
||||
return w.Flush()
|
||||
}
|
||||
|
||||
func (p *Printer) printBuildInfos(bis map[string]*binfotypes.BuildInfo, out io.Writer) error {
|
||||
if len(bis) == 0 {
|
||||
return nil
|
||||
} else if len(bis) == 1 {
|
||||
for _, bi := range bis {
|
||||
return p.printBuildInfo(bi, "", out)
|
||||
}
|
||||
}
|
||||
var pkeys []string
|
||||
for _, pform := range p.platforms {
|
||||
pkeys = append(pkeys, platforms.Format(pform))
|
||||
}
|
||||
sort.Strings(pkeys)
|
||||
for _, platform := range pkeys {
|
||||
bi := bis[platform]
|
||||
w := tabwriter.NewWriter(out, 0, 0, 1, ' ', 0)
|
||||
_, _ = fmt.Fprintf(w, "\t\nPlatform:\t%s\t\n", platform)
|
||||
_ = w.Flush()
|
||||
if err := p.printBuildInfo(bi, "", out); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
type tplInput struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
Manifest interface{} `json:"manifest,omitempty"`
|
||||
Image *ocispecs.Image `json:"image,omitempty"`
|
||||
|
||||
result *result
|
||||
}
|
||||
|
||||
func (p *Printer) printBuildInfo(bi *binfotypes.BuildInfo, pfx string, out io.Writer) error {
|
||||
w := tabwriter.NewWriter(out, 0, 0, 1, ' ', 0)
|
||||
_, _ = fmt.Fprintf(w, "%sFrontend:\t%s\n", pfx, bi.Frontend)
|
||||
|
||||
if len(bi.Attrs) > 0 {
|
||||
_, _ = fmt.Fprintf(w, "%sAttrs:\t\n", pfx)
|
||||
_ = w.Flush()
|
||||
for k, v := range bi.Attrs {
|
||||
_, _ = fmt.Fprintf(w, "%s%s:\t%s\n", pfx+defaultPfx, k, *v)
|
||||
}
|
||||
}
|
||||
|
||||
if len(bi.Sources) > 0 {
|
||||
_, _ = fmt.Fprintf(w, "%sSources:\t\n", pfx)
|
||||
_ = w.Flush()
|
||||
for i, v := range bi.Sources {
|
||||
if i != 0 {
|
||||
_, _ = fmt.Fprintf(w, "\t\n")
|
||||
}
|
||||
_, _ = fmt.Fprintf(w, "%sType:\t%s\n", pfx+defaultPfx, v.Type)
|
||||
_, _ = fmt.Fprintf(w, "%sRef:\t%s\n", pfx+defaultPfx, v.Ref)
|
||||
_, _ = fmt.Fprintf(w, "%sPin:\t%s\n", pfx+defaultPfx, v.Pin)
|
||||
}
|
||||
}
|
||||
|
||||
if len(bi.Deps) > 0 {
|
||||
_, _ = fmt.Fprintf(w, "%sDeps:\t\n", pfx)
|
||||
_ = w.Flush()
|
||||
firstPass := true
|
||||
for k, v := range bi.Deps {
|
||||
if !firstPass {
|
||||
_, _ = fmt.Fprintf(w, "\t\n")
|
||||
}
|
||||
_, _ = fmt.Fprintf(w, "%sName:\t%s\n", pfx+defaultPfx, k)
|
||||
_ = w.Flush()
|
||||
_ = p.printBuildInfo(&v, pfx+defaultPfx, out)
|
||||
firstPass = false
|
||||
}
|
||||
}
|
||||
|
||||
return w.Flush()
|
||||
}
|
||||
|
||||
func (p *Printer) getImageConfig(platform *ocispecs.Platform) (*ocispecs.Image, []byte, error) {
|
||||
_, dtic, err := p.resolver.ImageConfig(p.ctx, p.name, platform)
|
||||
func (inp tplInput) SBOM() (sbomStub, error) {
|
||||
sbom, err := inp.result.SBOM()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
return sbomStub{}, nil
|
||||
}
|
||||
var img *ocispecs.Image
|
||||
if err = json.Unmarshal(dtic, &img); err != nil {
|
||||
return nil, nil, err
|
||||
for _, v := range sbom {
|
||||
return v, nil
|
||||
}
|
||||
return img, dtic, nil
|
||||
return sbomStub{}, nil
|
||||
}
|
||||
|
||||
func (inp tplInput) Provenance() (provenanceStub, error) {
|
||||
provenance, err := inp.result.Provenance()
|
||||
if err != nil {
|
||||
return provenanceStub{}, nil
|
||||
}
|
||||
for _, v := range provenance {
|
||||
return v, nil
|
||||
}
|
||||
return provenanceStub{}, nil
|
||||
}
|
||||
|
||||
type tplInputs struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
Manifest interface{} `json:"manifest,omitempty"`
|
||||
Image map[string]*ocispecs.Image `json:"image,omitempty"`
|
||||
|
||||
result *result
|
||||
}
|
||||
|
||||
func (inp tplInputs) SBOM() (map[string]sbomStub, error) {
|
||||
return inp.result.SBOM()
|
||||
}
|
||||
|
||||
func (inp tplInputs) Provenance() (map[string]provenanceStub, error) {
|
||||
return inp.result.Provenance()
|
||||
}
|
||||
|
40
vendor/github.com/containerd/containerd/remotes/docker/pusher.go
generated
vendored
40
vendor/github.com/containerd/containerd/remotes/docker/pusher.go
generated
vendored
@@ -377,17 +377,24 @@ func (pw *pushWriter) Write(p []byte) (n int, err error) {
|
||||
|
||||
// If content has already been written, the bytes
|
||||
// cannot be written and the caller must reset
|
||||
if status.Offset > 0 {
|
||||
status.Offset = 0
|
||||
status.UpdatedAt = time.Now()
|
||||
pw.tracker.SetStatus(pw.ref, status)
|
||||
return 0, content.ErrReset
|
||||
}
|
||||
status.Offset = 0
|
||||
status.UpdatedAt = time.Now()
|
||||
pw.tracker.SetStatus(pw.ref, status)
|
||||
return 0, content.ErrReset
|
||||
default:
|
||||
}
|
||||
}
|
||||
|
||||
n, err = pw.pipe.Write(p)
|
||||
if errors.Is(err, io.ErrClosedPipe) {
|
||||
// if the pipe is closed, we might have the original error on the error
|
||||
// channel - so we should try and get it
|
||||
select {
|
||||
case err2 := <-pw.errC:
|
||||
err = err2
|
||||
default:
|
||||
}
|
||||
}
|
||||
status.Offset += int64(n)
|
||||
status.UpdatedAt = time.Now()
|
||||
pw.tracker.SetStatus(pw.ref, status)
|
||||
@@ -428,7 +435,7 @@ func (pw *pushWriter) Digest() digest.Digest {
|
||||
|
||||
func (pw *pushWriter) Commit(ctx context.Context, size int64, expected digest.Digest, opts ...content.Opt) error {
|
||||
// Check whether read has already thrown an error
|
||||
if _, err := pw.pipe.Write([]byte{}); err != nil && err != io.ErrClosedPipe {
|
||||
if _, err := pw.pipe.Write([]byte{}); err != nil && !errors.Is(err, io.ErrClosedPipe) {
|
||||
return fmt.Errorf("pipe error before commit: %w", err)
|
||||
}
|
||||
|
||||
@@ -439,9 +446,7 @@ func (pw *pushWriter) Commit(ctx context.Context, size int64, expected digest.Di
|
||||
var resp *http.Response
|
||||
select {
|
||||
case err := <-pw.errC:
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return err
|
||||
case resp = <-pw.respC:
|
||||
defer resp.Body.Close()
|
||||
case p, ok := <-pw.pipeC:
|
||||
@@ -453,18 +458,17 @@ func (pw *pushWriter) Commit(ctx context.Context, size int64, expected digest.Di
|
||||
}
|
||||
pw.pipe.CloseWithError(content.ErrReset)
|
||||
pw.pipe = p
|
||||
|
||||
// If content has already been written, the bytes
|
||||
// cannot be written again and the caller must reset
|
||||
status, err := pw.tracker.GetStatus(pw.ref)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// If content has already been written, the bytes
|
||||
// cannot be written again and the caller must reset
|
||||
if status.Offset > 0 {
|
||||
status.Offset = 0
|
||||
status.UpdatedAt = time.Now()
|
||||
pw.tracker.SetStatus(pw.ref, status)
|
||||
return content.ErrReset
|
||||
}
|
||||
status.Offset = 0
|
||||
status.UpdatedAt = time.Now()
|
||||
pw.tracker.SetStatus(pw.ref, status)
|
||||
return content.ErrReset
|
||||
}
|
||||
|
||||
// 201 is specified return status, some registries return
|
||||
|
2
vendor/github.com/containerd/containerd/version/version.go
generated
vendored
2
vendor/github.com/containerd/containerd/version/version.go
generated
vendored
@@ -23,7 +23,7 @@ var (
|
||||
Package = "github.com/containerd/containerd"
|
||||
|
||||
// Version holds the complete version number. Filled in at linking time.
|
||||
Version = "1.6.11+unknown"
|
||||
Version = "1.6.15+unknown"
|
||||
|
||||
// Revision is filled with the VCS (e.g. git) revision being used to build
|
||||
// the program at linking time.
|
||||
|
2
vendor/github.com/docker/cli-docs-tool/README.md
generated
vendored
2
vendor/github.com/docker/cli-docs-tool/README.md
generated
vendored
@@ -1,5 +1,5 @@
|
||||
[](https://pkg.go.dev/github.com/docker/cli-docs-tool)
|
||||
[](https://github.com/docker/cli-docs-tool/actions?query=workflow%3Atest)
|
||||
[](https://github.com/docker/cli-docs-tool/actions?query=workflow%3Atest)
|
||||
[](https://goreportcard.com/report/github.com/docker/cli-docs-tool)
|
||||
|
||||
## About
|
||||
|
84
vendor/github.com/docker/cli-docs-tool/clidocstool_md.go
generated
vendored
84
vendor/github.com/docker/cli-docs-tool/clidocstool_md.go
generated
vendored
@@ -20,7 +20,9 @@ import (
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"text/tabwriter"
|
||||
"text/template"
|
||||
|
||||
"github.com/docker/cli-docs-tool/annotation"
|
||||
@@ -28,6 +30,11 @@ import (
|
||||
"github.com/spf13/pflag"
|
||||
)
|
||||
|
||||
var (
|
||||
nlRegexp = regexp.MustCompile(`\r?\n`)
|
||||
adjustSep = regexp.MustCompile(`\|:---(\s+)`)
|
||||
)
|
||||
|
||||
// GenMarkdownTree will generate a markdown page for this command and all
|
||||
// descendants in the directory given.
|
||||
func (c *Client) GenMarkdownTree(cmd *cobra.Command) error {
|
||||
@@ -144,6 +151,42 @@ func mdMakeLink(txt, link string, f *pflag.Flag, isAnchor bool) string {
|
||||
return "[" + txt + "](" + link + ")"
|
||||
}
|
||||
|
||||
type mdTable struct {
|
||||
out *strings.Builder
|
||||
tabWriter *tabwriter.Writer
|
||||
}
|
||||
|
||||
func newMdTable(headers ...string) *mdTable {
|
||||
w := &strings.Builder{}
|
||||
t := &mdTable{
|
||||
out: w,
|
||||
// Using tabwriter.Debug, which uses "|" as separator instead of tabs,
|
||||
// which is what we want. It's a bit of a hack, but does the job :)
|
||||
tabWriter: tabwriter.NewWriter(w, 5, 5, 1, ' ', tabwriter.Debug),
|
||||
}
|
||||
t.addHeader(headers...)
|
||||
return t
|
||||
}
|
||||
|
||||
func (t *mdTable) addHeader(cols ...string) {
|
||||
t.AddRow(cols...)
|
||||
_, _ = t.tabWriter.Write([]byte("|" + strings.Repeat(":---\t", len(cols)) + "\n"))
|
||||
}
|
||||
|
||||
func (t *mdTable) AddRow(cols ...string) {
|
||||
for i := range cols {
|
||||
cols[i] = mdEscapePipe(cols[i])
|
||||
}
|
||||
_, _ = t.tabWriter.Write([]byte("| " + strings.Join(cols, "\t ") + "\t\n"))
|
||||
}
|
||||
|
||||
func (t *mdTable) String() string {
|
||||
_ = t.tabWriter.Flush()
|
||||
return adjustSep.ReplaceAllStringFunc(t.out.String()+"\n", func(in string) string {
|
||||
return strings.ReplaceAll(in, " ", "-")
|
||||
})
|
||||
}
|
||||
|
||||
func mdCmdOutput(cmd *cobra.Command, old string) (string, error) {
|
||||
b := &strings.Builder{}
|
||||
|
||||
@@ -152,46 +195,41 @@ func mdCmdOutput(cmd *cobra.Command, old string) (string, error) {
|
||||
desc = cmd.Long
|
||||
}
|
||||
if desc != "" {
|
||||
fmt.Fprintf(b, "%s\n\n", desc)
|
||||
b.WriteString(desc + "\n\n")
|
||||
}
|
||||
|
||||
if aliases := getAliases(cmd); len(aliases) != 0 {
|
||||
fmt.Fprint(b, "### Aliases\n\n")
|
||||
fmt.Fprint(b, "`"+strings.Join(aliases, "`, `")+"`")
|
||||
fmt.Fprint(b, "\n\n")
|
||||
b.WriteString("### Aliases\n\n")
|
||||
b.WriteString("`" + strings.Join(aliases, "`, `") + "`")
|
||||
b.WriteString("\n\n")
|
||||
}
|
||||
|
||||
if len(cmd.Commands()) != 0 {
|
||||
fmt.Fprint(b, "### Subcommands\n\n")
|
||||
fmt.Fprint(b, "| Name | Description |\n")
|
||||
fmt.Fprint(b, "| --- | --- |\n")
|
||||
b.WriteString("### Subcommands\n\n")
|
||||
table := newMdTable("Name", "Description")
|
||||
for _, c := range cmd.Commands() {
|
||||
fmt.Fprintf(b, "| [`%s`](%s) | %s |\n", c.Name(), mdFilename(c), c.Short)
|
||||
table.AddRow(fmt.Sprintf("[`%s`](%s)", c.Name(), mdFilename(c)), c.Short)
|
||||
}
|
||||
fmt.Fprint(b, "\n\n")
|
||||
b.WriteString(table.String() + "\n")
|
||||
}
|
||||
|
||||
// add inherited flags before checking for flags availability
|
||||
cmd.Flags().AddFlagSet(cmd.InheritedFlags())
|
||||
|
||||
if cmd.Flags().HasAvailableFlags() {
|
||||
fmt.Fprint(b, "### Options\n\n")
|
||||
fmt.Fprint(b, "| Name | Type | Default | Description |\n")
|
||||
fmt.Fprint(b, "| --- | --- | --- | --- |\n")
|
||||
|
||||
b.WriteString("### Options\n\n")
|
||||
table := newMdTable("Name", "Type", "Default", "Description")
|
||||
cmd.Flags().VisitAll(func(f *pflag.Flag) {
|
||||
if f.Hidden {
|
||||
return
|
||||
}
|
||||
isLink := strings.Contains(old, "<a name=\""+f.Name+"\"></a>")
|
||||
fmt.Fprint(b, "| ")
|
||||
var name string
|
||||
if f.Shorthand != "" {
|
||||
name := "`-" + f.Shorthand + "`"
|
||||
name = mdMakeLink(name, f.Name, f, isLink)
|
||||
fmt.Fprintf(b, "%s, ", name)
|
||||
name = mdMakeLink("`-"+f.Shorthand+"`", f.Name, f, isLink)
|
||||
name += ", "
|
||||
}
|
||||
name := "`--" + f.Name + "`"
|
||||
name = mdMakeLink(name, f.Name, f, isLink)
|
||||
name += mdMakeLink("`--"+f.Name+"`", f.Name, f, isLink)
|
||||
|
||||
var ftype string
|
||||
if f.Value.Type() != "bool" {
|
||||
@@ -216,9 +254,9 @@ func mdCmdOutput(cmd *cobra.Command, old string) (string, error) {
|
||||
} else if cd, ok := cmd.Annotations[annotation.CodeDelimiter]; ok {
|
||||
usage = strings.ReplaceAll(usage, cd, "`")
|
||||
}
|
||||
fmt.Fprintf(b, "%s | %s | %s | %s |\n", mdEscapePipe(name), mdEscapePipe(ftype), mdEscapePipe(defval), mdEscapePipe(usage))
|
||||
table.AddRow(name, ftype, defval, mdReplaceNewline(usage))
|
||||
})
|
||||
fmt.Fprintln(b, "")
|
||||
b.WriteString(table.String())
|
||||
}
|
||||
|
||||
return b.String(), nil
|
||||
@@ -227,3 +265,7 @@ func mdCmdOutput(cmd *cobra.Command, old string) (string, error) {
|
||||
func mdEscapePipe(s string) string {
|
||||
return strings.ReplaceAll(s, `|`, `\|`)
|
||||
}
|
||||
|
||||
func mdReplaceNewline(s string) string {
|
||||
return nlRegexp.ReplaceAllString(s, "<br>")
|
||||
}
|
||||
|
2
vendor/github.com/docker/cli/cli/connhelper/commandconn/commandconn.go
generated
vendored
2
vendor/github.com/docker/cli/cli/connhelper/commandconn/commandconn.go
generated
vendored
@@ -37,7 +37,7 @@ func New(ctx context.Context, cmd string, args ...string) (net.Conn, error) {
|
||||
c commandConn
|
||||
err error
|
||||
)
|
||||
c.cmd = exec.CommandContext(ctx, cmd, args...)
|
||||
c.cmd = exec.Command(cmd, args...)
|
||||
// we assume that args never contains sensitive information
|
||||
logrus.Debugf("commandconn: starting %s with %v", cmd, args)
|
||||
c.cmd.Env = os.Environ()
|
||||
|
2
vendor/github.com/docker/cli/cli/flags/options.go
generated
vendored
2
vendor/github.com/docker/cli/cli/flags/options.go
generated
vendored
@@ -71,8 +71,6 @@ func (o *ClientOptions) InstallFlags(flags *pflag.FlagSet) {
|
||||
flags.BoolVar(&o.TLS, "tls", dockerTLS, "Use TLS; implied by --tlsverify")
|
||||
flags.BoolVar(&o.TLSVerify, FlagTLSVerify, dockerTLSVerify, "Use TLS and verify the remote")
|
||||
|
||||
// TODO use flag flags.String("identity"}, "i", "", "Path to libtrust key file")
|
||||
|
||||
o.TLSOptions = &tlsconfig.Options{
|
||||
CAFile: filepath.Join(dockerCertPath, DefaultCaFile),
|
||||
CertFile: filepath.Join(dockerCertPath, DefaultCertFile),
|
||||
|
9
vendor/github.com/docker/docker/errdefs/http_helpers.go
generated
vendored
9
vendor/github.com/docker/docker/errdefs/http_helpers.go
generated
vendored
@@ -2,14 +2,12 @@ package errdefs // import "github.com/docker/docker/errdefs"
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
// FromStatusCode creates an errdef error, based on the provided HTTP status-code
|
||||
func FromStatusCode(err error, statusCode int) error {
|
||||
if err == nil {
|
||||
return err
|
||||
return nil
|
||||
}
|
||||
switch statusCode {
|
||||
case http.StatusNotFound:
|
||||
@@ -33,11 +31,6 @@ func FromStatusCode(err error, statusCode int) error {
|
||||
err = System(err)
|
||||
}
|
||||
default:
|
||||
logrus.WithError(err).WithFields(logrus.Fields{
|
||||
"module": "api",
|
||||
"status_code": statusCode,
|
||||
}).Debug("FIXME: Got an status-code for which error does not match any expected type!!!")
|
||||
|
||||
switch {
|
||||
case statusCode >= 200 && statusCode < 400:
|
||||
// it's a client error
|
||||
|
536
vendor/github.com/moby/buildkit/api/services/control/control.pb.go
generated
vendored
536
vendor/github.com/moby/buildkit/api/services/control/control.pb.go
generated
vendored
@@ -13,6 +13,7 @@ import (
|
||||
_ "github.com/golang/protobuf/ptypes/timestamp"
|
||||
types "github.com/moby/buildkit/api/types"
|
||||
pb "github.com/moby/buildkit/solver/pb"
|
||||
pb1 "github.com/moby/buildkit/sourcepolicy/pb"
|
||||
github_com_moby_buildkit_util_entitlements "github.com/moby/buildkit/util/entitlements"
|
||||
github_com_opencontainers_go_digest "github.com/opencontainers/go-digest"
|
||||
grpc "google.golang.org/grpc"
|
||||
@@ -377,6 +378,7 @@ type SolveRequest struct {
|
||||
Entitlements []github_com_moby_buildkit_util_entitlements.Entitlement `protobuf:"bytes,9,rep,name=Entitlements,proto3,customtype=github.com/moby/buildkit/util/entitlements.Entitlement" json:"Entitlements,omitempty"`
|
||||
FrontendInputs map[string]*pb.Definition `protobuf:"bytes,10,rep,name=FrontendInputs,proto3" json:"FrontendInputs,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||||
Internal bool `protobuf:"varint,11,opt,name=Internal,proto3" json:"Internal,omitempty"`
|
||||
SourcePolicy *pb1.Policy `protobuf:"bytes,12,opt,name=SourcePolicy,proto3" json:"SourcePolicy,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
@@ -485,6 +487,13 @@ func (m *SolveRequest) GetInternal() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (m *SolveRequest) GetSourcePolicy() *pb1.Policy {
|
||||
if m != nil {
|
||||
return m.SourcePolicy
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type CacheOptions struct {
|
||||
// ExportRefDeprecated is deprecated in favor or the new Exports since BuildKit v0.4.0.
|
||||
// When ExportRefDeprecated is set, the solver appends
|
||||
@@ -1496,6 +1505,9 @@ type BuildHistoryRecord struct {
|
||||
Generation int32 `protobuf:"varint,12,opt,name=Generation,proto3" json:"Generation,omitempty"`
|
||||
Trace *Descriptor `protobuf:"bytes,13,opt,name=trace,proto3" json:"trace,omitempty"`
|
||||
Pinned bool `protobuf:"varint,14,opt,name=pinned,proto3" json:"pinned,omitempty"`
|
||||
NumCachedSteps int32 `protobuf:"varint,15,opt,name=numCachedSteps,proto3" json:"numCachedSteps,omitempty"`
|
||||
NumTotalSteps int32 `protobuf:"varint,16,opt,name=numTotalSteps,proto3" json:"numTotalSteps,omitempty"`
|
||||
NumCompletedSteps int32 `protobuf:"varint,17,opt,name=numCompletedSteps,proto3" json:"numCompletedSteps,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
@@ -1632,6 +1644,27 @@ func (m *BuildHistoryRecord) GetPinned() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (m *BuildHistoryRecord) GetNumCachedSteps() int32 {
|
||||
if m != nil {
|
||||
return m.NumCachedSteps
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *BuildHistoryRecord) GetNumTotalSteps() int32 {
|
||||
if m != nil {
|
||||
return m.NumTotalSteps
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *BuildHistoryRecord) GetNumCompletedSteps() int32 {
|
||||
if m != nil {
|
||||
return m.NumCompletedSteps
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
type UpdateBuildHistoryRequest struct {
|
||||
Ref string `protobuf:"bytes,1,opt,name=Ref,proto3" json:"Ref,omitempty"`
|
||||
Pinned bool `protobuf:"varint,2,opt,name=Pinned,proto3" json:"Pinned,omitempty"`
|
||||
@@ -1953,143 +1986,149 @@ func init() {
|
||||
func init() { proto.RegisterFile("control.proto", fileDescriptor_0c5120591600887d) }
|
||||
|
||||
var fileDescriptor_0c5120591600887d = []byte{
|
||||
// 2173 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x39, 0xcd, 0x6f, 0x1b, 0xc7,
|
||||
0xf5, 0x5e, 0x52, 0xfc, 0x7a, 0xa4, 0x14, 0x79, 0xec, 0x18, 0xfb, 0xdb, 0x9f, 0x23, 0xc9, 0x1b,
|
||||
0x1b, 0x15, 0x5c, 0x7b, 0xa9, 0xb0, 0x49, 0xed, 0xc8, 0xad, 0x6b, 0x51, 0x64, 0x63, 0xb9, 0x36,
|
||||
0xec, 0x8c, 0xe4, 0x18, 0x08, 0xe0, 0x16, 0x2b, 0x72, 0x44, 0x2f, 0xb4, 0xdc, 0xdd, 0xce, 0x0c,
|
||||
0x15, 0x2b, 0x7f, 0x40, 0x81, 0x5c, 0x8a, 0x5e, 0x8a, 0x5e, 0x7a, 0xef, 0xa9, 0xe7, 0x1e, 0x7b,
|
||||
0x2a, 0xe0, 0x63, 0xcf, 0x39, 0xb8, 0x85, 0xff, 0x80, 0xa2, 0xc7, 0x5e, 0x0a, 0x14, 0xf3, 0xb1,
|
||||
0xe4, 0x90, 0x5c, 0x4a, 0x94, 0xed, 0x13, 0xe7, 0xcd, 0xbc, 0xf7, 0xf6, 0x7d, 0xcf, 0x7b, 0x43,
|
||||
0x58, 0xec, 0xc4, 0x11, 0xa7, 0x71, 0xe8, 0x25, 0x34, 0xe6, 0x31, 0x5a, 0xee, 0xc7, 0xfb, 0xc7,
|
||||
0xde, 0xfe, 0x20, 0x08, 0xbb, 0x87, 0x01, 0xf7, 0x8e, 0x3e, 0x71, 0x6e, 0xf6, 0x02, 0xfe, 0x62,
|
||||
0xb0, 0xef, 0x75, 0xe2, 0x7e, 0xbd, 0x17, 0xf7, 0xe2, 0xba, 0x44, 0xdc, 0x1f, 0x1c, 0x48, 0x48,
|
||||
0x02, 0x72, 0xa5, 0x18, 0x38, 0xab, 0xbd, 0x38, 0xee, 0x85, 0x64, 0x84, 0xc5, 0x83, 0x3e, 0x61,
|
||||
0xdc, 0xef, 0x27, 0x1a, 0xe1, 0x86, 0xc1, 0x4f, 0x7c, 0xac, 0x9e, 0x7e, 0xac, 0xce, 0xe2, 0xf0,
|
||||
0x88, 0xd0, 0x7a, 0xb2, 0x5f, 0x8f, 0x13, 0xa6, 0xb1, 0xeb, 0x33, 0xb1, 0xfd, 0x24, 0xa8, 0xf3,
|
||||
0xe3, 0x84, 0xb0, 0xfa, 0x37, 0x31, 0x3d, 0x24, 0x54, 0x13, 0x34, 0x26, 0xc5, 0x55, 0xf2, 0xf8,
|
||||
0x49, 0xc0, 0xf4, 0xb2, 0x4e, 0x93, 0x4e, 0x9d, 0x71, 0x9f, 0x0f, 0xf4, 0x47, 0xdc, 0xdf, 0x58,
|
||||
0x50, 0x7b, 0x42, 0x07, 0x11, 0xc1, 0xe4, 0xd7, 0x03, 0xc2, 0x38, 0xba, 0x04, 0xc5, 0x83, 0x20,
|
||||
0xe4, 0x84, 0xda, 0xd6, 0x5a, 0x7e, 0xbd, 0x82, 0x35, 0x84, 0x96, 0x21, 0xef, 0x87, 0xa1, 0x9d,
|
||||
0x5b, 0xb3, 0xd6, 0xcb, 0x58, 0x2c, 0xd1, 0x3a, 0xd4, 0x0e, 0x09, 0x49, 0x5a, 0x03, 0xea, 0xf3,
|
||||
0x20, 0x8e, 0xec, 0xfc, 0x9a, 0xb5, 0x9e, 0x6f, 0x2e, 0xbc, 0x7a, 0xbd, 0x6a, 0xe1, 0xb1, 0x13,
|
||||
0xe4, 0x42, 0x45, 0xc0, 0xcd, 0x63, 0x4e, 0x98, 0xbd, 0x60, 0xa0, 0x8d, 0xb6, 0xdd, 0xeb, 0xb0,
|
||||
0xdc, 0x0a, 0xd8, 0xe1, 0x53, 0xe6, 0xf7, 0x4e, 0x93, 0xc5, 0x7d, 0x00, 0xe7, 0x0d, 0x5c, 0x96,
|
||||
0xc4, 0x11, 0x23, 0xe8, 0x33, 0x28, 0x52, 0xd2, 0x89, 0x69, 0x57, 0x22, 0x57, 0x1b, 0x1f, 0x79,
|
||||
0x93, 0xfe, 0xf4, 0x34, 0x81, 0x40, 0xc2, 0x1a, 0xd9, 0xfd, 0x43, 0x1e, 0xaa, 0xc6, 0x3e, 0x5a,
|
||||
0x82, 0xdc, 0x4e, 0xcb, 0xb6, 0xd6, 0xac, 0xf5, 0x0a, 0xce, 0xed, 0xb4, 0x90, 0x0d, 0xa5, 0x47,
|
||||
0x03, 0xee, 0xef, 0x87, 0x44, 0xeb, 0x9e, 0x82, 0xe8, 0x22, 0x14, 0x76, 0xa2, 0xa7, 0x8c, 0x48,
|
||||
0xc5, 0xcb, 0x58, 0x01, 0x08, 0xc1, 0xc2, 0x6e, 0xf0, 0x2d, 0x51, 0x6a, 0x62, 0xb9, 0x46, 0x0e,
|
||||
0x14, 0x9f, 0xf8, 0x94, 0x44, 0xdc, 0x2e, 0x08, 0xbe, 0xcd, 0x9c, 0x6d, 0x61, 0xbd, 0x83, 0x9a,
|
||||
0x50, 0xd9, 0xa6, 0xc4, 0xe7, 0xa4, 0xbb, 0xc5, 0xed, 0xe2, 0x9a, 0xb5, 0x5e, 0x6d, 0x38, 0x9e,
|
||||
0xf2, 0x96, 0x97, 0x06, 0x92, 0xb7, 0x97, 0x06, 0x52, 0xb3, 0xfc, 0xea, 0xf5, 0xea, 0xb9, 0xdf,
|
||||
0xfd, 0x43, 0xd8, 0x6e, 0x48, 0x86, 0xee, 0x01, 0x3c, 0xf4, 0x19, 0x7f, 0xca, 0x24, 0x93, 0xd2,
|
||||
0xa9, 0x4c, 0x16, 0x24, 0x03, 0x83, 0x06, 0xad, 0x00, 0x48, 0x23, 0x6c, 0xc7, 0x83, 0x88, 0xdb,
|
||||
0x65, 0x29, 0xbb, 0xb1, 0x83, 0xd6, 0xa0, 0xda, 0x22, 0xac, 0x43, 0x83, 0x44, 0xba, 0xba, 0x22,
|
||||
0xcd, 0x63, 0x6e, 0x09, 0x0e, 0xca, 0x82, 0x7b, 0xc7, 0x09, 0xb1, 0x41, 0x22, 0x18, 0x3b, 0xc2,
|
||||
0x97, 0xbb, 0x2f, 0x7c, 0x4a, 0xba, 0x76, 0x55, 0x9a, 0x4b, 0x43, 0xc2, 0xbe, 0xca, 0x12, 0xcc,
|
||||
0xae, 0x49, 0x27, 0xa7, 0xa0, 0xfb, 0xd7, 0x22, 0xd4, 0x76, 0x45, 0x5e, 0xa4, 0xe1, 0xb0, 0x0c,
|
||||
0x79, 0x4c, 0x0e, 0xb4, 0x6f, 0xc4, 0x12, 0x79, 0x00, 0x2d, 0x72, 0x10, 0x44, 0x81, 0x94, 0x2a,
|
||||
0x27, 0x15, 0x5f, 0xf2, 0x92, 0x7d, 0x6f, 0xb4, 0x8b, 0x0d, 0x0c, 0xe4, 0x40, 0xb9, 0xfd, 0x32,
|
||||
0x89, 0xa9, 0x08, 0xa9, 0xbc, 0x64, 0x33, 0x84, 0xd1, 0x33, 0x58, 0x4c, 0xd7, 0x5b, 0x9c, 0x53,
|
||||
0x11, 0xa8, 0x22, 0x8c, 0x3e, 0x99, 0x0e, 0x23, 0x53, 0x28, 0x6f, 0x8c, 0xa6, 0x1d, 0x71, 0x7a,
|
||||
0x8c, 0xc7, 0xf9, 0x08, 0x0d, 0x77, 0x09, 0x63, 0x42, 0x42, 0xe9, 0x7e, 0x9c, 0x82, 0x42, 0x9c,
|
||||
0x9f, 0xd3, 0x38, 0xe2, 0x24, 0xea, 0x4a, 0xd7, 0x57, 0xf0, 0x10, 0x16, 0xe2, 0xa4, 0x6b, 0x25,
|
||||
0x4e, 0x69, 0x2e, 0x71, 0xc6, 0x68, 0xb4, 0x38, 0x63, 0x7b, 0x68, 0x13, 0x0a, 0xdb, 0x7e, 0xe7,
|
||||
0x05, 0x91, 0x5e, 0xae, 0x36, 0x56, 0xa6, 0x19, 0xca, 0xe3, 0xc7, 0xd2, 0xad, 0x4c, 0x26, 0xea,
|
||||
0x39, 0xac, 0x48, 0xd0, 0x2f, 0xa1, 0xd6, 0x8e, 0x78, 0xc0, 0x43, 0xd2, 0x97, 0x1e, 0xab, 0x08,
|
||||
0x8f, 0x35, 0x37, 0xbf, 0x7f, 0xbd, 0xfa, 0xe3, 0x99, 0xc5, 0x6a, 0xc0, 0x83, 0xb0, 0x4e, 0x0c,
|
||||
0x2a, 0xcf, 0x60, 0x81, 0xc7, 0xf8, 0xa1, 0xaf, 0x61, 0x29, 0x15, 0x76, 0x27, 0x4a, 0x06, 0x9c,
|
||||
0xd9, 0x20, 0xb5, 0x6e, 0xcc, 0xa9, 0xb5, 0x22, 0x52, 0x6a, 0x4f, 0x70, 0x12, 0xc6, 0xde, 0x89,
|
||||
0x38, 0xa1, 0x91, 0x1f, 0xea, 0x10, 0x1c, 0xc2, 0xce, 0x3d, 0x40, 0xd3, 0x7e, 0x14, 0xf1, 0x76,
|
||||
0x48, 0x8e, 0xd3, 0x78, 0x3b, 0x24, 0xc7, 0x22, 0xe5, 0x8f, 0xfc, 0x70, 0xa0, 0x4a, 0x41, 0x05,
|
||||
0x2b, 0x60, 0x33, 0x77, 0xdb, 0x12, 0x1c, 0xa6, 0x4d, 0x7f, 0x26, 0x0e, 0x5f, 0xc2, 0x85, 0x0c,
|
||||
0x35, 0x32, 0x58, 0x5c, 0x35, 0x59, 0x4c, 0xc7, 0xfb, 0x88, 0xa5, 0xfb, 0xe7, 0x3c, 0xd4, 0x4c,
|
||||
0x67, 0xa2, 0x0d, 0xb8, 0xa0, 0xf4, 0xc4, 0xe4, 0xa0, 0x45, 0x12, 0x4a, 0x3a, 0xa2, 0x82, 0x68,
|
||||
0xe6, 0x59, 0x47, 0xa8, 0x01, 0x17, 0x77, 0xfa, 0x7a, 0x9b, 0x19, 0x24, 0x39, 0x99, 0xab, 0x99,
|
||||
0x67, 0x28, 0x86, 0x0f, 0x15, 0x2b, 0x69, 0x09, 0x83, 0x28, 0x2f, 0x9d, 0xf9, 0xf9, 0xc9, 0x11,
|
||||
0xe7, 0x65, 0xd2, 0x2a, 0x9f, 0x66, 0xf3, 0x45, 0x3f, 0x85, 0x92, 0x3a, 0x48, 0x93, 0xf6, 0xe3,
|
||||
0x93, 0x3f, 0xa1, 0x98, 0xa5, 0x34, 0x82, 0x5c, 0xe9, 0xc1, 0xec, 0xc2, 0x19, 0xc8, 0x35, 0x8d,
|
||||
0x73, 0x1f, 0x9c, 0xd9, 0x22, 0x9f, 0x25, 0x04, 0xdc, 0x3f, 0x59, 0x70, 0x7e, 0xea, 0x43, 0xe2,
|
||||
0x46, 0x91, 0x35, 0x55, 0xb1, 0x90, 0x6b, 0xd4, 0x82, 0x82, 0xaa, 0x0a, 0x39, 0x29, 0xb0, 0x37,
|
||||
0x87, 0xc0, 0x9e, 0x51, 0x12, 0x14, 0xb1, 0x73, 0x1b, 0xe0, 0xed, 0x82, 0xd5, 0xfd, 0x8b, 0x05,
|
||||
0x8b, 0x3a, 0x03, 0xf5, 0xf5, 0xeb, 0xc3, 0x72, 0x9a, 0x42, 0xe9, 0x9e, 0xbe, 0x88, 0x3f, 0x9b,
|
||||
0x99, 0xbc, 0x0a, 0xcd, 0x9b, 0xa4, 0x53, 0x32, 0x4e, 0xb1, 0x73, 0xb6, 0xd3, 0xb8, 0x9a, 0x40,
|
||||
0x3d, 0x93, 0xe4, 0x57, 0x60, 0x71, 0x57, 0x36, 0x40, 0x33, 0x6f, 0x15, 0xf7, 0xdf, 0x16, 0x2c,
|
||||
0xa5, 0x38, 0x5a, 0xbb, 0x4f, 0xa1, 0x7c, 0x44, 0x28, 0x27, 0x2f, 0x09, 0xd3, 0x5a, 0xd9, 0xd3,
|
||||
0x5a, 0x7d, 0x25, 0x31, 0xf0, 0x10, 0x13, 0x6d, 0x42, 0x59, 0x35, 0x5b, 0x24, 0x75, 0xd4, 0xca,
|
||||
0x2c, 0x2a, 0xfd, 0xbd, 0x21, 0x3e, 0xaa, 0xc3, 0x42, 0x18, 0xf7, 0x98, 0xce, 0x99, 0xff, 0x9f,
|
||||
0x45, 0xf7, 0x30, 0xee, 0x61, 0x89, 0x88, 0xee, 0x40, 0xf9, 0x1b, 0x9f, 0x46, 0x41, 0xd4, 0x4b,
|
||||
0xb3, 0x60, 0x75, 0x16, 0xd1, 0x33, 0x85, 0x87, 0x87, 0x04, 0xa2, 0x0b, 0x2a, 0xaa, 0x33, 0xf4,
|
||||
0x00, 0x8a, 0xdd, 0xa0, 0x47, 0x18, 0x57, 0x26, 0x69, 0x36, 0xc4, 0x05, 0xf0, 0xfd, 0xeb, 0xd5,
|
||||
0xeb, 0x46, 0x85, 0x8f, 0x13, 0x12, 0x89, 0xe6, 0xd9, 0x0f, 0x22, 0x42, 0x45, 0x73, 0x79, 0x53,
|
||||
0x91, 0x78, 0x2d, 0xf9, 0x83, 0x35, 0x07, 0xc1, 0x2b, 0x50, 0x75, 0x5c, 0xd6, 0x8b, 0xb7, 0xe3,
|
||||
0xa5, 0x38, 0x88, 0x34, 0x88, 0xfc, 0x3e, 0xd1, 0xf7, 0xb6, 0x5c, 0x8b, 0xa6, 0xa2, 0x23, 0xe2,
|
||||
0xbc, 0x2b, 0xdb, 0xad, 0x32, 0xd6, 0x10, 0xda, 0x84, 0x12, 0xe3, 0x3e, 0x15, 0x35, 0xa7, 0x30,
|
||||
0x67, 0x37, 0x94, 0x12, 0xa0, 0xbb, 0x50, 0xe9, 0xc4, 0xfd, 0x24, 0x24, 0x82, 0xba, 0x38, 0x27,
|
||||
0xf5, 0x88, 0x44, 0x84, 0x1e, 0xa1, 0x34, 0xa6, 0xb2, 0x0f, 0xab, 0x60, 0x05, 0xa0, 0x5b, 0xb0,
|
||||
0x98, 0xd0, 0xb8, 0x47, 0x09, 0x63, 0x5f, 0xd0, 0x78, 0x90, 0xe8, 0xdb, 0xf7, 0xbc, 0x28, 0xde,
|
||||
0x4f, 0xcc, 0x03, 0x3c, 0x8e, 0xe7, 0xfe, 0x2b, 0x07, 0x35, 0x33, 0x44, 0xa6, 0x1a, 0xd4, 0x07,
|
||||
0x50, 0x54, 0x01, 0xa7, 0x62, 0xfd, 0xed, 0x6c, 0xac, 0x38, 0x64, 0xda, 0xd8, 0x86, 0x52, 0x67,
|
||||
0x40, 0x65, 0xf7, 0xaa, 0x7a, 0xda, 0x14, 0x14, 0x9a, 0xf2, 0x98, 0xfb, 0xa1, 0xb4, 0x71, 0x1e,
|
||||
0x2b, 0x40, 0x34, 0xb4, 0xc3, 0xb9, 0xe7, 0x6c, 0x0d, 0xed, 0x90, 0xcc, 0xf4, 0x5f, 0xe9, 0x9d,
|
||||
0xfc, 0x57, 0x3e, 0xb3, 0xff, 0xdc, 0xbf, 0x59, 0x50, 0x19, 0xe6, 0x96, 0x61, 0x5d, 0xeb, 0x9d,
|
||||
0xad, 0x3b, 0x66, 0x99, 0xdc, 0xdb, 0x59, 0xe6, 0x12, 0x14, 0x19, 0xa7, 0xc4, 0xef, 0xab, 0x71,
|
||||
0x0b, 0x6b, 0x48, 0x54, 0xb1, 0x3e, 0xeb, 0x49, 0x0f, 0xd5, 0xb0, 0x58, 0xba, 0xff, 0xb1, 0x60,
|
||||
0x71, 0x2c, 0xdd, 0xdf, 0xab, 0x2e, 0x17, 0xa1, 0x10, 0x92, 0x23, 0xa2, 0x06, 0xc2, 0x3c, 0x56,
|
||||
0x80, 0xd8, 0x65, 0x2f, 0x62, 0xca, 0xa5, 0x70, 0x35, 0xac, 0x00, 0x21, 0x73, 0x97, 0x70, 0x3f,
|
||||
0x08, 0x65, 0x5d, 0xaa, 0x61, 0x0d, 0x09, 0x99, 0x07, 0x34, 0xd4, 0x4d, 0xb1, 0x58, 0x22, 0x17,
|
||||
0x16, 0x82, 0xe8, 0x20, 0xd6, 0x61, 0x23, 0x3b, 0x9b, 0xdd, 0x78, 0x40, 0x3b, 0x64, 0x27, 0x3a,
|
||||
0x88, 0xb1, 0x3c, 0x43, 0x57, 0xa0, 0x48, 0xfd, 0xa8, 0x47, 0xd2, 0x8e, 0xb8, 0x22, 0xb0, 0xb0,
|
||||
0xd8, 0xc1, 0xfa, 0xc0, 0x75, 0xa1, 0x26, 0x87, 0xca, 0x47, 0x84, 0x89, 0x11, 0x46, 0x84, 0x75,
|
||||
0xd7, 0xe7, 0xbe, 0x54, 0xbb, 0x86, 0xe5, 0xda, 0xbd, 0x01, 0xe8, 0x61, 0xc0, 0xf8, 0x33, 0x39,
|
||||
0x40, 0xb3, 0xd3, 0x26, 0xce, 0x5d, 0xb8, 0x30, 0x86, 0xad, 0xaf, 0x85, 0x9f, 0x4c, 0xcc, 0x9c,
|
||||
0x57, 0xa7, 0x2b, 0xae, 0x9c, 0xd3, 0x3d, 0x45, 0x38, 0x31, 0x7a, 0x2e, 0x42, 0x55, 0xea, 0xa5,
|
||||
0xbe, 0xed, 0xfa, 0x50, 0x53, 0xa0, 0x66, 0xfe, 0x25, 0x7c, 0x90, 0x32, 0xfa, 0x8a, 0x50, 0x39,
|
||||
0x3f, 0x58, 0xd2, 0x2e, 0x3f, 0x98, 0xf5, 0x95, 0xe6, 0x38, 0x3a, 0x9e, 0xa4, 0x77, 0x09, 0x5c,
|
||||
0x90, 0x38, 0xf7, 0x03, 0xc6, 0x63, 0x7a, 0x9c, 0x6a, 0xbd, 0x02, 0xb0, 0xd5, 0xe1, 0xc1, 0x11,
|
||||
0x79, 0x1c, 0x85, 0xea, 0x1a, 0x2d, 0x63, 0x63, 0x27, 0xbd, 0x22, 0x73, 0xa3, 0xc1, 0xeb, 0x32,
|
||||
0x54, 0xda, 0x3e, 0x0d, 0x8f, 0xdb, 0x2f, 0x03, 0xae, 0xe7, 0xdf, 0xd1, 0x86, 0xfb, 0x5b, 0x0b,
|
||||
0xce, 0x9b, 0xdf, 0x69, 0x1f, 0x89, 0x72, 0x71, 0x07, 0x16, 0x78, 0xda, 0xc7, 0x2c, 0x65, 0x29,
|
||||
0x31, 0x45, 0x22, 0x5a, 0x1d, 0x2c, 0x89, 0x0c, 0x4b, 0xab, 0xc4, 0xb9, 0x7a, 0x32, 0xf9, 0x84,
|
||||
0xa5, 0xff, 0x5b, 0x02, 0x34, 0x7d, 0x9c, 0x31, 0x50, 0x9a, 0x13, 0x59, 0x6e, 0x62, 0x22, 0x7b,
|
||||
0x3e, 0x39, 0x91, 0xa9, 0xab, 0xf9, 0xd6, 0x3c, 0x92, 0xcc, 0x31, 0x97, 0xdd, 0x86, 0x4a, 0xda,
|
||||
0xdd, 0xa4, 0x17, 0xb8, 0x33, 0xcd, 0x7a, 0xd8, 0x00, 0x8d, 0x90, 0xd1, 0x7a, 0x7a, 0xe3, 0xa8,
|
||||
0xbb, 0x0e, 0xa5, 0x35, 0x85, 0x26, 0x1d, 0x4f, 0xf7, 0x15, 0xfa, 0x16, 0xba, 0x7b, 0xb6, 0xc7,
|
||||
0x86, 0x85, 0xc9, 0x87, 0x86, 0x26, 0x54, 0xb7, 0xd3, 0x42, 0x79, 0x86, 0x97, 0x06, 0x93, 0x08,
|
||||
0x6d, 0xe8, 0xc6, 0x46, 0x95, 0xe6, 0xcb, 0xd3, 0x2a, 0xa6, 0xaf, 0x0a, 0x31, 0xd5, 0x9d, 0xcd,
|
||||
0x41, 0x46, 0x6b, 0x59, 0x91, 0x06, 0xda, 0x9c, 0xcb, 0xf6, 0x73, 0xf6, 0x97, 0xe8, 0x73, 0x28,
|
||||
0x62, 0xc2, 0x06, 0x21, 0x97, 0xcf, 0x17, 0xd5, 0xc6, 0x95, 0x19, 0xdc, 0x15, 0x92, 0xcc, 0x55,
|
||||
0x4d, 0x80, 0x7e, 0x01, 0x25, 0xb5, 0x62, 0x76, 0x75, 0xd6, 0x9c, 0x9e, 0x21, 0x99, 0xa6, 0xd1,
|
||||
0x03, 0x85, 0x86, 0x44, 0x3a, 0x7e, 0x41, 0x22, 0xa2, 0x9f, 0xd5, 0x6a, 0x6b, 0xd6, 0x7a, 0x01,
|
||||
0x1b, 0x3b, 0xa8, 0x01, 0x05, 0x4e, 0xfd, 0x0e, 0xb1, 0x17, 0xe7, 0x30, 0xa1, 0x42, 0x15, 0x85,
|
||||
0x2d, 0x09, 0xa2, 0x88, 0x74, 0xed, 0x25, 0xd5, 0x29, 0x29, 0xe8, 0x3d, 0xcc, 0xad, 0xef, 0xa3,
|
||||
0x2b, 0x77, 0x9e, 0x43, 0xcd, 0xb4, 0x45, 0x06, 0xed, 0xad, 0xf1, 0xa9, 0x77, 0x0e, 0xdf, 0x18,
|
||||
0x4d, 0xff, 0x73, 0xf8, 0xbf, 0xa7, 0x49, 0xd7, 0xe7, 0x24, 0xab, 0xfa, 0x4d, 0x57, 0x81, 0x4b,
|
||||
0x50, 0x7c, 0xa2, 0x8c, 0xa5, 0x9e, 0xfc, 0x34, 0x24, 0xf6, 0x5b, 0x44, 0xc4, 0xb1, 0x2e, 0x79,
|
||||
0x1a, 0x72, 0x2f, 0x83, 0x93, 0xc5, 0x5e, 0x19, 0xc3, 0xfd, 0x63, 0x0e, 0x60, 0xe4, 0x10, 0xf4,
|
||||
0x11, 0x40, 0x9f, 0x74, 0x03, 0xff, 0x57, 0x7c, 0x34, 0xd4, 0x55, 0xe4, 0x8e, 0x9c, 0xec, 0x46,
|
||||
0xed, 0x77, 0xee, 0x9d, 0xdb, 0x6f, 0x04, 0x0b, 0x2c, 0xf8, 0x96, 0xe8, 0x56, 0x41, 0xae, 0xd1,
|
||||
0x63, 0xa8, 0xfa, 0x51, 0x14, 0x73, 0x19, 0x4a, 0xe9, 0xc0, 0x7b, 0xf3, 0xa4, 0x10, 0xf2, 0xb6,
|
||||
0x46, 0xf8, 0x2a, 0x52, 0x4d, 0x0e, 0xce, 0x5d, 0x58, 0x9e, 0x44, 0x38, 0xd3, 0x40, 0xf6, 0x9d,
|
||||
0x05, 0x1f, 0x4c, 0xb8, 0x0e, 0x7d, 0x3a, 0xcc, 0x44, 0x6b, 0x8e, 0x10, 0x4f, 0x93, 0xf0, 0x1e,
|
||||
0xd4, 0xb6, 0x38, 0x17, 0x95, 0x47, 0xe9, 0xa6, 0x46, 0xae, 0x93, 0x69, 0xc7, 0x28, 0xdc, 0xdf,
|
||||
0x5b, 0xa3, 0x07, 0xc2, 0xcc, 0xb9, 0xfb, 0xce, 0xf8, 0xdc, 0x7d, 0x6d, 0x76, 0x81, 0x7e, 0x9f,
|
||||
0xe3, 0xf6, 0xf5, 0x9f, 0xc1, 0x87, 0x99, 0x97, 0x23, 0xaa, 0x42, 0x69, 0x77, 0x6f, 0x0b, 0xef,
|
||||
0xb5, 0x5b, 0xcb, 0xe7, 0x50, 0x0d, 0xca, 0xdb, 0x8f, 0x1f, 0x3d, 0x79, 0xd8, 0xde, 0x6b, 0x2f,
|
||||
0x5b, 0xe2, 0xa8, 0xd5, 0x16, 0xeb, 0xd6, 0x72, 0xae, 0xf1, 0x5d, 0x11, 0x4a, 0xdb, 0xea, 0xdf,
|
||||
0x0e, 0xb4, 0x07, 0x95, 0xe1, 0xeb, 0x39, 0x72, 0x33, 0xac, 0x33, 0xf1, 0x0c, 0xef, 0x7c, 0x7c,
|
||||
0x22, 0x8e, 0x2e, 0x9e, 0xf7, 0xa1, 0x20, 0xff, 0x47, 0x40, 0x19, 0x23, 0xae, 0xf9, 0x07, 0x83,
|
||||
0x73, 0xf2, 0xbb, 0xfc, 0x86, 0x25, 0x38, 0xc9, 0xf7, 0x81, 0x2c, 0x4e, 0xe6, 0xab, 0x9f, 0xb3,
|
||||
0x7a, 0xca, 0xc3, 0x02, 0x7a, 0x04, 0x45, 0x3d, 0x34, 0x65, 0xa1, 0x9a, 0xaf, 0x00, 0xce, 0xda,
|
||||
0x6c, 0x04, 0xc5, 0x6c, 0xc3, 0x42, 0x8f, 0x86, 0x0f, 0xb9, 0x59, 0xa2, 0x99, 0x1d, 0xa7, 0x73,
|
||||
0xca, 0xf9, 0xba, 0xb5, 0x61, 0xa1, 0xaf, 0xa1, 0x6a, 0xf4, 0x94, 0x28, 0xa3, 0xa3, 0x99, 0x6e,
|
||||
0x50, 0x9d, 0x6b, 0xa7, 0x60, 0x69, 0xcd, 0xdb, 0xb0, 0x20, 0x13, 0x29, 0xc3, 0xd8, 0x46, 0xcb,
|
||||
0x99, 0x25, 0xe6, 0x58, 0x0b, 0xba, 0xaf, 0x9a, 0x64, 0x12, 0x99, 0xd1, 0x87, 0xae, 0x9d, 0x76,
|
||||
0xb7, 0xcd, 0x0c, 0x9b, 0xa9, 0x20, 0xde, 0xb0, 0x50, 0x0c, 0x68, 0xba, 0x78, 0xa2, 0x1f, 0x66,
|
||||
0x44, 0xc9, 0xac, 0x0a, 0xee, 0xdc, 0x98, 0x0f, 0x59, 0x29, 0xd5, 0xac, 0xbd, 0x7a, 0xb3, 0x62,
|
||||
0xfd, 0xfd, 0xcd, 0x8a, 0xf5, 0xcf, 0x37, 0x2b, 0xd6, 0x7e, 0x51, 0x76, 0x2d, 0x3f, 0xfa, 0x5f,
|
||||
0x00, 0x00, 0x00, 0xff, 0xff, 0xea, 0x14, 0x57, 0x80, 0x0d, 0x1c, 0x00, 0x00,
|
||||
// 2261 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x59, 0xcd, 0x6e, 0x1b, 0xc9,
|
||||
0x11, 0xde, 0x21, 0x25, 0xfe, 0x14, 0x29, 0x59, 0x6a, 0x7b, 0x8d, 0xc9, 0xc4, 0x2b, 0xc9, 0xb3,
|
||||
0x76, 0x22, 0x38, 0xf6, 0x50, 0xcb, 0xac, 0x63, 0xaf, 0x9c, 0x38, 0x16, 0x45, 0x66, 0x2d, 0xc7,
|
||||
0x82, 0xb5, 0x2d, 0x79, 0x0d, 0x2c, 0xe0, 0x04, 0x23, 0xb2, 0x45, 0x0f, 0x34, 0x9c, 0x99, 0x74,
|
||||
0x37, 0xb5, 0xe6, 0x3e, 0x40, 0x80, 0xcd, 0x21, 0xc8, 0x25, 0xc8, 0x25, 0xf7, 0x9c, 0x72, 0xce,
|
||||
0x13, 0x04, 0xf0, 0x31, 0xe7, 0x3d, 0x38, 0x81, 0x1f, 0x20, 0xc8, 0x31, 0xb9, 0x05, 0xfd, 0x33,
|
||||
0xe4, 0x90, 0x33, 0x94, 0x28, 0xdb, 0x27, 0x76, 0x75, 0xd7, 0x57, 0x53, 0x55, 0x5d, 0x5d, 0x5d,
|
||||
0xd5, 0x84, 0x85, 0x76, 0x18, 0x70, 0x1a, 0xfa, 0x4e, 0x44, 0x43, 0x1e, 0xa2, 0xa5, 0x5e, 0x78,
|
||||
0x38, 0x70, 0x0e, 0xfb, 0x9e, 0xdf, 0x39, 0xf6, 0xb8, 0x73, 0xf2, 0x89, 0x75, 0xab, 0xeb, 0xf1,
|
||||
0x17, 0xfd, 0x43, 0xa7, 0x1d, 0xf6, 0x6a, 0xdd, 0xb0, 0x1b, 0xd6, 0x24, 0xe3, 0x61, 0xff, 0x48,
|
||||
0x52, 0x92, 0x90, 0x23, 0x25, 0xc0, 0x5a, 0xed, 0x86, 0x61, 0xd7, 0x27, 0x23, 0x2e, 0xee, 0xf5,
|
||||
0x08, 0xe3, 0x6e, 0x2f, 0xd2, 0x0c, 0x37, 0x13, 0xf2, 0xc4, 0xc7, 0x6a, 0xf1, 0xc7, 0x6a, 0x2c,
|
||||
0xf4, 0x4f, 0x08, 0xad, 0x45, 0x87, 0xb5, 0x30, 0x62, 0x9a, 0xbb, 0x36, 0x95, 0xdb, 0x8d, 0xbc,
|
||||
0x1a, 0x1f, 0x44, 0x84, 0xd5, 0xbe, 0x0e, 0xe9, 0x31, 0xa1, 0x1a, 0x50, 0x9f, 0x54, 0x57, 0xe9,
|
||||
0xe3, 0x46, 0x1e, 0xd3, 0xc3, 0x1a, 0x8d, 0xda, 0x35, 0xc6, 0x5d, 0xde, 0x8f, 0x3f, 0x72, 0xfb,
|
||||
0x14, 0x95, 0xfa, 0xb4, 0x4d, 0xa2, 0xd0, 0xf7, 0xda, 0x03, 0xa1, 0x98, 0x1a, 0x29, 0x98, 0xfd,
|
||||
0x5b, 0x03, 0xaa, 0x7b, 0xb4, 0x1f, 0x10, 0x4c, 0x7e, 0xd3, 0x27, 0x8c, 0xa3, 0xcb, 0x50, 0x38,
|
||||
0xf2, 0x7c, 0x4e, 0xa8, 0x69, 0xac, 0xe5, 0xd7, 0xcb, 0x58, 0x53, 0x68, 0x09, 0xf2, 0xae, 0xef,
|
||||
0x9b, 0xb9, 0x35, 0x63, 0xbd, 0x84, 0xc5, 0x10, 0xad, 0x43, 0xf5, 0x98, 0x90, 0xa8, 0xd9, 0xa7,
|
||||
0x2e, 0xf7, 0xc2, 0xc0, 0xcc, 0xaf, 0x19, 0xeb, 0xf9, 0xc6, 0xdc, 0xab, 0xd7, 0xab, 0x06, 0x1e,
|
||||
0x5b, 0x41, 0x36, 0x94, 0x05, 0xdd, 0x18, 0x70, 0xc2, 0xcc, 0xb9, 0x04, 0xdb, 0x68, 0xda, 0xbe,
|
||||
0x01, 0x4b, 0x4d, 0x8f, 0x1d, 0x3f, 0x65, 0x6e, 0xf7, 0x2c, 0x5d, 0xec, 0x47, 0xb0, 0x9c, 0xe0,
|
||||
0x65, 0x51, 0x18, 0x30, 0x82, 0x6e, 0x43, 0x81, 0x92, 0x76, 0x48, 0x3b, 0x92, 0xb9, 0x52, 0xff,
|
||||
0xc8, 0x99, 0x0c, 0x03, 0x47, 0x03, 0x04, 0x13, 0xd6, 0xcc, 0xf6, 0x9f, 0xf2, 0x50, 0x49, 0xcc,
|
||||
0xa3, 0x45, 0xc8, 0xed, 0x34, 0x4d, 0x63, 0xcd, 0x58, 0x2f, 0xe3, 0xdc, 0x4e, 0x13, 0x99, 0x50,
|
||||
0xdc, 0xed, 0x73, 0xf7, 0xd0, 0x27, 0xda, 0xf6, 0x98, 0x44, 0x97, 0x60, 0x7e, 0x27, 0x78, 0xca,
|
||||
0x88, 0x34, 0xbc, 0x84, 0x15, 0x81, 0x10, 0xcc, 0xed, 0x7b, 0xdf, 0x10, 0x65, 0x26, 0x96, 0x63,
|
||||
0x64, 0x41, 0x61, 0xcf, 0xa5, 0x24, 0xe0, 0xe6, 0xbc, 0x90, 0xdb, 0xc8, 0x99, 0x06, 0xd6, 0x33,
|
||||
0xa8, 0x01, 0xe5, 0x6d, 0x4a, 0x5c, 0x4e, 0x3a, 0x5b, 0xdc, 0x2c, 0xac, 0x19, 0xeb, 0x95, 0xba,
|
||||
0xe5, 0xa8, 0x4d, 0x76, 0xe2, 0xf8, 0x73, 0x0e, 0xe2, 0xf8, 0x6b, 0x94, 0x5e, 0xbd, 0x5e, 0xfd,
|
||||
0xe0, 0x0f, 0xff, 0x14, 0xbe, 0x1b, 0xc2, 0xd0, 0x03, 0x80, 0xc7, 0x2e, 0xe3, 0x4f, 0x99, 0x14,
|
||||
0x52, 0x3c, 0x53, 0xc8, 0x9c, 0x14, 0x90, 0xc0, 0xa0, 0x15, 0x00, 0xe9, 0x84, 0xed, 0xb0, 0x1f,
|
||||
0x70, 0xb3, 0x24, 0x75, 0x4f, 0xcc, 0xa0, 0x35, 0xa8, 0x34, 0x09, 0x6b, 0x53, 0x2f, 0x92, 0x5b,
|
||||
0x5d, 0x96, 0xee, 0x49, 0x4e, 0x09, 0x09, 0xca, 0x83, 0x07, 0x83, 0x88, 0x98, 0x20, 0x19, 0x12,
|
||||
0x33, 0x62, 0x2f, 0xf7, 0x5f, 0xb8, 0x94, 0x74, 0xcc, 0x8a, 0x74, 0x97, 0xa6, 0x84, 0x7f, 0x95,
|
||||
0x27, 0x98, 0x59, 0x95, 0x9b, 0x1c, 0x93, 0xf6, 0xef, 0x8a, 0x50, 0xdd, 0x17, 0xc7, 0x29, 0x0e,
|
||||
0x87, 0x25, 0xc8, 0x63, 0x72, 0xa4, 0xf7, 0x46, 0x0c, 0x91, 0x03, 0xd0, 0x24, 0x47, 0x5e, 0xe0,
|
||||
0x49, 0xad, 0x72, 0xd2, 0xf0, 0x45, 0x27, 0x3a, 0x74, 0x46, 0xb3, 0x38, 0xc1, 0x81, 0x2c, 0x28,
|
||||
0xb5, 0x5e, 0x46, 0x21, 0x15, 0x21, 0x95, 0x97, 0x62, 0x86, 0x34, 0x7a, 0x06, 0x0b, 0xf1, 0x78,
|
||||
0x8b, 0x73, 0x2a, 0x02, 0x55, 0x84, 0xd1, 0x27, 0xe9, 0x30, 0x4a, 0x2a, 0xe5, 0x8c, 0x61, 0x5a,
|
||||
0x01, 0xa7, 0x03, 0x3c, 0x2e, 0x47, 0x58, 0xb8, 0x4f, 0x18, 0x13, 0x1a, 0xca, 0xed, 0xc7, 0x31,
|
||||
0x29, 0xd4, 0xf9, 0x05, 0x0d, 0x03, 0x4e, 0x82, 0x8e, 0xdc, 0xfa, 0x32, 0x1e, 0xd2, 0x42, 0x9d,
|
||||
0x78, 0xac, 0xd4, 0x29, 0xce, 0xa4, 0xce, 0x18, 0x46, 0xab, 0x33, 0x36, 0x87, 0x36, 0x61, 0x7e,
|
||||
0xdb, 0x6d, 0xbf, 0x20, 0x72, 0x97, 0x2b, 0xf5, 0x95, 0xb4, 0x40, 0xb9, 0xfc, 0x44, 0x6e, 0x2b,
|
||||
0x93, 0x07, 0xf5, 0x03, 0xac, 0x20, 0xe8, 0x57, 0x50, 0x6d, 0x05, 0xdc, 0xe3, 0x3e, 0xe9, 0xc9,
|
||||
0x1d, 0x2b, 0x8b, 0x1d, 0x6b, 0x6c, 0x7e, 0xf7, 0x7a, 0xf5, 0x27, 0x53, 0xd3, 0x4f, 0x9f, 0x7b,
|
||||
0x7e, 0x8d, 0x24, 0x50, 0x4e, 0x42, 0x04, 0x1e, 0x93, 0x87, 0xbe, 0x82, 0xc5, 0x58, 0xd9, 0x9d,
|
||||
0x20, 0xea, 0x73, 0x66, 0x82, 0xb4, 0xba, 0x3e, 0xa3, 0xd5, 0x0a, 0xa4, 0xcc, 0x9e, 0x90, 0x24,
|
||||
0x9c, 0xbd, 0x13, 0x70, 0x42, 0x03, 0xd7, 0xd7, 0x21, 0x38, 0xa4, 0xd1, 0x8e, 0x88, 0x34, 0x91,
|
||||
0x25, 0xf7, 0x64, 0x6e, 0x34, 0xab, 0xd2, 0x35, 0xd7, 0xd3, 0x5f, 0x4d, 0xe6, 0x52, 0x47, 0x31,
|
||||
0xe3, 0x31, 0xa8, 0xf5, 0x00, 0x50, 0x3a, 0x24, 0x44, 0xe8, 0x1e, 0x93, 0x41, 0x1c, 0xba, 0xc7,
|
||||
0x64, 0x20, 0xb2, 0xc7, 0x89, 0xeb, 0xf7, 0x55, 0x56, 0x29, 0x63, 0x45, 0x6c, 0xe6, 0xee, 0x1a,
|
||||
0x42, 0x42, 0x7a, 0x17, 0xcf, 0x25, 0xe1, 0x0b, 0xb8, 0x98, 0xe1, 0x91, 0x0c, 0x11, 0xd7, 0x92,
|
||||
0x22, 0xd2, 0x47, 0x67, 0x24, 0xd2, 0xfe, 0x6b, 0x1e, 0xaa, 0xc9, 0xb8, 0x40, 0x1b, 0x70, 0x51,
|
||||
0xd9, 0x89, 0xc9, 0x51, 0x93, 0x44, 0x94, 0xb4, 0x45, 0x32, 0xd2, 0xc2, 0xb3, 0x96, 0x50, 0x1d,
|
||||
0x2e, 0xed, 0xf4, 0xf4, 0x34, 0x4b, 0x40, 0x72, 0xf2, 0xd8, 0x67, 0xae, 0xa1, 0x10, 0x3e, 0x54,
|
||||
0xa2, 0xa4, 0x27, 0x12, 0xa0, 0xbc, 0x8c, 0x8b, 0xcf, 0x4e, 0x0f, 0x5e, 0x27, 0x13, 0xab, 0xc2,
|
||||
0x23, 0x5b, 0x2e, 0xfa, 0x19, 0x14, 0xd5, 0x42, 0x7c, 0xfe, 0x3f, 0x3e, 0xfd, 0x13, 0x4a, 0x58,
|
||||
0x8c, 0x11, 0x70, 0x65, 0x07, 0x33, 0xe7, 0xcf, 0x01, 0xd7, 0x18, 0xeb, 0x21, 0x58, 0xd3, 0x55,
|
||||
0x3e, 0x4f, 0x08, 0xd8, 0x7f, 0x31, 0x60, 0x39, 0xf5, 0x21, 0x71, 0x39, 0xc9, 0xf4, 0xac, 0x44,
|
||||
0xc8, 0x31, 0x6a, 0xc2, 0xbc, 0x4a, 0x30, 0x39, 0xa9, 0xb0, 0x33, 0x83, 0xc2, 0x4e, 0x22, 0xbb,
|
||||
0x28, 0xb0, 0x75, 0x17, 0xe0, 0xed, 0x82, 0xd5, 0xfe, 0x9b, 0x01, 0x0b, 0xfa, 0x30, 0xeb, 0x9b,
|
||||
0xdc, 0x85, 0xa5, 0xf8, 0x08, 0xc5, 0x73, 0xfa, 0x4e, 0xbf, 0x3d, 0x35, 0x0f, 0x28, 0x36, 0x67,
|
||||
0x12, 0xa7, 0x74, 0x4c, 0x89, 0xb3, 0xb6, 0xe3, 0xb8, 0x9a, 0x60, 0x3d, 0x97, 0xe6, 0x57, 0x61,
|
||||
0x61, 0x5f, 0x96, 0x60, 0x53, 0x2f, 0x28, 0xfb, 0x3f, 0x06, 0x2c, 0xc6, 0x3c, 0xda, 0xba, 0x4f,
|
||||
0xa1, 0x74, 0x42, 0x28, 0x27, 0x2f, 0x09, 0xd3, 0x56, 0x99, 0x69, 0xab, 0xbe, 0x94, 0x1c, 0x78,
|
||||
0xc8, 0x89, 0x36, 0xa1, 0xa4, 0xca, 0x3d, 0x12, 0x6f, 0xd4, 0xca, 0x34, 0x94, 0xfe, 0xde, 0x90,
|
||||
0x1f, 0xd5, 0x60, 0xce, 0x0f, 0xbb, 0x4c, 0x9f, 0x99, 0xef, 0x4f, 0xc3, 0x3d, 0x0e, 0xbb, 0x58,
|
||||
0x32, 0xa2, 0x7b, 0x50, 0xfa, 0xda, 0xa5, 0x81, 0x17, 0x74, 0xe3, 0x53, 0xb0, 0x3a, 0x0d, 0xf4,
|
||||
0x4c, 0xf1, 0xe1, 0x21, 0x40, 0x14, 0x54, 0x05, 0xb5, 0x86, 0x1e, 0x41, 0xa1, 0xe3, 0x75, 0x09,
|
||||
0xe3, 0xca, 0x25, 0x8d, 0xba, 0xb8, 0x4b, 0xbe, 0x7b, 0xbd, 0x7a, 0x23, 0x71, 0x59, 0x84, 0x11,
|
||||
0x09, 0x44, 0xf9, 0xee, 0x7a, 0x01, 0xa1, 0xa2, 0xbc, 0xbd, 0xa5, 0x20, 0x4e, 0x53, 0xfe, 0x60,
|
||||
0x2d, 0x41, 0xc8, 0xf2, 0xd4, 0x95, 0x20, 0xf3, 0xc5, 0xdb, 0xc9, 0x52, 0x12, 0xc4, 0x31, 0x08,
|
||||
0xdc, 0x1e, 0xd1, 0x25, 0x80, 0x1c, 0x8b, 0xfa, 0xa4, 0x2d, 0xe2, 0xbc, 0x23, 0x2b, 0xb7, 0x12,
|
||||
0xd6, 0x14, 0xda, 0x84, 0x22, 0xe3, 0x2e, 0x15, 0x39, 0x67, 0x7e, 0xc6, 0xc2, 0x2a, 0x06, 0xa0,
|
||||
0xfb, 0x50, 0x6e, 0x87, 0xbd, 0xc8, 0x27, 0x02, 0x5d, 0x98, 0x11, 0x3d, 0x82, 0x88, 0xd0, 0x23,
|
||||
0x94, 0x86, 0x54, 0x96, 0x74, 0x65, 0xac, 0x08, 0x74, 0x07, 0x16, 0x22, 0x1a, 0x76, 0x29, 0x61,
|
||||
0xec, 0x73, 0x1a, 0xf6, 0x23, 0x7d, 0x91, 0x2f, 0x8b, 0xe4, 0xbd, 0x97, 0x5c, 0xc0, 0xe3, 0x7c,
|
||||
0xf6, 0xbf, 0x73, 0x50, 0x4d, 0x86, 0x48, 0xaa, 0xd6, 0x7d, 0x04, 0x05, 0x15, 0x70, 0x2a, 0xd6,
|
||||
0xdf, 0xce, 0xc7, 0x4a, 0x42, 0xa6, 0x8f, 0x4d, 0x28, 0xb6, 0xfb, 0x54, 0x16, 0xc2, 0xaa, 0x3c,
|
||||
0x8e, 0x49, 0x61, 0x29, 0x0f, 0xb9, 0xeb, 0x4b, 0x1f, 0xe7, 0xb1, 0x22, 0x44, 0x6d, 0x3c, 0xec,
|
||||
0xbc, 0xce, 0x57, 0x1b, 0x0f, 0x61, 0xc9, 0xfd, 0x2b, 0xbe, 0xd3, 0xfe, 0x95, 0xce, 0xbd, 0x7f,
|
||||
0xf6, 0xdf, 0x0d, 0x28, 0x0f, 0xcf, 0x56, 0xc2, 0xbb, 0xc6, 0x3b, 0x7b, 0x77, 0xcc, 0x33, 0xb9,
|
||||
0xb7, 0xf3, 0xcc, 0x65, 0x28, 0x30, 0x4e, 0x89, 0xdb, 0x53, 0x9d, 0x1b, 0xd6, 0x94, 0xc8, 0x62,
|
||||
0x3d, 0xd6, 0x95, 0x3b, 0x54, 0xc5, 0x62, 0x68, 0xff, 0xd7, 0x80, 0x85, 0xb1, 0xe3, 0xfe, 0x5e,
|
||||
0x6d, 0xb9, 0x04, 0xf3, 0x3e, 0x39, 0x21, 0xaa, 0xb7, 0xcc, 0x63, 0x45, 0x88, 0x59, 0xf6, 0x22,
|
||||
0xa4, 0x5c, 0x2a, 0x57, 0xc5, 0x8a, 0x10, 0x3a, 0x77, 0x08, 0x77, 0x3d, 0x5f, 0xe6, 0xa5, 0x2a,
|
||||
0xd6, 0x94, 0xd0, 0xb9, 0x4f, 0x7d, 0x5d, 0x5f, 0x8b, 0x21, 0xb2, 0x61, 0xce, 0x0b, 0x8e, 0x42,
|
||||
0x1d, 0x36, 0xb2, 0xb2, 0x51, 0x75, 0xda, 0x4e, 0x70, 0x14, 0x62, 0xb9, 0x86, 0xae, 0x42, 0x81,
|
||||
0xba, 0x41, 0x97, 0xc4, 0xc5, 0x75, 0x59, 0x70, 0x61, 0x31, 0x83, 0xf5, 0x82, 0x6d, 0x43, 0x55,
|
||||
0xf6, 0xa7, 0xbb, 0x84, 0x89, 0x6e, 0x48, 0x84, 0x75, 0xc7, 0xe5, 0xae, 0x34, 0xbb, 0x8a, 0xe5,
|
||||
0xd8, 0xbe, 0x09, 0xe8, 0xb1, 0xc7, 0xf8, 0x33, 0xd9, 0xc2, 0xb3, 0xb3, 0x9a, 0xd7, 0x7d, 0xb8,
|
||||
0x38, 0xc6, 0xad, 0xaf, 0x85, 0x9f, 0x4e, 0xb4, 0xaf, 0xd7, 0xd2, 0x19, 0x57, 0xbe, 0x14, 0x38,
|
||||
0x0a, 0x38, 0xd1, 0xc5, 0x2e, 0x40, 0x45, 0xda, 0xa5, 0xbe, 0x6d, 0xbb, 0x50, 0x55, 0xa4, 0x16,
|
||||
0xfe, 0x05, 0x5c, 0x88, 0x05, 0x7d, 0x49, 0xa8, 0x6c, 0x45, 0x0c, 0xe9, 0x97, 0x1f, 0x4e, 0xfb,
|
||||
0x4a, 0x63, 0x9c, 0x1d, 0x4f, 0xe2, 0x6d, 0x02, 0x17, 0x25, 0xcf, 0x43, 0x8f, 0xf1, 0x90, 0x0e,
|
||||
0x62, 0xab, 0x57, 0x00, 0xb6, 0xda, 0xdc, 0x3b, 0x21, 0x4f, 0x02, 0x5f, 0x5d, 0xa3, 0x25, 0x9c,
|
||||
0x98, 0x89, 0xaf, 0xc8, 0xdc, 0xa8, 0x87, 0xbb, 0x02, 0xe5, 0x96, 0x4b, 0xfd, 0x41, 0xeb, 0xa5,
|
||||
0xc7, 0x75, 0x2b, 0x3d, 0x9a, 0xb0, 0x7f, 0x6f, 0xc0, 0x72, 0xf2, 0x3b, 0xad, 0x13, 0x91, 0x2e,
|
||||
0xee, 0xc1, 0x1c, 0x8f, 0xeb, 0x98, 0xc5, 0x2c, 0x23, 0x52, 0x10, 0x51, 0xea, 0x60, 0x09, 0x4a,
|
||||
0x78, 0x5a, 0x1d, 0x9c, 0x6b, 0xa7, 0xc3, 0x27, 0x3c, 0xfd, 0xbf, 0x12, 0xa0, 0xf4, 0x72, 0x46,
|
||||
0x6f, 0x9a, 0x6c, 0xee, 0x72, 0x13, 0xcd, 0xdd, 0xf3, 0xc9, 0xe6, 0x4e, 0x5d, 0xcd, 0x77, 0x66,
|
||||
0xd1, 0x64, 0x86, 0x16, 0xef, 0x2e, 0x94, 0xe3, 0xea, 0x26, 0xbe, 0xc0, 0xad, 0xb4, 0xe8, 0x61,
|
||||
0x01, 0x34, 0x62, 0x46, 0xeb, 0xf1, 0x8d, 0xa3, 0xee, 0x3a, 0x14, 0xe7, 0x14, 0x1a, 0xb5, 0x1d,
|
||||
0x5d, 0x57, 0xe8, 0x5b, 0xe8, 0xfe, 0xf9, 0xde, 0x2d, 0xe6, 0x26, 0xdf, 0x2c, 0x1a, 0x50, 0xd9,
|
||||
0x8e, 0x13, 0xe5, 0x39, 0x1e, 0x2d, 0x92, 0x20, 0xb4, 0xa1, 0x0b, 0x1b, 0x95, 0x9a, 0xaf, 0xa4,
|
||||
0x4d, 0x8c, 0x1f, 0x28, 0x42, 0xaa, 0x2b, 0x9b, 0xa3, 0x8c, 0xd2, 0xb2, 0x2c, 0x1d, 0xb4, 0x39,
|
||||
0x93, 0xef, 0x67, 0xac, 0x2f, 0xd1, 0x67, 0x50, 0xc0, 0x84, 0xf5, 0x7d, 0x2e, 0x5f, 0x42, 0x2a,
|
||||
0xf5, 0xab, 0x53, 0xa4, 0x2b, 0x26, 0x79, 0x56, 0x35, 0x00, 0xfd, 0x12, 0x8a, 0x6a, 0xc4, 0xcc,
|
||||
0xca, 0xb4, 0x96, 0x3f, 0x43, 0x33, 0x8d, 0xd1, 0x0d, 0x85, 0xa6, 0xc4, 0x71, 0xfc, 0x9c, 0x04,
|
||||
0x44, 0xbf, 0xd0, 0x89, 0xb6, 0x76, 0x1e, 0x27, 0x66, 0x50, 0x1d, 0xe6, 0x39, 0x75, 0xdb, 0xc4,
|
||||
0x5c, 0x98, 0xc1, 0x85, 0x8a, 0x55, 0x24, 0xb6, 0xc8, 0x0b, 0x02, 0xd2, 0x31, 0x17, 0x55, 0xa5,
|
||||
0xa4, 0x28, 0xf4, 0x03, 0x58, 0x0c, 0xfa, 0x3d, 0xd9, 0x2c, 0x74, 0xf6, 0x39, 0x89, 0x98, 0x79,
|
||||
0x41, 0x7e, 0x6f, 0x62, 0x16, 0x5d, 0x83, 0x85, 0xa0, 0xdf, 0x3b, 0x10, 0x37, 0xbc, 0x62, 0x5b,
|
||||
0x92, 0x6c, 0xe3, 0x93, 0xe8, 0x26, 0x2c, 0x0b, 0x5c, 0xbc, 0xdb, 0x8a, 0x73, 0x59, 0x72, 0xa6,
|
||||
0x17, 0xde, 0x43, 0xcf, 0xfc, 0x3e, 0x3a, 0x02, 0xeb, 0x39, 0x54, 0x93, 0xfb, 0x90, 0x81, 0xbd,
|
||||
0x33, 0xde, 0x71, 0xcf, 0x10, 0x17, 0x89, 0x86, 0xe3, 0x39, 0x7c, 0xef, 0x69, 0xd4, 0x71, 0x39,
|
||||
0xc9, 0xca, 0xbc, 0xe9, 0x0c, 0x74, 0x19, 0x0a, 0x7b, 0x6a, 0xa3, 0xd4, 0xcb, 0xa5, 0xa6, 0xc4,
|
||||
0x7c, 0x93, 0x08, 0xe7, 0xe9, 0x74, 0xab, 0x29, 0xfb, 0x0a, 0x58, 0x59, 0xe2, 0x95, 0x33, 0xec,
|
||||
0x3f, 0xe7, 0x00, 0x46, 0xc1, 0x80, 0x3e, 0x02, 0xe8, 0x91, 0x8e, 0xe7, 0xfe, 0x9a, 0x8f, 0x1a,
|
||||
0xca, 0xb2, 0x9c, 0x91, 0x5d, 0xe5, 0xa8, 0xf4, 0xcf, 0xbd, 0x73, 0xe9, 0x8f, 0x60, 0x8e, 0x79,
|
||||
0xdf, 0x10, 0x5d, 0xa6, 0xc8, 0x31, 0x7a, 0x02, 0x15, 0x37, 0x08, 0x42, 0x2e, 0xc3, 0x38, 0x6e,
|
||||
0xb6, 0x6f, 0x9d, 0x16, 0xbe, 0xce, 0xd6, 0x88, 0x5f, 0x9d, 0x92, 0xa4, 0x04, 0xeb, 0x3e, 0x2c,
|
||||
0x4d, 0x32, 0x9c, 0xab, 0x19, 0xfc, 0xd6, 0x80, 0x0b, 0x13, 0x5b, 0x87, 0x3e, 0x1d, 0x66, 0x01,
|
||||
0x63, 0x86, 0xe3, 0x15, 0x27, 0x80, 0x07, 0x50, 0xdd, 0xe2, 0x5c, 0x64, 0x3d, 0x65, 0x9b, 0x6a,
|
||||
0xf7, 0x4e, 0xc7, 0x8e, 0x21, 0xec, 0x3f, 0x1a, 0xa3, 0x77, 0xce, 0xcc, 0x9e, 0xff, 0xde, 0x78,
|
||||
0xcf, 0x7f, 0x7d, 0xfa, 0xe5, 0xf0, 0x3e, 0x5b, 0xfd, 0x1b, 0x3f, 0x87, 0x0f, 0x33, 0x2f, 0x66,
|
||||
0x54, 0x81, 0xe2, 0xfe, 0xc1, 0x16, 0x3e, 0x68, 0x35, 0x97, 0x3e, 0x40, 0x55, 0x28, 0x6d, 0x3f,
|
||||
0xd9, 0xdd, 0x7b, 0xdc, 0x3a, 0x68, 0x2d, 0x19, 0x62, 0xa9, 0xd9, 0x12, 0xe3, 0xe6, 0x52, 0xae,
|
||||
0xfe, 0x6d, 0x01, 0x8a, 0xdb, 0xea, 0xbf, 0x1e, 0x74, 0x00, 0xe5, 0xe1, 0x9f, 0x00, 0xc8, 0xce,
|
||||
0xf0, 0xce, 0xc4, 0xbf, 0x09, 0xd6, 0xc7, 0xa7, 0xf2, 0xe8, 0xc4, 0xfd, 0x10, 0xe6, 0xe5, 0xdf,
|
||||
0x21, 0x28, 0xa3, 0xbd, 0x4e, 0xfe, 0x4f, 0x62, 0x9d, 0xfe, 0xf7, 0xc2, 0x86, 0x21, 0x24, 0xc9,
|
||||
0xb7, 0x89, 0x2c, 0x49, 0xc9, 0xc7, 0x4b, 0x6b, 0xf5, 0x8c, 0x47, 0x0d, 0xb4, 0x0b, 0x05, 0xdd,
|
||||
0xb0, 0x65, 0xb1, 0x26, 0x5f, 0x20, 0xac, 0xb5, 0xe9, 0x0c, 0x4a, 0xd8, 0x86, 0x81, 0x76, 0x87,
|
||||
0xef, 0xd1, 0x59, 0xaa, 0x25, 0xab, 0x5d, 0xeb, 0x8c, 0xf5, 0x75, 0x63, 0xc3, 0x40, 0x5f, 0x41,
|
||||
0x25, 0x51, 0xcf, 0xa2, 0x8c, 0x6a, 0x2a, 0x5d, 0x1c, 0x5b, 0xd7, 0xcf, 0xe0, 0xd2, 0x96, 0xb7,
|
||||
0x60, 0x4e, 0x1e, 0xa4, 0x0c, 0x67, 0x27, 0xca, 0xdd, 0x2c, 0x35, 0xc7, 0xca, 0xdf, 0x43, 0x55,
|
||||
0xa0, 0x93, 0x20, 0x19, 0x7d, 0xe8, 0xfa, 0x59, 0xf7, 0xea, 0xd4, 0xb0, 0x49, 0x05, 0xf1, 0x86,
|
||||
0x81, 0x42, 0x40, 0xe9, 0xe4, 0x89, 0x7e, 0x94, 0x11, 0x25, 0xd3, 0x32, 0xb8, 0x75, 0x73, 0x36,
|
||||
0x66, 0x65, 0x54, 0xa3, 0xfa, 0xea, 0xcd, 0x8a, 0xf1, 0x8f, 0x37, 0x2b, 0xc6, 0xbf, 0xde, 0xac,
|
||||
0x18, 0x87, 0x05, 0x59, 0x31, 0xfd, 0xf8, 0xff, 0x01, 0x00, 0x00, 0xff, 0xff, 0x7c, 0xb8, 0xc3,
|
||||
0x68, 0x0b, 0x1d, 0x00, 0x00,
|
||||
}
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
@@ -2853,6 +2892,18 @@ func (m *SolveRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) {
|
||||
i -= len(m.XXX_unrecognized)
|
||||
copy(dAtA[i:], m.XXX_unrecognized)
|
||||
}
|
||||
if m.SourcePolicy != nil {
|
||||
{
|
||||
size, err := m.SourcePolicy.MarshalToSizedBuffer(dAtA[:i])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
i -= size
|
||||
i = encodeVarintControl(dAtA, i, uint64(size))
|
||||
}
|
||||
i--
|
||||
dAtA[i] = 0x62
|
||||
}
|
||||
if m.Internal {
|
||||
i--
|
||||
if m.Internal {
|
||||
@@ -3339,23 +3390,23 @@ func (m *Vertex) MarshalToSizedBuffer(dAtA []byte) (int, error) {
|
||||
dAtA[i] = 0x3a
|
||||
}
|
||||
if m.Completed != nil {
|
||||
n7, err7 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.Completed, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.Completed):])
|
||||
if err7 != nil {
|
||||
return 0, err7
|
||||
}
|
||||
i -= n7
|
||||
i = encodeVarintControl(dAtA, i, uint64(n7))
|
||||
i--
|
||||
dAtA[i] = 0x32
|
||||
}
|
||||
if m.Started != nil {
|
||||
n8, err8 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.Started, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.Started):])
|
||||
n8, err8 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.Completed, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.Completed):])
|
||||
if err8 != nil {
|
||||
return 0, err8
|
||||
}
|
||||
i -= n8
|
||||
i = encodeVarintControl(dAtA, i, uint64(n8))
|
||||
i--
|
||||
dAtA[i] = 0x32
|
||||
}
|
||||
if m.Started != nil {
|
||||
n9, err9 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.Started, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.Started):])
|
||||
if err9 != nil {
|
||||
return 0, err9
|
||||
}
|
||||
i -= n9
|
||||
i = encodeVarintControl(dAtA, i, uint64(n9))
|
||||
i--
|
||||
dAtA[i] = 0x2a
|
||||
}
|
||||
if m.Cached {
|
||||
@@ -3419,31 +3470,31 @@ func (m *VertexStatus) MarshalToSizedBuffer(dAtA []byte) (int, error) {
|
||||
copy(dAtA[i:], m.XXX_unrecognized)
|
||||
}
|
||||
if m.Completed != nil {
|
||||
n9, err9 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.Completed, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.Completed):])
|
||||
if err9 != nil {
|
||||
return 0, err9
|
||||
}
|
||||
i -= n9
|
||||
i = encodeVarintControl(dAtA, i, uint64(n9))
|
||||
i--
|
||||
dAtA[i] = 0x42
|
||||
}
|
||||
if m.Started != nil {
|
||||
n10, err10 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.Started, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.Started):])
|
||||
n10, err10 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.Completed, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.Completed):])
|
||||
if err10 != nil {
|
||||
return 0, err10
|
||||
}
|
||||
i -= n10
|
||||
i = encodeVarintControl(dAtA, i, uint64(n10))
|
||||
i--
|
||||
dAtA[i] = 0x42
|
||||
}
|
||||
if m.Started != nil {
|
||||
n11, err11 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.Started, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.Started):])
|
||||
if err11 != nil {
|
||||
return 0, err11
|
||||
}
|
||||
i -= n11
|
||||
i = encodeVarintControl(dAtA, i, uint64(n11))
|
||||
i--
|
||||
dAtA[i] = 0x3a
|
||||
}
|
||||
n11, err11 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.Timestamp, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(m.Timestamp):])
|
||||
if err11 != nil {
|
||||
return 0, err11
|
||||
n12, err12 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.Timestamp, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(m.Timestamp):])
|
||||
if err12 != nil {
|
||||
return 0, err12
|
||||
}
|
||||
i -= n11
|
||||
i = encodeVarintControl(dAtA, i, uint64(n11))
|
||||
i -= n12
|
||||
i = encodeVarintControl(dAtA, i, uint64(n12))
|
||||
i--
|
||||
dAtA[i] = 0x32
|
||||
if m.Total != 0 {
|
||||
@@ -3516,12 +3567,12 @@ func (m *VertexLog) MarshalToSizedBuffer(dAtA []byte) (int, error) {
|
||||
i--
|
||||
dAtA[i] = 0x18
|
||||
}
|
||||
n12, err12 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.Timestamp, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(m.Timestamp):])
|
||||
if err12 != nil {
|
||||
return 0, err12
|
||||
n13, err13 := github_com_gogo_protobuf_types.StdTimeMarshalTo(m.Timestamp, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(m.Timestamp):])
|
||||
if err13 != nil {
|
||||
return 0, err13
|
||||
}
|
||||
i -= n12
|
||||
i = encodeVarintControl(dAtA, i, uint64(n12))
|
||||
i -= n13
|
||||
i = encodeVarintControl(dAtA, i, uint64(n13))
|
||||
i--
|
||||
dAtA[i] = 0x12
|
||||
if len(m.Vertex) > 0 {
|
||||
@@ -3921,6 +3972,25 @@ func (m *BuildHistoryRecord) MarshalToSizedBuffer(dAtA []byte) (int, error) {
|
||||
i -= len(m.XXX_unrecognized)
|
||||
copy(dAtA[i:], m.XXX_unrecognized)
|
||||
}
|
||||
if m.NumCompletedSteps != 0 {
|
||||
i = encodeVarintControl(dAtA, i, uint64(m.NumCompletedSteps))
|
||||
i--
|
||||
dAtA[i] = 0x1
|
||||
i--
|
||||
dAtA[i] = 0x88
|
||||
}
|
||||
if m.NumTotalSteps != 0 {
|
||||
i = encodeVarintControl(dAtA, i, uint64(m.NumTotalSteps))
|
||||
i--
|
||||
dAtA[i] = 0x1
|
||||
i--
|
||||
dAtA[i] = 0x80
|
||||
}
|
||||
if m.NumCachedSteps != 0 {
|
||||
i = encodeVarintControl(dAtA, i, uint64(m.NumCachedSteps))
|
||||
i--
|
||||
dAtA[i] = 0x78
|
||||
}
|
||||
if m.Pinned {
|
||||
i--
|
||||
if m.Pinned {
|
||||
@@ -4018,23 +4088,23 @@ func (m *BuildHistoryRecord) MarshalToSizedBuffer(dAtA []byte) (int, error) {
|
||||
dAtA[i] = 0x42
|
||||
}
|
||||
if m.CompletedAt != nil {
|
||||
n20, err20 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.CompletedAt, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.CompletedAt):])
|
||||
if err20 != nil {
|
||||
return 0, err20
|
||||
}
|
||||
i -= n20
|
||||
i = encodeVarintControl(dAtA, i, uint64(n20))
|
||||
i--
|
||||
dAtA[i] = 0x3a
|
||||
}
|
||||
if m.CreatedAt != nil {
|
||||
n21, err21 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.CreatedAt, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.CreatedAt):])
|
||||
n21, err21 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.CompletedAt, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.CompletedAt):])
|
||||
if err21 != nil {
|
||||
return 0, err21
|
||||
}
|
||||
i -= n21
|
||||
i = encodeVarintControl(dAtA, i, uint64(n21))
|
||||
i--
|
||||
dAtA[i] = 0x3a
|
||||
}
|
||||
if m.CreatedAt != nil {
|
||||
n22, err22 := github_com_gogo_protobuf_types.StdTimeMarshalTo(*m.CreatedAt, dAtA[i-github_com_gogo_protobuf_types.SizeOfStdTime(*m.CreatedAt):])
|
||||
if err22 != nil {
|
||||
return 0, err22
|
||||
}
|
||||
i -= n22
|
||||
i = encodeVarintControl(dAtA, i, uint64(n22))
|
||||
i--
|
||||
dAtA[i] = 0x32
|
||||
}
|
||||
if m.Error != nil {
|
||||
@@ -4546,6 +4616,10 @@ func (m *SolveRequest) Size() (n int) {
|
||||
if m.Internal {
|
||||
n += 2
|
||||
}
|
||||
if m.SourcePolicy != nil {
|
||||
l = m.SourcePolicy.Size()
|
||||
n += 1 + l + sovControl(uint64(l))
|
||||
}
|
||||
if m.XXX_unrecognized != nil {
|
||||
n += len(m.XXX_unrecognized)
|
||||
}
|
||||
@@ -5043,6 +5117,15 @@ func (m *BuildHistoryRecord) Size() (n int) {
|
||||
if m.Pinned {
|
||||
n += 2
|
||||
}
|
||||
if m.NumCachedSteps != 0 {
|
||||
n += 1 + sovControl(uint64(m.NumCachedSteps))
|
||||
}
|
||||
if m.NumTotalSteps != 0 {
|
||||
n += 2 + sovControl(uint64(m.NumTotalSteps))
|
||||
}
|
||||
if m.NumCompletedSteps != 0 {
|
||||
n += 2 + sovControl(uint64(m.NumCompletedSteps))
|
||||
}
|
||||
if m.XXX_unrecognized != nil {
|
||||
n += len(m.XXX_unrecognized)
|
||||
}
|
||||
@@ -6514,6 +6597,42 @@ func (m *SolveRequest) Unmarshal(dAtA []byte) error {
|
||||
}
|
||||
}
|
||||
m.Internal = bool(v != 0)
|
||||
case 12:
|
||||
if wireType != 2 {
|
||||
return fmt.Errorf("proto: wrong wireType = %d for field SourcePolicy", wireType)
|
||||
}
|
||||
var msglen int
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return ErrIntOverflowControl
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
msglen |= int(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
if msglen < 0 {
|
||||
return ErrInvalidLengthControl
|
||||
}
|
||||
postIndex := iNdEx + msglen
|
||||
if postIndex < 0 {
|
||||
return ErrInvalidLengthControl
|
||||
}
|
||||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
if m.SourcePolicy == nil {
|
||||
m.SourcePolicy = &pb1.Policy{}
|
||||
}
|
||||
if err := m.SourcePolicy.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
|
||||
return err
|
||||
}
|
||||
iNdEx = postIndex
|
||||
default:
|
||||
iNdEx = preIndex
|
||||
skippy, err := skipControl(dAtA[iNdEx:])
|
||||
@@ -9925,6 +10044,63 @@ func (m *BuildHistoryRecord) Unmarshal(dAtA []byte) error {
|
||||
}
|
||||
}
|
||||
m.Pinned = bool(v != 0)
|
||||
case 15:
|
||||
if wireType != 0 {
|
||||
return fmt.Errorf("proto: wrong wireType = %d for field NumCachedSteps", wireType)
|
||||
}
|
||||
m.NumCachedSteps = 0
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return ErrIntOverflowControl
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
m.NumCachedSteps |= int32(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
case 16:
|
||||
if wireType != 0 {
|
||||
return fmt.Errorf("proto: wrong wireType = %d for field NumTotalSteps", wireType)
|
||||
}
|
||||
m.NumTotalSteps = 0
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return ErrIntOverflowControl
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
m.NumTotalSteps |= int32(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
case 17:
|
||||
if wireType != 0 {
|
||||
return fmt.Errorf("proto: wrong wireType = %d for field NumCompletedSteps", wireType)
|
||||
}
|
||||
m.NumCompletedSteps = 0
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return ErrIntOverflowControl
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
m.NumCompletedSteps |= int32(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
default:
|
||||
iNdEx = preIndex
|
||||
skippy, err := skipControl(dAtA[iNdEx:])
|
||||
|
8
vendor/github.com/moby/buildkit/api/services/control/control.proto
generated
vendored
8
vendor/github.com/moby/buildkit/api/services/control/control.proto
generated
vendored
@@ -8,6 +8,7 @@ import "github.com/moby/buildkit/solver/pb/ops.proto";
|
||||
import "github.com/moby/buildkit/api/types/worker.proto";
|
||||
// import "github.com/containerd/containerd/api/types/descriptor.proto";
|
||||
import "github.com/gogo/googleapis/google/rpc/status.proto";
|
||||
import "github.com/moby/buildkit/sourcepolicy/pb/policy.proto";
|
||||
|
||||
option (gogoproto.sizer_all) = true;
|
||||
option (gogoproto.marshaler_all) = true;
|
||||
@@ -68,6 +69,7 @@ message SolveRequest {
|
||||
repeated string Entitlements = 9 [(gogoproto.customtype) = "github.com/moby/buildkit/util/entitlements.Entitlement" ];
|
||||
map<string, pb.Definition> FrontendInputs = 10;
|
||||
bool Internal = 11; // Internal builds are not recorded in build history
|
||||
moby.buildkit.v1.sourcepolicy.Policy SourcePolicy = 12;
|
||||
}
|
||||
|
||||
message CacheOptions {
|
||||
@@ -202,8 +204,10 @@ message BuildHistoryRecord {
|
||||
int32 Generation = 12;
|
||||
Descriptor trace = 13;
|
||||
bool pinned = 14;
|
||||
int32 numCachedSteps = 15;
|
||||
int32 numTotalSteps = 16;
|
||||
int32 numCompletedSteps = 17;
|
||||
// TODO: tags
|
||||
// TODO: steps/cache summary
|
||||
// TODO: unclipped logs
|
||||
}
|
||||
|
||||
@@ -230,4 +234,4 @@ message BuildResultInfo {
|
||||
message Exporter {
|
||||
string Type = 1;
|
||||
map<string, string> Attrs = 2;
|
||||
}
|
||||
}
|
||||
|
17
vendor/github.com/moby/buildkit/client/llb/resolver.go
generated
vendored
17
vendor/github.com/moby/buildkit/client/llb/resolver.go
generated
vendored
@@ -42,9 +42,16 @@ const (
|
||||
)
|
||||
|
||||
type ResolveImageConfigOpt struct {
|
||||
Platform *ocispecs.Platform
|
||||
ResolveMode string
|
||||
LogName string
|
||||
ResolverType // default is ResolverTypeRegistry
|
||||
SessionID string
|
||||
ResolverType
|
||||
|
||||
Platform *ocispecs.Platform
|
||||
ResolveMode string
|
||||
LogName string
|
||||
|
||||
Store ResolveImageConfigOptStore
|
||||
}
|
||||
|
||||
type ResolveImageConfigOptStore struct {
|
||||
SessionID string
|
||||
StoreID string
|
||||
}
|
||||
|
21
vendor/github.com/moby/buildkit/client/llb/source.go
generated
vendored
21
vendor/github.com/moby/buildkit/client/llb/source.go
generated
vendored
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
_ "crypto/sha256" // for opencontainers/go-digest
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
@@ -455,7 +454,7 @@ func Differ(t DiffType, required bool) LocalOption {
|
||||
})
|
||||
}
|
||||
|
||||
func OCILayout(store string, digest digest.Digest, opts ...OCILayoutOption) State {
|
||||
func OCILayout(ref string, opts ...OCILayoutOption) State {
|
||||
gi := &OCILayoutInfo{}
|
||||
|
||||
for _, o := range opts {
|
||||
@@ -464,17 +463,17 @@ func OCILayout(store string, digest digest.Digest, opts ...OCILayoutOption) Stat
|
||||
attrs := map[string]string{}
|
||||
if gi.sessionID != "" {
|
||||
attrs[pb.AttrOCILayoutSessionID] = gi.sessionID
|
||||
addCap(&gi.Constraints, pb.CapSourceOCILayoutSessionID)
|
||||
}
|
||||
|
||||
if ll := gi.layerLimit; ll != nil {
|
||||
attrs[pb.AttrOCILayoutLayerLimit] = strconv.FormatInt(int64(*ll), 10)
|
||||
addCap(&gi.Constraints, pb.CapSourceOCILayoutLayerLimit)
|
||||
if gi.storeID != "" {
|
||||
attrs[pb.AttrOCILayoutStoreID] = gi.storeID
|
||||
}
|
||||
if gi.layerLimit != nil {
|
||||
attrs[pb.AttrOCILayoutLayerLimit] = strconv.FormatInt(int64(*gi.layerLimit), 10)
|
||||
}
|
||||
|
||||
addCap(&gi.Constraints, pb.CapSourceOCILayout)
|
||||
|
||||
source := NewSource(fmt.Sprintf("oci-layout://%s@%s", store, digest), attrs, gi.Constraints)
|
||||
source := NewSource("oci-layout://"+ref, attrs, gi.Constraints)
|
||||
return NewState(source.Output())
|
||||
}
|
||||
|
||||
@@ -488,9 +487,10 @@ func (fn ociLayoutOptionFunc) SetOCILayoutOption(li *OCILayoutInfo) {
|
||||
fn(li)
|
||||
}
|
||||
|
||||
func OCISessionID(id string) OCILayoutOption {
|
||||
func OCIStore(sessionID string, storeID string) OCILayoutOption {
|
||||
return ociLayoutOptionFunc(func(oi *OCILayoutInfo) {
|
||||
oi.sessionID = id
|
||||
oi.sessionID = sessionID
|
||||
oi.storeID = storeID
|
||||
})
|
||||
}
|
||||
|
||||
@@ -503,6 +503,7 @@ func OCILayerLimit(limit int) OCILayoutOption {
|
||||
type OCILayoutInfo struct {
|
||||
constraintsWrapper
|
||||
sessionID string
|
||||
storeID string
|
||||
layerLimit *int
|
||||
}
|
||||
|
||||
|
201
vendor/github.com/moby/buildkit/client/ociindex/ociindex.go
generated
vendored
201
vendor/github.com/moby/buildkit/client/ociindex/ociindex.go
generated
vendored
@@ -4,6 +4,7 @@ import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
|
||||
"github.com/gofrs/flock"
|
||||
ocispecs "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
@@ -11,16 +12,132 @@ import (
|
||||
)
|
||||
|
||||
const (
|
||||
// IndexJSONLockFileSuffix is the suffix of the lock file
|
||||
IndexJSONLockFileSuffix = ".lock"
|
||||
// indexFile is the name of the index file
|
||||
indexFile = "index.json"
|
||||
|
||||
// lockFileSuffix is the suffix of the lock file
|
||||
lockFileSuffix = ".lock"
|
||||
)
|
||||
|
||||
// PutDescToIndex puts desc to index with tag.
|
||||
// Existing manifests with the same tag will be removed from the index.
|
||||
func PutDescToIndex(index *ocispecs.Index, desc ocispecs.Descriptor, tag string) error {
|
||||
if index == nil {
|
||||
index = &ocispecs.Index{}
|
||||
type StoreIndex struct {
|
||||
indexPath string
|
||||
lockPath string
|
||||
}
|
||||
|
||||
func NewStoreIndex(storePath string) StoreIndex {
|
||||
indexPath := path.Join(storePath, indexFile)
|
||||
return StoreIndex{
|
||||
indexPath: indexPath,
|
||||
lockPath: indexPath + lockFileSuffix,
|
||||
}
|
||||
}
|
||||
|
||||
func (s StoreIndex) Read() (*ocispecs.Index, error) {
|
||||
lock := flock.New(s.lockPath)
|
||||
locked, err := lock.TryRLock()
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "could not lock %s", s.lockPath)
|
||||
}
|
||||
if !locked {
|
||||
return nil, errors.Errorf("could not lock %s", s.lockPath)
|
||||
}
|
||||
defer func() {
|
||||
lock.Unlock()
|
||||
os.RemoveAll(s.lockPath)
|
||||
}()
|
||||
|
||||
b, err := os.ReadFile(s.indexPath)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "could not read %s", s.indexPath)
|
||||
}
|
||||
var idx ocispecs.Index
|
||||
if err := json.Unmarshal(b, &idx); err != nil {
|
||||
return nil, errors.Wrapf(err, "could not unmarshal %s (%q)", s.indexPath, string(b))
|
||||
}
|
||||
return &idx, nil
|
||||
}
|
||||
|
||||
func (s StoreIndex) Put(tag string, desc ocispecs.Descriptor) error {
|
||||
lock := flock.New(s.lockPath)
|
||||
locked, err := lock.TryLock()
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "could not lock %s", s.lockPath)
|
||||
}
|
||||
if !locked {
|
||||
return errors.Errorf("could not lock %s", s.lockPath)
|
||||
}
|
||||
defer func() {
|
||||
lock.Unlock()
|
||||
os.RemoveAll(s.lockPath)
|
||||
}()
|
||||
|
||||
f, err := os.OpenFile(s.indexPath, os.O_RDWR|os.O_CREATE, 0644)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "could not open %s", s.indexPath)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
var idx ocispecs.Index
|
||||
b, err := io.ReadAll(f)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "could not read %s", s.indexPath)
|
||||
}
|
||||
if len(b) > 0 {
|
||||
if err := json.Unmarshal(b, &idx); err != nil {
|
||||
return errors.Wrapf(err, "could not unmarshal %s (%q)", s.indexPath, string(b))
|
||||
}
|
||||
}
|
||||
|
||||
if err = insertDesc(&idx, desc, tag); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
b, err = json.Marshal(idx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err = f.WriteAt(b, 0); err != nil {
|
||||
return err
|
||||
}
|
||||
if err = f.Truncate(int64(len(b))); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s StoreIndex) Get(tag string) (*ocispecs.Descriptor, error) {
|
||||
idx, err := s.Read()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, m := range idx.Manifests {
|
||||
if t, ok := m.Annotations[ocispecs.AnnotationRefName]; ok && t == tag {
|
||||
return &m, nil
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (s StoreIndex) GetSingle() (*ocispecs.Descriptor, error) {
|
||||
idx, err := s.Read()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(idx.Manifests) == 1 {
|
||||
return &idx.Manifests[0], nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// insertDesc puts desc to index with tag.
|
||||
// Existing manifests with the same tag will be removed from the index.
|
||||
func insertDesc(index *ocispecs.Index, desc ocispecs.Descriptor, tag string) error {
|
||||
if index == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if index.SchemaVersion == 0 {
|
||||
index.SchemaVersion = 2
|
||||
}
|
||||
@@ -41,73 +158,3 @@ func PutDescToIndex(index *ocispecs.Index, desc ocispecs.Descriptor, tag string)
|
||||
index.Manifests = append(index.Manifests, desc)
|
||||
return nil
|
||||
}
|
||||
|
||||
func PutDescToIndexJSONFileLocked(indexJSONPath string, desc ocispecs.Descriptor, tag string) error {
|
||||
lockPath := indexJSONPath + IndexJSONLockFileSuffix
|
||||
lock := flock.New(lockPath)
|
||||
locked, err := lock.TryLock()
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "could not lock %s", lockPath)
|
||||
}
|
||||
if !locked {
|
||||
return errors.Errorf("could not lock %s", lockPath)
|
||||
}
|
||||
defer func() {
|
||||
lock.Unlock()
|
||||
os.RemoveAll(lockPath)
|
||||
}()
|
||||
f, err := os.OpenFile(indexJSONPath, os.O_RDWR|os.O_CREATE, 0644)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "could not open %s", indexJSONPath)
|
||||
}
|
||||
defer f.Close()
|
||||
var idx ocispecs.Index
|
||||
b, err := io.ReadAll(f)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "could not read %s", indexJSONPath)
|
||||
}
|
||||
if len(b) > 0 {
|
||||
if err := json.Unmarshal(b, &idx); err != nil {
|
||||
return errors.Wrapf(err, "could not unmarshal %s (%q)", indexJSONPath, string(b))
|
||||
}
|
||||
}
|
||||
if err = PutDescToIndex(&idx, desc, tag); err != nil {
|
||||
return err
|
||||
}
|
||||
b, err = json.Marshal(idx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err = f.WriteAt(b, 0); err != nil {
|
||||
return err
|
||||
}
|
||||
if err = f.Truncate(int64(len(b))); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func ReadIndexJSONFileLocked(indexJSONPath string) (*ocispecs.Index, error) {
|
||||
lockPath := indexJSONPath + IndexJSONLockFileSuffix
|
||||
lock := flock.New(lockPath)
|
||||
locked, err := lock.TryRLock()
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "could not lock %s", lockPath)
|
||||
}
|
||||
if !locked {
|
||||
return nil, errors.Errorf("could not lock %s", lockPath)
|
||||
}
|
||||
defer func() {
|
||||
lock.Unlock()
|
||||
os.RemoveAll(lockPath)
|
||||
}()
|
||||
b, err := os.ReadFile(indexJSONPath)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "could not read %s", indexJSONPath)
|
||||
}
|
||||
var idx ocispecs.Index
|
||||
if err := json.Unmarshal(b, &idx); err != nil {
|
||||
return nil, errors.Wrapf(err, "could not unmarshal %s (%q)", indexJSONPath, string(b))
|
||||
}
|
||||
return &idx, nil
|
||||
}
|
||||
|
67
vendor/github.com/moby/buildkit/client/solve.go
generated
vendored
67
vendor/github.com/moby/buildkit/client/solve.go
generated
vendored
@@ -22,6 +22,7 @@ import (
|
||||
"github.com/moby/buildkit/session/filesync"
|
||||
"github.com/moby/buildkit/session/grpchijack"
|
||||
"github.com/moby/buildkit/solver/pb"
|
||||
spb "github.com/moby/buildkit/sourcepolicy/pb"
|
||||
"github.com/moby/buildkit/util/bklog"
|
||||
"github.com/moby/buildkit/util/entitlements"
|
||||
ocispecs "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
@@ -47,6 +48,8 @@ type SolveOpt struct {
|
||||
SharedSession *session.Session // TODO: refactor to better session syncing
|
||||
SessionPreInitialized bool // TODO: refactor to better session syncing
|
||||
Internal bool
|
||||
SourcePolicy *spb.Policy
|
||||
Ref string
|
||||
}
|
||||
|
||||
type ExportEntry struct {
|
||||
@@ -93,6 +96,9 @@ func (c *Client) solve(ctx context.Context, def *llb.Definition, runGateway runG
|
||||
}
|
||||
|
||||
ref := identity.NewID()
|
||||
if opt.Ref != "" {
|
||||
ref = opt.Ref
|
||||
}
|
||||
eg, ctx := errgroup.WithContext(ctx)
|
||||
|
||||
statusContext, cancelStatus := context.WithCancel(context.Background())
|
||||
@@ -127,7 +133,7 @@ func (c *Client) solve(ctx context.Context, def *llb.Definition, runGateway runG
|
||||
ex = opt.Exports[0]
|
||||
}
|
||||
|
||||
indicesToUpdate := []string{}
|
||||
storesToUpdate := []string{}
|
||||
|
||||
if !opt.SessionPreInitialized {
|
||||
if len(syncedDirs) > 0 {
|
||||
@@ -192,7 +198,7 @@ func (c *Client) solve(ctx context.Context, def *llb.Definition, runGateway runG
|
||||
return nil, err
|
||||
}
|
||||
contentStores["export"] = cs
|
||||
indicesToUpdate = append(indicesToUpdate, filepath.Join(ex.OutputDir, "index.json"))
|
||||
storesToUpdate = append(storesToUpdate, ex.OutputDir)
|
||||
default:
|
||||
s.Allow(filesync.NewFSSyncTargetDir(ex.OutputDir))
|
||||
}
|
||||
@@ -261,6 +267,7 @@ func (c *Client) solve(ctx context.Context, def *llb.Definition, runGateway runG
|
||||
Cache: cacheOpt.options,
|
||||
Entitlements: opt.AllowedEntitlements,
|
||||
Internal: opt.Internal,
|
||||
SourcePolicy: opt.SourcePolicy,
|
||||
})
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "failed to solve")
|
||||
@@ -324,8 +331,9 @@ func (c *Client) solve(ctx context.Context, def *llb.Definition, runGateway runG
|
||||
if err = json.Unmarshal([]byte(manifestDescJSON), &manifestDesc); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for indexJSONPath, tag := range cacheOpt.indicesToUpdate {
|
||||
if err = ociindex.PutDescToIndexJSONFileLocked(indexJSONPath, manifestDesc, tag); err != nil {
|
||||
for storePath, tag := range cacheOpt.storesToUpdate {
|
||||
idx := ociindex.NewStoreIndex(storePath)
|
||||
if err := idx.Put(tag, manifestDesc); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
@@ -339,12 +347,13 @@ func (c *Client) solve(ctx context.Context, def *llb.Definition, runGateway runG
|
||||
if err = json.Unmarshal([]byte(manifestDescDt), &manifestDesc); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, indexJSONPath := range indicesToUpdate {
|
||||
for _, storePath := range storesToUpdate {
|
||||
tag := "latest"
|
||||
if t, ok := res.ExporterResponse["image.name"]; ok {
|
||||
tag = t
|
||||
}
|
||||
if err = ociindex.PutDescToIndexJSONFileLocked(indexJSONPath, manifestDesc, tag); err != nil {
|
||||
idx := ociindex.NewStoreIndex(storePath)
|
||||
if err := idx.Put(tag, manifestDesc); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
@@ -403,10 +412,10 @@ func defaultSessionName() string {
|
||||
}
|
||||
|
||||
type cacheOptions struct {
|
||||
options controlapi.CacheOptions
|
||||
contentStores map[string]content.Store // key: ID of content store ("local:" + csDir)
|
||||
indicesToUpdate map[string]string // key: index.JSON file name, value: tag
|
||||
frontendAttrs map[string]string
|
||||
options controlapi.CacheOptions
|
||||
contentStores map[string]content.Store // key: ID of content store ("local:" + csDir)
|
||||
storesToUpdate map[string]string // key: path to content store, value: tag
|
||||
frontendAttrs map[string]string
|
||||
}
|
||||
|
||||
func parseCacheOptions(ctx context.Context, isGateway bool, opt SolveOpt) (*cacheOptions, error) {
|
||||
@@ -415,7 +424,7 @@ func parseCacheOptions(ctx context.Context, isGateway bool, opt SolveOpt) (*cach
|
||||
cacheImports []*controlapi.CacheOptionsEntry
|
||||
)
|
||||
contentStores := make(map[string]content.Store)
|
||||
indicesToUpdate := make(map[string]string) // key: index.JSON file name, value: tag
|
||||
storesToUpdate := make(map[string]string)
|
||||
frontendAttrs := make(map[string]string)
|
||||
for _, ex := range opt.CacheExports {
|
||||
if ex.Type == "local" {
|
||||
@@ -437,8 +446,7 @@ func parseCacheOptions(ctx context.Context, isGateway bool, opt SolveOpt) (*cach
|
||||
tag = t
|
||||
}
|
||||
// TODO(AkihiroSuda): support custom index JSON path and tag
|
||||
indexJSONPath := filepath.Join(csDir, "index.json")
|
||||
indicesToUpdate[indexJSONPath] = tag
|
||||
storesToUpdate[csDir] = tag
|
||||
}
|
||||
if ex.Type == "registry" {
|
||||
regRef := ex.Attrs["ref"]
|
||||
@@ -462,27 +470,26 @@ func parseCacheOptions(ctx context.Context, isGateway bool, opt SolveOpt) (*cach
|
||||
bklog.G(ctx).Warning("local cache import at " + csDir + " not found due to err: " + err.Error())
|
||||
continue
|
||||
}
|
||||
// if digest is not specified, load from "latest" tag
|
||||
// if digest is not specified, attempt to load from tag
|
||||
if im.Attrs["digest"] == "" {
|
||||
idx, err := ociindex.ReadIndexJSONFileLocked(filepath.Join(csDir, "index.json"))
|
||||
tag := "latest"
|
||||
if t, ok := im.Attrs["tag"]; ok {
|
||||
tag = t
|
||||
}
|
||||
|
||||
idx := ociindex.NewStoreIndex(csDir)
|
||||
desc, err := idx.Get(tag)
|
||||
if err != nil {
|
||||
bklog.G(ctx).Warning("local cache import at " + csDir + " not found due to err: " + err.Error())
|
||||
continue
|
||||
}
|
||||
for _, m := range idx.Manifests {
|
||||
tag := "latest"
|
||||
if t, ok := im.Attrs["tag"]; ok {
|
||||
tag = t
|
||||
}
|
||||
if m.Annotations[ocispecs.AnnotationRefName] == tag {
|
||||
im.Attrs["digest"] = string(m.Digest)
|
||||
break
|
||||
}
|
||||
}
|
||||
if im.Attrs["digest"] == "" {
|
||||
return nil, errors.New("local cache importer requires either explicit digest, \"latest\" tag or custom tag on index.json")
|
||||
if desc != nil {
|
||||
im.Attrs["digest"] = desc.Digest.String()
|
||||
}
|
||||
}
|
||||
if im.Attrs["digest"] == "" {
|
||||
return nil, errors.New("local cache importer requires either explicit digest, \"latest\" tag or custom tag on index.json")
|
||||
}
|
||||
contentStores["local:"+csDir] = cs
|
||||
}
|
||||
if im.Type == "registry" {
|
||||
@@ -510,9 +517,9 @@ func parseCacheOptions(ctx context.Context, isGateway bool, opt SolveOpt) (*cach
|
||||
Exports: cacheExports,
|
||||
Imports: cacheImports,
|
||||
},
|
||||
contentStores: contentStores,
|
||||
indicesToUpdate: indicesToUpdate,
|
||||
frontendAttrs: frontendAttrs,
|
||||
contentStores: contentStores,
|
||||
storesToUpdate: storesToUpdate,
|
||||
frontendAttrs: frontendAttrs,
|
||||
}
|
||||
return &res, nil
|
||||
}
|
||||
|
81
vendor/github.com/moby/buildkit/frontend/attestations/parse.go
generated
vendored
81
vendor/github.com/moby/buildkit/frontend/attestations/parse.go
generated
vendored
@@ -1,81 +0,0 @@
|
||||
package attestations
|
||||
|
||||
import (
|
||||
"encoding/csv"
|
||||
"strings"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
const (
|
||||
KeyTypeSbom = "sbom"
|
||||
KeyTypeProvenance = "provenance"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultSBOMGenerator = "docker/buildkit-syft-scanner:stable-1"
|
||||
)
|
||||
|
||||
func Filter(v map[string]string) map[string]string {
|
||||
attests := make(map[string]string)
|
||||
for k, v := range v {
|
||||
if strings.HasPrefix(k, "attest:") {
|
||||
attests[k] = v
|
||||
continue
|
||||
}
|
||||
if strings.HasPrefix(k, "build-arg:BUILDKIT_ATTEST_") {
|
||||
attests[k] = v
|
||||
continue
|
||||
}
|
||||
}
|
||||
return attests
|
||||
}
|
||||
|
||||
func Validate(values map[string]map[string]string) (map[string]map[string]string, error) {
|
||||
for k := range values {
|
||||
if k != KeyTypeSbom && k != KeyTypeProvenance {
|
||||
return nil, errors.Errorf("unknown attestation type %q", k)
|
||||
}
|
||||
}
|
||||
return values, nil
|
||||
}
|
||||
|
||||
func Parse(values map[string]string) (map[string]map[string]string, error) {
|
||||
attests := make(map[string]string)
|
||||
for k, v := range values {
|
||||
if strings.HasPrefix(k, "attest:") {
|
||||
attests[strings.ToLower(strings.TrimPrefix(k, "attest:"))] = v
|
||||
continue
|
||||
}
|
||||
if strings.HasPrefix(k, "build-arg:BUILDKIT_ATTEST_") {
|
||||
attests[strings.ToLower(strings.TrimPrefix(k, "build-arg:BUILDKIT_ATTEST_"))] = v
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
out := make(map[string]map[string]string)
|
||||
for k, v := range attests {
|
||||
attrs := make(map[string]string)
|
||||
out[k] = attrs
|
||||
if k == KeyTypeSbom {
|
||||
attrs["generator"] = defaultSBOMGenerator
|
||||
}
|
||||
if v == "" {
|
||||
continue
|
||||
}
|
||||
csvReader := csv.NewReader(strings.NewReader(v))
|
||||
fields, err := csvReader.Read()
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse %s", k)
|
||||
}
|
||||
for _, field := range fields {
|
||||
parts := strings.SplitN(field, "=", 2)
|
||||
if len(parts) != 2 {
|
||||
parts = append(parts, "")
|
||||
}
|
||||
attrs[parts[0]] = parts[1]
|
||||
}
|
||||
}
|
||||
|
||||
return Validate(out)
|
||||
}
|
2
vendor/github.com/moby/buildkit/frontend/gateway/client/client.go
generated
vendored
2
vendor/github.com/moby/buildkit/frontend/gateway/client/client.go
generated
vendored
@@ -8,6 +8,7 @@ import (
|
||||
"github.com/moby/buildkit/client/llb"
|
||||
"github.com/moby/buildkit/solver/pb"
|
||||
"github.com/moby/buildkit/solver/result"
|
||||
spb "github.com/moby/buildkit/sourcepolicy/pb"
|
||||
"github.com/moby/buildkit/util/apicaps"
|
||||
digest "github.com/opencontainers/go-digest"
|
||||
ocispecs "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
@@ -128,6 +129,7 @@ type SolveRequest struct {
|
||||
FrontendOpt map[string]string
|
||||
FrontendInputs map[string]*pb.Definition
|
||||
CacheImports []CacheOptionsEntry
|
||||
SourcePolicies []*spb.Policy
|
||||
}
|
||||
|
||||
type CacheOptionsEntry struct {
|
||||
|
11
vendor/github.com/moby/buildkit/frontend/gateway/grpcclient/client.go
generated
vendored
11
vendor/github.com/moby/buildkit/frontend/gateway/grpcclient/client.go
generated
vendored
@@ -382,6 +382,7 @@ func (c *grpcClient) Solve(ctx context.Context, creq client.SolveRequest) (res *
|
||||
AllowResultReturn: true,
|
||||
AllowResultArrayRef: true,
|
||||
CacheImports: cacheImports,
|
||||
SourcePolicies: creq.SourcePolicies,
|
||||
}
|
||||
|
||||
// backwards compatibility with inline return
|
||||
@@ -488,7 +489,15 @@ func (c *grpcClient) ResolveImageConfig(ctx context.Context, ref string, opt llb
|
||||
OSFeatures: platform.OSFeatures,
|
||||
}
|
||||
}
|
||||
resp, err := c.client.ResolveImageConfig(ctx, &pb.ResolveImageConfigRequest{Ref: ref, Platform: p, ResolveMode: opt.ResolveMode, LogName: opt.LogName, ResolverType: int32(opt.ResolverType), SessionID: opt.SessionID})
|
||||
resp, err := c.client.ResolveImageConfig(ctx, &pb.ResolveImageConfigRequest{
|
||||
ResolverType: int32(opt.ResolverType),
|
||||
Ref: ref,
|
||||
Platform: p,
|
||||
ResolveMode: opt.ResolveMode,
|
||||
LogName: opt.LogName,
|
||||
SessionID: opt.Store.SessionID,
|
||||
StoreID: opt.Store.StoreID,
|
||||
})
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
432
vendor/github.com/moby/buildkit/frontend/gateway/pb/gateway.pb.go
generated
vendored
432
vendor/github.com/moby/buildkit/frontend/gateway/pb/gateway.pb.go
generated
vendored
@@ -11,7 +11,8 @@ import (
|
||||
proto "github.com/gogo/protobuf/proto"
|
||||
types1 "github.com/moby/buildkit/api/types"
|
||||
pb "github.com/moby/buildkit/solver/pb"
|
||||
pb1 "github.com/moby/buildkit/util/apicaps/pb"
|
||||
pb1 "github.com/moby/buildkit/sourcepolicy/pb"
|
||||
pb2 "github.com/moby/buildkit/util/apicaps/pb"
|
||||
github_com_opencontainers_go_digest "github.com/opencontainers/go-digest"
|
||||
types "github.com/tonistiigi/fsutil/types"
|
||||
grpc "google.golang.org/grpc"
|
||||
@@ -741,6 +742,7 @@ type ResolveImageConfigRequest struct {
|
||||
LogName string `protobuf:"bytes,4,opt,name=LogName,proto3" json:"LogName,omitempty"`
|
||||
ResolverType int32 `protobuf:"varint,5,opt,name=ResolverType,proto3" json:"ResolverType,omitempty"`
|
||||
SessionID string `protobuf:"bytes,6,opt,name=SessionID,proto3" json:"SessionID,omitempty"`
|
||||
StoreID string `protobuf:"bytes,7,opt,name=StoreID,proto3" json:"StoreID,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
@@ -821,6 +823,13 @@ func (m *ResolveImageConfigRequest) GetSessionID() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ResolveImageConfigRequest) GetStoreID() string {
|
||||
if m != nil {
|
||||
return m.StoreID
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type ResolveImageConfigResponse struct {
|
||||
Digest github_com_opencontainers_go_digest.Digest `protobuf:"bytes,1,opt,name=Digest,proto3,customtype=github.com/opencontainers/go-digest.Digest" json:"Digest"`
|
||||
Config []byte `protobuf:"bytes,2,opt,name=Config,proto3" json:"Config,omitempty"`
|
||||
@@ -885,6 +894,7 @@ type SolveRequest struct {
|
||||
// apicaps:CapFrontendInputs
|
||||
FrontendInputs map[string]*pb.Definition `protobuf:"bytes,13,rep,name=FrontendInputs,proto3" json:"FrontendInputs,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||||
Evaluate bool `protobuf:"varint,14,opt,name=Evaluate,proto3" json:"Evaluate,omitempty"`
|
||||
SourcePolicies []*pb1.Policy `protobuf:"bytes,15,rep,name=SourcePolicies,proto3" json:"SourcePolicies,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
@@ -993,6 +1003,13 @@ func (m *SolveRequest) GetEvaluate() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (m *SolveRequest) GetSourcePolicies() []*pb1.Policy {
|
||||
if m != nil {
|
||||
return m.SourcePolicies
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// CacheOptionsEntry corresponds to the control.CacheOptionsEntry
|
||||
type CacheOptionsEntry struct {
|
||||
Type string `protobuf:"bytes,1,opt,name=Type,proto3" json:"Type,omitempty"`
|
||||
@@ -1609,8 +1626,8 @@ func (m *PingRequest) XXX_DiscardUnknown() {
|
||||
var xxx_messageInfo_PingRequest proto.InternalMessageInfo
|
||||
|
||||
type PongResponse struct {
|
||||
FrontendAPICaps []pb1.APICap `protobuf:"bytes,1,rep,name=FrontendAPICaps,proto3" json:"FrontendAPICaps"`
|
||||
LLBCaps []pb1.APICap `protobuf:"bytes,2,rep,name=LLBCaps,proto3" json:"LLBCaps"`
|
||||
FrontendAPICaps []pb2.APICap `protobuf:"bytes,1,rep,name=FrontendAPICaps,proto3" json:"FrontendAPICaps"`
|
||||
LLBCaps []pb2.APICap `protobuf:"bytes,2,rep,name=LLBCaps,proto3" json:"LLBCaps"`
|
||||
Workers []*types1.WorkerRecord `protobuf:"bytes,3,rep,name=Workers,proto3" json:"Workers,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
@@ -1650,14 +1667,14 @@ func (m *PongResponse) XXX_DiscardUnknown() {
|
||||
|
||||
var xxx_messageInfo_PongResponse proto.InternalMessageInfo
|
||||
|
||||
func (m *PongResponse) GetFrontendAPICaps() []pb1.APICap {
|
||||
func (m *PongResponse) GetFrontendAPICaps() []pb2.APICap {
|
||||
if m != nil {
|
||||
return m.FrontendAPICaps
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *PongResponse) GetLLBCaps() []pb1.APICap {
|
||||
func (m *PongResponse) GetLLBCaps() []pb2.APICap {
|
||||
if m != nil {
|
||||
return m.LLBCaps
|
||||
}
|
||||
@@ -2610,157 +2627,161 @@ func init() {
|
||||
func init() { proto.RegisterFile("gateway.proto", fileDescriptor_f1a937782ebbded5) }
|
||||
|
||||
var fileDescriptor_f1a937782ebbded5 = []byte{
|
||||
// 2397 bytes of a gzipped FileDescriptorProto
|
||||
// 2452 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x59, 0xcf, 0x6f, 0x1b, 0xc7,
|
||||
0xf5, 0xd7, 0x8a, 0x94, 0x48, 0x3e, 0xfe, 0x10, 0x3d, 0x71, 0xf2, 0x65, 0x16, 0x81, 0x23, 0xaf,
|
||||
0x13, 0x45, 0x56, 0x1c, 0xd2, 0x5f, 0xda, 0x81, 0x5c, 0xbb, 0x75, 0x62, 0xfd, 0x82, 0x14, 0x4b,
|
||||
0x36, 0x3b, 0x72, 0xe1, 0x22, 0x48, 0x81, 0xae, 0xb8, 0x43, 0x7a, 0xeb, 0xd5, 0xee, 0x76, 0x77,
|
||||
0x68, 0x59, 0xc9, 0xa5, 0xbd, 0x15, 0x39, 0xf5, 0xd4, 0x5b, 0x50, 0xa0, 0x05, 0x7a, 0x6e, 0x2f,
|
||||
0xbd, 0xb5, 0xe7, 0x00, 0xbd, 0xf4, 0xd2, 0x4b, 0x0f, 0x41, 0xe1, 0x3f, 0xa2, 0x40, 0x6f, 0xc5,
|
||||
0x9b, 0x99, 0x25, 0x87, 0x3f, 0xb4, 0x24, 0xeb, 0x13, 0x67, 0xde, 0xbc, 0x1f, 0x33, 0xef, 0xcd,
|
||||
0x7b, 0xef, 0x33, 0x4b, 0x28, 0x77, 0x6d, 0xce, 0xce, 0xec, 0xf3, 0x7a, 0x18, 0x05, 0x3c, 0x20,
|
||||
0x6f, 0x9f, 0x06, 0x27, 0xe7, 0xf5, 0x93, 0x9e, 0xeb, 0x39, 0xcf, 0x5d, 0x5e, 0x7f, 0xf1, 0xff,
|
||||
0xf5, 0x4e, 0x14, 0xf8, 0x9c, 0xf9, 0x8e, 0xf9, 0x51, 0xd7, 0xe5, 0xcf, 0x7a, 0x27, 0xf5, 0x76,
|
||||
0x70, 0xda, 0xe8, 0x06, 0xdd, 0xa0, 0x21, 0x24, 0x4e, 0x7a, 0x1d, 0x31, 0x13, 0x13, 0x31, 0x92,
|
||||
0x9a, 0xcc, 0xe6, 0x28, 0x7b, 0x37, 0x08, 0xba, 0x1e, 0xb3, 0x43, 0x37, 0x56, 0xc3, 0x46, 0x14,
|
||||
0xb6, 0x1b, 0x31, 0xb7, 0x79, 0x2f, 0x56, 0x32, 0x37, 0x34, 0x19, 0xdc, 0x48, 0x23, 0xd9, 0x48,
|
||||
0x23, 0x0e, 0xbc, 0x17, 0x2c, 0x6a, 0x84, 0x27, 0x8d, 0x20, 0x4c, 0xb8, 0x1b, 0x17, 0x72, 0xdb,
|
||||
0xa1, 0xdb, 0xe0, 0xe7, 0x21, 0x8b, 0x1b, 0x67, 0x41, 0xf4, 0x9c, 0x45, 0x4a, 0xe0, 0xd6, 0x85,
|
||||
0x02, 0x3d, 0xee, 0x7a, 0x28, 0xd5, 0xb6, 0xc3, 0x18, 0x8d, 0xe0, 0xaf, 0x12, 0xd2, 0x8f, 0xcd,
|
||||
0x03, 0xdf, 0x8d, 0xb9, 0xeb, 0x76, 0xdd, 0x46, 0x27, 0x16, 0x32, 0xd2, 0x0a, 0x1e, 0x42, 0xb2,
|
||||
0x5b, 0x7f, 0xcb, 0xc2, 0x32, 0x65, 0x71, 0xcf, 0xe3, 0x64, 0x0d, 0xca, 0x11, 0xeb, 0xec, 0xb0,
|
||||
0x30, 0x62, 0x6d, 0x9b, 0x33, 0xa7, 0x66, 0xac, 0x1a, 0xeb, 0x85, 0xfd, 0x05, 0x3a, 0x4c, 0x26,
|
||||
0x3f, 0x82, 0x4a, 0xc4, 0x3a, 0xb1, 0xc6, 0xb8, 0xb8, 0x6a, 0xac, 0x17, 0x9b, 0x1f, 0xd6, 0x2f,
|
||||
0x0c, 0x46, 0x9d, 0xb2, 0xce, 0x91, 0x1d, 0x0e, 0x44, 0xf6, 0x17, 0xe8, 0x88, 0x12, 0xd2, 0x84,
|
||||
0x4c, 0xc4, 0x3a, 0xb5, 0x8c, 0xd0, 0x75, 0x25, 0x5d, 0xd7, 0xfe, 0x02, 0x45, 0x66, 0xb2, 0x09,
|
||||
0x59, 0xd4, 0x52, 0xcb, 0x0a, 0xa1, 0xab, 0x53, 0x37, 0xb0, 0xbf, 0x40, 0x85, 0x00, 0x79, 0x08,
|
||||
0xf9, 0x53, 0xc6, 0x6d, 0xc7, 0xe6, 0x76, 0x0d, 0x56, 0x33, 0xeb, 0xc5, 0x66, 0x23, 0x55, 0x18,
|
||||
0x1d, 0x54, 0x3f, 0x52, 0x12, 0xbb, 0x3e, 0x8f, 0xce, 0x69, 0x5f, 0x01, 0x79, 0x0a, 0x25, 0x9b,
|
||||
0x73, 0x86, 0x5e, 0x75, 0x03, 0x3f, 0xae, 0x95, 0x84, 0xc2, 0x5b, 0xd3, 0x15, 0x3e, 0xd0, 0xa4,
|
||||
0xa4, 0xd2, 0x21, 0x45, 0xe6, 0x3d, 0x28, 0x0f, 0xd9, 0x24, 0x55, 0xc8, 0x3c, 0x67, 0xe7, 0x32,
|
||||
0x30, 0x14, 0x87, 0xe4, 0x32, 0x2c, 0xbd, 0xb0, 0xbd, 0x1e, 0x13, 0x31, 0x28, 0x51, 0x39, 0xb9,
|
||||
0xbb, 0x78, 0xc7, 0x30, 0x9f, 0xc1, 0xa5, 0x31, 0xfd, 0x13, 0x14, 0xfc, 0x40, 0x57, 0x50, 0x6c,
|
||||
0x7e, 0x90, 0xb2, 0x6b, 0x5d, 0x9d, 0x66, 0x69, 0x2b, 0x0f, 0xcb, 0x91, 0x38, 0x90, 0xf5, 0x1b,
|
||||
0x03, 0xaa, 0xa3, 0xa1, 0x26, 0x07, 0x2a, 0x48, 0x86, 0x70, 0xcb, 0xc7, 0x73, 0xdc, 0x12, 0x24,
|
||||
0x28, 0xc7, 0x08, 0x15, 0xe6, 0x26, 0x14, 0xfa, 0xa4, 0x69, 0xce, 0x28, 0x68, 0x5b, 0xb4, 0x36,
|
||||
0x21, 0x43, 0x59, 0x87, 0x54, 0x60, 0xd1, 0x55, 0xf7, 0x9a, 0x2e, 0xba, 0x0e, 0x59, 0x85, 0x8c,
|
||||
0xc3, 0x3a, 0xea, 0xe8, 0x95, 0x7a, 0x78, 0x52, 0xdf, 0x61, 0x1d, 0xd7, 0x77, 0xf1, 0x88, 0x14,
|
||||
0x97, 0xac, 0xdf, 0x19, 0x98, 0x1f, 0xb8, 0x2d, 0xf2, 0xc9, 0xd0, 0x39, 0xa6, 0xdf, 0xf6, 0xb1,
|
||||
0xdd, 0x3f, 0x4d, 0xdf, 0xfd, 0xed, 0xe1, 0x48, 0x4c, 0x49, 0x01, 0xfd, 0x74, 0x3f, 0x86, 0x92,
|
||||
0x1e, 0x1b, 0xb2, 0x0f, 0x45, 0xed, 0x1e, 0xa9, 0x0d, 0xaf, 0xcd, 0x16, 0x59, 0xaa, 0x8b, 0x5a,
|
||||
0x7f, 0xc8, 0x40, 0x51, 0x5b, 0x24, 0xf7, 0x21, 0xfb, 0xdc, 0xf5, 0xa5, 0x0b, 0x2b, 0xcd, 0x8d,
|
||||
0xd9, 0x54, 0x3e, 0x74, 0x7d, 0x87, 0x0a, 0x39, 0xd2, 0xd2, 0xf2, 0x6e, 0x51, 0x6c, 0xeb, 0xf6,
|
||||
0x6c, 0x3a, 0x2e, 0x4c, 0xbe, 0x9b, 0x73, 0x94, 0x0d, 0x59, 0x34, 0x08, 0x64, 0x43, 0x9b, 0x3f,
|
||||
0x13, 0x45, 0xa3, 0x40, 0xc5, 0x98, 0xdc, 0x84, 0x37, 0x5c, 0xff, 0x49, 0xc0, 0x83, 0x56, 0xc4,
|
||||
0x1c, 0x17, 0x2f, 0xdf, 0x93, 0xf3, 0x90, 0xd5, 0x96, 0x04, 0xcb, 0xa4, 0x25, 0xd2, 0x82, 0x8a,
|
||||
0x24, 0x1f, 0xf7, 0x4e, 0x7e, 0xc6, 0xda, 0x3c, 0xae, 0x2d, 0x8b, 0xf3, 0xac, 0xa7, 0x6c, 0xe1,
|
||||
0x40, 0x17, 0xa0, 0x23, 0xf2, 0xaf, 0x95, 0xed, 0xd6, 0x9f, 0x0c, 0x28, 0x0f, 0xa9, 0x27, 0x9f,
|
||||
0x0e, 0x85, 0xea, 0xc6, 0xac, 0xdb, 0xd2, 0x82, 0xf5, 0x19, 0x2c, 0x3b, 0x6e, 0x97, 0xc5, 0x5c,
|
||||
0x84, 0xaa, 0xb0, 0xd5, 0xfc, 0xf6, 0xbb, 0x77, 0x17, 0xfe, 0xf9, 0xdd, 0xbb, 0x1b, 0x5a, 0x8b,
|
||||
0x09, 0x42, 0xe6, 0xb7, 0x03, 0x9f, 0xdb, 0xae, 0xcf, 0x22, 0xec, 0x94, 0x1f, 0x49, 0x91, 0xfa,
|
||||
0x8e, 0xf8, 0xa1, 0x4a, 0x03, 0x3a, 0xdd, 0xb7, 0x4f, 0x99, 0x88, 0x53, 0x81, 0x8a, 0xb1, 0xc5,
|
||||
0xa1, 0x4c, 0x19, 0xef, 0x45, 0x3e, 0x65, 0x3f, 0xef, 0x21, 0xd3, 0xf7, 0x92, 0x42, 0x22, 0x36,
|
||||
0x3d, 0xad, 0xa0, 0x23, 0x23, 0x55, 0x02, 0x64, 0x1d, 0x96, 0x58, 0x14, 0x05, 0x91, 0x4a, 0x1e,
|
||||
0x52, 0x97, 0x3d, 0xbb, 0x1e, 0x85, 0xed, 0xfa, 0xb1, 0xe8, 0xd9, 0x54, 0x32, 0x58, 0x55, 0xa8,
|
||||
0x24, 0x56, 0xe3, 0x30, 0xf0, 0x63, 0x66, 0xad, 0xa0, 0xeb, 0xc2, 0x1e, 0x8f, 0xd5, 0x3e, 0xac,
|
||||
0xbf, 0x1a, 0x50, 0x49, 0x28, 0x92, 0x87, 0x7c, 0x01, 0xc5, 0x41, 0x69, 0x48, 0x6a, 0xc0, 0xdd,
|
||||
0x54, 0xa7, 0xea, 0xf2, 0x5a, 0x5d, 0x51, 0x25, 0x41, 0x57, 0x67, 0x3e, 0x82, 0xea, 0x28, 0xc3,
|
||||
0x84, 0xe8, 0xbf, 0x37, 0x5c, 0x20, 0x46, 0xeb, 0x95, 0x76, 0x1b, 0xfe, 0x61, 0xc0, 0xdb, 0x94,
|
||||
0x09, 0x10, 0x72, 0x70, 0x6a, 0x77, 0xd9, 0x76, 0xe0, 0x77, 0xdc, 0x6e, 0xe2, 0xe6, 0xaa, 0x28,
|
||||
0x86, 0x89, 0x66, 0xac, 0x8b, 0xeb, 0x90, 0x6f, 0x79, 0x36, 0xef, 0x04, 0xd1, 0xa9, 0x52, 0x5e,
|
||||
0x42, 0xe5, 0x09, 0x8d, 0xf6, 0x57, 0xc9, 0x2a, 0x14, 0x95, 0xe2, 0xa3, 0xc0, 0x49, 0xc2, 0xa9,
|
||||
0x93, 0x48, 0x0d, 0x72, 0x87, 0x41, 0xf7, 0x11, 0x06, 0x5b, 0x66, 0x58, 0x32, 0x25, 0x16, 0x94,
|
||||
0x14, 0x63, 0xd4, 0xcf, 0xae, 0x25, 0x3a, 0x44, 0x23, 0xef, 0x40, 0xe1, 0x98, 0xc5, 0xb1, 0x1b,
|
||||
0xf8, 0x07, 0x3b, 0xb5, 0x65, 0x21, 0x3f, 0x20, 0x58, 0xbf, 0x30, 0xc0, 0x9c, 0x74, 0x2e, 0x15,
|
||||
0xa4, 0xcf, 0x60, 0x59, 0x5e, 0x3b, 0x79, 0xb6, 0xff, 0xed, 0xc2, 0xca, 0x5f, 0xf2, 0x16, 0x2c,
|
||||
0x4b, 0xed, 0x2a, 0xd7, 0xd4, 0xcc, 0xfa, 0xf3, 0x12, 0x94, 0x8e, 0x71, 0x03, 0x89, 0x37, 0xeb,
|
||||
0x00, 0x83, 0x20, 0xa8, 0x8b, 0x3b, 0x1a, 0x1a, 0x8d, 0x83, 0x98, 0x90, 0xdf, 0x53, 0x97, 0x44,
|
||||
0xf5, 0xa9, 0xfe, 0x9c, 0x7c, 0x0e, 0xc5, 0x64, 0xfc, 0x38, 0xe4, 0xb5, 0x8c, 0xb8, 0x65, 0x77,
|
||||
0x52, 0x6e, 0x99, 0xbe, 0x93, 0xba, 0x26, 0xaa, 0xee, 0x98, 0x46, 0x21, 0x37, 0xe0, 0x92, 0xed,
|
||||
0x79, 0xc1, 0x99, 0x4a, 0x1c, 0x91, 0x02, 0x22, 0x04, 0x79, 0x3a, 0xbe, 0x80, 0x05, 0x51, 0x23,
|
||||
0x3e, 0x88, 0x22, 0xfb, 0x1c, 0xef, 0xcc, 0xb2, 0xe0, 0x9f, 0xb4, 0x84, 0xb5, 0x69, 0xcf, 0xf5,
|
||||
0x6d, 0xaf, 0x06, 0x82, 0x47, 0x4e, 0x30, 0xe6, 0xbb, 0x2f, 0xc3, 0x20, 0xe2, 0x2c, 0x7a, 0xc0,
|
||||
0x79, 0x54, 0x2b, 0x0a, 0x67, 0x0e, 0xd1, 0x48, 0x0b, 0x4a, 0xdb, 0x76, 0xfb, 0x19, 0x3b, 0x38,
|
||||
0x45, 0x62, 0x82, 0x9f, 0xd2, 0x2a, 0x96, 0x60, 0x7f, 0x1c, 0xea, 0xc0, 0x49, 0xd7, 0x40, 0xda,
|
||||
0x50, 0x49, 0x8e, 0x2e, 0xf3, 0xb0, 0x56, 0x16, 0x3a, 0xef, 0xcd, 0xeb, 0x4a, 0x29, 0x2d, 0x4d,
|
||||
0x8c, 0xa8, 0xc4, 0x40, 0xee, 0x62, 0xca, 0xd9, 0x9c, 0xd5, 0x2a, 0xe2, 0xcc, 0xfd, 0xb9, 0x79,
|
||||
0x1f, 0xaa, 0xa3, 0xd1, 0x98, 0x07, 0xaf, 0x98, 0x3f, 0x84, 0x37, 0x26, 0x6c, 0xe1, 0xb5, 0x6a,
|
||||
0xc2, 0x1f, 0x0d, 0xb8, 0x34, 0xe6, 0x37, 0xac, 0xcb, 0x22, 0x17, 0xa5, 0x4a, 0x31, 0x26, 0x47,
|
||||
0xb0, 0x84, 0x71, 0x89, 0x55, 0x87, 0xde, 0x9c, 0x27, 0x10, 0x75, 0x21, 0x29, 0x1d, 0x26, 0xb5,
|
||||
0x98, 0x77, 0x00, 0x06, 0xc4, 0xb9, 0x50, 0xdb, 0x17, 0x50, 0x56, 0x51, 0x51, 0x09, 0x5e, 0x95,
|
||||
0xcd, 0x5e, 0x09, 0x63, 0x33, 0x1f, 0xb4, 0x8c, 0xcc, 0x9c, 0x2d, 0xc3, 0xfa, 0x0a, 0x56, 0x28,
|
||||
0xb3, 0x9d, 0x3d, 0xd7, 0x63, 0x17, 0x57, 0x46, 0xcc, 0x56, 0xd7, 0x63, 0x2d, 0x04, 0x0c, 0x49,
|
||||
0xb6, 0xaa, 0x39, 0xb9, 0x0b, 0x4b, 0xd4, 0xf6, 0xbb, 0x4c, 0x99, 0x7e, 0x2f, 0xc5, 0xb4, 0x30,
|
||||
0x82, 0xbc, 0x54, 0x8a, 0x58, 0xf7, 0xa0, 0xd0, 0xa7, 0x61, 0xad, 0x79, 0xdc, 0xe9, 0xc4, 0x4c,
|
||||
0xd6, 0xad, 0x0c, 0x55, 0x33, 0xa4, 0x1f, 0x32, 0xbf, 0xab, 0x4c, 0x67, 0xa8, 0x9a, 0x59, 0x6b,
|
||||
0x88, 0xb2, 0x93, 0x9d, 0x2b, 0xd7, 0x10, 0xc8, 0xee, 0x20, 0xaa, 0x32, 0x44, 0x82, 0x89, 0xb1,
|
||||
0xe5, 0x60, 0xab, 0xb3, 0x9d, 0x1d, 0x37, 0xba, 0xf8, 0x80, 0x35, 0xc8, 0xed, 0xb8, 0x91, 0x76,
|
||||
0xbe, 0x64, 0x4a, 0xd6, 0xb0, 0x09, 0xb6, 0xbd, 0x9e, 0x83, 0xa7, 0xe5, 0x2c, 0xf2, 0x55, 0xb5,
|
||||
0x1f, 0xa1, 0x5a, 0x9f, 0x48, 0x3f, 0x0a, 0x2b, 0x6a, 0x33, 0x37, 0x20, 0xc7, 0x7c, 0x1e, 0xb9,
|
||||
0x2c, 0xe9, 0x94, 0xa4, 0x2e, 0x1f, 0xa0, 0x75, 0xf1, 0x00, 0x15, 0x1d, 0x99, 0x26, 0x2c, 0xd6,
|
||||
0x26, 0xac, 0x20, 0x21, 0x3d, 0x10, 0x04, 0xb2, 0xda, 0x26, 0xc5, 0xd8, 0xba, 0x0b, 0xd5, 0x81,
|
||||
0xa0, 0x32, 0xbd, 0x06, 0x59, 0x84, 0x8c, 0xaa, 0x10, 0x4f, 0xb2, 0x2b, 0xd6, 0xad, 0x6b, 0xb0,
|
||||
0x92, 0x64, 0xeb, 0x85, 0x46, 0x2d, 0x02, 0xd5, 0x01, 0x93, 0x42, 0x0b, 0x65, 0x28, 0xb6, 0x5c,
|
||||
0x3f, 0x69, 0xa6, 0xd6, 0x2b, 0x03, 0x4a, 0xad, 0xc0, 0x1f, 0x34, 0xa1, 0x16, 0xac, 0x24, 0xa9,
|
||||
0xfb, 0xa0, 0x75, 0xb0, 0x6d, 0x87, 0x89, 0x0f, 0x56, 0xc7, 0xef, 0x87, 0x7a, 0xc2, 0xd7, 0x25,
|
||||
0xe3, 0x56, 0x16, 0xfb, 0x15, 0x1d, 0x15, 0x27, 0x9f, 0x42, 0xee, 0xf0, 0x70, 0x4b, 0x68, 0x5a,
|
||||
0x9c, 0x4b, 0x53, 0x22, 0x46, 0xee, 0x43, 0xee, 0xa9, 0xf8, 0xb2, 0x10, 0xab, 0x9e, 0x32, 0xe1,
|
||||
0xae, 0x4a, 0x0f, 0x49, 0x36, 0xca, 0xda, 0x41, 0xe4, 0xd0, 0x44, 0xc8, 0xfa, 0xb7, 0x01, 0xc5,
|
||||
0xa7, 0xf6, 0x00, 0xa8, 0x0d, 0x90, 0xe1, 0x6b, 0x34, 0x5a, 0x85, 0x0c, 0x2f, 0xc3, 0x92, 0xc7,
|
||||
0x5e, 0x30, 0x4f, 0xdd, 0x71, 0x39, 0x41, 0x6a, 0xfc, 0x2c, 0x88, 0x64, 0x5a, 0x97, 0xa8, 0x9c,
|
||||
0x60, 0x42, 0x38, 0x8c, 0xdb, 0xae, 0x57, 0xcb, 0xae, 0x66, 0xb0, 0x29, 0xcb, 0x19, 0x46, 0xae,
|
||||
0x17, 0x79, 0x0a, 0xae, 0xe3, 0x90, 0x58, 0x90, 0x75, 0xfd, 0x4e, 0x20, 0x1a, 0x96, 0x2a, 0x8b,
|
||||
0xc7, 0x41, 0x2f, 0x6a, 0xb3, 0x03, 0xbf, 0x13, 0x50, 0xb1, 0x46, 0xae, 0xc2, 0x72, 0x84, 0xf9,
|
||||
0x17, 0xd7, 0x72, 0xc2, 0x29, 0x05, 0xe4, 0x92, 0x59, 0xaa, 0x16, 0xac, 0x0a, 0x94, 0xe4, 0xb9,
|
||||
0x55, 0xf0, 0x7f, 0xbd, 0x08, 0x6f, 0x3c, 0x62, 0x67, 0xdb, 0xc9, 0xb9, 0x12, 0x87, 0xac, 0x42,
|
||||
0xb1, 0x4f, 0x3b, 0xd8, 0x51, 0x57, 0x48, 0x27, 0xa1, 0xb1, 0xa3, 0xa0, 0xe7, 0xf3, 0x24, 0x86,
|
||||
0xc2, 0x98, 0xa0, 0x50, 0xb5, 0x40, 0xde, 0x87, 0xdc, 0x23, 0xc6, 0xcf, 0x82, 0xe8, 0xb9, 0x38,
|
||||
0x75, 0xa5, 0x59, 0x44, 0x9e, 0x47, 0x8c, 0x23, 0xae, 0xa2, 0xc9, 0x1a, 0x82, 0xb5, 0x30, 0x01,
|
||||
0x6b, 0xd9, 0x49, 0x60, 0x2d, 0x59, 0x25, 0x9b, 0x50, 0x6c, 0x07, 0x7e, 0xcc, 0x23, 0xdb, 0x45,
|
||||
0xc3, 0x4b, 0x82, 0xf9, 0x4d, 0x64, 0x96, 0x81, 0xdd, 0x1e, 0x2c, 0x52, 0x9d, 0x93, 0x6c, 0x00,
|
||||
0xb0, 0x97, 0x3c, 0xb2, 0xf7, 0x83, 0xb8, 0xff, 0xb0, 0x01, 0x94, 0x43, 0xc2, 0x41, 0x8b, 0x6a,
|
||||
0xab, 0xd6, 0x5b, 0x70, 0x79, 0xd8, 0x23, 0xca, 0x55, 0xf7, 0xe0, 0xff, 0x28, 0xf3, 0x98, 0x1d,
|
||||
0xb3, 0xf9, 0xbd, 0x65, 0x99, 0x50, 0x1b, 0x17, 0x56, 0x8a, 0xff, 0x93, 0x81, 0xe2, 0xee, 0x4b,
|
||||
0xd6, 0x3e, 0x62, 0x71, 0x6c, 0x77, 0x05, 0x64, 0x6c, 0x45, 0x41, 0x9b, 0xc5, 0x71, 0x5f, 0xd7,
|
||||
0x80, 0x40, 0xbe, 0x0f, 0xd9, 0x03, 0xdf, 0xe5, 0xaa, 0x3f, 0xae, 0xa5, 0x22, 0x76, 0x97, 0x2b,
|
||||
0x9d, 0xfb, 0x0b, 0x54, 0x48, 0x91, 0xbb, 0x90, 0xc5, 0xea, 0x32, 0x4b, 0x85, 0x77, 0x34, 0x59,
|
||||
0x94, 0x21, 0x5b, 0xe2, 0xcb, 0x9a, 0xfb, 0x25, 0x53, 0x51, 0x5a, 0x4f, 0x6f, 0x4d, 0xee, 0x97,
|
||||
0x6c, 0xa0, 0x41, 0x49, 0x92, 0x5d, 0xc8, 0x1d, 0x73, 0x3b, 0xe2, 0xcc, 0x51, 0xd1, 0xbb, 0x9e,
|
||||
0x86, 0x60, 0x24, 0xe7, 0x40, 0x4b, 0x22, 0x8b, 0x4e, 0xd8, 0x7d, 0xe9, 0x72, 0x95, 0x0d, 0x69,
|
||||
0x4e, 0x40, 0x36, 0xed, 0x20, 0x38, 0x45, 0xe9, 0x9d, 0xc0, 0x67, 0xb5, 0xdc, 0x54, 0x69, 0x64,
|
||||
0xd3, 0xa4, 0x71, 0x8a, 0x6e, 0x38, 0x76, 0xbb, 0x08, 0x0c, 0xf3, 0x53, 0xdd, 0x20, 0x19, 0x35,
|
||||
0x37, 0x48, 0xc2, 0x56, 0x0e, 0x96, 0x04, 0x0c, 0xb2, 0x7e, 0x6b, 0x40, 0x51, 0x8b, 0xd3, 0x0c,
|
||||
0x79, 0xf7, 0x0e, 0x64, 0xf1, 0x55, 0xad, 0xe2, 0x9f, 0x17, 0x59, 0xc7, 0xb8, 0x4d, 0x05, 0x15,
|
||||
0x0b, 0xc7, 0x9e, 0x23, 0x8b, 0x62, 0x99, 0xe2, 0x10, 0x29, 0x4f, 0xf8, 0xb9, 0x08, 0x59, 0x9e,
|
||||
0xe2, 0x90, 0xdc, 0x80, 0xfc, 0x31, 0x6b, 0xf7, 0x22, 0x97, 0x9f, 0x8b, 0x20, 0x54, 0x9a, 0x55,
|
||||
0x51, 0x4e, 0x14, 0x4d, 0x24, 0x67, 0x9f, 0xc3, 0x7a, 0x88, 0x97, 0x73, 0xb0, 0x41, 0x02, 0xd9,
|
||||
0x6d, 0x7c, 0x28, 0xe1, 0xce, 0xca, 0x54, 0x8c, 0xf1, 0xad, 0xba, 0x3b, 0xed, 0xad, 0xba, 0x9b,
|
||||
0xbc, 0x55, 0x87, 0x83, 0x8a, 0xdd, 0x47, 0x73, 0xb2, 0xf5, 0x00, 0x0a, 0xfd, 0x8b, 0x47, 0x2a,
|
||||
0xb0, 0xb8, 0xe7, 0x28, 0x4b, 0x8b, 0x7b, 0x0e, 0x1e, 0x65, 0xf7, 0xf1, 0x9e, 0xb0, 0x92, 0xa7,
|
||||
0x38, 0xec, 0x83, 0x84, 0x8c, 0x06, 0x12, 0x36, 0xf1, 0x15, 0xae, 0xdd, 0x3e, 0x64, 0xa2, 0xc1,
|
||||
0x59, 0x9c, 0x6c, 0x19, 0xc7, 0xf2, 0x18, 0x5e, 0x2c, 0x74, 0x89, 0x63, 0x78, 0xb1, 0x75, 0x0d,
|
||||
0xca, 0x43, 0xf1, 0x42, 0x26, 0xf1, 0xec, 0x53, 0x58, 0x12, 0xc7, 0x1b, 0x0c, 0x56, 0x46, 0xbe,
|
||||
0x04, 0x91, 0xf7, 0x61, 0x59, 0x7e, 0x71, 0xa8, 0x2e, 0x98, 0x6f, 0x7f, 0xfd, 0xcd, 0xea, 0x9b,
|
||||
0x23, 0x0c, 0x72, 0x11, 0xd9, 0xb6, 0x7a, 0xbe, 0xe3, 0xb1, 0xaa, 0x31, 0x91, 0x4d, 0x2e, 0x9a,
|
||||
0xd9, 0x5f, 0xfd, 0xfe, 0xca, 0xc2, 0x86, 0x0d, 0x97, 0xc6, 0xbe, 0x62, 0x90, 0x6b, 0x90, 0x3d,
|
||||
0x66, 0x5e, 0x27, 0x31, 0x33, 0xc6, 0x80, 0x8b, 0xe4, 0x2a, 0x64, 0xa8, 0x7d, 0x56, 0x35, 0xcc,
|
||||
0xda, 0xd7, 0xdf, 0xac, 0x5e, 0x1e, 0xff, 0x14, 0x62, 0x9f, 0x49, 0x13, 0xcd, 0xbf, 0x00, 0x14,
|
||||
0x0e, 0x0f, 0xb7, 0xb6, 0x22, 0xd7, 0xe9, 0x32, 0xf2, 0x4b, 0x03, 0xc8, 0xf8, 0x4b, 0x94, 0xdc,
|
||||
0x4e, 0xcf, 0xf1, 0xc9, 0x0f, 0x72, 0xf3, 0xe3, 0x39, 0xa5, 0x14, 0xd2, 0xf8, 0x1c, 0x96, 0x04,
|
||||
0x3c, 0x26, 0x1f, 0xcc, 0xf8, 0xac, 0x31, 0xd7, 0xa7, 0x33, 0x2a, 0xdd, 0x6d, 0xc8, 0x27, 0x10,
|
||||
0x93, 0x6c, 0xa4, 0x6e, 0x6f, 0x08, 0x41, 0x9b, 0x1f, 0xce, 0xc4, 0xab, 0x8c, 0xfc, 0x14, 0x72,
|
||||
0x0a, 0x39, 0x92, 0xeb, 0x53, 0xe4, 0x06, 0x18, 0xd6, 0xdc, 0x98, 0x85, 0x75, 0x70, 0x8c, 0x04,
|
||||
0x21, 0xa6, 0x1e, 0x63, 0x04, 0x7f, 0xa6, 0x1e, 0x63, 0x0c, 0x72, 0xb6, 0x07, 0x0f, 0xc1, 0x54,
|
||||
0x23, 0x23, 0x78, 0x33, 0xd5, 0xc8, 0x28, 0xec, 0x24, 0x4f, 0x21, 0x8b, 0xb0, 0x93, 0xa4, 0x95,
|
||||
0x5f, 0x0d, 0x97, 0x9a, 0x69, 0x77, 0x62, 0x08, 0xaf, 0xfe, 0x04, 0xdb, 0x94, 0x78, 0xf3, 0xa7,
|
||||
0x37, 0x28, 0xed, 0x43, 0x9d, 0x79, 0x7d, 0x06, 0xce, 0x81, 0x7a, 0xf5, 0x5e, 0x5e, 0x9f, 0xe1,
|
||||
0x6b, 0xd9, 0x74, 0xf5, 0x23, 0xdf, 0xe5, 0x02, 0x28, 0xe9, 0xe8, 0x83, 0xd4, 0x53, 0x44, 0x27,
|
||||
0x00, 0x37, 0xb3, 0x31, 0x33, 0xbf, 0x32, 0xf8, 0x15, 0xbe, 0xbd, 0x86, 0x91, 0x09, 0x69, 0xa6,
|
||||
0xba, 0x63, 0x22, 0x06, 0x32, 0x6f, 0xcd, 0x25, 0xa3, 0x8c, 0xdb, 0x12, 0xf9, 0x28, 0x74, 0x43,
|
||||
0xd2, 0x1b, 0x79, 0x1f, 0x21, 0x99, 0x33, 0xf2, 0xad, 0x1b, 0x37, 0x0d, 0xbc, 0x67, 0x88, 0x78,
|
||||
0x53, 0x75, 0x6b, 0x4f, 0x81, 0xd4, 0x7b, 0xa6, 0x43, 0xe7, 0xad, 0xd2, 0xb7, 0xaf, 0xae, 0x18,
|
||||
0x7f, 0x7f, 0x75, 0xc5, 0xf8, 0xd7, 0xab, 0x2b, 0xc6, 0xc9, 0xb2, 0xf8, 0xfb, 0xf1, 0xd6, 0x7f,
|
||||
0x03, 0x00, 0x00, 0xff, 0xff, 0x5c, 0x01, 0xa5, 0x69, 0xd0, 0x1d, 0x00, 0x00,
|
||||
0xf5, 0xd7, 0x8a, 0x14, 0x25, 0x3d, 0x52, 0x14, 0x3d, 0x76, 0xf2, 0xa5, 0x17, 0x81, 0x23, 0xaf,
|
||||
0x63, 0x45, 0x56, 0x9c, 0xa5, 0xbf, 0xb2, 0x0d, 0xb9, 0x76, 0xeb, 0xc4, 0xfa, 0x05, 0x29, 0x96,
|
||||
0x6c, 0x76, 0xe4, 0xc2, 0x45, 0x90, 0x02, 0x5d, 0x71, 0x87, 0xf4, 0xd6, 0xab, 0xdd, 0xed, 0xee,
|
||||
0xd0, 0x32, 0x93, 0x4b, 0x7b, 0x28, 0x50, 0xe4, 0xd4, 0x53, 0x6f, 0x41, 0x81, 0x16, 0xe8, 0xb9,
|
||||
0xfd, 0x03, 0xda, 0x73, 0x80, 0x5e, 0x7a, 0xee, 0x21, 0x28, 0xfc, 0x0f, 0xf4, 0x56, 0xa0, 0xb7,
|
||||
0xe2, 0xcd, 0xcc, 0x92, 0xc3, 0x1f, 0x5a, 0x92, 0xf5, 0x89, 0x33, 0x6f, 0xde, 0x8f, 0x79, 0xef,
|
||||
0xcd, 0x7b, 0xf3, 0x99, 0x25, 0x2c, 0xb5, 0x1c, 0xce, 0xce, 0x9c, 0x8e, 0x1d, 0xc5, 0x21, 0x0f,
|
||||
0xc9, 0xe5, 0xd3, 0xf0, 0xa4, 0x63, 0x9f, 0xb4, 0x3d, 0xdf, 0x7d, 0xe9, 0x71, 0xfb, 0xd5, 0xff,
|
||||
0xdb, 0xcd, 0x38, 0x0c, 0x38, 0x0b, 0x5c, 0xf3, 0xe3, 0x96, 0xc7, 0x5f, 0xb4, 0x4f, 0xec, 0x46,
|
||||
0x78, 0x5a, 0x6b, 0x85, 0xad, 0xb0, 0x26, 0x24, 0x4e, 0xda, 0x4d, 0x31, 0x13, 0x13, 0x31, 0x92,
|
||||
0x9a, 0xcc, 0x8d, 0x41, 0xf6, 0x56, 0x18, 0xb6, 0x7c, 0xe6, 0x44, 0x5e, 0xa2, 0x86, 0xb5, 0x38,
|
||||
0x6a, 0xd4, 0x12, 0xee, 0xf0, 0x76, 0xa2, 0x64, 0x6e, 0x6a, 0x32, 0xb8, 0x91, 0x5a, 0xba, 0x91,
|
||||
0x5a, 0x12, 0xfa, 0xaf, 0x58, 0x5c, 0x8b, 0x4e, 0x6a, 0x61, 0x94, 0x72, 0xd7, 0xce, 0xe5, 0x76,
|
||||
0x22, 0xaf, 0xc6, 0x3b, 0x11, 0x4b, 0x6a, 0x67, 0x61, 0xfc, 0x92, 0xc5, 0x4a, 0xe0, 0xf6, 0xb9,
|
||||
0x02, 0x6d, 0xee, 0xf9, 0x28, 0xd5, 0x70, 0xa2, 0x04, 0x8d, 0xe0, 0xaf, 0x12, 0xd2, 0xdd, 0xe6,
|
||||
0x61, 0xe0, 0x25, 0xdc, 0xf3, 0x5a, 0x5e, 0xad, 0x99, 0x08, 0x19, 0x69, 0x05, 0x9d, 0x50, 0xec,
|
||||
0x77, 0x33, 0x5c, 0x68, 0xc7, 0x0d, 0x16, 0x85, 0xbe, 0xd7, 0xe8, 0xa0, 0x0d, 0x39, 0x92, 0x62,
|
||||
0xd6, 0xdf, 0xf2, 0x50, 0xa0, 0x2c, 0x69, 0xfb, 0x9c, 0xac, 0xc2, 0x52, 0xcc, 0x9a, 0x3b, 0x2c,
|
||||
0x8a, 0x59, 0xc3, 0xe1, 0xcc, 0xad, 0x1a, 0x2b, 0xc6, 0xda, 0xe2, 0xfe, 0x0c, 0xed, 0x27, 0x93,
|
||||
0x1f, 0x41, 0x39, 0x66, 0xcd, 0x44, 0x63, 0x9c, 0x5d, 0x31, 0xd6, 0x8a, 0x1b, 0x1f, 0xd9, 0xe7,
|
||||
0xe6, 0xd0, 0xa6, 0xac, 0x79, 0xe4, 0x44, 0x3d, 0x91, 0xfd, 0x19, 0x3a, 0xa0, 0x84, 0x6c, 0x40,
|
||||
0x2e, 0x66, 0xcd, 0x6a, 0x4e, 0xe8, 0xba, 0x92, 0xad, 0x6b, 0x7f, 0x86, 0x22, 0x33, 0xd9, 0x84,
|
||||
0x3c, 0x6a, 0xa9, 0xe6, 0x85, 0xd0, 0xd5, 0xb1, 0x1b, 0xd8, 0x9f, 0xa1, 0x42, 0x80, 0x3c, 0x86,
|
||||
0x85, 0x53, 0xc6, 0x1d, 0xd7, 0xe1, 0x4e, 0x15, 0x56, 0x72, 0x6b, 0xc5, 0x8d, 0x5a, 0xa6, 0x30,
|
||||
0x06, 0xc8, 0x3e, 0x52, 0x12, 0xbb, 0x01, 0x8f, 0x3b, 0xb4, 0xab, 0x80, 0x3c, 0x87, 0x92, 0xc3,
|
||||
0x39, 0xc3, 0x64, 0x78, 0x61, 0x90, 0x54, 0x4b, 0x42, 0xe1, 0xed, 0xf1, 0x0a, 0x1f, 0x69, 0x52,
|
||||
0x52, 0x69, 0x9f, 0x22, 0xf3, 0x01, 0x2c, 0xf5, 0xd9, 0x24, 0x15, 0xc8, 0xbd, 0x64, 0x1d, 0x99,
|
||||
0x18, 0x8a, 0x43, 0x72, 0x09, 0xe6, 0x5e, 0x39, 0x7e, 0x9b, 0x89, 0x1c, 0x94, 0xa8, 0x9c, 0xdc,
|
||||
0x9f, 0xbd, 0x67, 0x98, 0x2f, 0xe0, 0xc2, 0x90, 0xfe, 0x11, 0x0a, 0x7e, 0xa0, 0x2b, 0x28, 0x6e,
|
||||
0x7c, 0x98, 0xb1, 0x6b, 0x5d, 0x9d, 0x66, 0x69, 0x6b, 0x01, 0x0a, 0xb1, 0x70, 0xc8, 0xfa, 0xad,
|
||||
0x01, 0x95, 0xc1, 0x54, 0x93, 0x03, 0x95, 0x24, 0x43, 0x84, 0xe5, 0xee, 0x14, 0xa7, 0x04, 0x09,
|
||||
0x2a, 0x30, 0x42, 0x85, 0xb9, 0x09, 0x8b, 0x5d, 0xd2, 0xb8, 0x60, 0x2c, 0x6a, 0x5b, 0xb4, 0x36,
|
||||
0x21, 0x47, 0x59, 0x93, 0x94, 0x61, 0xd6, 0x53, 0xe7, 0x9a, 0xce, 0x7a, 0x2e, 0x59, 0x81, 0x9c,
|
||||
0xcb, 0x9a, 0xca, 0xf5, 0xb2, 0x1d, 0x9d, 0xd8, 0x3b, 0xac, 0xe9, 0x05, 0x1e, 0xba, 0x48, 0x71,
|
||||
0xc9, 0xfa, 0xbd, 0x81, 0xf5, 0x81, 0xdb, 0x22, 0x9f, 0xf4, 0xf9, 0x31, 0xfe, 0xb4, 0x0f, 0xed,
|
||||
0xfe, 0x79, 0xf6, 0xee, 0xef, 0xf4, 0x67, 0x62, 0x4c, 0x09, 0xe8, 0xde, 0xfd, 0x18, 0x4a, 0x7a,
|
||||
0x6e, 0xc8, 0x3e, 0x14, 0xb5, 0x73, 0xa4, 0x36, 0xbc, 0x3a, 0x59, 0x66, 0xa9, 0x2e, 0x6a, 0xfd,
|
||||
0x31, 0x07, 0x45, 0x6d, 0x91, 0x3c, 0x84, 0xfc, 0x4b, 0x2f, 0x90, 0x21, 0x2c, 0x6f, 0xac, 0x4f,
|
||||
0xa6, 0xf2, 0xb1, 0x17, 0xb8, 0x54, 0xc8, 0x91, 0xba, 0x56, 0x77, 0xb3, 0x62, 0x5b, 0x77, 0x26,
|
||||
0xd3, 0x71, 0x6e, 0xf1, 0xdd, 0x9a, 0xa2, 0x6d, 0xc8, 0xa6, 0x41, 0x20, 0x1f, 0x39, 0xfc, 0x85,
|
||||
0x68, 0x1a, 0x8b, 0x54, 0x8c, 0xc9, 0x2d, 0xb8, 0xe8, 0x05, 0xcf, 0x42, 0x1e, 0xd6, 0x63, 0xe6,
|
||||
0x7a, 0x78, 0xf8, 0x9e, 0x75, 0x22, 0x56, 0x9d, 0x13, 0x2c, 0xa3, 0x96, 0x48, 0x1d, 0xca, 0x92,
|
||||
0x7c, 0xdc, 0x3e, 0xf9, 0x19, 0x6b, 0xf0, 0xa4, 0x5a, 0x10, 0xfe, 0xac, 0x65, 0x6c, 0xe1, 0x40,
|
||||
0x17, 0xa0, 0x03, 0xf2, 0x6f, 0x55, 0xed, 0xd6, 0x9f, 0x0d, 0x58, 0xea, 0x53, 0x4f, 0x3e, 0xed,
|
||||
0x4b, 0xd5, 0xcd, 0x49, 0xb7, 0xa5, 0x25, 0xeb, 0x33, 0x28, 0xb8, 0x5e, 0x8b, 0x25, 0x5c, 0xa4,
|
||||
0x6a, 0x71, 0x6b, 0xe3, 0xdb, 0xef, 0xde, 0x9f, 0xf9, 0xc7, 0x77, 0xef, 0xaf, 0x6b, 0x57, 0x4d,
|
||||
0x18, 0xb1, 0xa0, 0x11, 0x06, 0xdc, 0xf1, 0x02, 0x16, 0xe3, 0x05, 0xfb, 0xb1, 0x14, 0xb1, 0x77,
|
||||
0xc4, 0x0f, 0x55, 0x1a, 0x30, 0xe8, 0x81, 0x73, 0xca, 0x44, 0x9e, 0x16, 0xa9, 0x18, 0x5b, 0x1c,
|
||||
0x96, 0x28, 0xe3, 0xed, 0x38, 0xa0, 0xec, 0xe7, 0x6d, 0x64, 0xfa, 0x5e, 0xda, 0x48, 0xc4, 0xa6,
|
||||
0xc7, 0x35, 0x74, 0x64, 0xa4, 0x4a, 0x80, 0xac, 0xc1, 0x1c, 0x8b, 0xe3, 0x30, 0x56, 0xc5, 0x43,
|
||||
0x6c, 0x79, 0xd5, 0xdb, 0x71, 0xd4, 0xb0, 0x8f, 0xc5, 0x55, 0x4f, 0x25, 0x83, 0x55, 0x81, 0x72,
|
||||
0x6a, 0x35, 0x89, 0xc2, 0x20, 0x61, 0xd6, 0x32, 0x86, 0x2e, 0x6a, 0xf3, 0x44, 0xed, 0xc3, 0xfa,
|
||||
0xab, 0x01, 0xe5, 0x94, 0x22, 0x79, 0xc8, 0x17, 0x50, 0xec, 0xb5, 0x86, 0xb4, 0x07, 0xdc, 0xcf,
|
||||
0x0c, 0xaa, 0x2e, 0xaf, 0xf5, 0x15, 0xd5, 0x12, 0x74, 0x75, 0xe6, 0x13, 0xa8, 0x0c, 0x32, 0x8c,
|
||||
0xc8, 0xfe, 0x07, 0xfd, 0x0d, 0x62, 0xb0, 0x5f, 0x69, 0xa7, 0xe1, 0x5f, 0x06, 0x5c, 0xa6, 0x4c,
|
||||
0x60, 0x97, 0x83, 0x53, 0xa7, 0xc5, 0xb6, 0xc3, 0xa0, 0xe9, 0xb5, 0xd2, 0x30, 0x57, 0x44, 0x33,
|
||||
0x4c, 0x35, 0x63, 0x5f, 0x5c, 0x83, 0x85, 0xba, 0xef, 0xf0, 0x66, 0x18, 0x9f, 0x2a, 0xe5, 0x25,
|
||||
0x54, 0x9e, 0xd2, 0x68, 0x77, 0x95, 0xac, 0x40, 0x51, 0x29, 0x3e, 0x0a, 0xdd, 0x34, 0x9d, 0x3a,
|
||||
0x89, 0x54, 0x61, 0xfe, 0x30, 0x6c, 0x3d, 0xc1, 0x64, 0xcb, 0x0a, 0x4b, 0xa7, 0xc4, 0x82, 0x92,
|
||||
0x62, 0x8c, 0xbb, 0xd5, 0x35, 0x47, 0xfb, 0x68, 0xe4, 0x3d, 0x58, 0x3c, 0x66, 0x49, 0xe2, 0x85,
|
||||
0xc1, 0xc1, 0x4e, 0xb5, 0x20, 0xe4, 0x7b, 0x04, 0xd4, 0x7d, 0xcc, 0xc3, 0x98, 0x1d, 0xec, 0x54,
|
||||
0xe7, 0xa5, 0x6e, 0x35, 0xb5, 0x7e, 0x61, 0x80, 0x39, 0xca, 0x63, 0x95, 0xbe, 0xcf, 0xa0, 0x20,
|
||||
0x0f, 0xa4, 0xf4, 0xfa, 0x7f, 0x3b, 0xca, 0xf2, 0x97, 0xbc, 0x0b, 0x05, 0xa9, 0x5d, 0x55, 0xa1,
|
||||
0x9a, 0x59, 0xbf, 0x2a, 0x40, 0xe9, 0x18, 0x37, 0x90, 0xc6, 0xd9, 0x06, 0xe8, 0xa5, 0x47, 0x1d,
|
||||
0xe9, 0xc1, 0xa4, 0x69, 0x1c, 0xc4, 0x84, 0x85, 0x3d, 0x75, 0x7c, 0xd4, 0x0d, 0xd6, 0x9d, 0x93,
|
||||
0xcf, 0xa1, 0x98, 0x8e, 0x9f, 0x46, 0xbc, 0x9a, 0x13, 0xe7, 0xef, 0x5e, 0xc6, 0xf9, 0xd3, 0x77,
|
||||
0x62, 0x6b, 0xa2, 0xea, 0xf4, 0x69, 0x14, 0x72, 0x13, 0x2e, 0x38, 0xbe, 0x1f, 0x9e, 0xa9, 0x92,
|
||||
0x12, 0xc5, 0x21, 0x92, 0xb3, 0x40, 0x87, 0x17, 0xb0, 0x55, 0x6a, 0xc4, 0x47, 0x71, 0xec, 0x74,
|
||||
0xf0, 0x34, 0x15, 0x04, 0xff, 0xa8, 0x25, 0xec, 0x5a, 0x7b, 0x5e, 0xe0, 0xf8, 0x55, 0x10, 0x3c,
|
||||
0x72, 0x82, 0xa7, 0x61, 0xf7, 0x75, 0x14, 0xc6, 0x9c, 0xc5, 0x8f, 0x38, 0x8f, 0xab, 0x45, 0x11,
|
||||
0xcc, 0x3e, 0x1a, 0xa9, 0x43, 0x69, 0xdb, 0x69, 0xbc, 0x60, 0x07, 0xa7, 0x48, 0x4c, 0x91, 0x55,
|
||||
0x56, 0x2f, 0x13, 0xec, 0x4f, 0x23, 0x1d, 0x52, 0xe9, 0x1a, 0x48, 0x03, 0xca, 0xa9, 0xeb, 0xb2,
|
||||
0x42, 0xab, 0x4b, 0x42, 0xe7, 0x83, 0x69, 0x43, 0x29, 0xa5, 0xa5, 0x89, 0x01, 0x95, 0x98, 0xc8,
|
||||
0x5d, 0x2c, 0x46, 0x87, 0xb3, 0x6a, 0x59, 0xf8, 0xdc, 0x9d, 0x93, 0x23, 0x28, 0x1f, 0x0b, 0x40,
|
||||
0x5e, 0x47, 0x18, 0xee, 0xb1, 0xa4, 0xba, 0x2c, 0x36, 0x70, 0x7d, 0x78, 0x03, 0x3a, 0x70, 0xb7,
|
||||
0x05, 0x7b, 0x87, 0x0e, 0x08, 0x9b, 0x0f, 0xa1, 0x32, 0x98, 0xdc, 0x69, 0x80, 0x91, 0xf9, 0x43,
|
||||
0xb8, 0x38, 0xc2, 0xa3, 0xb7, 0x6a, 0x3e, 0x7f, 0x32, 0xe0, 0xc2, 0x50, 0x1a, 0xf0, 0x02, 0x10,
|
||||
0x45, 0x2f, 0x55, 0x8a, 0x31, 0x39, 0x82, 0x39, 0x4c, 0x73, 0xa2, 0xa0, 0xc0, 0xe6, 0x34, 0x79,
|
||||
0xb5, 0x85, 0xa4, 0x8c, 0xbf, 0xd4, 0x62, 0xde, 0x03, 0xe8, 0x11, 0xa7, 0x82, 0x87, 0x5f, 0xc0,
|
||||
0x92, 0x4a, 0xb2, 0xea, 0x17, 0x15, 0x89, 0x2a, 0x94, 0x30, 0xa2, 0x86, 0xde, 0xdd, 0x94, 0x9b,
|
||||
0xf2, 0x6e, 0xb2, 0xbe, 0x82, 0x65, 0xca, 0x1c, 0x77, 0xcf, 0xf3, 0xd9, 0xf9, 0x2d, 0x18, 0x8b,
|
||||
0xdf, 0xf3, 0x59, 0x1d, 0x91, 0x49, 0x5a, 0xfc, 0x6a, 0x4e, 0xee, 0xc3, 0x1c, 0x75, 0x82, 0x16,
|
||||
0x53, 0xa6, 0x3f, 0xc8, 0x30, 0x2d, 0x8c, 0x20, 0x2f, 0x95, 0x22, 0xd6, 0x03, 0x58, 0xec, 0xd2,
|
||||
0xb0, 0x75, 0x3d, 0x6d, 0x36, 0x13, 0x26, 0xdb, 0x60, 0x8e, 0xaa, 0x19, 0xd2, 0x0f, 0x59, 0xd0,
|
||||
0x52, 0xa6, 0x73, 0x54, 0xcd, 0xac, 0x55, 0x84, 0xf3, 0xe9, 0xce, 0x55, 0x68, 0x08, 0xe4, 0x77,
|
||||
0x10, 0xbe, 0x19, 0xa2, 0x5e, 0xc5, 0xd8, 0x72, 0xf1, 0x4e, 0x75, 0xdc, 0x1d, 0x2f, 0x3e, 0xdf,
|
||||
0xc1, 0x2a, 0xcc, 0xef, 0x78, 0xb1, 0xe6, 0x5f, 0x3a, 0x25, 0xab, 0x78, 0xdb, 0x36, 0xfc, 0xb6,
|
||||
0x8b, 0xde, 0x72, 0x16, 0x07, 0xea, 0x5a, 0x19, 0xa0, 0x5a, 0x9f, 0xc8, 0x38, 0x0a, 0x2b, 0x6a,
|
||||
0x33, 0x37, 0x61, 0x9e, 0x05, 0x3c, 0xc6, 0x32, 0x92, 0x57, 0x32, 0xb1, 0xe5, 0x03, 0xd9, 0x16,
|
||||
0x0f, 0x64, 0x71, 0xf5, 0xd3, 0x94, 0xc5, 0xda, 0x84, 0x65, 0x24, 0x64, 0x27, 0x82, 0x40, 0x5e,
|
||||
0xdb, 0xa4, 0x18, 0x5b, 0xf7, 0xa1, 0xd2, 0x13, 0x54, 0xa6, 0x57, 0x21, 0x8f, 0xd8, 0x54, 0xf5,
|
||||
0xf5, 0x51, 0x76, 0xc5, 0xba, 0x75, 0x0d, 0x96, 0xd3, 0xe2, 0x3f, 0xd7, 0xa8, 0x45, 0xa0, 0xd2,
|
||||
0x63, 0x52, 0xb0, 0x64, 0x09, 0x8a, 0x75, 0x2f, 0x48, 0x6f, 0x6d, 0xeb, 0x8d, 0x01, 0xa5, 0x7a,
|
||||
0x18, 0xf4, 0xee, 0xb4, 0x3a, 0x2c, 0xa7, 0xa5, 0xfb, 0xa8, 0x7e, 0xb0, 0xed, 0x44, 0x69, 0x0c,
|
||||
0x56, 0x86, 0xcf, 0x87, 0xfa, 0xc4, 0x60, 0x4b, 0xc6, 0xad, 0x3c, 0x5e, 0x7f, 0x74, 0x50, 0x9c,
|
||||
0x7c, 0x0a, 0xf3, 0x87, 0x87, 0x5b, 0x42, 0xd3, 0xec, 0x54, 0x9a, 0x52, 0x31, 0xf2, 0x10, 0xe6,
|
||||
0x9f, 0x8b, 0x2f, 0x1f, 0x89, 0xba, 0xa2, 0x46, 0x9c, 0x55, 0x19, 0x21, 0xc9, 0x46, 0x59, 0x23,
|
||||
0x8c, 0x5d, 0x9a, 0x0a, 0x59, 0xff, 0x36, 0xa0, 0xf8, 0xdc, 0xe9, 0x21, 0xc2, 0x1e, 0x04, 0x7d,
|
||||
0x8b, 0x7b, 0x5b, 0x41, 0xd0, 0x4b, 0x30, 0xe7, 0xb3, 0x57, 0xcc, 0x57, 0x67, 0x5c, 0x4e, 0x90,
|
||||
0x9a, 0xbc, 0x08, 0x63, 0x59, 0xd6, 0x25, 0x2a, 0x27, 0x58, 0x10, 0x2e, 0xe3, 0x8e, 0xe7, 0x57,
|
||||
0xf3, 0x2b, 0x39, 0xbc, 0xe3, 0xe5, 0x0c, 0x33, 0xd7, 0x8e, 0x7d, 0xf5, 0x2e, 0xc0, 0x21, 0xb1,
|
||||
0x20, 0xef, 0x05, 0xcd, 0x50, 0xdc, 0x7f, 0xaa, 0x2d, 0xca, 0x16, 0x7d, 0x10, 0x34, 0x43, 0x2a,
|
||||
0xd6, 0xc8, 0x55, 0x28, 0xc4, 0x58, 0x7f, 0x49, 0x75, 0x5e, 0x04, 0x65, 0x11, 0xb9, 0x64, 0x95,
|
||||
0xaa, 0x05, 0xab, 0x0c, 0x25, 0xe9, 0xb7, 0x4a, 0xfe, 0x6f, 0x66, 0xe1, 0xe2, 0x13, 0x76, 0xb6,
|
||||
0x9d, 0xfa, 0x95, 0x06, 0x64, 0x05, 0x8a, 0x5d, 0xda, 0xc1, 0x8e, 0x3a, 0x42, 0x3a, 0x09, 0x8d,
|
||||
0x1d, 0x85, 0xed, 0x80, 0xa7, 0x39, 0x14, 0xc6, 0x04, 0x85, 0xaa, 0x05, 0x72, 0x1d, 0xe6, 0x9f,
|
||||
0x30, 0x7e, 0x16, 0xc6, 0x2f, 0x85, 0xd7, 0xe5, 0x8d, 0x22, 0xf2, 0x3c, 0x61, 0x1c, 0x01, 0x1c,
|
||||
0x4d, 0xd7, 0x10, 0x15, 0x46, 0x29, 0x2a, 0xcc, 0x8f, 0x42, 0x85, 0xe9, 0x2a, 0xd9, 0x84, 0x62,
|
||||
0x23, 0x0c, 0x12, 0x1e, 0x3b, 0x1e, 0x1a, 0x9e, 0x13, 0xcc, 0xef, 0x20, 0xb3, 0x4c, 0xec, 0x76,
|
||||
0x6f, 0x91, 0xea, 0x9c, 0x64, 0x1d, 0x80, 0xbd, 0xe6, 0xb1, 0xb3, 0x1f, 0x26, 0xdd, 0x17, 0x14,
|
||||
0xa0, 0x1c, 0x12, 0x0e, 0xea, 0x54, 0x5b, 0xb5, 0xde, 0x85, 0x4b, 0xfd, 0x11, 0x51, 0xa1, 0x7a,
|
||||
0x00, 0xff, 0x47, 0x99, 0xcf, 0x9c, 0x84, 0x4d, 0x1f, 0x2d, 0xcb, 0x84, 0xea, 0xb0, 0xb0, 0x52,
|
||||
0xfc, 0x9f, 0x1c, 0x14, 0x77, 0x5f, 0xb3, 0xc6, 0x11, 0x4b, 0x12, 0xa7, 0x25, 0xb0, 0x69, 0x3d,
|
||||
0x0e, 0x1b, 0x2c, 0x49, 0xba, 0xba, 0x7a, 0x04, 0xf2, 0x7d, 0xc8, 0x1f, 0x04, 0x1e, 0x57, 0xf7,
|
||||
0xe3, 0x6a, 0xe6, 0xd3, 0xc0, 0xe3, 0x4a, 0xe7, 0xfe, 0x0c, 0x15, 0x52, 0xe4, 0x3e, 0xe4, 0xb1,
|
||||
0xbb, 0x4c, 0xd2, 0xe1, 0x5d, 0x4d, 0x16, 0x65, 0xc8, 0x96, 0xf8, 0x84, 0xe7, 0x7d, 0xc9, 0x54,
|
||||
0x96, 0xd6, 0xb2, 0xaf, 0x26, 0xef, 0x4b, 0xd6, 0xd3, 0xa0, 0x24, 0xc9, 0x2e, 0x22, 0x6b, 0x27,
|
||||
0xe6, 0xcc, 0x55, 0xd9, 0xbb, 0x91, 0x05, 0x88, 0x24, 0x67, 0x4f, 0x4b, 0x2a, 0x8b, 0x41, 0xd8,
|
||||
0x7d, 0xed, 0x71, 0x55, 0x0d, 0x59, 0x41, 0x40, 0x36, 0xcd, 0x11, 0x9c, 0xa2, 0xf4, 0x4e, 0x18,
|
||||
0x30, 0x81, 0xed, 0xb3, 0xa5, 0x91, 0x4d, 0x93, 0xc6, 0x29, 0x86, 0xe1, 0xd8, 0x6b, 0x21, 0xce,
|
||||
0x5c, 0x18, 0x1b, 0x06, 0xc9, 0xa8, 0x85, 0x41, 0x12, 0xb6, 0xe6, 0x61, 0x4e, 0xc0, 0x20, 0xeb,
|
||||
0x77, 0x06, 0x14, 0xb5, 0x3c, 0x4d, 0x50, 0x77, 0xef, 0x41, 0x1e, 0x9f, 0xef, 0x2a, 0xff, 0x0b,
|
||||
0xa2, 0xea, 0x18, 0x77, 0xa8, 0xa0, 0x62, 0xe3, 0xd8, 0x73, 0x65, 0x53, 0x5c, 0xa2, 0x38, 0x44,
|
||||
0xca, 0x33, 0xde, 0x11, 0x29, 0x5b, 0xa0, 0x38, 0x24, 0x37, 0x61, 0xe1, 0x98, 0x35, 0xda, 0xb1,
|
||||
0xc7, 0x3b, 0x22, 0x09, 0xe5, 0x8d, 0x8a, 0x68, 0x27, 0x8a, 0x26, 0x8a, 0xb3, 0xcb, 0x61, 0x3d,
|
||||
0xc6, 0xc3, 0xd9, 0xdb, 0x20, 0x81, 0xfc, 0x36, 0xbe, 0xc8, 0x70, 0x67, 0x4b, 0x54, 0x8c, 0xf1,
|
||||
0x51, 0xbc, 0x3b, 0xee, 0x51, 0xbc, 0x9b, 0x3e, 0x8a, 0xfb, 0x93, 0x8a, 0xb7, 0x8f, 0x16, 0x64,
|
||||
0xeb, 0x11, 0x2c, 0x76, 0x0f, 0x1e, 0x29, 0xc3, 0xec, 0x9e, 0xab, 0x2c, 0xcd, 0xee, 0xb9, 0xe8,
|
||||
0xca, 0xee, 0xd3, 0x3d, 0x61, 0x65, 0x81, 0xe2, 0xb0, 0x0b, 0x12, 0x72, 0x1a, 0x48, 0xd8, 0xc4,
|
||||
0xe7, 0xbe, 0x76, 0xfa, 0x90, 0x89, 0x86, 0x67, 0x49, 0xba, 0x65, 0x1c, 0x4b, 0x37, 0xfc, 0x44,
|
||||
0xe8, 0x12, 0x6e, 0xf8, 0x89, 0x75, 0x0d, 0x96, 0xfa, 0xf2, 0x85, 0x4c, 0xe2, 0x7d, 0xa9, 0xb0,
|
||||
0x24, 0x8e, 0xd7, 0x19, 0x2c, 0x0f, 0x7c, 0x72, 0x22, 0xd7, 0xa1, 0x20, 0x3f, 0x6d, 0x54, 0x66,
|
||||
0xcc, 0xcb, 0x5f, 0x7f, 0xb3, 0xf2, 0xce, 0x00, 0x83, 0x5c, 0x44, 0xb6, 0xad, 0x76, 0xe0, 0xfa,
|
||||
0xac, 0x62, 0x8c, 0x64, 0x93, 0x8b, 0x66, 0xfe, 0xd7, 0x7f, 0xb8, 0x32, 0xb3, 0xee, 0xc0, 0x85,
|
||||
0xa1, 0xcf, 0x25, 0xe4, 0x1a, 0xe4, 0x8f, 0x99, 0xdf, 0x4c, 0xcd, 0x0c, 0x31, 0xe0, 0x22, 0xb9,
|
||||
0x0a, 0x39, 0xea, 0x9c, 0x55, 0x0c, 0xb3, 0xfa, 0xf5, 0x37, 0x2b, 0x97, 0x86, 0xbf, 0xb9, 0x38,
|
||||
0x67, 0xd2, 0xc4, 0xc6, 0x5f, 0x00, 0x16, 0x0f, 0x0f, 0xb7, 0xb6, 0x62, 0xcf, 0x6d, 0x31, 0xf2,
|
||||
0x4b, 0x03, 0xc8, 0xf0, 0xc3, 0x96, 0xdc, 0xc9, 0xae, 0xf1, 0xd1, 0x2f, 0x7f, 0xf3, 0xee, 0x94,
|
||||
0x52, 0x0a, 0x69, 0x7c, 0x0e, 0x73, 0x02, 0x1e, 0x93, 0x0f, 0x27, 0x7c, 0x25, 0x99, 0x6b, 0xe3,
|
||||
0x19, 0x95, 0xee, 0x06, 0x2c, 0xa4, 0x10, 0x93, 0xac, 0x67, 0x6e, 0xaf, 0x0f, 0x41, 0x9b, 0x1f,
|
||||
0x4d, 0xc4, 0xab, 0x8c, 0xfc, 0x14, 0xe6, 0x15, 0x72, 0x24, 0x37, 0xc6, 0xc8, 0xf5, 0x30, 0xac,
|
||||
0xb9, 0x3e, 0x09, 0x6b, 0xcf, 0x8d, 0x14, 0x21, 0x66, 0xba, 0x31, 0x80, 0x3f, 0x33, 0xdd, 0x18,
|
||||
0x82, 0x9c, 0x8d, 0xde, 0xbb, 0x32, 0xd3, 0xc8, 0x00, 0xde, 0xcc, 0x34, 0x32, 0x08, 0x3b, 0xc9,
|
||||
0x73, 0xc8, 0x23, 0xec, 0x24, 0x59, 0xed, 0x57, 0xc3, 0xa5, 0x66, 0xd6, 0x99, 0xe8, 0xc3, 0xab,
|
||||
0x3f, 0xc1, 0x6b, 0x4a, 0x7c, 0x42, 0xc8, 0xbe, 0xa0, 0xb4, 0x2f, 0x82, 0xe6, 0x8d, 0x09, 0x38,
|
||||
0x7b, 0xea, 0xd5, 0xf3, 0x7b, 0x6d, 0x82, 0xcf, 0x72, 0xe3, 0xd5, 0x0f, 0x7c, 0x00, 0x0c, 0xa1,
|
||||
0xa4, 0xa3, 0x0f, 0x62, 0x67, 0x88, 0x8e, 0x00, 0x6e, 0x66, 0x6d, 0x62, 0x7e, 0x65, 0xf0, 0x2b,
|
||||
0x7c, 0x7b, 0xf5, 0x23, 0x13, 0xb2, 0x91, 0x19, 0x8e, 0x91, 0x18, 0xc8, 0xbc, 0x3d, 0x95, 0x8c,
|
||||
0x32, 0xee, 0x48, 0xe4, 0xa3, 0xd0, 0x0d, 0xc9, 0xbe, 0xc8, 0xbb, 0x08, 0xc9, 0x9c, 0x90, 0x6f,
|
||||
0xcd, 0xb8, 0x65, 0xe0, 0x39, 0x43, 0xc4, 0x9b, 0xa9, 0x5b, 0x7b, 0x0a, 0x64, 0x9e, 0x33, 0x1d,
|
||||
0x3a, 0x6f, 0x95, 0xbe, 0x7d, 0x73, 0xc5, 0xf8, 0xfb, 0x9b, 0x2b, 0xc6, 0x3f, 0xdf, 0x5c, 0x31,
|
||||
0x4e, 0x0a, 0xe2, 0x7f, 0xce, 0xdb, 0xff, 0x0d, 0x00, 0x00, 0xff, 0xff, 0xc4, 0x91, 0xe5, 0xca,
|
||||
0x70, 0x1e, 0x00, 0x00,
|
||||
}
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
@@ -3993,6 +4014,13 @@ func (m *ResolveImageConfigRequest) MarshalToSizedBuffer(dAtA []byte) (int, erro
|
||||
i -= len(m.XXX_unrecognized)
|
||||
copy(dAtA[i:], m.XXX_unrecognized)
|
||||
}
|
||||
if len(m.StoreID) > 0 {
|
||||
i -= len(m.StoreID)
|
||||
copy(dAtA[i:], m.StoreID)
|
||||
i = encodeVarintGateway(dAtA, i, uint64(len(m.StoreID)))
|
||||
i--
|
||||
dAtA[i] = 0x3a
|
||||
}
|
||||
if len(m.SessionID) > 0 {
|
||||
i -= len(m.SessionID)
|
||||
copy(dAtA[i:], m.SessionID)
|
||||
@@ -4106,6 +4134,20 @@ func (m *SolveRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) {
|
||||
i -= len(m.XXX_unrecognized)
|
||||
copy(dAtA[i:], m.XXX_unrecognized)
|
||||
}
|
||||
if len(m.SourcePolicies) > 0 {
|
||||
for iNdEx := len(m.SourcePolicies) - 1; iNdEx >= 0; iNdEx-- {
|
||||
{
|
||||
size, err := m.SourcePolicies[iNdEx].MarshalToSizedBuffer(dAtA[:i])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
i -= size
|
||||
i = encodeVarintGateway(dAtA, i, uint64(size))
|
||||
}
|
||||
i--
|
||||
dAtA[i] = 0x7a
|
||||
}
|
||||
}
|
||||
if m.Evaluate {
|
||||
i--
|
||||
if m.Evaluate {
|
||||
@@ -5912,6 +5954,10 @@ func (m *ResolveImageConfigRequest) Size() (n int) {
|
||||
if l > 0 {
|
||||
n += 1 + l + sovGateway(uint64(l))
|
||||
}
|
||||
l = len(m.StoreID)
|
||||
if l > 0 {
|
||||
n += 1 + l + sovGateway(uint64(l))
|
||||
}
|
||||
if m.XXX_unrecognized != nil {
|
||||
n += len(m.XXX_unrecognized)
|
||||
}
|
||||
@@ -5995,6 +6041,12 @@ func (m *SolveRequest) Size() (n int) {
|
||||
if m.Evaluate {
|
||||
n += 2
|
||||
}
|
||||
if len(m.SourcePolicies) > 0 {
|
||||
for _, e := range m.SourcePolicies {
|
||||
l = e.Size()
|
||||
n += 1 + l + sovGateway(uint64(l))
|
||||
}
|
||||
}
|
||||
if m.XXX_unrecognized != nil {
|
||||
n += len(m.XXX_unrecognized)
|
||||
}
|
||||
@@ -8716,6 +8768,38 @@ func (m *ResolveImageConfigRequest) Unmarshal(dAtA []byte) error {
|
||||
}
|
||||
m.SessionID = string(dAtA[iNdEx:postIndex])
|
||||
iNdEx = postIndex
|
||||
case 7:
|
||||
if wireType != 2 {
|
||||
return fmt.Errorf("proto: wrong wireType = %d for field StoreID", wireType)
|
||||
}
|
||||
var stringLen uint64
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return ErrIntOverflowGateway
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
stringLen |= uint64(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
intStringLen := int(stringLen)
|
||||
if intStringLen < 0 {
|
||||
return ErrInvalidLengthGateway
|
||||
}
|
||||
postIndex := iNdEx + intStringLen
|
||||
if postIndex < 0 {
|
||||
return ErrInvalidLengthGateway
|
||||
}
|
||||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
m.StoreID = string(dAtA[iNdEx:postIndex])
|
||||
iNdEx = postIndex
|
||||
default:
|
||||
iNdEx = preIndex
|
||||
skippy, err := skipGateway(dAtA[iNdEx:])
|
||||
@@ -9356,6 +9440,40 @@ func (m *SolveRequest) Unmarshal(dAtA []byte) error {
|
||||
}
|
||||
}
|
||||
m.Evaluate = bool(v != 0)
|
||||
case 15:
|
||||
if wireType != 2 {
|
||||
return fmt.Errorf("proto: wrong wireType = %d for field SourcePolicies", wireType)
|
||||
}
|
||||
var msglen int
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return ErrIntOverflowGateway
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
msglen |= int(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
if msglen < 0 {
|
||||
return ErrInvalidLengthGateway
|
||||
}
|
||||
postIndex := iNdEx + msglen
|
||||
if postIndex < 0 {
|
||||
return ErrInvalidLengthGateway
|
||||
}
|
||||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
m.SourcePolicies = append(m.SourcePolicies, &pb1.Policy{})
|
||||
if err := m.SourcePolicies[len(m.SourcePolicies)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
|
||||
return err
|
||||
}
|
||||
iNdEx = postIndex
|
||||
default:
|
||||
iNdEx = preIndex
|
||||
skippy, err := skipGateway(dAtA[iNdEx:])
|
||||
@@ -10709,7 +10827,7 @@ func (m *PongResponse) Unmarshal(dAtA []byte) error {
|
||||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
m.FrontendAPICaps = append(m.FrontendAPICaps, pb1.APICap{})
|
||||
m.FrontendAPICaps = append(m.FrontendAPICaps, pb2.APICap{})
|
||||
if err := m.FrontendAPICaps[len(m.FrontendAPICaps)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -10743,7 +10861,7 @@ func (m *PongResponse) Unmarshal(dAtA []byte) error {
|
||||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
m.LLBCaps = append(m.LLBCaps, pb1.APICap{})
|
||||
m.LLBCaps = append(m.LLBCaps, pb2.APICap{})
|
||||
if err := m.LLBCaps[len(m.LLBCaps)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
4
vendor/github.com/moby/buildkit/frontend/gateway/pb/gateway.proto
generated
vendored
4
vendor/github.com/moby/buildkit/frontend/gateway/pb/gateway.proto
generated
vendored
@@ -8,6 +8,7 @@ import "github.com/moby/buildkit/solver/pb/ops.proto";
|
||||
import "github.com/moby/buildkit/api/types/worker.proto";
|
||||
import "github.com/moby/buildkit/util/apicaps/pb/caps.proto";
|
||||
import "github.com/tonistiigi/fsutil/types/stat.proto";
|
||||
import "github.com/moby/buildkit/sourcepolicy/pb/policy.proto";
|
||||
|
||||
|
||||
option (gogoproto.sizer_all) = true;
|
||||
@@ -122,6 +123,7 @@ message ResolveImageConfigRequest {
|
||||
string LogName = 4;
|
||||
int32 ResolverType = 5;
|
||||
string SessionID = 6;
|
||||
string StoreID = 7;
|
||||
}
|
||||
|
||||
message ResolveImageConfigResponse {
|
||||
@@ -148,6 +150,8 @@ message SolveRequest {
|
||||
map<string, pb.Definition> FrontendInputs = 13;
|
||||
|
||||
bool Evaluate = 14;
|
||||
|
||||
repeated moby.buildkit.v1.sourcepolicy.Policy SourcePolicies = 15;
|
||||
}
|
||||
|
||||
// CacheOptionsEntry corresponds to the control.CacheOptionsEntry
|
||||
|
10
vendor/github.com/moby/buildkit/session/sshforward/copy.go
generated
vendored
10
vendor/github.com/moby/buildkit/session/sshforward/copy.go
generated
vendored
@@ -14,16 +14,24 @@ type Stream interface {
|
||||
}
|
||||
|
||||
func Copy(ctx context.Context, conn io.ReadWriteCloser, stream Stream, closeStream func() error) error {
|
||||
defer conn.Close()
|
||||
g, ctx := errgroup.WithContext(ctx)
|
||||
|
||||
g.Go(func() (retErr error) {
|
||||
p := &BytesMessage{}
|
||||
for {
|
||||
if err := stream.RecvMsg(p); err != nil {
|
||||
conn.Close()
|
||||
if err == io.EOF {
|
||||
// indicates client performed CloseSend, but they may still be
|
||||
// reading data
|
||||
if conn, ok := conn.(interface {
|
||||
CloseWrite() error
|
||||
}); ok {
|
||||
conn.CloseWrite()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
conn.Close()
|
||||
return errors.WithStack(err)
|
||||
}
|
||||
select {
|
||||
|
1
vendor/github.com/moby/buildkit/solver/pb/attr.go
generated
vendored
1
vendor/github.com/moby/buildkit/solver/pb/attr.go
generated
vendored
@@ -29,6 +29,7 @@ const AttrImageRecordType = "image.recordtype"
|
||||
const AttrImageLayerLimit = "image.layerlimit"
|
||||
|
||||
const AttrOCILayoutSessionID = "oci.session"
|
||||
const AttrOCILayoutStoreID = "oci.store"
|
||||
const AttrOCILayoutLayerLimit = "oci.layerlimit"
|
||||
|
||||
const AttrLocalDiffer = "local.differ"
|
||||
|
24
vendor/github.com/moby/buildkit/solver/pb/caps.go
generated
vendored
24
vendor/github.com/moby/buildkit/solver/pb/caps.go
generated
vendored
@@ -35,9 +35,7 @@ const (
|
||||
CapSourceHTTPPerm apicaps.CapID = "source.http.perm"
|
||||
CapSourceHTTPUIDGID apicaps.CapID = "soruce.http.uidgid"
|
||||
|
||||
CapSourceOCILayout apicaps.CapID = "source.ocilayout"
|
||||
CapSourceOCILayoutSessionID apicaps.CapID = "source.ocilayout.sessionid"
|
||||
CapSourceOCILayoutLayerLimit apicaps.CapID = "source.ocilayout.layerlimit"
|
||||
CapSourceOCILayout apicaps.CapID = "source.ocilayout"
|
||||
|
||||
CapBuildOpLLBFileName apicaps.CapID = "source.buildop.llbfilename"
|
||||
|
||||
@@ -86,6 +84,8 @@ const (
|
||||
|
||||
// CapSourceDateEpoch is the capability to automatically handle the date epoch
|
||||
CapSourceDateEpoch apicaps.CapID = "exporter.sourcedateepoch"
|
||||
|
||||
CapSourcePolicy apicaps.CapID = "source.policy"
|
||||
)
|
||||
|
||||
func init() {
|
||||
@@ -221,18 +221,6 @@ func init() {
|
||||
Status: apicaps.CapStatusExperimental,
|
||||
})
|
||||
|
||||
Caps.Init(apicaps.Cap{
|
||||
ID: CapSourceOCILayoutSessionID,
|
||||
Enabled: true,
|
||||
Status: apicaps.CapStatusExperimental,
|
||||
})
|
||||
|
||||
Caps.Init(apicaps.Cap{
|
||||
ID: CapSourceOCILayoutLayerLimit,
|
||||
Enabled: true,
|
||||
Status: apicaps.CapStatusExperimental,
|
||||
})
|
||||
|
||||
Caps.Init(apicaps.Cap{
|
||||
ID: CapSourceHTTPUIDGID,
|
||||
Enabled: true,
|
||||
@@ -465,4 +453,10 @@ func init() {
|
||||
Enabled: true,
|
||||
Status: apicaps.CapStatusExperimental,
|
||||
})
|
||||
|
||||
Caps.Init(apicaps.Cap{
|
||||
ID: CapSourcePolicy,
|
||||
Enabled: true,
|
||||
Status: apicaps.CapStatusExperimental,
|
||||
})
|
||||
}
|
||||
|
7
vendor/github.com/moby/buildkit/solver/result/attestation.go
generated
vendored
7
vendor/github.com/moby/buildkit/solver/result/attestation.go
generated
vendored
@@ -9,12 +9,13 @@ import (
|
||||
|
||||
const (
|
||||
AttestationReasonKey = "reason"
|
||||
AttestationSBOMCore = "sbom-core"
|
||||
AttestationInlineOnlyKey = "inline-only"
|
||||
)
|
||||
|
||||
var (
|
||||
AttestationReasonSBOM = []byte("sbom")
|
||||
AttestationReasonProvenance = []byte("provenance")
|
||||
const (
|
||||
AttestationReasonSBOM = "sbom"
|
||||
AttestationReasonProvenance = "provenance"
|
||||
)
|
||||
|
||||
type Attestation[T any] struct {
|
||||
|
3
vendor/github.com/moby/buildkit/sourcepolicy/pb/generate.go
generated
vendored
Normal file
3
vendor/github.com/moby/buildkit/sourcepolicy/pb/generate.go
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
package moby_buildkit_v1_sourcepolicy //nolint:revive
|
||||
|
||||
//go:generate protoc -I=. --gogofaster_out=plugins=grpc:. policy.proto
|
62
vendor/github.com/moby/buildkit/sourcepolicy/pb/json.go
generated
vendored
Normal file
62
vendor/github.com/moby/buildkit/sourcepolicy/pb/json.go
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
package moby_buildkit_v1_sourcepolicy //nolint:revive
|
||||
|
||||
import (
|
||||
"github.com/gogo/protobuf/proto"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// MarshalJSON implements json.Marshaler with custom marshaling for PolicyAction.
|
||||
// It gives the string form of the enum value.
|
||||
func (a PolicyAction) MarshalJSON() ([]byte, error) {
|
||||
return proto.MarshalJSONEnum(PolicyAction_name, int32(a))
|
||||
}
|
||||
|
||||
func (a *PolicyAction) UnmarshalJSON(data []byte) error {
|
||||
val, err := proto.UnmarshalJSONEnum(PolicyAction_value, data, a.String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, ok := PolicyAction_name[val]
|
||||
if !ok {
|
||||
return errors.Errorf("invalid PolicyAction value: %d", val)
|
||||
}
|
||||
*a = PolicyAction(val)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a AttrMatch) MarshalJSON() ([]byte, error) {
|
||||
return proto.MarshalJSONEnum(AttrMatch_name, int32(a))
|
||||
}
|
||||
|
||||
func (a *AttrMatch) UnmarshalJSON(data []byte) error {
|
||||
val, err := proto.UnmarshalJSONEnum(AttrMatch_value, data, a.String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, ok := AttrMatch_name[val]
|
||||
if !ok {
|
||||
return errors.Errorf("invalid AttrMatch value: %d", val)
|
||||
}
|
||||
*a = AttrMatch(val)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a MatchType) MarshalJSON() ([]byte, error) {
|
||||
return proto.MarshalJSONEnum(MatchType_name, int32(a))
|
||||
}
|
||||
|
||||
func (a *MatchType) UnmarshalJSON(data []byte) error {
|
||||
val, err := proto.UnmarshalJSONEnum(MatchType_value, data, a.String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, ok := AttrMatch_name[val]
|
||||
if !ok {
|
||||
return errors.Errorf("invalid MatchType value: %d", val)
|
||||
}
|
||||
*a = MatchType(val)
|
||||
return nil
|
||||
}
|
1615
vendor/github.com/moby/buildkit/sourcepolicy/pb/policy.pb.go
generated
vendored
Normal file
1615
vendor/github.com/moby/buildkit/sourcepolicy/pb/policy.pb.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
64
vendor/github.com/moby/buildkit/sourcepolicy/pb/policy.proto
generated
vendored
Normal file
64
vendor/github.com/moby/buildkit/sourcepolicy/pb/policy.proto
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package moby.buildkit.v1.sourcepolicy;
|
||||
|
||||
// Rule defines the action(s) to take when a source is matched
|
||||
message Rule {
|
||||
PolicyAction action = 1;
|
||||
Selector selector = 2;
|
||||
Update updates = 3;
|
||||
}
|
||||
|
||||
// Update contains updates to the matched build step after rule is applied
|
||||
message Update {
|
||||
string identifier = 1;
|
||||
map<string, string> attrs = 2;
|
||||
}
|
||||
|
||||
// Selector identifies a source to match a policy to
|
||||
message Selector {
|
||||
string identifier = 1;
|
||||
// MatchType is the type of match to perform on the source identifier
|
||||
MatchType match_type = 2;
|
||||
repeated AttrConstraint constraints = 3;
|
||||
}
|
||||
|
||||
// PolicyAction defines the action to take when a source is matched
|
||||
enum PolicyAction {
|
||||
ALLOW = 0;
|
||||
DENY = 1;
|
||||
CONVERT = 2;
|
||||
}
|
||||
|
||||
// AttrConstraint defines a constraint on a source attribute
|
||||
message AttrConstraint {
|
||||
string key = 1;
|
||||
string value = 2;
|
||||
AttrMatch condition = 3;
|
||||
}
|
||||
|
||||
// AttrMatch defines the condition to match a source attribute
|
||||
enum AttrMatch {
|
||||
EQUAL = 0;
|
||||
NOTEQUAL = 1;
|
||||
MATCHES = 2;
|
||||
}
|
||||
|
||||
// Policy is the list of rules the policy engine will perform
|
||||
message Policy {
|
||||
int64 version = 1; // Currently 1
|
||||
repeated Rule rules = 2;
|
||||
}
|
||||
|
||||
// Match type is used to determine how a rule source is matched
|
||||
enum MatchType {
|
||||
// WILDCARD is the default matching type.
|
||||
// It may first attempt to due an exact match but will follow up with a wildcard match
|
||||
// For something more powerful, use REGEX
|
||||
WILDCARD = 0;
|
||||
// EXACT treats the source identifier as a litteral string match
|
||||
EXACT = 1;
|
||||
// REGEX treats the source identifier as a regular expression
|
||||
// With regex matching you can also use match groups to replace values in the destination identifier
|
||||
REGEX = 2;
|
||||
}
|
21
vendor/github.com/moby/buildkit/util/contentutil/multiprovider.go
generated
vendored
21
vendor/github.com/moby/buildkit/util/contentutil/multiprovider.go
generated
vendored
@@ -6,6 +6,7 @@ import (
|
||||
|
||||
"github.com/containerd/containerd/content"
|
||||
"github.com/containerd/containerd/errdefs"
|
||||
"github.com/moby/buildkit/session"
|
||||
digest "github.com/opencontainers/go-digest"
|
||||
ocispecs "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
"github.com/pkg/errors"
|
||||
@@ -90,3 +91,23 @@ func (mp *MultiProvider) Add(dgst digest.Digest, p content.Provider) {
|
||||
defer mp.mu.Unlock()
|
||||
mp.sub[dgst] = p
|
||||
}
|
||||
|
||||
func (mp *MultiProvider) UnlazySession(desc ocispecs.Descriptor) session.Group {
|
||||
type unlazySession interface {
|
||||
UnlazySession(ocispecs.Descriptor) session.Group
|
||||
}
|
||||
|
||||
mp.mu.RLock()
|
||||
if p, ok := mp.sub[desc.Digest]; ok {
|
||||
mp.mu.RUnlock()
|
||||
if cd, ok := p.(unlazySession); ok {
|
||||
return cd.UnlazySession(desc)
|
||||
}
|
||||
} else {
|
||||
mp.mu.RUnlock()
|
||||
}
|
||||
if cd, ok := mp.base.(unlazySession); ok {
|
||||
return cd.UnlazySession(desc)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
22
vendor/github.com/moby/buildkit/util/tracing/detect/detect.go
generated
vendored
22
vendor/github.com/moby/buildkit/util/tracing/detect/detect.go
generated
vendored
@@ -73,21 +73,31 @@ func detectExporter() (sdktrace.SpanExporter, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func detect() error {
|
||||
tp = trace.NewNoopTracerProvider()
|
||||
|
||||
func getExporter() (sdktrace.SpanExporter, error) {
|
||||
exp, err := detectExporter()
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if exp != nil {
|
||||
exp = &threadSafeExporterWrapper{
|
||||
exporter: exp,
|
||||
}
|
||||
}
|
||||
|
||||
if Recorder != nil {
|
||||
Recorder.SpanExporter = exp
|
||||
exp = Recorder
|
||||
}
|
||||
return exp, nil
|
||||
}
|
||||
|
||||
if exp == nil {
|
||||
return nil
|
||||
func detect() error {
|
||||
tp = trace.NewNoopTracerProvider()
|
||||
|
||||
exp, err := getExporter()
|
||||
if err != nil || exp == nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// enable log with traceID when valid exporter
|
||||
|
2
vendor/github.com/moby/buildkit/util/tracing/detect/otlp.go
generated
vendored
2
vendor/github.com/moby/buildkit/util/tracing/detect/otlp.go
generated
vendored
@@ -16,7 +16,7 @@ func init() {
|
||||
}
|
||||
|
||||
func otlpExporter() (sdktrace.SpanExporter, error) {
|
||||
set := os.Getenv("OTEL_TRACES_EXPORTER") == "otpl" || os.Getenv("OTEL_EXPORTER_OTLP_ENDPOINT") != "" || os.Getenv("OTEL_EXPORTER_OTLP_TRACES_ENDPOINT") != ""
|
||||
set := os.Getenv("OTEL_TRACES_EXPORTER") == "otlp" || os.Getenv("OTEL_EXPORTER_OTLP_ENDPOINT") != "" || os.Getenv("OTEL_EXPORTER_OTLP_TRACES_ENDPOINT") != ""
|
||||
if !set {
|
||||
return nil, nil
|
||||
}
|
||||
|
8
vendor/github.com/moby/buildkit/util/tracing/detect/recorder.go
generated
vendored
8
vendor/github.com/moby/buildkit/util/tracing/detect/recorder.go
generated
vendored
@@ -43,10 +43,6 @@ func NewTraceRecorder() *TraceRecorder {
|
||||
}
|
||||
|
||||
func (r *TraceRecorder) Record(traceID trace.TraceID) func() []tracetest.SpanStub {
|
||||
if r.flush != nil {
|
||||
r.flush(context.TODO())
|
||||
}
|
||||
|
||||
r.mu.Lock()
|
||||
defer r.mu.Unlock()
|
||||
|
||||
@@ -55,6 +51,10 @@ func (r *TraceRecorder) Record(traceID trace.TraceID) func() []tracetest.SpanStu
|
||||
var spans []tracetest.SpanStub
|
||||
return func() []tracetest.SpanStub {
|
||||
once.Do(func() {
|
||||
if r.flush != nil {
|
||||
r.flush(context.TODO())
|
||||
}
|
||||
|
||||
r.mu.Lock()
|
||||
defer r.mu.Unlock()
|
||||
|
||||
|
26
vendor/github.com/moby/buildkit/util/tracing/detect/threadsafe.go
generated
vendored
Normal file
26
vendor/github.com/moby/buildkit/util/tracing/detect/threadsafe.go
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
package detect
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
|
||||
sdktrace "go.opentelemetry.io/otel/sdk/trace"
|
||||
)
|
||||
|
||||
// threadSafeExporterWrapper wraps an OpenTelemetry SpanExporter and makes it thread-safe.
|
||||
type threadSafeExporterWrapper struct {
|
||||
mu sync.Mutex
|
||||
exporter sdktrace.SpanExporter
|
||||
}
|
||||
|
||||
func (tse *threadSafeExporterWrapper) ExportSpans(ctx context.Context, spans []sdktrace.ReadOnlySpan) error {
|
||||
tse.mu.Lock()
|
||||
defer tse.mu.Unlock()
|
||||
return tse.exporter.ExportSpans(ctx, spans)
|
||||
}
|
||||
|
||||
func (tse *threadSafeExporterWrapper) Shutdown(ctx context.Context) error {
|
||||
tse.mu.Lock()
|
||||
defer tse.mu.Unlock()
|
||||
return tse.exporter.Shutdown(ctx)
|
||||
}
|
202
vendor/github.com/moby/sys/mountinfo/LICENSE
generated
vendored
Normal file
202
vendor/github.com/moby/sys/mountinfo/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
44
vendor/github.com/moby/sys/mountinfo/doc.go
generated
vendored
Normal file
44
vendor/github.com/moby/sys/mountinfo/doc.go
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
// Package mountinfo provides a set of functions to retrieve information about OS mounts.
|
||||
//
|
||||
// Currently it supports Linux. For historical reasons, there is also some support for FreeBSD and OpenBSD,
|
||||
// and a shallow implementation for Windows, but in general this is Linux-only package, so
|
||||
// the rest of the document only applies to Linux, unless explicitly specified otherwise.
|
||||
//
|
||||
// In Linux, information about mounts seen by the current process is available from
|
||||
// /proc/self/mountinfo. Note that due to mount namespaces, different processes can
|
||||
// see different mounts. A per-process mountinfo table is available from /proc/<PID>/mountinfo,
|
||||
// where <PID> is a numerical process identifier.
|
||||
//
|
||||
// In general, /proc is not a very efficient interface, and mountinfo is not an exception.
|
||||
// For example, there is no way to get information about a specific mount point (i.e. it
|
||||
// is all-or-nothing). This package tries to hide the /proc ineffectiveness by using
|
||||
// parse filters while reading mountinfo. A filter can skip some entries, or stop
|
||||
// processing the rest of the file once the needed information is found.
|
||||
//
|
||||
// For mountinfo filters that accept path as an argument, the path must be absolute,
|
||||
// having all symlinks resolved, and being cleaned (i.e. no extra slashes or dots).
|
||||
// One way to achieve all of the above is to employ filepath.Abs followed by
|
||||
// filepath.EvalSymlinks (the latter calls filepath.Clean on the result so
|
||||
// there is no need to explicitly call filepath.Clean).
|
||||
//
|
||||
// NOTE that in many cases there is no need to consult mountinfo at all. Here are some
|
||||
// of the cases where mountinfo should not be parsed:
|
||||
//
|
||||
// 1. Before performing a mount. Usually, this is not needed, but if required (say to
|
||||
// prevent over-mounts), to check whether a directory is mounted, call os.Lstat
|
||||
// on it and its parent directory, and compare their st.Sys().(*syscall.Stat_t).Dev
|
||||
// fields -- if they differ, then the directory is the mount point. NOTE this does
|
||||
// not work for bind mounts. Optionally, the filesystem type can also be checked
|
||||
// by calling unix.Statfs and checking the Type field (i.e. filesystem type).
|
||||
//
|
||||
// 2. After performing a mount. If there is no error returned, the mount succeeded;
|
||||
// checking the mount table for a new mount is redundant and expensive.
|
||||
//
|
||||
// 3. Before performing an unmount. It is more efficient to do an unmount and ignore
|
||||
// a specific error (EINVAL) which tells the directory is not mounted.
|
||||
//
|
||||
// 4. After performing an unmount. If there is no error returned, the unmount succeeded.
|
||||
//
|
||||
// 5. To find the mount point root of a specific directory. You can perform os.Stat()
|
||||
// on the directory and traverse up until the Dev field of a parent directory differs.
|
||||
package mountinfo
|
101
vendor/github.com/moby/sys/mountinfo/mounted_linux.go
generated
vendored
Normal file
101
vendor/github.com/moby/sys/mountinfo/mounted_linux.go
generated
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
package mountinfo
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"golang.org/x/sys/unix"
|
||||
)
|
||||
|
||||
// MountedFast is a method of detecting a mount point without reading
|
||||
// mountinfo from procfs. A caller can only trust the result if no error
|
||||
// and sure == true are returned. Otherwise, other methods (e.g. parsing
|
||||
// /proc/mounts) have to be used. If unsure, use Mounted instead (which
|
||||
// uses MountedFast, but falls back to parsing mountinfo if needed).
|
||||
//
|
||||
// If a non-existent path is specified, an appropriate error is returned.
|
||||
// In case the caller is not interested in this particular error, it should
|
||||
// be handled separately using e.g. errors.Is(err, fs.ErrNotExist).
|
||||
//
|
||||
// This function is only available on Linux. When available (since kernel
|
||||
// v5.6), openat2(2) syscall is used to reliably detect all mounts. Otherwise,
|
||||
// the implementation falls back to using stat(2), which can reliably detect
|
||||
// normal (but not bind) mounts.
|
||||
func MountedFast(path string) (mounted, sure bool, err error) {
|
||||
// Root is always mounted.
|
||||
if path == string(os.PathSeparator) {
|
||||
return true, true, nil
|
||||
}
|
||||
|
||||
path, err = normalizePath(path)
|
||||
if err != nil {
|
||||
return false, false, err
|
||||
}
|
||||
mounted, sure, err = mountedFast(path)
|
||||
return
|
||||
}
|
||||
|
||||
// mountedByOpenat2 is a method of detecting a mount that works for all kinds
|
||||
// of mounts (incl. bind mounts), but requires a recent (v5.6+) linux kernel.
|
||||
func mountedByOpenat2(path string) (bool, error) {
|
||||
dir, last := filepath.Split(path)
|
||||
|
||||
dirfd, err := unix.Openat2(unix.AT_FDCWD, dir, &unix.OpenHow{
|
||||
Flags: unix.O_PATH | unix.O_CLOEXEC,
|
||||
})
|
||||
if err != nil {
|
||||
return false, &os.PathError{Op: "openat2", Path: dir, Err: err}
|
||||
}
|
||||
fd, err := unix.Openat2(dirfd, last, &unix.OpenHow{
|
||||
Flags: unix.O_PATH | unix.O_CLOEXEC | unix.O_NOFOLLOW,
|
||||
Resolve: unix.RESOLVE_NO_XDEV,
|
||||
})
|
||||
_ = unix.Close(dirfd)
|
||||
switch err { //nolint:errorlint // unix errors are bare
|
||||
case nil: // definitely not a mount
|
||||
_ = unix.Close(fd)
|
||||
return false, nil
|
||||
case unix.EXDEV: // definitely a mount
|
||||
return true, nil
|
||||
}
|
||||
// not sure
|
||||
return false, &os.PathError{Op: "openat2", Path: path, Err: err}
|
||||
}
|
||||
|
||||
// mountedFast is similar to MountedFast, except it expects a normalized path.
|
||||
func mountedFast(path string) (mounted, sure bool, err error) {
|
||||
// Root is always mounted.
|
||||
if path == string(os.PathSeparator) {
|
||||
return true, true, nil
|
||||
}
|
||||
|
||||
// Try a fast path, using openat2() with RESOLVE_NO_XDEV.
|
||||
mounted, err = mountedByOpenat2(path)
|
||||
if err == nil {
|
||||
return mounted, true, nil
|
||||
}
|
||||
|
||||
// Another fast path: compare st.st_dev fields.
|
||||
mounted, err = mountedByStat(path)
|
||||
// This does not work for bind mounts, so false negative
|
||||
// is possible, therefore only trust if return is true.
|
||||
if mounted && err == nil {
|
||||
return true, true, nil
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func mounted(path string) (bool, error) {
|
||||
path, err := normalizePath(path)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
mounted, sure, err := mountedFast(path)
|
||||
if sure && err == nil {
|
||||
return mounted, nil
|
||||
}
|
||||
|
||||
// Fallback to parsing mountinfo.
|
||||
return mountedByMountinfo(path)
|
||||
}
|
53
vendor/github.com/moby/sys/mountinfo/mounted_unix.go
generated
vendored
Normal file
53
vendor/github.com/moby/sys/mountinfo/mounted_unix.go
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
//go:build linux || freebsd || openbsd || darwin
|
||||
// +build linux freebsd openbsd darwin
|
||||
|
||||
package mountinfo
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"golang.org/x/sys/unix"
|
||||
)
|
||||
|
||||
func mountedByStat(path string) (bool, error) {
|
||||
var st unix.Stat_t
|
||||
|
||||
if err := unix.Lstat(path, &st); err != nil {
|
||||
return false, &os.PathError{Op: "stat", Path: path, Err: err}
|
||||
}
|
||||
dev := st.Dev
|
||||
parent := filepath.Dir(path)
|
||||
if err := unix.Lstat(parent, &st); err != nil {
|
||||
return false, &os.PathError{Op: "stat", Path: parent, Err: err}
|
||||
}
|
||||
if dev != st.Dev {
|
||||
// Device differs from that of parent,
|
||||
// so definitely a mount point.
|
||||
return true, nil
|
||||
}
|
||||
// NB: this does not detect bind mounts on Linux.
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func normalizePath(path string) (realPath string, err error) {
|
||||
if realPath, err = filepath.Abs(path); err != nil {
|
||||
return "", err
|
||||
}
|
||||
if realPath, err = filepath.EvalSymlinks(realPath); err != nil {
|
||||
return "", err
|
||||
}
|
||||
if _, err := os.Stat(realPath); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return realPath, nil
|
||||
}
|
||||
|
||||
func mountedByMountinfo(path string) (bool, error) {
|
||||
entries, err := GetMounts(SingleEntryFilter(path))
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return len(entries) > 0, nil
|
||||
}
|
67
vendor/github.com/moby/sys/mountinfo/mountinfo.go
generated
vendored
Normal file
67
vendor/github.com/moby/sys/mountinfo/mountinfo.go
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
package mountinfo
|
||||
|
||||
import (
|
||||
"os"
|
||||
)
|
||||
|
||||
// GetMounts retrieves a list of mounts for the current running process,
|
||||
// with an optional filter applied (use nil for no filter).
|
||||
func GetMounts(f FilterFunc) ([]*Info, error) {
|
||||
return parseMountTable(f)
|
||||
}
|
||||
|
||||
// Mounted determines if a specified path is a mount point. In case of any
|
||||
// error, false (and an error) is returned.
|
||||
//
|
||||
// If a non-existent path is specified, an appropriate error is returned.
|
||||
// In case the caller is not interested in this particular error, it should
|
||||
// be handled separately using e.g. errors.Is(err, fs.ErrNotExist).
|
||||
func Mounted(path string) (bool, error) {
|
||||
// root is always mounted
|
||||
if path == string(os.PathSeparator) {
|
||||
return true, nil
|
||||
}
|
||||
return mounted(path)
|
||||
}
|
||||
|
||||
// Info reveals information about a particular mounted filesystem. This
|
||||
// struct is populated from the content in the /proc/<pid>/mountinfo file.
|
||||
type Info struct {
|
||||
// ID is a unique identifier of the mount (may be reused after umount).
|
||||
ID int
|
||||
|
||||
// Parent is the ID of the parent mount (or of self for the root
|
||||
// of this mount namespace's mount tree).
|
||||
Parent int
|
||||
|
||||
// Major and Minor are the major and the minor components of the Dev
|
||||
// field of unix.Stat_t structure returned by unix.*Stat calls for
|
||||
// files on this filesystem.
|
||||
Major, Minor int
|
||||
|
||||
// Root is the pathname of the directory in the filesystem which forms
|
||||
// the root of this mount.
|
||||
Root string
|
||||
|
||||
// Mountpoint is the pathname of the mount point relative to the
|
||||
// process's root directory.
|
||||
Mountpoint string
|
||||
|
||||
// Options is a comma-separated list of mount options.
|
||||
Options string
|
||||
|
||||
// Optional are zero or more fields of the form "tag[:value]",
|
||||
// separated by a space. Currently, the possible optional fields are
|
||||
// "shared", "master", "propagate_from", and "unbindable". For more
|
||||
// information, see mount_namespaces(7) Linux man page.
|
||||
Optional string
|
||||
|
||||
// FSType is the filesystem type in the form "type[.subtype]".
|
||||
FSType string
|
||||
|
||||
// Source is filesystem-specific information, or "none".
|
||||
Source string
|
||||
|
||||
// VFSOptions is a comma-separated list of superblock options.
|
||||
VFSOptions string
|
||||
}
|
56
vendor/github.com/moby/sys/mountinfo/mountinfo_bsd.go
generated
vendored
Normal file
56
vendor/github.com/moby/sys/mountinfo/mountinfo_bsd.go
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
//go:build freebsd || openbsd || darwin
|
||||
// +build freebsd openbsd darwin
|
||||
|
||||
package mountinfo
|
||||
|
||||
import "golang.org/x/sys/unix"
|
||||
|
||||
// parseMountTable returns information about mounted filesystems
|
||||
func parseMountTable(filter FilterFunc) ([]*Info, error) {
|
||||
count, err := unix.Getfsstat(nil, unix.MNT_WAIT)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
entries := make([]unix.Statfs_t, count)
|
||||
_, err = unix.Getfsstat(entries, unix.MNT_WAIT)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var out []*Info
|
||||
for _, entry := range entries {
|
||||
var skip, stop bool
|
||||
mountinfo := getMountinfo(&entry)
|
||||
|
||||
if filter != nil {
|
||||
// filter out entries we're not interested in
|
||||
skip, stop = filter(mountinfo)
|
||||
if skip {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
out = append(out, mountinfo)
|
||||
if stop {
|
||||
break
|
||||
}
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func mounted(path string) (bool, error) {
|
||||
path, err := normalizePath(path)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
// Fast path: compare st.st_dev fields.
|
||||
// This should always work for FreeBSD and OpenBSD.
|
||||
mounted, err := mountedByStat(path)
|
||||
if err == nil {
|
||||
return mounted, nil
|
||||
}
|
||||
|
||||
// Fallback to parsing mountinfo
|
||||
return mountedByMountinfo(path)
|
||||
}
|
63
vendor/github.com/moby/sys/mountinfo/mountinfo_filters.go
generated
vendored
Normal file
63
vendor/github.com/moby/sys/mountinfo/mountinfo_filters.go
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
package mountinfo
|
||||
|
||||
import "strings"
|
||||
|
||||
// FilterFunc is a type defining a callback function for GetMount(),
|
||||
// used to filter out mountinfo entries we're not interested in,
|
||||
// and/or stop further processing if we found what we wanted.
|
||||
//
|
||||
// It takes a pointer to the Info struct (fully populated with all available
|
||||
// fields on the GOOS platform), and returns two booleans:
|
||||
//
|
||||
// skip: true if the entry should be skipped;
|
||||
//
|
||||
// stop: true if parsing should be stopped after the entry.
|
||||
type FilterFunc func(*Info) (skip, stop bool)
|
||||
|
||||
// PrefixFilter discards all entries whose mount points do not start with, or
|
||||
// are equal to the path specified in prefix. The prefix path must be absolute,
|
||||
// have all symlinks resolved, and cleaned (i.e. no extra slashes or dots).
|
||||
//
|
||||
// PrefixFilter treats prefix as a path, not a partial prefix, which means that
|
||||
// given "/foo", "/foo/bar" and "/foobar" entries, PrefixFilter("/foo") returns
|
||||
// "/foo" and "/foo/bar", and discards "/foobar".
|
||||
func PrefixFilter(prefix string) FilterFunc {
|
||||
return func(m *Info) (bool, bool) {
|
||||
skip := !strings.HasPrefix(m.Mountpoint+"/", prefix+"/")
|
||||
return skip, false
|
||||
}
|
||||
}
|
||||
|
||||
// SingleEntryFilter looks for a specific entry.
|
||||
func SingleEntryFilter(mp string) FilterFunc {
|
||||
return func(m *Info) (bool, bool) {
|
||||
if m.Mountpoint == mp {
|
||||
return false, true // don't skip, stop now
|
||||
}
|
||||
return true, false // skip, keep going
|
||||
}
|
||||
}
|
||||
|
||||
// ParentsFilter returns all entries whose mount points
|
||||
// can be parents of a path specified, discarding others.
|
||||
//
|
||||
// For example, given /var/lib/docker/something, entries
|
||||
// like /var/lib/docker, /var and / are returned.
|
||||
func ParentsFilter(path string) FilterFunc {
|
||||
return func(m *Info) (bool, bool) {
|
||||
skip := !strings.HasPrefix(path, m.Mountpoint)
|
||||
return skip, false
|
||||
}
|
||||
}
|
||||
|
||||
// FSTypeFilter returns all entries that match provided fstype(s).
|
||||
func FSTypeFilter(fstype ...string) FilterFunc {
|
||||
return func(m *Info) (bool, bool) {
|
||||
for _, t := range fstype {
|
||||
if m.FSType == t {
|
||||
return false, false // don't skip, keep going
|
||||
}
|
||||
}
|
||||
return true, false // skip, keep going
|
||||
}
|
||||
}
|
14
vendor/github.com/moby/sys/mountinfo/mountinfo_freebsdlike.go
generated
vendored
Normal file
14
vendor/github.com/moby/sys/mountinfo/mountinfo_freebsdlike.go
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
//go:build freebsd || darwin
|
||||
// +build freebsd darwin
|
||||
|
||||
package mountinfo
|
||||
|
||||
import "golang.org/x/sys/unix"
|
||||
|
||||
func getMountinfo(entry *unix.Statfs_t) *Info {
|
||||
return &Info{
|
||||
Mountpoint: unix.ByteSliceToString(entry.Mntonname[:]),
|
||||
FSType: unix.ByteSliceToString(entry.Fstypename[:]),
|
||||
Source: unix.ByteSliceToString(entry.Mntfromname[:]),
|
||||
}
|
||||
}
|
214
vendor/github.com/moby/sys/mountinfo/mountinfo_linux.go
generated
vendored
Normal file
214
vendor/github.com/moby/sys/mountinfo/mountinfo_linux.go
generated
vendored
Normal file
@@ -0,0 +1,214 @@
|
||||
package mountinfo
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// GetMountsFromReader retrieves a list of mounts from the
|
||||
// reader provided, with an optional filter applied (use nil
|
||||
// for no filter). This can be useful in tests or benchmarks
|
||||
// that provide fake mountinfo data, or when a source other
|
||||
// than /proc/self/mountinfo needs to be read from.
|
||||
//
|
||||
// This function is Linux-specific.
|
||||
func GetMountsFromReader(r io.Reader, filter FilterFunc) ([]*Info, error) {
|
||||
s := bufio.NewScanner(r)
|
||||
out := []*Info{}
|
||||
for s.Scan() {
|
||||
var err error
|
||||
|
||||
/*
|
||||
See http://man7.org/linux/man-pages/man5/proc.5.html
|
||||
|
||||
36 35 98:0 /mnt1 /mnt2 rw,noatime master:1 - ext3 /dev/root rw,errors=continue
|
||||
(1)(2)(3) (4) (5) (6) (7) (8) (9) (10) (11)
|
||||
|
||||
(1) mount ID: unique identifier of the mount (may be reused after umount)
|
||||
(2) parent ID: ID of parent (or of self for the top of the mount tree)
|
||||
(3) major:minor: value of st_dev for files on filesystem
|
||||
(4) root: root of the mount within the filesystem
|
||||
(5) mount point: mount point relative to the process's root
|
||||
(6) mount options: per mount options
|
||||
(7) optional fields: zero or more fields of the form "tag[:value]"
|
||||
(8) separator: marks the end of the optional fields
|
||||
(9) filesystem type: name of filesystem of the form "type[.subtype]"
|
||||
(10) mount source: filesystem specific information or "none"
|
||||
(11) super options: per super block options
|
||||
|
||||
In other words, we have:
|
||||
* 6 mandatory fields (1)..(6)
|
||||
* 0 or more optional fields (7)
|
||||
* a separator field (8)
|
||||
* 3 mandatory fields (9)..(11)
|
||||
*/
|
||||
|
||||
text := s.Text()
|
||||
fields := strings.Split(text, " ")
|
||||
numFields := len(fields)
|
||||
if numFields < 10 {
|
||||
// should be at least 10 fields
|
||||
return nil, fmt.Errorf("parsing '%s' failed: not enough fields (%d)", text, numFields)
|
||||
}
|
||||
|
||||
// separator field
|
||||
sepIdx := numFields - 4
|
||||
// In Linux <= 3.9 mounting a cifs with spaces in a share
|
||||
// name (like "//srv/My Docs") _may_ end up having a space
|
||||
// in the last field of mountinfo (like "unc=//serv/My Docs").
|
||||
// Since kernel 3.10-rc1, cifs option "unc=" is ignored,
|
||||
// so spaces should not appear.
|
||||
//
|
||||
// Check for a separator, and work around the spaces bug
|
||||
for fields[sepIdx] != "-" {
|
||||
sepIdx--
|
||||
if sepIdx == 5 {
|
||||
return nil, fmt.Errorf("parsing '%s' failed: missing - separator", text)
|
||||
}
|
||||
}
|
||||
|
||||
p := &Info{}
|
||||
|
||||
p.Mountpoint, err = unescape(fields[4])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("parsing '%s' failed: mount point: %w", fields[4], err)
|
||||
}
|
||||
p.FSType, err = unescape(fields[sepIdx+1])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("parsing '%s' failed: fstype: %w", fields[sepIdx+1], err)
|
||||
}
|
||||
p.Source, err = unescape(fields[sepIdx+2])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("parsing '%s' failed: source: %w", fields[sepIdx+2], err)
|
||||
}
|
||||
p.VFSOptions = fields[sepIdx+3]
|
||||
|
||||
// ignore any numbers parsing errors, as there should not be any
|
||||
p.ID, _ = strconv.Atoi(fields[0])
|
||||
p.Parent, _ = strconv.Atoi(fields[1])
|
||||
mm := strings.SplitN(fields[2], ":", 3)
|
||||
if len(mm) != 2 {
|
||||
return nil, fmt.Errorf("parsing '%s' failed: unexpected major:minor pair %s", text, mm)
|
||||
}
|
||||
p.Major, _ = strconv.Atoi(mm[0])
|
||||
p.Minor, _ = strconv.Atoi(mm[1])
|
||||
|
||||
p.Root, err = unescape(fields[3])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("parsing '%s' failed: root: %w", fields[3], err)
|
||||
}
|
||||
|
||||
p.Options = fields[5]
|
||||
|
||||
// zero or more optional fields
|
||||
p.Optional = strings.Join(fields[6:sepIdx], " ")
|
||||
|
||||
// Run the filter after parsing all fields.
|
||||
var skip, stop bool
|
||||
if filter != nil {
|
||||
skip, stop = filter(p)
|
||||
if skip {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
out = append(out, p)
|
||||
if stop {
|
||||
break
|
||||
}
|
||||
}
|
||||
if err := s.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func parseMountTable(filter FilterFunc) ([]*Info, error) {
|
||||
f, err := os.Open("/proc/self/mountinfo")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
return GetMountsFromReader(f, filter)
|
||||
}
|
||||
|
||||
// PidMountInfo retrieves the list of mounts from a given process' mount
|
||||
// namespace. Unless there is a need to get mounts from a mount namespace
|
||||
// different from that of a calling process, use GetMounts.
|
||||
//
|
||||
// This function is Linux-specific.
|
||||
//
|
||||
// Deprecated: this will be removed before v1; use GetMountsFromReader with
|
||||
// opened /proc/<pid>/mountinfo as an argument instead.
|
||||
func PidMountInfo(pid int) ([]*Info, error) {
|
||||
f, err := os.Open(fmt.Sprintf("/proc/%d/mountinfo", pid))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
return GetMountsFromReader(f, nil)
|
||||
}
|
||||
|
||||
// A few specific characters in mountinfo path entries (root and mountpoint)
|
||||
// are escaped using a backslash followed by a character's ascii code in octal.
|
||||
//
|
||||
// space -- as \040
|
||||
// tab (aka \t) -- as \011
|
||||
// newline (aka \n) -- as \012
|
||||
// backslash (aka \\) -- as \134
|
||||
//
|
||||
// This function converts path from mountinfo back, i.e. it unescapes the above sequences.
|
||||
func unescape(path string) (string, error) {
|
||||
// try to avoid copying
|
||||
if strings.IndexByte(path, '\\') == -1 {
|
||||
return path, nil
|
||||
}
|
||||
|
||||
// The following code is UTF-8 transparent as it only looks for some
|
||||
// specific characters (backslash and 0..7) with values < utf8.RuneSelf,
|
||||
// and everything else is passed through as is.
|
||||
buf := make([]byte, len(path))
|
||||
bufLen := 0
|
||||
for i := 0; i < len(path); i++ {
|
||||
if path[i] != '\\' {
|
||||
buf[bufLen] = path[i]
|
||||
bufLen++
|
||||
continue
|
||||
}
|
||||
s := path[i:]
|
||||
if len(s) < 4 {
|
||||
// too short
|
||||
return "", fmt.Errorf("bad escape sequence %q: too short", s)
|
||||
}
|
||||
c := s[1]
|
||||
switch c {
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7':
|
||||
v := c - '0'
|
||||
for j := 2; j < 4; j++ { // one digit already; two more
|
||||
if s[j] < '0' || s[j] > '7' {
|
||||
return "", fmt.Errorf("bad escape sequence %q: not a digit", s[:3])
|
||||
}
|
||||
x := s[j] - '0'
|
||||
v = (v << 3) | x
|
||||
}
|
||||
if v > 255 {
|
||||
return "", fmt.Errorf("bad escape sequence %q: out of range" + s[:3])
|
||||
}
|
||||
buf[bufLen] = v
|
||||
bufLen++
|
||||
i += 3
|
||||
continue
|
||||
default:
|
||||
return "", fmt.Errorf("bad escape sequence %q: not a digit" + s[:3])
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return string(buf[:bufLen]), nil
|
||||
}
|
11
vendor/github.com/moby/sys/mountinfo/mountinfo_openbsd.go
generated
vendored
Normal file
11
vendor/github.com/moby/sys/mountinfo/mountinfo_openbsd.go
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
package mountinfo
|
||||
|
||||
import "golang.org/x/sys/unix"
|
||||
|
||||
func getMountinfo(entry *unix.Statfs_t) *Info {
|
||||
return &Info{
|
||||
Mountpoint: unix.ByteSliceToString(entry.F_mntonname[:]),
|
||||
FSType: unix.ByteSliceToString(entry.F_fstypename[:]),
|
||||
Source: unix.ByteSliceToString(entry.F_mntfromname[:]),
|
||||
}
|
||||
}
|
19
vendor/github.com/moby/sys/mountinfo/mountinfo_unsupported.go
generated
vendored
Normal file
19
vendor/github.com/moby/sys/mountinfo/mountinfo_unsupported.go
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
//go:build !windows && !linux && !freebsd && !openbsd && !darwin
|
||||
// +build !windows,!linux,!freebsd,!openbsd,!darwin
|
||||
|
||||
package mountinfo
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"runtime"
|
||||
)
|
||||
|
||||
var errNotImplemented = fmt.Errorf("not implemented on %s/%s", runtime.GOOS, runtime.GOARCH)
|
||||
|
||||
func parseMountTable(_ FilterFunc) ([]*Info, error) {
|
||||
return nil, errNotImplemented
|
||||
}
|
||||
|
||||
func mounted(path string) (bool, error) {
|
||||
return false, errNotImplemented
|
||||
}
|
10
vendor/github.com/moby/sys/mountinfo/mountinfo_windows.go
generated
vendored
Normal file
10
vendor/github.com/moby/sys/mountinfo/mountinfo_windows.go
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
package mountinfo
|
||||
|
||||
func parseMountTable(_ FilterFunc) ([]*Info, error) {
|
||||
// Do NOT return an error!
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func mounted(_ string) (bool, error) {
|
||||
return false, nil
|
||||
}
|
2
vendor/github.com/tonistiigi/fsutil/send.go
generated
vendored
2
vendor/github.com/tonistiigi/fsutil/send.go
generated
vendored
@@ -135,7 +135,7 @@ func (s *sender) sendFile(h *sendHandle) error {
|
||||
defer f.Close()
|
||||
buf := bufPool.Get().(*[]byte)
|
||||
defer bufPool.Put(buf)
|
||||
if _, err := io.CopyBuffer(&fileSender{sender: s, id: h.id}, f, *buf); err != nil {
|
||||
if _, err := io.CopyBuffer(&fileSender{sender: s, id: h.id}, struct{ io.Reader }{f}, *buf); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
17
vendor/modules.txt
vendored
17
vendor/modules.txt
vendored
@@ -150,7 +150,7 @@ github.com/compose-spec/compose-go/types
|
||||
# github.com/containerd/console v1.0.3
|
||||
## explicit; go 1.13
|
||||
github.com/containerd/console
|
||||
# github.com/containerd/containerd v1.6.11
|
||||
# github.com/containerd/containerd v1.6.16-0.20230124210447-1709cfe273d9
|
||||
## explicit; go 1.17
|
||||
github.com/containerd/containerd/api/services/content/v1
|
||||
github.com/containerd/containerd/archive/compression
|
||||
@@ -194,7 +194,7 @@ github.com/davecgh/go-spew/spew
|
||||
## explicit; go 1.18
|
||||
github.com/distribution/distribution/v3/digestset
|
||||
github.com/distribution/distribution/v3/reference
|
||||
# github.com/docker/cli v23.0.0-beta.1+incompatible
|
||||
# github.com/docker/cli v23.0.0-rc.1+incompatible
|
||||
## explicit
|
||||
github.com/docker/cli/cli
|
||||
github.com/docker/cli/cli-plugins/manager
|
||||
@@ -219,7 +219,7 @@ github.com/docker/cli/cli/streams
|
||||
github.com/docker/cli/cli/trust
|
||||
github.com/docker/cli/cli/version
|
||||
github.com/docker/cli/opts
|
||||
# github.com/docker/cli-docs-tool v0.5.0
|
||||
# github.com/docker/cli-docs-tool v0.5.1
|
||||
## explicit; go 1.18
|
||||
github.com/docker/cli-docs-tool
|
||||
github.com/docker/cli-docs-tool/annotation
|
||||
@@ -241,7 +241,7 @@ github.com/docker/distribution/registry/client/transport
|
||||
github.com/docker/distribution/registry/storage/cache
|
||||
github.com/docker/distribution/registry/storage/cache/memory
|
||||
github.com/docker/distribution/uuid
|
||||
# github.com/docker/docker v23.0.0-beta.1+incompatible
|
||||
# github.com/docker/docker v23.0.0-rc.1+incompatible
|
||||
## explicit
|
||||
github.com/docker/docker/api
|
||||
github.com/docker/docker/api/types
|
||||
@@ -433,7 +433,7 @@ github.com/mitchellh/go-wordwrap
|
||||
# github.com/mitchellh/mapstructure v1.5.0
|
||||
## explicit; go 1.14
|
||||
github.com/mitchellh/mapstructure
|
||||
# github.com/moby/buildkit v0.11.0-rc1.0.20221213193744-862b22d7e7cf
|
||||
# github.com/moby/buildkit v0.11.2
|
||||
## explicit; go 1.18
|
||||
github.com/moby/buildkit/api/services/control
|
||||
github.com/moby/buildkit/api/types
|
||||
@@ -448,7 +448,6 @@ github.com/moby/buildkit/client/ociindex
|
||||
github.com/moby/buildkit/cmd/buildkitd/config
|
||||
github.com/moby/buildkit/exporter/containerimage/exptypes
|
||||
github.com/moby/buildkit/exporter/containerimage/image
|
||||
github.com/moby/buildkit/frontend/attestations
|
||||
github.com/moby/buildkit/frontend/gateway/client
|
||||
github.com/moby/buildkit/frontend/gateway/grpcclient
|
||||
github.com/moby/buildkit/frontend/gateway/pb
|
||||
@@ -472,6 +471,7 @@ github.com/moby/buildkit/solver/errdefs
|
||||
github.com/moby/buildkit/solver/pb
|
||||
github.com/moby/buildkit/solver/result
|
||||
github.com/moby/buildkit/source/types
|
||||
github.com/moby/buildkit/sourcepolicy/pb
|
||||
github.com/moby/buildkit/util/apicaps
|
||||
github.com/moby/buildkit/util/apicaps/pb
|
||||
github.com/moby/buildkit/util/appcontext
|
||||
@@ -511,6 +511,9 @@ github.com/moby/patternmatcher
|
||||
## explicit; go 1.13
|
||||
github.com/moby/spdystream
|
||||
github.com/moby/spdystream/spdy
|
||||
# github.com/moby/sys/mountinfo v0.6.2
|
||||
## explicit; go 1.16
|
||||
github.com/moby/sys/mountinfo
|
||||
# github.com/moby/sys/sequential v0.5.0
|
||||
## explicit; go 1.17
|
||||
github.com/moby/sys/sequential
|
||||
@@ -603,7 +606,7 @@ github.com/theupdateframework/notary/tuf/data
|
||||
github.com/theupdateframework/notary/tuf/signed
|
||||
github.com/theupdateframework/notary/tuf/utils
|
||||
github.com/theupdateframework/notary/tuf/validation
|
||||
# github.com/tonistiigi/fsutil v0.0.0-20221114235510-0127568185cf
|
||||
# github.com/tonistiigi/fsutil v0.0.0-20230105215944-fb433841cbfa
|
||||
## explicit; go 1.18
|
||||
github.com/tonistiigi/fsutil
|
||||
github.com/tonistiigi/fsutil/types
|
||||
|
Reference in New Issue
Block a user