vendor: update buildkit v0.14-dev version 549891b

Brings in formatter for lint requests.

Signed-off-by: Tonis Tiigi <tonistiigi@gmail.com>
This commit is contained in:
Tonis Tiigi
2024-04-11 07:49:31 -07:00
parent 3e90cc4b84
commit da3435ed3a
107 changed files with 6830 additions and 2446 deletions

View File

@ -471,6 +471,7 @@ func ociIndexRecord(manifests []ocispec.Descriptor) tarRecord {
Versioned: ocispecs.Versioned{
SchemaVersion: 2,
},
MediaType: ocispec.MediaTypeImageIndex,
Manifests: manifests,
}

View File

@ -23,7 +23,7 @@ var (
Package = "github.com/containerd/containerd"
// Version holds the complete version number. Filled in at linking time.
Version = "1.7.14+unknown"
Version = "1.7.15+unknown"
// Revision is filled with the VCS (e.g. git) revision being used to build
// the program at linking time.

View File

@ -56,6 +56,7 @@ type Unmarshaler struct {
// implement JSONPBMarshaler so that the custom format can be produced.
//
// The JSON unmarshaling must follow the JSON to proto specification:
//
// https://developers.google.com/protocol-buffers/docs/proto3#json
//
// Deprecated: Custom types should implement protobuf reflection instead.

View File

@ -55,6 +55,7 @@ type Marshaler struct {
// implement JSONPBUnmarshaler so that the custom format can be parsed.
//
// The JSON marshaling must follow the proto to JSON specification:
//
// https://developers.google.com/protocol-buffers/docs/proto3#json
//
// Deprecated: Custom types should implement protobuf reflection instead.

View File

@ -5,17 +5,21 @@
// protoc-gen-go is a plugin for the Google protocol buffer compiler to generate
// Go code. Install it by building this program and making it accessible within
// your PATH with the name:
//
// protoc-gen-go
//
// The 'go' suffix becomes part of the argument for the protocol compiler,
// such that it can be invoked as:
//
// protoc --go_out=paths=source_relative:. path/to/file.proto
//
// This generates Go bindings for the protocol buffer defined by file.proto.
// With that input, the output will be written to:
//
// path/to/file.pb.go
//
// See the README and documentation for protocol buffers to learn more:
//
// https://developers.google.com/protocol-buffers/
package main

View File

@ -127,9 +127,10 @@ func Is(any *anypb.Any, m proto.Message) bool {
// The allocated message is stored in the embedded proto.Message.
//
// Example:
// var x ptypes.DynamicAny
// if err := ptypes.UnmarshalAny(a, &x); err != nil { ... }
// fmt.Printf("unmarshaled message: %v", x.Message)
//
// var x ptypes.DynamicAny
// if err := ptypes.UnmarshalAny(a, &x); err != nil { ... }
// fmt.Printf("unmarshaled message: %v", x.Message)
//
// Deprecated: Use the any.UnmarshalNew method instead to unmarshal
// the any message contents into a new instance of the underlying message.

View File

@ -1,204 +0,0 @@
# Created by .ignore support plugin (hsz.mobi)
### Go template
# Compiled Object files, Static and Dynamic libs (Shared Objects)
*.o
*.a
*.so
# Folders
_obj
_test
# Architecture specific extensions/prefixes
*.[568vq]
[568vq].out
*.cgo1.go
*.cgo2.c
_cgo_defun.c
_cgo_gotypes.go
_cgo_export.*
_testmain.go
*.exe
*.test
*.prof
### Windows template
# Windows image file caches
Thumbs.db
ehthumbs.db
# Folder config file
Desktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msm
*.msp
# Windows shortcuts
*.lnk
### Kate template
# Swap Files #
.*.kate-swp
.swp.*
### SublimeText template
# cache files for sublime text
*.tmlanguage.cache
*.tmPreferences.cache
*.stTheme.cache
# workspace files are user-specific
*.sublime-workspace
# project files should be checked into the repository, unless a significant
# proportion of contributors will probably not be using SublimeText
# *.sublime-project
# sftp configuration file
sftp-config.json
### Linux template
*~
# temporary files which can be created if a process still has a handle open of a deleted file
.fuse_hidden*
# KDE directory preferences
.directory
# Linux trash folder which might appear on any partition or disk
.Trash-*
### JetBrains template
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff:
.idea
.idea/tasks.xml
.idea/dictionaries
.idea/vcs.xml
.idea/jsLibraryMappings.xml
# Sensitive or high-churn files:
.idea/dataSources.ids
.idea/dataSources.xml
.idea/dataSources.local.xml
.idea/sqlDataSources.xml
.idea/dynamic.xml
.idea/uiDesigner.xml
# Gradle:
.idea/gradle.xml
.idea/libraries
# Mongo Explorer plugin:
.idea/mongoSettings.xml
## File-based project format:
*.iws
## Plugin-specific files:
# IntelliJ
/out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
### Xcode template
# Xcode
#
# gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
## Build generated
build/
DerivedData/
## Various settings
*.pbxuser
!default.pbxuser
*.mode1v3
!default.mode1v3
*.mode2v3
!default.mode2v3
*.perspectivev3
!default.perspectivev3
xcuserdata/
## Other
*.moved-aside
*.xccheckout
*.xcscmblueprint
### Eclipse template
.metadata
bin/
tmp/
*.tmp
*.bak
*.swp
*~.nib
local.properties
.settings/
.loadpath
.recommenders
# Eclipse Core
.project
# External tool builders
.externalToolBuilders/
# Locally stored "Eclipse launch configurations"
*.launch
# PyDev specific (Python IDE for Eclipse)
*.pydevproject
# CDT-specific (C/C++ Development Tooling)
.cproject
# JDT-specific (Eclipse Java Development Tools)
.classpath
# Java annotation processor (APT)
.factorypath
# PDT-specific (PHP Development Tools)
.buildpath
# sbteclipse plugin
.target
# Tern plugin
.tern-project
# TeXlipse plugin
.texlipse
# STS (Spring Tool Suite)
.springBeans
# Code Recommenders
.recommenders/
coverage.txt
#vendor
vendor/
.envrc

View File

@ -1,16 +0,0 @@
sudo: false
language: go
go:
- 1.13.x
- 1.14.x
- 1.15.x
env:
global:
- GO111MODULE=on
script:
- make test
after_success:
- bash <(curl -s https://codecov.io/bash)

View File

@ -1,51 +0,0 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
Types of changes:
- `Added` for new features.
- `Changed` for changes in existing functionality.
- `Deprecated` for soon-to-be removed features.
- `Removed` for now removed features.
- `Fixed` for any bug fixes.
- `Security` in case of vulnerabilities.
## [Unreleased]
### Added
- [#223](https://github.com/grpc-ecosystem/go-grpc-middleware/pull/223) Add go-kit logging middleware - [adrien-f](https://github.com/adrien-f)
## [v1.1.0] - 2019-09-12
### Added
- [#226](https://github.com/grpc-ecosystem/go-grpc-middleware/pull/226) Support for go modules.
- [#221](https://github.com/grpc-ecosystem/go-grpc-middleware/pull/221) logging/zap add support for gRPC LoggerV2 - [kush-patel-hs](https://github.com/kush-patel-hs)
- [#181](https://github.com/grpc-ecosystem/go-grpc-middleware/pull/181) Rate Limit support - [ceshihao](https://github.com/ceshihao)
- [#161](https://github.com/grpc-ecosystem/go-grpc-middleware/pull/161) Retry on server stream call - [lonnblad](https://github.com/lonnblad)
- [#152](https://github.com/grpc-ecosystem/go-grpc-middleware/pull/152) Exponential backoff functions - [polyfloyd](https://github.com/polyfloyd)
- [#147](https://github.com/grpc-ecosystem/go-grpc-middleware/pull/147) Jaeger support for ctxtags extraction - [vporoshok](https://github.com/vporoshok)
- [#184](https://github.com/grpc-ecosystem/go-grpc-middleware/pull/184) ctxTags identifies if the call was sampled
### Deprecated
- [#201](https://github.com/grpc-ecosystem/go-grpc-middleware/pull/201) `golang.org/x/net/context` - [houz42](https://github.com/houz42)
- [#183](https://github.com/grpc-ecosystem/go-grpc-middleware/pull/183) Documentation Generation in favour of <godoc.org>.
### Fixed
- [172](https://github.com/grpc-ecosystem/go-grpc-middleware/pull/172) Passing ctx into retry and recover - [johanbrandhorst](https://github.com/johanbrandhorst)
- Numerious documentation fixes.
## v1.0.0 - 2018-05-08
### Added
- grpc_auth
- grpc_ctxtags
- grpc_zap
- grpc_logrus
- grpc_opentracing
- grpc_retry
- grpc_validator
- grpc_recovery
[Unreleased]: https://github.com/grpc-ecosystem/go-grpc-middleware/compare/v1.1.0...HEAD
[v1.1.0]: https://github.com/grpc-ecosystem/go-grpc-middleware/compare/v1.0.0...v1.1.0

View File

@ -1,20 +0,0 @@
# Contributing
We would love to have people submit pull requests and help make `grpc-ecosystem/go-grpc-middleware` even better 👍.
Fork, then clone the repo:
```bash
git clone git@github.com:your-username/go-grpc-middleware.git
```
Before checking in please run the following:
```bash
make all
```
This will `vet`, `fmt`, regenerate documentation and run all tests.
Push to your fork and open a pull request.

View File

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,86 +0,0 @@
# Go gRPC Middleware
[![Travis Build](https://travis-ci.org/grpc-ecosystem/go-grpc-middleware.svg?branch=master)](https://travis-ci.org/grpc-ecosystem/go-grpc-middleware)
[![Go Report Card](https://goreportcard.com/badge/github.com/grpc-ecosystem/go-grpc-middleware)](https://goreportcard.com/report/github.com/grpc-ecosystem/go-grpc-middleware)
[![GoDoc](http://img.shields.io/badge/GoDoc-Reference-blue.svg)](https://godoc.org/github.com/grpc-ecosystem/go-grpc-middleware)
[![SourceGraph](https://sourcegraph.com/github.com/grpc-ecosystem/go-grpc-middleware/-/badge.svg)](https://sourcegraph.com/github.com/grpc-ecosystem/go-grpc-middleware/?badge)
[![codecov](https://codecov.io/gh/grpc-ecosystem/go-grpc-middleware/branch/master/graph/badge.svg)](https://codecov.io/gh/grpc-ecosystem/go-grpc-middleware)
[![Apache 2.0 License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](LICENSE)
[![quality: production](https://img.shields.io/badge/quality-production-orange.svg)](#status)
[![Slack](https://img.shields.io/badge/slack-%23grpc--middleware-brightgreen)](https://slack.com/share/IRUQCFC23/9Tm7hxRFVKKNoajQfMOcUiIk/enQtODc4ODI4NTIyMDcxLWM5NDA0ZTE4Njg5YjRjYWZkMTI5MzQwNDY3YzBjMzE1YzdjOGM5ZjI1NDNiM2JmNzI2YjM5ODE5OTRiNTEyOWE)
[gRPC Go](https://github.com/grpc/grpc-go) Middleware: interceptors, helpers, utilities.
## Middleware
[gRPC Go](https://github.com/grpc/grpc-go) recently acquired support for
Interceptors, i.e. [middleware](https://medium.com/@matryer/writing-middleware-in-golang-and-how-go-makes-it-so-much-fun-4375c1246e81#.gv7tdlghs)
that is executed either on the gRPC Server before the request is passed onto the user's application logic, or on the gRPC client around the user call. It is a perfect way to implement
common patterns: auth, logging, message, validation, retries or monitoring.
These are generic building blocks that make it easy to build multiple microservices easily.
The purpose of this repository is to act as a go-to point for such reusable functionality. It contains
some of them itself, but also will link to useful external repos.
`grpc_middleware` itself provides support for chaining interceptors, here's an example:
```go
import "github.com/grpc-ecosystem/go-grpc-middleware"
myServer := grpc.NewServer(
grpc.StreamInterceptor(grpc_middleware.ChainStreamServer(
grpc_recovery.StreamServerInterceptor(),
grpc_ctxtags.StreamServerInterceptor(),
grpc_opentracing.StreamServerInterceptor(),
grpc_prometheus.StreamServerInterceptor,
grpc_zap.StreamServerInterceptor(zapLogger),
grpc_auth.StreamServerInterceptor(myAuthFunction),
)),
grpc.UnaryInterceptor(grpc_middleware.ChainUnaryServer(
grpc_recovery.UnaryServerInterceptor(),
grpc_ctxtags.UnaryServerInterceptor(),
grpc_opentracing.UnaryServerInterceptor(),
grpc_prometheus.UnaryServerInterceptor,
grpc_zap.UnaryServerInterceptor(zapLogger),
grpc_auth.UnaryServerInterceptor(myAuthFunction),
)),
)
```
## Interceptors
*Please send a PR to add new interceptors or middleware to this list*
#### Auth
* [`grpc_auth`](auth) - a customizable (via `AuthFunc`) piece of auth middleware
#### Logging
* [`grpc_ctxtags`](tags/) - a library that adds a `Tag` map to context, with data populated from request body
* [`grpc_zap`](logging/zap/) - integration of [zap](https://github.com/uber-go/zap) logging library into gRPC handlers.
* [`grpc_logrus`](logging/logrus/) - integration of [logrus](https://github.com/sirupsen/logrus) logging library into gRPC handlers.
* [`grpc_kit`](logging/kit/) - integration of [go-kit](https://github.com/go-kit/kit/tree/master/log) logging library into gRPC handlers.
* [`grpc_grpc_logsettable`](logging/settable/) - a wrapper around `grpclog.LoggerV2` that allows to replace loggers in runtime (thread-safe).
#### Monitoring
* [`grpc_prometheus`⚡](https://github.com/grpc-ecosystem/go-grpc-prometheus) - Prometheus client-side and server-side monitoring middleware
* [`otgrpc`⚡](https://github.com/grpc-ecosystem/grpc-opentracing/tree/master/go/otgrpc) - [OpenTracing](http://opentracing.io/) client-side and server-side interceptors
* [`grpc_opentracing`](tracing/opentracing) - [OpenTracing](http://opentracing.io/) client-side and server-side interceptors with support for streaming and handler-returned tags
#### Client
* [`grpc_retry`](retry/) - a generic gRPC response code retry mechanism, client-side middleware
#### Server
* [`grpc_validator`](validator/) - codegen inbound message validation from `.proto` options
* [`grpc_recovery`](recovery/) - turn panics into gRPC errors
* [`ratelimit`](ratelimit/) - grpc rate limiting by your own limiter
## Status
This code has been running in *production* since May 2016 as the basis of the gRPC micro services stack at [Improbable](https://improbable.io).
Additional tooling will be added, and contributions are welcome.
## License
`go-grpc-middleware` is released under the Apache 2.0 license. See the [LICENSE](LICENSE) file for details.

View File

@ -1,120 +0,0 @@
// Copyright 2016 Michal Witkowski. All Rights Reserved.
// See LICENSE for licensing terms.
// gRPC Server Interceptor chaining middleware.
package grpc_middleware
import (
"context"
"google.golang.org/grpc"
)
// ChainUnaryServer creates a single interceptor out of a chain of many interceptors.
//
// Execution is done in left-to-right order, including passing of context.
// For example ChainUnaryServer(one, two, three) will execute one before two before three, and three
// will see context changes of one and two.
func ChainUnaryServer(interceptors ...grpc.UnaryServerInterceptor) grpc.UnaryServerInterceptor {
n := len(interceptors)
return func(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) {
chainer := func(currentInter grpc.UnaryServerInterceptor, currentHandler grpc.UnaryHandler) grpc.UnaryHandler {
return func(currentCtx context.Context, currentReq interface{}) (interface{}, error) {
return currentInter(currentCtx, currentReq, info, currentHandler)
}
}
chainedHandler := handler
for i := n - 1; i >= 0; i-- {
chainedHandler = chainer(interceptors[i], chainedHandler)
}
return chainedHandler(ctx, req)
}
}
// ChainStreamServer creates a single interceptor out of a chain of many interceptors.
//
// Execution is done in left-to-right order, including passing of context.
// For example ChainUnaryServer(one, two, three) will execute one before two before three.
// If you want to pass context between interceptors, use WrapServerStream.
func ChainStreamServer(interceptors ...grpc.StreamServerInterceptor) grpc.StreamServerInterceptor {
n := len(interceptors)
return func(srv interface{}, ss grpc.ServerStream, info *grpc.StreamServerInfo, handler grpc.StreamHandler) error {
chainer := func(currentInter grpc.StreamServerInterceptor, currentHandler grpc.StreamHandler) grpc.StreamHandler {
return func(currentSrv interface{}, currentStream grpc.ServerStream) error {
return currentInter(currentSrv, currentStream, info, currentHandler)
}
}
chainedHandler := handler
for i := n - 1; i >= 0; i-- {
chainedHandler = chainer(interceptors[i], chainedHandler)
}
return chainedHandler(srv, ss)
}
}
// ChainUnaryClient creates a single interceptor out of a chain of many interceptors.
//
// Execution is done in left-to-right order, including passing of context.
// For example ChainUnaryClient(one, two, three) will execute one before two before three.
func ChainUnaryClient(interceptors ...grpc.UnaryClientInterceptor) grpc.UnaryClientInterceptor {
n := len(interceptors)
return func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error {
chainer := func(currentInter grpc.UnaryClientInterceptor, currentInvoker grpc.UnaryInvoker) grpc.UnaryInvoker {
return func(currentCtx context.Context, currentMethod string, currentReq, currentRepl interface{}, currentConn *grpc.ClientConn, currentOpts ...grpc.CallOption) error {
return currentInter(currentCtx, currentMethod, currentReq, currentRepl, currentConn, currentInvoker, currentOpts...)
}
}
chainedInvoker := invoker
for i := n - 1; i >= 0; i-- {
chainedInvoker = chainer(interceptors[i], chainedInvoker)
}
return chainedInvoker(ctx, method, req, reply, cc, opts...)
}
}
// ChainStreamClient creates a single interceptor out of a chain of many interceptors.
//
// Execution is done in left-to-right order, including passing of context.
// For example ChainStreamClient(one, two, three) will execute one before two before three.
func ChainStreamClient(interceptors ...grpc.StreamClientInterceptor) grpc.StreamClientInterceptor {
n := len(interceptors)
return func(ctx context.Context, desc *grpc.StreamDesc, cc *grpc.ClientConn, method string, streamer grpc.Streamer, opts ...grpc.CallOption) (grpc.ClientStream, error) {
chainer := func(currentInter grpc.StreamClientInterceptor, currentStreamer grpc.Streamer) grpc.Streamer {
return func(currentCtx context.Context, currentDesc *grpc.StreamDesc, currentConn *grpc.ClientConn, currentMethod string, currentOpts ...grpc.CallOption) (grpc.ClientStream, error) {
return currentInter(currentCtx, currentDesc, currentConn, currentMethod, currentStreamer, currentOpts...)
}
}
chainedStreamer := streamer
for i := n - 1; i >= 0; i-- {
chainedStreamer = chainer(interceptors[i], chainedStreamer)
}
return chainedStreamer(ctx, desc, cc, method, opts...)
}
}
// Chain creates a single interceptor out of a chain of many interceptors.
//
// WithUnaryServerChain is a grpc.Server config option that accepts multiple unary interceptors.
// Basically syntactic sugar.
func WithUnaryServerChain(interceptors ...grpc.UnaryServerInterceptor) grpc.ServerOption {
return grpc.UnaryInterceptor(ChainUnaryServer(interceptors...))
}
// WithStreamServerChain is a grpc.Server config option that accepts multiple stream interceptors.
// Basically syntactic sugar.
func WithStreamServerChain(interceptors ...grpc.StreamServerInterceptor) grpc.ServerOption {
return grpc.StreamInterceptor(ChainStreamServer(interceptors...))
}

View File

@ -1,69 +0,0 @@
// Copyright 2016 Michal Witkowski. All Rights Reserved.
// See LICENSE for licensing terms.
/*
`grpc_middleware` is a collection of gRPC middleware packages: interceptors, helpers and tools.
Middleware
gRPC is a fantastic RPC middleware, which sees a lot of adoption in the Golang world. However, the
upstream gRPC codebase is relatively bare bones.
This package, and most of its child packages provides commonly needed middleware for gRPC:
client-side interceptors for retires, server-side interceptors for input validation and auth,
functions for chaining said interceptors, metadata convenience methods and more.
Chaining
By default, gRPC doesn't allow one to have more than one interceptor either on the client nor on
the server side. `grpc_middleware` provides convenient chaining methods
Simple way of turning a multiple interceptors into a single interceptor. Here's an example for
server chaining:
myServer := grpc.NewServer(
grpc.StreamInterceptor(grpc_middleware.ChainStreamServer(loggingStream, monitoringStream, authStream)),
grpc.UnaryInterceptor(grpc_middleware.ChainUnaryServer(loggingUnary, monitoringUnary, authUnary)),
)
These interceptors will be executed from left to right: logging, monitoring and auth.
Here's an example for client side chaining:
clientConn, err = grpc.Dial(
address,
grpc.WithUnaryInterceptor(grpc_middleware.ChainUnaryClient(monitoringClientUnary, retryUnary)),
grpc.WithStreamInterceptor(grpc_middleware.ChainStreamClient(monitoringClientStream, retryStream)),
)
client = pb_testproto.NewTestServiceClient(clientConn)
resp, err := client.PingEmpty(s.ctx, &myservice.Request{Msg: "hello"})
These interceptors will be executed from left to right: monitoring and then retry logic.
The retry interceptor will call every interceptor that follows it whenever when a retry happens.
Writing Your Own
Implementing your own interceptor is pretty trivial: there are interfaces for that. But the interesting
bit exposing common data to handlers (and other middleware), similarly to HTTP Middleware design.
For example, you may want to pass the identity of the caller from the auth interceptor all the way
to the handling function.
For example, a client side interceptor example for auth looks like:
func FakeAuthUnaryInterceptor(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) {
newCtx := context.WithValue(ctx, "user_id", "john@example.com")
return handler(newCtx, req)
}
Unfortunately, it's not as easy for streaming RPCs. These have the `context.Context` embedded within
the `grpc.ServerStream` object. To pass values through context, a wrapper (`WrappedServerStream`) is
needed. For example:
func FakeAuthStreamingInterceptor(srv interface{}, stream grpc.ServerStream, info *grpc.StreamServerInfo, handler grpc.StreamHandler) error {
newStream := grpc_middleware.WrapServerStream(stream)
newStream.WrappedContext = context.WithValue(ctx, "user_id", "john@example.com")
return handler(srv, newStream)
}
*/
package grpc_middleware

View File

@ -1,17 +0,0 @@
SHELL=/bin/bash
GOFILES_NOVENDOR = $(shell go list ./... | grep -v /vendor/)
all: vet fmt test
fmt:
go fmt $(GOFILES_NOVENDOR)
vet:
# do not check lostcancel, they are intentional.
go vet -lostcancel=false $(GOFILES_NOVENDOR)
test: vet
./scripts/test_all.sh
.PHONY: all test

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.0 KiB

View File

@ -1,30 +0,0 @@
// Copyright 2016 Michal Witkowski. All Rights Reserved.
// See LICENSE for licensing terms.
package grpc_middleware
import (
"context"
"google.golang.org/grpc"
)
// WrappedServerStream is a thin wrapper around grpc.ServerStream that allows modifying context.
type WrappedServerStream struct {
grpc.ServerStream
// WrappedContext is the wrapper's own Context. You can assign it.
WrappedContext context.Context
}
// Context returns the wrapper's WrappedContext, overwriting the nested grpc.ServerStream.Context()
func (w *WrappedServerStream) Context() context.Context {
return w.WrappedContext
}
// WrapServerStream returns a ServerStream that has the ability to overwrite context.
func WrapServerStream(stream grpc.ServerStream) *WrappedServerStream {
if existing, ok := stream.(*WrappedServerStream); ok {
return existing
}
return &WrappedServerStream{ServerStream: stream, WrappedContext: stream.Context()}
}

View File

@ -7,7 +7,6 @@ import (
"net"
"net/url"
"os"
"strings"
"time"
contentapi "github.com/containerd/containerd/api/services/content/v1"
@ -18,6 +17,7 @@ import (
"github.com/moby/buildkit/session/grpchijack"
"github.com/moby/buildkit/util/appdefaults"
"github.com/moby/buildkit/util/grpcerrors"
"github.com/moby/buildkit/util/tracing"
"github.com/moby/buildkit/util/tracing/otlptracegrpc"
"github.com/pkg/errors"
"go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc"
@ -48,9 +48,6 @@ func New(ctx context.Context, address string, opts ...ClientOpt) (*Client, error
}
needDialer := true
var unary []grpc.UnaryClientInterceptor
var stream []grpc.StreamClientInterceptor
var customTracer bool // allows manually setting disabling tracing even if tracer in context
var tracerProvider trace.TracerProvider
var tracerDelegate TracerDelegate
@ -101,9 +98,14 @@ func New(ctx context.Context, address string, opts ...ClientOpt) (*Client, error
}
if tracerProvider != nil {
propagators := propagation.NewCompositeTextMapPropagator(propagation.TraceContext{}, propagation.Baggage{})
unary = append(unary, filterInterceptor(otelgrpc.UnaryClientInterceptor(otelgrpc.WithTracerProvider(tracerProvider), otelgrpc.WithPropagators(propagators)))) //nolint:staticcheck // TODO(thaJeztah): ignore SA1019 for deprecated options: see https://github.com/moby/buildkit/issues/4681
stream = append(stream, otelgrpc.StreamClientInterceptor(otelgrpc.WithTracerProvider(tracerProvider), otelgrpc.WithPropagators(propagators))) //nolint:staticcheck // TODO(thaJeztah): ignore SA1019 for deprecated options: see https://github.com/moby/buildkit/issues/4681
gopts = append(gopts, grpc.WithStatsHandler(
tracing.ClientStatsHandler(
otelgrpc.WithTracerProvider(tracerProvider),
otelgrpc.WithPropagators(
propagation.NewCompositeTextMapPropagator(propagation.TraceContext{}, propagation.Baggage{}),
),
),
))
}
if needDialer {
@ -145,12 +147,8 @@ func New(ctx context.Context, address string, opts ...ClientOpt) (*Client, error
}
gopts = append(gopts, grpc.WithAuthority(authority))
unary = append(unary, grpcerrors.UnaryClientInterceptor)
stream = append(stream, grpcerrors.StreamClientInterceptor)
gopts = append(gopts, grpc.WithChainUnaryInterceptor(unary...))
gopts = append(gopts, grpc.WithChainStreamInterceptor(stream...))
gopts = append(gopts, grpc.WithUnaryInterceptor(grpcerrors.UnaryClientInterceptor))
gopts = append(gopts, grpc.WithStreamInterceptor(grpcerrors.StreamClientInterceptor))
gopts = append(gopts, customDialOptions...)
conn, err := grpc.DialContext(ctx, address, gopts...)
@ -386,15 +384,6 @@ func resolveDialer(address string) (func(context.Context, string) (net.Conn, err
return nil, nil
}
func filterInterceptor(intercept grpc.UnaryClientInterceptor) grpc.UnaryClientInterceptor {
return func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error {
if strings.HasSuffix(method, "opentelemetry.proto.collector.trace.v1.TraceService/Export") {
return invoker(ctx, method, req, reply, cc, opts...)
}
return intercept(ctx, method, req, reply, cc, invoker, opts...)
}
}
type withGRPCDialOption struct {
opt grpc.DialOption
}

View File

@ -488,17 +488,18 @@ type CopyOption interface {
}
type CopyInfo struct {
Mode *os.FileMode
FollowSymlinks bool
CopyDirContentsOnly bool
IncludePatterns []string
ExcludePatterns []string
AttemptUnpack bool
CreateDestPath bool
AllowWildcard bool
AllowEmptyWildcard bool
ChownOpt *ChownOpt
CreatedTime *time.Time
Mode *os.FileMode
FollowSymlinks bool
CopyDirContentsOnly bool
IncludePatterns []string
ExcludePatterns []string
AttemptUnpack bool
CreateDestPath bool
AllowWildcard bool
AllowEmptyWildcard bool
ChownOpt *ChownOpt
CreatedTime *time.Time
AlwaysReplaceExistingDestPaths bool
}
func (mi *CopyInfo) SetCopyOption(mi2 *CopyInfo) {
@ -533,6 +534,7 @@ func (a *fileActionCopy) toProtoAction(ctx context.Context, parent string, base
AttemptUnpackDockerCompatibility: a.info.AttemptUnpack,
CreateDestPath: a.info.CreateDestPath,
Timestamp: marshalTime(a.info.CreatedTime),
AlwaysReplaceExistingDestPaths: a.info.AlwaysReplaceExistingDestPaths,
}
if a.info.Mode != nil {
c.Mode = int32(*a.info.Mode)
@ -565,6 +567,9 @@ func (a *fileActionCopy) addCaps(f *FileOp) {
if len(a.info.IncludePatterns) != 0 || len(a.info.ExcludePatterns) != 0 {
addCap(&f.constraints, pb.CapFileCopyIncludeExcludePatterns)
}
if a.info.AlwaysReplaceExistingDestPaths {
addCap(&f.constraints, pb.CapFileCopyAlwaysReplaceExistingDestPaths)
}
}
type CreatedTime time.Time

View File

@ -0,0 +1,46 @@
// Package command contains the set of Dockerfile commands.
package command
// Define constants for the command strings
const (
Add = "add"
Arg = "arg"
Cmd = "cmd"
Copy = "copy"
Entrypoint = "entrypoint"
Env = "env"
Expose = "expose"
From = "from"
Healthcheck = "healthcheck"
Label = "label"
Maintainer = "maintainer"
Onbuild = "onbuild"
Run = "run"
Shell = "shell"
StopSignal = "stopsignal"
User = "user"
Volume = "volume"
Workdir = "workdir"
)
// Commands is list of all Dockerfile commands
var Commands = map[string]struct{}{
Add: {},
Arg: {},
Cmd: {},
Copy: {},
Entrypoint: {},
Env: {},
Expose: {},
From: {},
Healthcheck: {},
Label: {},
Maintainer: {},
Onbuild: {},
Run: {},
Shell: {},
StopSignal: {},
User: {},
Volume: {},
Workdir: {},
}

View File

@ -0,0 +1,171 @@
package parser
import (
"bufio"
"bytes"
"encoding/json"
"fmt"
"regexp"
"strings"
"github.com/pkg/errors"
)
const (
keySyntax = "syntax"
keyEscape = "escape"
)
var validDirectives = map[string]struct{}{
keySyntax: {},
keyEscape: {},
}
type Directive struct {
Name string
Value string
Location []Range
}
// DirectiveParser is a parser for Dockerfile directives that enforces the
// quirks of the directive parser.
type DirectiveParser struct {
line int
regexp *regexp.Regexp
seen map[string]struct{}
done bool
}
func (d *DirectiveParser) setComment(comment string) {
d.regexp = regexp.MustCompile(fmt.Sprintf(`^%s\s*([a-zA-Z][a-zA-Z0-9]*)\s*=\s*(.+?)\s*$`, comment))
}
func (d *DirectiveParser) ParseLine(line []byte) (*Directive, error) {
d.line++
if d.done {
return nil, nil
}
if d.regexp == nil {
d.setComment("#")
}
match := d.regexp.FindSubmatch(line)
if len(match) == 0 {
d.done = true
return nil, nil
}
k := strings.ToLower(string(match[1]))
if _, ok := validDirectives[k]; !ok {
d.done = true
return nil, nil
}
if d.seen == nil {
d.seen = map[string]struct{}{}
}
if _, ok := d.seen[k]; ok {
return nil, errors.Errorf("only one %s parser directive can be used", k)
}
d.seen[k] = struct{}{}
v := string(match[2])
directive := Directive{
Name: k,
Value: v,
Location: []Range{{
Start: Position{Line: d.line},
End: Position{Line: d.line},
}},
}
return &directive, nil
}
func (d *DirectiveParser) ParseAll(data []byte) ([]*Directive, error) {
scanner := bufio.NewScanner(bytes.NewReader(data))
var directives []*Directive
for scanner.Scan() {
if d.done {
break
}
d, err := d.ParseLine(scanner.Bytes())
if err != nil {
return directives, err
}
if d != nil {
directives = append(directives, d)
}
}
return directives, nil
}
// DetectSyntax returns the syntax of provided input.
//
// The traditional dockerfile directives '# syntax = ...' are used by default,
// however, the function will also fallback to c-style directives '// syntax = ...'
// and json-encoded directives '{ "syntax": "..." }'. Finally, starting lines
// with '#!' are treated as shebangs and ignored.
//
// This allows for a flexible range of input formats, and appropriate syntax
// selection.
func DetectSyntax(dt []byte) (string, string, []Range, bool) {
dt, hadShebang, err := discardShebang(dt)
if err != nil {
return "", "", nil, false
}
line := 0
if hadShebang {
line++
}
// use default directive parser, and search for #syntax=
directiveParser := DirectiveParser{line: line}
if syntax, cmdline, loc, ok := detectSyntaxFromParser(dt, directiveParser); ok {
return syntax, cmdline, loc, true
}
// use directive with different comment prefix, and search for //syntax=
directiveParser = DirectiveParser{line: line}
directiveParser.setComment("//")
if syntax, cmdline, loc, ok := detectSyntaxFromParser(dt, directiveParser); ok {
return syntax, cmdline, loc, true
}
// search for possible json directives
var directive struct {
Syntax string `json:"syntax"`
}
if err := json.Unmarshal(dt, &directive); err == nil {
if directive.Syntax != "" {
loc := []Range{{
Start: Position{Line: line},
End: Position{Line: line},
}}
return directive.Syntax, directive.Syntax, loc, true
}
}
return "", "", nil, false
}
func detectSyntaxFromParser(dt []byte, parser DirectiveParser) (string, string, []Range, bool) {
directives, _ := parser.ParseAll(dt)
for _, d := range directives {
// check for syntax directive before erroring out, since the error
// might have occurred *after* the syntax directive
if d.Name == keySyntax {
p, _, _ := strings.Cut(d.Value, " ")
return p, d.Value, d.Location, true
}
}
return "", "", nil, false
}
func discardShebang(dt []byte) ([]byte, bool, error) {
line, rest, _ := bytes.Cut(dt, []byte("\n"))
if bytes.HasPrefix(line, []byte("#!")) {
return rest, true, nil
}
return dt, false, nil
}

View File

@ -0,0 +1,59 @@
package parser
import (
"github.com/moby/buildkit/util/stack"
"github.com/pkg/errors"
)
// ErrorLocation gives a location in source code that caused the error
type ErrorLocation struct {
Locations [][]Range
error
}
// Unwrap unwraps to the next error
func (e *ErrorLocation) Unwrap() error {
return e.error
}
// Range is a code section between two positions
type Range struct {
Start Position
End Position
}
// Position is a point in source code
type Position struct {
Line int
Character int
}
func withLocation(err error, start, end int) error {
return WithLocation(err, toRanges(start, end))
}
// WithLocation extends an error with a source code location
func WithLocation(err error, location []Range) error {
if err == nil {
return nil
}
var el *ErrorLocation
if errors.As(err, &el) {
el.Locations = append(el.Locations, location)
return err
}
return stack.Enable(&ErrorLocation{
error: err,
Locations: [][]Range{location},
})
}
func toRanges(start, end int) (r []Range) {
if end <= start {
end = start
}
for i := start; i <= end; i++ {
r = append(r, Range{Start: Position{Line: i}, End: Position{Line: i}})
}
return
}

View File

@ -0,0 +1,367 @@
package parser
// line parsers are dispatch calls that parse a single unit of text into a
// Node object which contains the whole statement. Dockerfiles have varied
// (but not usually unique, see ONBUILD for a unique example) parsing rules
// per-command, and these unify the processing in a way that makes it
// manageable.
import (
"encoding/json"
"strings"
"unicode"
"unicode/utf8"
"github.com/pkg/errors"
)
var (
errDockerfileNotStringArray = errors.New("when using JSON array syntax, arrays must be comprised of strings only")
)
const (
commandLabel = "LABEL"
)
// ignore the current argument. This will still leave a command parsed, but
// will not incorporate the arguments into the ast.
func parseIgnore(rest string, d *directives) (*Node, map[string]bool, error) {
return &Node{}, nil, nil
}
// used for onbuild. Could potentially be used for anything that represents a
// statement with sub-statements.
//
// ONBUILD RUN foo bar -> (onbuild (run foo bar))
func parseSubCommand(rest string, d *directives) (*Node, map[string]bool, error) {
if rest == "" {
return nil, nil, nil
}
child, err := newNodeFromLine(rest, d, nil)
if err != nil {
return nil, nil, err
}
return &Node{Children: []*Node{child}}, nil, nil
}
// helper to parse words (i.e space delimited or quoted strings) in a statement.
// The quotes are preserved as part of this function and they are stripped later
// as part of processWords().
func parseWords(rest string, d *directives) []string {
const (
inSpaces = iota // looking for start of a word
inWord
inQuote
)
words := []string{}
phase := inSpaces
word := ""
quote := '\000'
blankOK := false
var ch rune
var chWidth int
for pos := 0; pos <= len(rest); pos += chWidth {
if pos != len(rest) {
ch, chWidth = utf8.DecodeRuneInString(rest[pos:])
}
if phase == inSpaces { // Looking for start of word
if pos == len(rest) { // end of input
break
}
if unicode.IsSpace(ch) { // skip spaces
continue
}
phase = inWord // found it, fall through
}
if (phase == inWord || phase == inQuote) && (pos == len(rest)) {
if blankOK || len(word) > 0 {
words = append(words, word)
}
break
}
if phase == inWord {
if unicode.IsSpace(ch) {
phase = inSpaces
if blankOK || len(word) > 0 {
words = append(words, word)
}
word = ""
blankOK = false
continue
}
if ch == '\'' || ch == '"' {
quote = ch
blankOK = true
phase = inQuote
}
if ch == d.escapeToken {
if pos+chWidth == len(rest) {
continue // just skip an escape token at end of line
}
// If we're not quoted and we see an escape token, then always just
// add the escape token plus the char to the word, even if the char
// is a quote.
word += string(ch)
pos += chWidth
ch, chWidth = utf8.DecodeRuneInString(rest[pos:])
}
word += string(ch)
continue
}
if phase == inQuote {
if ch == quote {
phase = inWord
}
// The escape token is special except for ' quotes - can't escape anything for '
if ch == d.escapeToken && quote != '\'' {
if pos+chWidth == len(rest) {
phase = inWord
continue // just skip the escape token at end
}
pos += chWidth
word += string(ch)
ch, chWidth = utf8.DecodeRuneInString(rest[pos:])
}
word += string(ch)
}
}
return words
}
// parse environment like statements. Note that this does *not* handle
// variable interpolation, which will be handled in the evaluator.
func parseNameVal(rest string, key string, d *directives) (*Node, error) {
// This is kind of tricky because we need to support the old
// variant: KEY name value
// as well as the new one: KEY name=value ...
// The trigger to know which one is being used will be whether we hit
// a space or = first. space ==> old, "=" ==> new
words := parseWords(rest, d)
if len(words) == 0 {
return nil, nil
}
// Old format (KEY name value)
if !strings.Contains(words[0], "=") {
parts := reWhitespace.Split(rest, 2)
if len(parts) < 2 {
return nil, errors.Errorf("%s must have two arguments", key)
}
return newKeyValueNode(parts[0], parts[1]), nil
}
var rootNode *Node
var prevNode *Node
for _, word := range words {
if !strings.Contains(word, "=") {
return nil, errors.Errorf("Syntax error - can't find = in %q. Must be of the form: name=value", word)
}
parts := strings.SplitN(word, "=", 2)
node := newKeyValueNode(parts[0], parts[1])
rootNode, prevNode = appendKeyValueNode(node, rootNode, prevNode)
}
return rootNode, nil
}
func newKeyValueNode(key, value string) *Node {
return &Node{
Value: key,
Next: &Node{Value: value},
}
}
func appendKeyValueNode(node, rootNode, prevNode *Node) (*Node, *Node) {
if rootNode == nil {
rootNode = node
}
if prevNode != nil {
prevNode.Next = node
}
prevNode = node.Next
return rootNode, prevNode
}
func parseEnv(rest string, d *directives) (*Node, map[string]bool, error) {
node, err := parseNameVal(rest, "ENV", d)
return node, nil, err
}
func parseLabel(rest string, d *directives) (*Node, map[string]bool, error) {
node, err := parseNameVal(rest, commandLabel, d)
return node, nil, err
}
// parses a statement containing one or more keyword definition(s) and/or
// value assignments, like `name1 name2= name3="" name4=value`.
// Note that this is a stricter format than the old format of assignment,
// allowed by parseNameVal(), in a way that this only allows assignment of the
// form `keyword=[<value>]` like `name2=`, `name3=""`, and `name4=value` above.
// In addition, a keyword definition alone is of the form `keyword` like `name1`
// above. And the assignments `name2=` and `name3=""` are equivalent and
// assign an empty value to the respective keywords.
func parseNameOrNameVal(rest string, d *directives) (*Node, map[string]bool, error) {
words := parseWords(rest, d)
if len(words) == 0 {
return nil, nil, nil
}
var (
rootnode *Node
prevNode *Node
)
for i, word := range words {
node := &Node{}
node.Value = word
if i == 0 {
rootnode = node
} else {
prevNode.Next = node
}
prevNode = node
}
return rootnode, nil, nil
}
// parses a whitespace-delimited set of arguments. The result is effectively a
// linked list of string arguments.
func parseStringsWhitespaceDelimited(rest string, d *directives) (*Node, map[string]bool, error) {
if rest == "" {
return nil, nil, nil
}
node := &Node{}
rootnode := node
prevnode := node
for _, str := range reWhitespace.Split(rest, -1) { // use regexp
prevnode = node
node.Value = str
node.Next = &Node{}
node = node.Next
}
// XXX to get around regexp.Split *always* providing an empty string at the
// end due to how our loop is constructed, nil out the last node in the
// chain.
prevnode.Next = nil
return rootnode, nil, nil
}
// parseString just wraps the string in quotes and returns a working node.
func parseString(rest string, d *directives) (*Node, map[string]bool, error) {
if rest == "" {
return nil, nil, nil
}
n := &Node{}
n.Value = rest
return n, nil, nil
}
// parseJSON converts JSON arrays to an AST.
func parseJSON(rest string, d *directives) (*Node, map[string]bool, error) {
rest = strings.TrimLeftFunc(rest, unicode.IsSpace)
if !strings.HasPrefix(rest, "[") {
return nil, nil, errors.Errorf("Error parsing %q as a JSON array", rest)
}
var myJSON []interface{}
if err := json.NewDecoder(strings.NewReader(rest)).Decode(&myJSON); err != nil {
return nil, nil, err
}
var top, prev *Node
for _, str := range myJSON {
s, ok := str.(string)
if !ok {
return nil, nil, errDockerfileNotStringArray
}
node := &Node{Value: s}
if prev == nil {
top = node
} else {
prev.Next = node
}
prev = node
}
return top, map[string]bool{"json": true}, nil
}
// parseMaybeJSON determines if the argument appears to be a JSON array. If
// so, passes to parseJSON; if not, quotes the result and returns a single
// node.
func parseMaybeJSON(rest string, d *directives) (*Node, map[string]bool, error) {
if rest == "" {
return nil, nil, nil
}
node, attrs, err := parseJSON(rest, d)
if err == nil {
return node, attrs, nil
}
if err == errDockerfileNotStringArray {
return nil, nil, err
}
node = &Node{}
node.Value = rest
return node, nil, nil
}
// parseMaybeJSONToList determines if the argument appears to be a JSON array. If
// so, passes to parseJSON; if not, attempts to parse it as a whitespace
// delimited string.
func parseMaybeJSONToList(rest string, d *directives) (*Node, map[string]bool, error) {
node, attrs, err := parseJSON(rest, d)
if err == nil {
return node, attrs, nil
}
if err == errDockerfileNotStringArray {
return nil, nil, err
}
return parseStringsWhitespaceDelimited(rest, d)
}
// The HEALTHCHECK command is like parseMaybeJSON, but has an extra type argument.
func parseHealthConfig(rest string, d *directives) (*Node, map[string]bool, error) {
// Find end of first argument
var sep int
for ; sep < len(rest); sep++ {
if unicode.IsSpace(rune(rest[sep])) {
break
}
}
next := sep
for ; next < len(rest); next++ {
if !unicode.IsSpace(rune(rest[next])) {
break
}
}
if sep == 0 {
return nil, nil, nil
}
typ := rest[:sep]
cmd, attrs, err := parseMaybeJSON(rest[next:], d)
if err != nil {
return nil, nil, err
}
return &Node{Value: typ, Next: cmd}, attrs, err
}

View File

@ -0,0 +1,552 @@
// The parser package implements a parser that transforms a raw byte-stream
// into a low-level Abstract Syntax Tree.
package parser
import (
"bufio"
"bytes"
"fmt"
"io"
"regexp"
"strconv"
"strings"
"unicode"
"github.com/moby/buildkit/frontend/dockerfile/command"
"github.com/moby/buildkit/frontend/dockerfile/shell"
"github.com/pkg/errors"
)
// Node is a structure used to represent a parse tree.
//
// In the node there are three fields, Value, Next, and Children. Value is the
// current token's string value. Next is always the next non-child token, and
// children contains all the children. Here's an example:
//
// (value next (child child-next child-next-next) next-next)
//
// This data structure is frankly pretty lousy for handling complex languages,
// but lucky for us the Dockerfile isn't very complicated. This structure
// works a little more effectively than a "proper" parse tree for our needs.
type Node struct {
Value string // actual content
Next *Node // the next item in the current sexp
Children []*Node // the children of this sexp
Heredocs []Heredoc // extra heredoc content attachments
Attributes map[string]bool // special attributes for this node
Original string // original line used before parsing
Flags []string // only top Node should have this set
StartLine int // the line in the original dockerfile where the node begins
EndLine int // the line in the original dockerfile where the node ends
PrevComment []string
}
// Location return the location of node in source code
func (node *Node) Location() []Range {
return toRanges(node.StartLine, node.EndLine)
}
// Dump dumps the AST defined by `node` as a list of sexps.
// Returns a string suitable for printing.
func (node *Node) Dump() string {
str := strings.ToLower(node.Value)
if len(node.Flags) > 0 {
str += fmt.Sprintf(" %q", node.Flags)
}
for _, n := range node.Children {
str += "(" + n.Dump() + ")\n"
}
for n := node.Next; n != nil; n = n.Next {
if len(n.Children) > 0 {
str += " " + n.Dump()
} else {
str += " " + strconv.Quote(n.Value)
}
}
return strings.TrimSpace(str)
}
func (node *Node) lines(start, end int) {
node.StartLine = start
node.EndLine = end
}
func (node *Node) canContainHeredoc() bool {
// check for compound commands, like ONBUILD
if ok := heredocCompoundDirectives[strings.ToLower(node.Value)]; ok {
if node.Next != nil && len(node.Next.Children) > 0 {
node = node.Next.Children[0]
}
}
if ok := heredocDirectives[strings.ToLower(node.Value)]; !ok {
return false
}
if isJSON := node.Attributes["json"]; isJSON {
return false
}
return true
}
// AddChild adds a new child node, and updates line information
func (node *Node) AddChild(child *Node, startLine, endLine int) {
child.lines(startLine, endLine)
if node.StartLine < 0 {
node.StartLine = startLine
}
node.EndLine = endLine
node.Children = append(node.Children, child)
}
type Heredoc struct {
Name string
FileDescriptor uint
Expand bool
Chomp bool
Content string
}
var (
dispatch map[string]func(string, *directives) (*Node, map[string]bool, error)
reWhitespace = regexp.MustCompile(`[\t\v\f\r ]+`)
reComment = regexp.MustCompile(`^#.*$`)
reHeredoc = regexp.MustCompile(`^(\d*)<<(-?)([^<]*)$`)
reLeadingTabs = regexp.MustCompile(`(?m)^\t+`)
)
// DefaultEscapeToken is the default escape token
const DefaultEscapeToken = '\\'
var (
// Directives allowed to contain heredocs
heredocDirectives = map[string]bool{
command.Add: true,
command.Copy: true,
command.Run: true,
}
// Directives allowed to contain directives containing heredocs
heredocCompoundDirectives = map[string]bool{
command.Onbuild: true,
}
)
// directives is the structure used during a build run to hold the state of
// parsing directives.
type directives struct {
parser DirectiveParser
escapeToken rune // Current escape token
lineContinuationRegex *regexp.Regexp // Current line continuation regex
}
// setEscapeToken sets the default token for escaping characters and as line-
// continuation token in a Dockerfile. Only ` (backtick) and \ (backslash) are
// allowed as token.
func (d *directives) setEscapeToken(s string) error {
if s != "`" && s != `\` {
return errors.Errorf("invalid escape token '%s' does not match ` or \\", s)
}
d.escapeToken = rune(s[0])
// The escape token is used both to escape characters in a line and as line
// continuation token. If it's the last non-whitespace token, it is used as
// line-continuation token, *unless* preceded by an escape-token.
//
// The second branch in the regular expression handles line-continuation
// tokens on their own line, which don't have any character preceding them.
//
// Due to Go lacking negative look-ahead matching, this regular expression
// does not currently handle a line-continuation token preceded by an *escaped*
// escape-token ("foo \\\").
d.lineContinuationRegex = regexp.MustCompile(`([^\` + s + `])\` + s + `[ \t]*$|^\` + s + `[ \t]*$`)
return nil
}
// possibleParserDirective looks for parser directives, eg '# escapeToken=<char>'.
// Parser directives must precede any builder instruction or other comments,
// and cannot be repeated.
func (d *directives) possibleParserDirective(line string) error {
directive, err := d.parser.ParseLine([]byte(line))
if err != nil {
return err
}
if directive != nil && directive.Name == keyEscape {
return d.setEscapeToken(directive.Value)
}
return nil
}
// newDefaultDirectives returns a new directives structure with the default escapeToken token
func newDefaultDirectives() *directives {
d := &directives{}
d.setEscapeToken(string(DefaultEscapeToken))
return d
}
func init() {
// Dispatch Table. see line_parsers.go for the parse functions.
// The command is parsed and mapped to the line parser. The line parser
// receives the arguments but not the command, and returns an AST after
// reformulating the arguments according to the rules in the parser
// functions. Errors are propagated up by Parse() and the resulting AST can
// be incorporated directly into the existing AST as a next.
dispatch = map[string]func(string, *directives) (*Node, map[string]bool, error){
command.Add: parseMaybeJSONToList,
command.Arg: parseNameOrNameVal,
command.Cmd: parseMaybeJSON,
command.Copy: parseMaybeJSONToList,
command.Entrypoint: parseMaybeJSON,
command.Env: parseEnv,
command.Expose: parseStringsWhitespaceDelimited,
command.From: parseStringsWhitespaceDelimited,
command.Healthcheck: parseHealthConfig,
command.Label: parseLabel,
command.Maintainer: parseString,
command.Onbuild: parseSubCommand,
command.Run: parseMaybeJSON,
command.Shell: parseMaybeJSON,
command.StopSignal: parseString,
command.User: parseString,
command.Volume: parseMaybeJSONToList,
command.Workdir: parseString,
}
}
// newNodeFromLine splits the line into parts, and dispatches to a function
// based on the command and command arguments. A Node is created from the
// result of the dispatch.
func newNodeFromLine(line string, d *directives, comments []string) (*Node, error) {
cmd, flags, args, err := splitCommand(line)
if err != nil {
return nil, err
}
fn := dispatch[strings.ToLower(cmd)]
// Ignore invalid Dockerfile instructions
if fn == nil {
fn = parseIgnore
}
next, attrs, err := fn(args, d)
if err != nil {
return nil, err
}
return &Node{
Value: cmd,
Original: line,
Flags: flags,
Next: next,
Attributes: attrs,
PrevComment: comments,
}, nil
}
// Result contains the bundled outputs from parsing a Dockerfile.
type Result struct {
AST *Node
EscapeToken rune
Warnings []Warning
}
// Warning contains information to identify and locate a warning generated
// during parsing.
type Warning struct {
Short string
Detail [][]byte
URL string
Location *Range
}
// PrintWarnings to the writer
func (r *Result) PrintWarnings(out io.Writer) {
if len(r.Warnings) == 0 {
return
}
for _, w := range r.Warnings {
fmt.Fprintf(out, "[WARNING]: %s\n", w.Short)
}
if len(r.Warnings) > 0 {
fmt.Fprintf(out, "[WARNING]: Empty continuation lines will become errors in a future release.\n")
}
}
// Parse consumes lines from a provided Reader, parses each line into an AST
// and returns the results of doing so.
func Parse(rwc io.Reader) (*Result, error) {
d := newDefaultDirectives()
currentLine := 0
root := &Node{StartLine: -1}
scanner := bufio.NewScanner(rwc)
scanner.Split(scanLines)
warnings := []Warning{}
var comments []string
var err error
for scanner.Scan() {
bytesRead := scanner.Bytes()
if currentLine == 0 {
// First line, strip the byte-order-marker if present
bytesRead = bytes.TrimPrefix(bytesRead, utf8bom)
}
if isComment(bytesRead) {
comment := strings.TrimSpace(string(bytesRead[1:]))
if comment == "" {
comments = nil
} else {
comments = append(comments, comment)
}
}
bytesRead, err = processLine(d, bytesRead, true)
if err != nil {
return nil, withLocation(err, currentLine, 0)
}
currentLine++
startLine := currentLine
line, isEndOfLine := trimContinuationCharacter(string(bytesRead), d)
if isEndOfLine && line == "" {
continue
}
var hasEmptyContinuationLine bool
for !isEndOfLine && scanner.Scan() {
bytesRead, err := processLine(d, scanner.Bytes(), false)
if err != nil {
return nil, withLocation(err, currentLine, 0)
}
currentLine++
if isComment(scanner.Bytes()) {
// original line was a comment (processLine strips comments)
continue
}
if isEmptyContinuationLine(bytesRead) {
hasEmptyContinuationLine = true
continue
}
continuationLine := string(bytesRead)
continuationLine, isEndOfLine = trimContinuationCharacter(continuationLine, d)
line += continuationLine
}
if hasEmptyContinuationLine {
warnings = append(warnings, Warning{
Short: "Empty continuation line found in: " + line,
Detail: [][]byte{[]byte("Empty continuation lines will become errors in a future release")},
URL: "https://github.com/moby/moby/pull/33719",
Location: &Range{Start: Position{Line: currentLine}, End: Position{Line: currentLine}},
})
}
child, err := newNodeFromLine(line, d, comments)
if err != nil {
return nil, withLocation(err, startLine, currentLine)
}
if child.canContainHeredoc() {
heredocs, err := heredocsFromLine(line)
if err != nil {
return nil, withLocation(err, startLine, currentLine)
}
for _, heredoc := range heredocs {
terminator := []byte(heredoc.Name)
terminated := false
for scanner.Scan() {
bytesRead := scanner.Bytes()
currentLine++
possibleTerminator := trimNewline(bytesRead)
if heredoc.Chomp {
possibleTerminator = trimLeadingTabs(possibleTerminator)
}
if bytes.Equal(possibleTerminator, terminator) {
terminated = true
break
}
heredoc.Content += string(bytesRead)
}
if !terminated {
return nil, withLocation(errors.New("unterminated heredoc"), startLine, currentLine)
}
child.Heredocs = append(child.Heredocs, heredoc)
}
}
root.AddChild(child, startLine, currentLine)
comments = nil
}
if root.StartLine < 0 {
return nil, withLocation(errors.New("file with no instructions"), currentLine, 0)
}
return &Result{
AST: root,
Warnings: warnings,
EscapeToken: d.escapeToken,
}, withLocation(handleScannerError(scanner.Err()), currentLine, 0)
}
// heredocFromMatch extracts a heredoc from a possible heredoc regex match.
func heredocFromMatch(match []string) (*Heredoc, error) {
if len(match) == 0 {
return nil, nil
}
fd, _ := strconv.ParseUint(match[1], 10, 0)
chomp := match[2] == "-"
rest := match[3]
if len(rest) == 0 {
return nil, nil
}
shlex := shell.NewLex('\\')
shlex.SkipUnsetEnv = true
// Attempt to parse both the heredoc both with *and* without quotes.
// If there are quotes in one but not the other, then we know that some
// part of the heredoc word is quoted, so we shouldn't expand the content.
shlex.RawQuotes = false
words, err := shlex.ProcessWords(rest, []string{})
if err != nil {
return nil, err
}
// quick sanity check that rest is a single word
if len(words) != 1 {
return nil, nil
}
shlex.RawQuotes = true
wordsRaw, err := shlex.ProcessWords(rest, []string{})
if err != nil {
return nil, err
}
if len(wordsRaw) != len(words) {
return nil, errors.Errorf("internal lexing of heredoc produced inconsistent results: %s", rest)
}
word := words[0]
wordQuoteCount := strings.Count(word, `'`) + strings.Count(word, `"`)
wordRaw := wordsRaw[0]
wordRawQuoteCount := strings.Count(wordRaw, `'`) + strings.Count(wordRaw, `"`)
expand := wordQuoteCount == wordRawQuoteCount
return &Heredoc{
Name: word,
Expand: expand,
Chomp: chomp,
FileDescriptor: uint(fd),
}, nil
}
// ParseHeredoc parses a heredoc word from a target string, returning the
// components from the doc.
func ParseHeredoc(src string) (*Heredoc, error) {
return heredocFromMatch(reHeredoc.FindStringSubmatch(src))
}
// MustParseHeredoc is a variant of ParseHeredoc that discards the error, if
// there was one present.
func MustParseHeredoc(src string) *Heredoc {
heredoc, _ := ParseHeredoc(src)
return heredoc
}
func heredocsFromLine(line string) ([]Heredoc, error) {
shlex := shell.NewLex('\\')
shlex.RawQuotes = true
shlex.RawEscapes = true
shlex.SkipUnsetEnv = true
words, _ := shlex.ProcessWords(line, []string{})
var docs []Heredoc
for _, word := range words {
heredoc, err := ParseHeredoc(word)
if err != nil {
return nil, err
}
if heredoc != nil {
docs = append(docs, *heredoc)
}
}
return docs, nil
}
// ChompHeredocContent chomps leading tabs from the heredoc.
func ChompHeredocContent(src string) string {
return reLeadingTabs.ReplaceAllString(src, "")
}
func trimComments(src []byte) []byte {
return reComment.ReplaceAll(src, []byte{})
}
func trimLeadingWhitespace(src []byte) []byte {
return bytes.TrimLeftFunc(src, unicode.IsSpace)
}
func trimLeadingTabs(src []byte) []byte {
return bytes.TrimLeft(src, "\t")
}
func trimNewline(src []byte) []byte {
return bytes.TrimRight(src, "\r\n")
}
func isComment(line []byte) bool {
return reComment.Match(trimLeadingWhitespace(trimNewline(line)))
}
func isEmptyContinuationLine(line []byte) bool {
return len(trimLeadingWhitespace(trimNewline(line))) == 0
}
var utf8bom = []byte{0xEF, 0xBB, 0xBF}
func trimContinuationCharacter(line string, d *directives) (string, bool) {
if d.lineContinuationRegex.MatchString(line) {
line = d.lineContinuationRegex.ReplaceAllString(line, "$1")
return line, false
}
return line, true
}
// TODO: remove stripLeftWhitespace after deprecation period. It seems silly
// to preserve whitespace on continuation lines. Why is that done?
func processLine(d *directives, token []byte, stripLeftWhitespace bool) ([]byte, error) {
token = trimNewline(token)
if stripLeftWhitespace {
token = trimLeadingWhitespace(token)
}
return trimComments(token), d.possibleParserDirective(string(token))
}
// Variation of bufio.ScanLines that preserves the line endings
func scanLines(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
return 0, nil, nil
}
if i := bytes.IndexByte(data, '\n'); i >= 0 {
return i + 1, data[0 : i+1], nil
}
if atEOF {
return len(data), data, nil
}
return 0, nil, nil
}
func handleScannerError(err error) error {
switch err {
case bufio.ErrTooLong:
return errors.Errorf("dockerfile line greater than max allowed size of %d", bufio.MaxScanTokenSize-1)
default:
return err
}
}

View File

@ -0,0 +1,117 @@
package parser
import (
"strings"
"unicode"
)
// splitCommand takes a single line of text and parses out the cmd and args,
// which are used for dispatching to more exact parsing functions.
func splitCommand(line string) (string, []string, string, error) {
var args string
var flags []string
// Make sure we get the same results irrespective of leading/trailing spaces
cmdline := reWhitespace.Split(strings.TrimSpace(line), 2)
if len(cmdline) == 2 {
var err error
args, flags, err = extractBuilderFlags(cmdline[1])
if err != nil {
return "", nil, "", err
}
}
return cmdline[0], flags, strings.TrimSpace(args), nil
}
func extractBuilderFlags(line string) (string, []string, error) {
// Parses the BuilderFlags and returns the remaining part of the line
const (
inSpaces = iota // looking for start of a word
inWord
inQuote
)
words := []string{}
phase := inSpaces
word := ""
quote := '\000'
blankOK := false
var ch rune
for pos := 0; pos <= len(line); pos++ {
if pos != len(line) {
ch = rune(line[pos])
}
if phase == inSpaces { // Looking for start of word
if pos == len(line) { // end of input
break
}
if unicode.IsSpace(ch) { // skip spaces
continue
}
// Only keep going if the next word starts with --
if ch != '-' || pos+1 == len(line) || rune(line[pos+1]) != '-' {
return line[pos:], words, nil
}
phase = inWord // found something with "--", fall through
}
if (phase == inWord || phase == inQuote) && (pos == len(line)) {
if word != "--" && (blankOK || len(word) > 0) {
words = append(words, word)
}
break
}
if phase == inWord {
if unicode.IsSpace(ch) {
phase = inSpaces
if word == "--" {
return line[pos:], words, nil
}
if blankOK || len(word) > 0 {
words = append(words, word)
}
word = ""
blankOK = false
continue
}
if ch == '\'' || ch == '"' {
quote = ch
blankOK = true
phase = inQuote
continue
}
if ch == '\\' {
if pos+1 == len(line) {
continue // just skip \ at end
}
pos++
ch = rune(line[pos])
}
word += string(ch)
continue
}
if phase == inQuote {
if ch == quote {
phase = inWord
continue
}
if ch == '\\' {
if pos+1 == len(line) {
phase = inWord
continue // just skip \ at end
}
pos++
ch = rune(line[pos])
}
word += string(ch)
}
}
return "", words, nil
}

View File

@ -0,0 +1,238 @@
A|hello | hello
A|he'll'o | hello
A|he'llo | error
A|he\'llo | he'llo
A|he\\'llo | error
A|abc\tdef | abctdef
A|"abc\tdef" | abc\tdef
A|"abc\\tdef" | abc\tdef
A|'abc\tdef' | abc\tdef
A|hello\ | hello
A|hello\\ | hello\
A|"hello | error
A|"hello\" | error
A|"hel'lo" | hel'lo
A|'hello | error
A|'hello\' | hello\
A|'hello\there' | hello\there
A|'hello\\there' | hello\\there
A|"''" | ''
A|$. | $.
A|he$1x | hex
A|he$.x | he$.x
# Next one is different on Windows as $pwd==$PWD
U|he$pwd. | he.
W|he$pwd. | he/home.
A|he$PWD | he/home
A|he\$PWD | he$PWD
A|he\\$PWD | he\/home
A|"he\$PWD" | he$PWD
A|"he\\$PWD" | he\/home
A|\${} | ${}
A|\${}aaa | ${}aaa
A|he\${} | he${}
A|he\${}xx | he${}xx
A|${} | error
A|${}aaa | error
A|he${} | error
A|he${}xx | error
A|he${hi} | he
A|he${hi}xx | hexx
A|he${PWD} | he/home
A|he${.} | error
A|he${XXX:-000}xx | he000xx
A|he${PWD:-000}xx | he/homexx
A|he${XXX:-$PWD}xx | he/homexx
A|he${XXX:-${PWD:-yyy}}xx | he/homexx
A|he${XXX:-${YYY:-yyy}}xx | heyyyxx
A|he${XXX:YYY} | error
A|he${XXX?} | error
A|he${XXX:?} | error
A|he${PWD?} | he/home
A|he${PWD:?} | he/home
A|he${NULL?} | he
A|he${NULL:?} | error
A|he${XXX:+${PWD}}xx | hexx
A|he${PWD:+${XXX}}xx | hexx
A|he${PWD:+${SHELL}}xx | hebashxx
A|he${XXX:+000}xx | hexx
A|he${PWD:+000}xx | he000xx
A|'he${XX}' | he${XX}
A|"he${PWD}" | he/home
A|"he'$PWD'" | he'/home'
A|"$PWD" | /home
A|'$PWD' | $PWD
A|'\$PWD' | \$PWD
A|'"hello"' | "hello"
A|he\$PWD | he$PWD
A|"he\$PWD" | he$PWD
A|'he\$PWD' | he\$PWD
A|he${PWD | error
A|he${PWD:=000}xx | error
A|he${PWD:+${PWD}:}xx | he/home:xx
A|he${XXX:-\$PWD:}xx | he$PWD:xx
A|he${XXX:-\${PWD}z}xx | he${PWDz}xx
A|안녕하세요 | 안녕하세요
A|안'녕'하세요 | 안녕하세요
A|안'녕하세요 | error
A|안녕\'하세요 | 안녕'하세요
A|안\\'녕하세요 | error
A|안녕\t하세요 | 안녕t하세요
A|"안녕\t하세요" | 안녕\t하세요
A|'안녕\t하세요 | error
A|안녕하세요\ | 안녕하세요
A|안녕하세요\\ | 안녕하세요\
A|"안녕하세요 | error
A|"안녕하세요\" | error
A|"안녕'하세요" | 안녕'하세요
A|'안녕하세요 | error
A|'안녕하세요\' | 안녕하세요\
A|안녕$1x | 안녕x
A|안녕$.x | 안녕$.x
# Next one is different on Windows as $pwd==$PWD
U|안녕$pwd. | 안녕.
W|안녕$pwd. | 안녕/home.
A|안녕$PWD | 안녕/home
A|안녕\$PWD | 안녕$PWD
A|안녕\\$PWD | 안녕\/home
A|안녕\${} | 안녕${}
A|안녕\${}xx | 안녕${}xx
A|안녕${} | error
A|안녕${}xx | error
A|안녕${hi} | 안녕
A|안녕${hi}xx | 안녕xx
A|안녕${PWD} | 안녕/home
A|안녕${.} | error
A|안녕${XXX:-000}xx | 안녕000xx
A|안녕${PWD:-000}xx | 안녕/homexx
A|안녕${XXX:-$PWD}xx | 안녕/homexx
A|안녕${XXX:-${PWD:-yyy}}xx | 안녕/homexx
A|안녕${XXX:-${YYY:-yyy}}xx | 안녕yyyxx
A|안녕${XXX:YYY} | error
A|안녕${XXX:+${PWD}}xx | 안녕xx
A|안녕${PWD:+${XXX}}xx | 안녕xx
A|안녕${PWD:+${SHELL}}xx | 안녕bashxx
A|안녕${XXX:+000}xx | 안녕xx
A|안녕${PWD:+000}xx | 안녕000xx
A|'안녕${XX}' | 안녕${XX}
A|"안녕${PWD}" | 안녕/home
A|"안녕'$PWD'" | 안녕'/home'
A|'"안녕"' | "안녕"
A|안녕\$PWD | 안녕$PWD
A|"안녕\$PWD" | 안녕$PWD
A|'안녕\$PWD' | 안녕\$PWD
A|안녕${PWD | error
A|안녕${PWD:=000}xx | error
A|안녕${PWD:+${PWD}:}xx | 안녕/home:xx
A|안녕${XXX:-\$PWD:}xx | 안녕$PWD:xx
A|안녕${XXX:-\${PWD}z}xx | 안녕${PWDz}xx
A|$KOREAN | 한국어
A|안녕$KOREAN | 안녕한국어
A|${{aaa} | error
A|${aaa}} | }
A|${aaa | error
A|${{aaa:-bbb} | error
A|${aaa:-bbb}} | bbb}
A|${aaa:-bbb | error
A|${aaa:-bbb} | bbb
A|${aaa:-${bbb:-ccc}} | ccc
A|${aaa:-bbb ${foo} | error
A|${aaa:-bbb {foo} | bbb {foo
A|${:} | error
A|${:-bbb} | error
A|${:+bbb} | error
# Positional parameters won't be set:
# http://pubs.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_05_01
A|$1 |
A|${1} |
A|${1:+bbb} |
A|${1:-bbb} | bbb
A|$2 |
A|${2} |
A|${2:+bbb} |
A|${2:-bbb} | bbb
A|$3 |
A|${3} |
A|${3:+bbb} |
A|${3:-bbb} | bbb
A|$4 |
A|${4} |
A|${4:+bbb} |
A|${4:-bbb} | bbb
A|$5 |
A|${5} |
A|${5:+bbb} |
A|${5:-bbb} | bbb
A|$6 |
A|${6} |
A|${6:+bbb} |
A|${6:-bbb} | bbb
A|$7 |
A|${7} |
A|${7:+bbb} |
A|${7:-bbb} | bbb
A|$8 |
A|${8} |
A|${8:+bbb} |
A|${8:-bbb} | bbb
A|$9 |
A|${9} |
A|${9:+bbb} |
A|${9:-bbb} | bbb
A|$999 |
A|${999} |
A|${999:+bbb} |
A|${999:-bbb} | bbb
A|$999aaa | aaa
A|${999}aaa | aaa
A|${999:+bbb}aaa | aaa
A|${999:-bbb}aaa | bbbaaa
A|$001 |
A|${001} |
A|${001:+bbb} |
A|${001:-bbb} | bbb
A|$001aaa | aaa
A|${001}aaa | aaa
A|${001:+bbb}aaa | aaa
A|${001:-bbb}aaa | bbbaaa
# Special parameters won't be set in the Dockerfile:
# http://pubs.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_05_02
A|$@ |
A|${@} |
A|${@:+bbb} |
A|${@:-bbb} | bbb
A|$@@@ | @@
A|$@aaa | aaa
A|${@}aaa | aaa
A|${@:+bbb}aaa | aaa
A|${@:-bbb}aaa | bbbaaa
A|$* |
A|${*} |
A|${*:+bbb} |
A|${*:-bbb} | bbb
A|$# |
A|${#} |
A|${#:+bbb} |
A|${#:-bbb} | bbb
A|$? |
A|${?} |
A|${?:+bbb} |
A|${?:-bbb} | bbb
A|$- |
A|${-} |
A|${-:+bbb} |
A|${-:-bbb} | bbb
A|$$ |
A|${$} |
A|${$:+bbb} |
A|${$:-bbb} | bbb
A|$! |
A|${!} |
A|${!:+bbb} |
A|${!:-bbb} | bbb
A|$0 |
A|${0} |
A|${0:+bbb} |
A|${0:-bbb} | bbb

View File

@ -0,0 +1,11 @@
//go:build !windows
// +build !windows
package shell
// EqualEnvKeys compare two strings and returns true if they are equal.
// On Unix this comparison is case-sensitive.
// On Windows this comparison is case-insensitive.
func EqualEnvKeys(from, to string) bool {
return from == to
}

View File

@ -0,0 +1,10 @@
package shell
import "strings"
// EqualEnvKeys compare two strings and returns true if they are equal.
// On Unix this comparison is case-sensitive.
// On Windows this comparison is case-insensitive.
func EqualEnvKeys(from, to string) bool {
return strings.EqualFold(from, to)
}

View File

@ -0,0 +1,645 @@
package shell
import (
"bytes"
"fmt"
"regexp"
"strings"
"text/scanner"
"unicode"
"github.com/pkg/errors"
)
// Lex performs shell word splitting and variable expansion.
//
// Lex takes a string and an array of env variables and
// process all quotes (" and ') as well as $xxx and ${xxx} env variable
// tokens. Tries to mimic bash shell process.
// It doesn't support all flavors of ${xx:...} formats but new ones can
// be added by adding code to the "special ${} format processing" section
type Lex struct {
escapeToken rune
RawQuotes bool
RawEscapes bool
SkipProcessQuotes bool
SkipUnsetEnv bool
}
// NewLex creates a new Lex which uses escapeToken to escape quotes.
func NewLex(escapeToken rune) *Lex {
return &Lex{escapeToken: escapeToken}
}
// ProcessWord will use the 'env' list of environment variables,
// and replace any env var references in 'word'.
func (s *Lex) ProcessWord(word string, env []string) (string, error) {
word, _, err := s.process(word, BuildEnvs(env))
return word, err
}
// ProcessWords will use the 'env' list of environment variables,
// and replace any env var references in 'word' then it will also
// return a slice of strings which represents the 'word'
// split up based on spaces - taking into account quotes. Note that
// this splitting is done **after** the env var substitutions are done.
// Note, each one is trimmed to remove leading and trailing spaces (unless
// they are quoted", but ProcessWord retains spaces between words.
func (s *Lex) ProcessWords(word string, env []string) ([]string, error) {
_, words, err := s.process(word, BuildEnvs(env))
return words, err
}
// ProcessWordWithMap will use the 'env' list of environment variables,
// and replace any env var references in 'word'.
func (s *Lex) ProcessWordWithMap(word string, env map[string]string) (string, error) {
word, _, err := s.process(word, env)
return word, err
}
// ProcessWordWithMatches will use the 'env' list of environment variables,
// replace any env var references in 'word' and return the env that were used.
func (s *Lex) ProcessWordWithMatches(word string, env map[string]string) (string, map[string]struct{}, error) {
sw := s.init(word, env)
word, _, err := sw.process(word)
return word, sw.matches, err
}
func (s *Lex) ProcessWordsWithMap(word string, env map[string]string) ([]string, error) {
_, words, err := s.process(word, env)
return words, err
}
func (s *Lex) init(word string, env map[string]string) *shellWord {
sw := &shellWord{
envs: env,
escapeToken: s.escapeToken,
skipUnsetEnv: s.SkipUnsetEnv,
skipProcessQuotes: s.SkipProcessQuotes,
rawQuotes: s.RawQuotes,
rawEscapes: s.RawEscapes,
matches: make(map[string]struct{}),
}
sw.scanner.Init(strings.NewReader(word))
return sw
}
func (s *Lex) process(word string, env map[string]string) (string, []string, error) {
sw := s.init(word, env)
return sw.process(word)
}
type shellWord struct {
scanner scanner.Scanner
envs map[string]string
escapeToken rune
rawQuotes bool
rawEscapes bool
skipUnsetEnv bool
skipProcessQuotes bool
matches map[string]struct{}
}
func (sw *shellWord) process(source string) (string, []string, error) {
word, words, err := sw.processStopOn(scanner.EOF, sw.rawEscapes)
if err != nil {
err = errors.Wrapf(err, "failed to process %q", source)
}
return word, words, err
}
type wordsStruct struct {
word string
words []string
inWord bool
}
func (w *wordsStruct) addChar(ch rune) {
if unicode.IsSpace(ch) && w.inWord {
if len(w.word) != 0 {
w.words = append(w.words, w.word)
w.word = ""
w.inWord = false
}
} else if !unicode.IsSpace(ch) {
w.addRawChar(ch)
}
}
func (w *wordsStruct) addRawChar(ch rune) {
w.word += string(ch)
w.inWord = true
}
func (w *wordsStruct) addString(str string) {
for _, ch := range str {
w.addChar(ch)
}
}
func (w *wordsStruct) addRawString(str string) {
w.word += str
w.inWord = true
}
func (w *wordsStruct) getWords() []string {
if len(w.word) > 0 {
w.words = append(w.words, w.word)
// Just in case we're called again by mistake
w.word = ""
w.inWord = false
}
return w.words
}
// Process the word, starting at 'pos', and stop when we get to the
// end of the word or the 'stopChar' character
func (sw *shellWord) processStopOn(stopChar rune, rawEscapes bool) (string, []string, error) {
var result bytes.Buffer
var words wordsStruct
var charFuncMapping = map[rune]func() (string, error){
'$': sw.processDollar,
}
if !sw.skipProcessQuotes {
charFuncMapping['\''] = sw.processSingleQuote
charFuncMapping['"'] = sw.processDoubleQuote
}
// temporarily set sw.rawEscapes if needed
if rawEscapes != sw.rawEscapes {
sw.rawEscapes = rawEscapes
defer func() {
sw.rawEscapes = !rawEscapes
}()
}
for sw.scanner.Peek() != scanner.EOF {
ch := sw.scanner.Peek()
if stopChar != scanner.EOF && ch == stopChar {
sw.scanner.Next()
return result.String(), words.getWords(), nil
}
if fn, ok := charFuncMapping[ch]; ok {
// Call special processing func for certain chars
tmp, err := fn()
if err != nil {
return "", []string{}, err
}
result.WriteString(tmp)
if ch == rune('$') {
words.addString(tmp)
} else {
words.addRawString(tmp)
}
} else {
// Not special, just add it to the result
ch = sw.scanner.Next()
if ch == sw.escapeToken {
if sw.rawEscapes {
words.addRawChar(ch)
result.WriteRune(ch)
}
// '\' (default escape token, but ` allowed) escapes, except end of line
ch = sw.scanner.Next()
if ch == scanner.EOF {
break
}
words.addRawChar(ch)
} else {
words.addChar(ch)
}
result.WriteRune(ch)
}
}
if stopChar != scanner.EOF {
return "", []string{}, errors.Errorf("unexpected end of statement while looking for matching %s", string(stopChar))
}
return result.String(), words.getWords(), nil
}
func (sw *shellWord) processSingleQuote() (string, error) {
// All chars between single quotes are taken as-is
// Note, you can't escape '
//
// From the "sh" man page:
// Single Quotes
// Enclosing characters in single quotes preserves the literal meaning of
// all the characters (except single quotes, making it impossible to put
// single-quotes in a single-quoted string).
var result bytes.Buffer
ch := sw.scanner.Next()
if sw.rawQuotes {
result.WriteRune(ch)
}
for {
ch = sw.scanner.Next()
switch ch {
case scanner.EOF:
return "", errors.New("unexpected end of statement while looking for matching single-quote")
case '\'':
if sw.rawQuotes {
result.WriteRune(ch)
}
return result.String(), nil
}
result.WriteRune(ch)
}
}
func (sw *shellWord) processDoubleQuote() (string, error) {
// All chars up to the next " are taken as-is, even ', except any $ chars
// But you can escape " with a \ (or ` if escape token set accordingly)
//
// From the "sh" man page:
// Double Quotes
// Enclosing characters within double quotes preserves the literal meaning
// of all characters except dollarsign ($), backquote (`), and backslash
// (\). The backslash inside double quotes is historically weird, and
// serves to quote only the following characters:
// $ ` " \ <newline>.
// Otherwise it remains literal.
var result bytes.Buffer
ch := sw.scanner.Next()
if sw.rawQuotes {
result.WriteRune(ch)
}
for {
switch sw.scanner.Peek() {
case scanner.EOF:
return "", errors.New("unexpected end of statement while looking for matching double-quote")
case '"':
ch := sw.scanner.Next()
if sw.rawQuotes {
result.WriteRune(ch)
}
return result.String(), nil
case '$':
value, err := sw.processDollar()
if err != nil {
return "", err
}
result.WriteString(value)
default:
ch := sw.scanner.Next()
if ch == sw.escapeToken {
if sw.rawEscapes {
result.WriteRune(ch)
}
switch sw.scanner.Peek() {
case scanner.EOF:
// Ignore \ at end of word
continue
case '"', '$', sw.escapeToken:
// These chars can be escaped, all other \'s are left as-is
// Note: for now don't do anything special with ` chars.
// Not sure what to do with them anyway since we're not going
// to execute the text in there (not now anyway).
ch = sw.scanner.Next()
}
}
result.WriteRune(ch)
}
}
}
func (sw *shellWord) processDollar() (string, error) {
sw.scanner.Next()
// $xxx case
if sw.scanner.Peek() != '{' {
name := sw.processName()
if name == "" {
return "$", nil
}
value, found := sw.getEnv(name)
if !found && sw.skipUnsetEnv {
return "$" + name, nil
}
return value, nil
}
sw.scanner.Next()
switch sw.scanner.Peek() {
case scanner.EOF:
return "", errors.New("syntax error: missing '}'")
case '{', '}', ':':
// Invalid ${{xx}, ${:xx}, ${:}. ${} case
return "", errors.New("syntax error: bad substitution")
}
name := sw.processName()
ch := sw.scanner.Next()
chs := string(ch)
nullIsUnset := false
switch ch {
case '}':
// Normal ${xx} case
value, set := sw.getEnv(name)
if !set && sw.skipUnsetEnv {
return fmt.Sprintf("${%s}", name), nil
}
return value, nil
case ':':
nullIsUnset = true
ch = sw.scanner.Next()
chs += string(ch)
fallthrough
case '+', '-', '?', '#', '%':
rawEscapes := ch == '#' || ch == '%'
word, _, err := sw.processStopOn('}', rawEscapes)
if err != nil {
if sw.scanner.Peek() == scanner.EOF {
return "", errors.New("syntax error: missing '}'")
}
return "", err
}
// Grab the current value of the variable in question so we
// can use it to determine what to do based on the modifier
value, set := sw.getEnv(name)
if sw.skipUnsetEnv && !set {
return fmt.Sprintf("${%s%s%s}", name, chs, word), nil
}
switch ch {
case '-':
if !set || (nullIsUnset && value == "") {
return word, nil
}
return value, nil
case '+':
if !set || (nullIsUnset && value == "") {
return "", nil
}
return word, nil
case '?':
if !set {
message := "is not allowed to be unset"
if word != "" {
message = word
}
return "", errors.Errorf("%s: %s", name, message)
}
if nullIsUnset && value == "" {
message := "is not allowed to be empty"
if word != "" {
message = word
}
return "", errors.Errorf("%s: %s", name, message)
}
return value, nil
case '%', '#':
// %/# matches the shortest pattern expansion, %%/## the longest
greedy := false
if word[0] == byte(ch) {
greedy = true
word = word[1:]
}
if ch == '%' {
return trimSuffix(word, value, greedy)
}
return trimPrefix(word, value, greedy)
default:
return "", errors.Errorf("unsupported modifier (%s) in substitution", chs)
}
case '/':
replaceAll := sw.scanner.Peek() == '/'
if replaceAll {
sw.scanner.Next()
}
pattern, _, err := sw.processStopOn('/', true)
if err != nil {
if sw.scanner.Peek() == scanner.EOF {
return "", errors.New("syntax error: missing '/' in ${}")
}
return "", err
}
replacement, _, err := sw.processStopOn('}', true)
if err != nil {
if sw.scanner.Peek() == scanner.EOF {
return "", errors.New("syntax error: missing '}'")
}
return "", err
}
value, set := sw.getEnv(name)
if sw.skipUnsetEnv && !set {
return fmt.Sprintf("${%s/%s/%s}", name, pattern, replacement), nil
}
re, err := convertShellPatternToRegex(pattern, true, false)
if err != nil {
return "", errors.Errorf("invalid pattern (%s) in substitution: %s", pattern, err)
}
if replaceAll {
value = re.ReplaceAllString(value, replacement)
} else {
if idx := re.FindStringIndex(value); idx != nil {
value = value[0:idx[0]] + replacement + value[idx[1]:]
}
}
return value, nil
default:
return "", errors.Errorf("unsupported modifier (%s) in substitution", chs)
}
}
func (sw *shellWord) processName() string {
// Read in a name (alphanumeric or _)
// If it starts with a numeric then just return $#
var name bytes.Buffer
for sw.scanner.Peek() != scanner.EOF {
ch := sw.scanner.Peek()
if name.Len() == 0 && unicode.IsDigit(ch) {
for sw.scanner.Peek() != scanner.EOF && unicode.IsDigit(sw.scanner.Peek()) {
// Keep reading until the first non-digit character, or EOF
ch = sw.scanner.Next()
name.WriteRune(ch)
}
return name.String()
}
if name.Len() == 0 && isSpecialParam(ch) {
ch = sw.scanner.Next()
return string(ch)
}
if !unicode.IsLetter(ch) && !unicode.IsDigit(ch) && ch != '_' {
break
}
ch = sw.scanner.Next()
name.WriteRune(ch)
}
return name.String()
}
// isSpecialParam checks if the provided character is a special parameters,
// as defined in http://pubs.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_05_02
func isSpecialParam(char rune) bool {
switch char {
case '@', '*', '#', '?', '-', '$', '!', '0':
// Special parameters
// http://pubs.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_05_02
return true
}
return false
}
func (sw *shellWord) getEnv(name string) (string, bool) {
for key, value := range sw.envs {
if EqualEnvKeys(name, key) {
sw.matches[name] = struct{}{}
return value, true
}
}
return "", false
}
func BuildEnvs(env []string) map[string]string {
envs := map[string]string{}
for _, e := range env {
i := strings.Index(e, "=")
if i < 0 {
envs[e] = ""
} else {
k := e[:i]
v := e[i+1:]
// overwrite value if key already exists
envs[k] = v
}
}
return envs
}
// convertShellPatternToRegex converts a shell-like wildcard pattern
// (? is a single char, * either the shortest or longest (greedy) string)
// to an equivalent regular expression.
//
// Based on
// https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html#tag_18_13
// but without the bracket expressions (`[]`)
func convertShellPatternToRegex(pattern string, greedy bool, anchored bool) (*regexp.Regexp, error) {
var s scanner.Scanner
s.Init(strings.NewReader(pattern))
var out strings.Builder
out.Grow(len(pattern) + 4)
// match only at the beginning of the string
if anchored {
out.WriteByte('^')
}
// default: non-greedy wildcards
starPattern := ".*?"
if greedy {
starPattern = ".*"
}
for tok := s.Next(); tok != scanner.EOF; tok = s.Next() {
switch tok {
case '*':
out.WriteString(starPattern)
continue
case '?':
out.WriteByte('.')
continue
case '\\':
// } and / as part of ${} need to be escaped, but the escape isn't part
// of the pattern
if s.Peek() == '}' || s.Peek() == '/' {
continue
}
out.WriteRune('\\')
tok = s.Next()
if tok != '*' && tok != '?' && tok != '\\' {
return nil, errors.Errorf("invalid escape '\\%c'", tok)
}
// regex characters that need to be escaped
// escaping closing is optional, but done for consistency
case '[', ']', '{', '}', '.', '+', '(', ')', '|', '^', '$':
out.WriteByte('\\')
}
out.WriteRune(tok)
}
return regexp.Compile(out.String())
}
func trimPrefix(word, value string, greedy bool) (string, error) {
re, err := convertShellPatternToRegex(word, greedy, true)
if err != nil {
return "", errors.Errorf("invalid pattern (%s) in substitution: %s", word, err)
}
if idx := re.FindStringIndex(value); idx != nil {
value = value[idx[1]:]
}
return value, nil
}
// reverse without avoid reversing escapes, i.e. a\*c -> c\*a
func reversePattern(pattern string) string {
patternRunes := []rune(pattern)
out := make([]rune, len(patternRunes))
lastIdx := len(patternRunes) - 1
for i := 0; i <= lastIdx; {
tok := patternRunes[i]
outIdx := lastIdx - i
if tok == '\\' && i != lastIdx {
out[outIdx-1] = tok
// the pattern is taken from a ${var#pattern}, so the last
// character can't be an escape character
out[outIdx] = patternRunes[i+1]
i += 2
} else {
out[outIdx] = tok
i++
}
}
return string(out)
}
func reverseString(str string) string {
out := []rune(str)
outIdx := len(out) - 1
for i := 0; i < outIdx; i++ {
out[i], out[outIdx] = out[outIdx], out[i]
outIdx--
}
return string(out)
}
func trimSuffix(pattern, word string, greedy bool) (string, error) {
// regular expressions can't handle finding the shortest rightmost
// string so we reverse both search space and pattern to convert it
// to a leftmost search in both cases
pattern = reversePattern(pattern)
word = reverseString(word)
str, err := trimPrefix(pattern, word, greedy)
if err != nil {
return "", err
}
return reverseString(str), nil
}

View File

@ -0,0 +1,30 @@
hello | hello
hello${hi}bye | hellobye
ENV hi=hi
hello${hi}bye | hellohibye
ENV space=abc def
hello${space}bye | helloabc,defbye
hello"${space}"bye | helloabc defbye
hello "${space}"bye | hello,abc defbye
ENV leading= ab c
hello${leading}def | hello,ab,cdef
hello"${leading}" def | hello ab c,def
hello"${leading}" | hello ab c
hello${leading} | hello,ab,c
# next line MUST have 3 trailing spaces, don't erase them!
ENV trailing=ab c
hello${trailing} | helloab,c
hello${trailing}d | helloab,c,d
hello"${trailing}"d | helloab c d
# next line MUST have 3 trailing spaces, don't erase them!
hel"lo${trailing}" | helloab c
hello" there " | hello there
hello there | hello,there
hello\ there | hello there
hello" there | error
hello\" there | hello",there
hello"\\there" | hello\there
hello"\there" | hello\there
hello'\\there' | hello\\there
hello'\there' | hello\there
hello'$there' | hello$there

View File

@ -7,6 +7,7 @@ import (
"github.com/moby/buildkit/frontend/gateway/client"
"github.com/moby/buildkit/frontend/subrequests"
"github.com/moby/buildkit/frontend/subrequests/lint"
"github.com/moby/buildkit/frontend/subrequests/outline"
"github.com/moby/buildkit/frontend/subrequests/targets"
"github.com/moby/buildkit/solver/errdefs"
@ -19,6 +20,7 @@ const (
type RequestHandler struct {
Outline func(context.Context) (*outline.Outline, error)
ListTargets func(context.Context) (*targets.List, error)
Lint func(context.Context) (*lint.LintResults, error)
AllowOther bool
}
@ -55,6 +57,18 @@ func (bc *Client) HandleSubrequest(ctx context.Context, h RequestHandler) (*clie
res, err := targets.ToResult()
return res, true, err
}
case lint.SubrequestLintDefinition.Name:
if f := h.Lint; f != nil {
warnings, err := f(ctx)
if err != nil {
return nil, false, err
}
if warnings == nil {
return nil, true, nil
}
res, err := warnings.ToResult()
return res, true, err
}
}
if h.AllowOther {
return nil, false, nil

View File

@ -0,0 +1,163 @@
package lint
import (
"bytes"
"encoding/json"
"fmt"
"io"
"sort"
"text/tabwriter"
"github.com/moby/buildkit/client/llb"
"github.com/moby/buildkit/frontend/dockerfile/parser"
"github.com/moby/buildkit/frontend/gateway/client"
"github.com/moby/buildkit/frontend/subrequests"
"github.com/moby/buildkit/solver/pb"
)
const RequestLint = "frontend.lint"
var SubrequestLintDefinition = subrequests.Request{
Name: RequestLint,
Version: "1.0.0",
Type: subrequests.TypeRPC,
Description: "Lint a Dockerfile",
Opts: []subrequests.Named{},
Metadata: []subrequests.Named{
{Name: "result.json"},
{Name: "result.txt"},
},
}
type Source struct {
Filename string `json:"fileName"`
Language string `json:"language"`
Definition *pb.Definition `json:"definition"`
Data []byte `json:"data"`
}
type Warning struct {
RuleName string `json:"ruleName"`
Description string `json:"description,omitempty"`
URL string `json:"url,omitempty"`
Detail string `json:"detail,omitempty"`
Location pb.Location `json:"location,omitempty"`
}
type LintResults struct {
Warnings []Warning `json:"warnings"`
Sources []Source `json:"sources"`
}
func (results *LintResults) AddSource(sourceMap *llb.SourceMap) int {
newSource := Source{
Filename: sourceMap.Filename,
Language: sourceMap.Language,
Definition: sourceMap.Definition.ToPB(),
Data: sourceMap.Data,
}
for i, source := range results.Sources {
if sourceEqual(source, newSource) {
return i
}
}
results.Sources = append(results.Sources, newSource)
return len(results.Sources) - 1
}
func (results *LintResults) AddWarning(rulename, description, url, fmtmsg string, sourceIndex int, location []parser.Range) {
sourceLocation := []*pb.Range{}
for _, loc := range location {
sourceLocation = append(sourceLocation, &pb.Range{
Start: pb.Position{
Line: int32(loc.Start.Line),
Character: int32(loc.Start.Character),
},
End: pb.Position{
Line: int32(loc.End.Line),
Character: int32(loc.End.Character),
},
})
}
pbLocation := pb.Location{
SourceIndex: int32(sourceIndex),
Ranges: sourceLocation,
}
results.Warnings = append(results.Warnings, Warning{
RuleName: rulename,
Description: description,
URL: url,
Detail: fmtmsg,
Location: pbLocation,
})
}
func sourceEqual(a, b Source) bool {
if a.Filename != b.Filename || a.Language != b.Language {
return false
}
return bytes.Equal(a.Data, b.Data)
}
func (results *LintResults) ToResult() (*client.Result, error) {
res := client.NewResult()
dt, err := json.MarshalIndent(results, "", " ")
if err != nil {
return nil, err
}
res.AddMeta("result.json", dt)
b := bytes.NewBuffer(nil)
if err := PrintLintViolations(dt, b); err != nil {
return nil, err
}
res.AddMeta("result.txt", b.Bytes())
res.AddMeta("version", []byte(SubrequestLintDefinition.Version))
return res, nil
}
func PrintLintViolations(dt []byte, w io.Writer) error {
var warnings LintResults
if err := json.Unmarshal(dt, &warnings); err != nil {
return err
}
// Here, we're grouping the warnings by rule name
lintWarnings := make(map[string][]Warning)
lintWarningRules := []string{}
for _, warning := range warnings.Warnings {
if _, ok := lintWarnings[warning.RuleName]; !ok {
lintWarningRules = append(lintWarningRules, warning.RuleName)
lintWarnings[warning.RuleName] = []Warning{}
}
lintWarnings[warning.RuleName] = append(lintWarnings[warning.RuleName], warning)
}
sort.Strings(lintWarningRules)
tw := tabwriter.NewWriter(w, 0, 0, 2, ' ', 0)
for _, rule := range lintWarningRules {
fmt.Fprintf(tw, "Lint Rule %s\n", rule)
for _, warning := range lintWarnings[rule] {
source := warnings.Sources[warning.Location.SourceIndex]
sourceData := bytes.Split(source.Data, []byte("\n"))
firstRange := warning.Location.Ranges[0]
if firstRange.Start.Line != firstRange.End.Line {
fmt.Fprintf(tw, "\t%s:%d-%d\n", source.Filename, firstRange.Start.Line, firstRange.End.Line)
} else {
fmt.Fprintf(tw, "\t%s:%d\n", source.Filename, firstRange.Start.Line)
}
fmt.Fprintf(tw, "\t%s\n", warning.Detail)
for _, r := range warning.Location.Ranges {
for i := r.Start.Line; i <= r.End.Line; i++ {
fmt.Fprintf(tw, "\t%d\t|\t%s\n", i, sourceData[i-1])
}
}
fmt.Fprintln(tw)
}
fmt.Fprintln(tw)
}
return tw.Flush()
}

View File

@ -8,9 +8,9 @@ import (
"time"
"github.com/containerd/containerd/defaults"
grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware"
"github.com/moby/buildkit/util/bklog"
"github.com/moby/buildkit/util/grpcerrors"
"github.com/moby/buildkit/util/tracing"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
"go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc"
@ -31,9 +31,6 @@ func serve(ctx context.Context, grpcServer *grpc.Server, conn net.Conn) {
}
func grpcClientConn(ctx context.Context, conn net.Conn) (context.Context, *grpc.ClientConn, error) {
var unary []grpc.UnaryClientInterceptor
var stream []grpc.StreamClientInterceptor
var dialCount int64
dialer := grpc.WithContextDialer(func(ctx context.Context, addr string) (net.Conn, error) {
if c := atomic.AddInt64(&dialCount, 1); c > 1 {
@ -47,26 +44,16 @@ func grpcClientConn(ctx context.Context, conn net.Conn) (context.Context, *grpc.
grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(grpc.MaxCallRecvMsgSize(defaults.DefaultMaxRecvMsgSize)),
grpc.WithDefaultCallOptions(grpc.MaxCallSendMsgSize(defaults.DefaultMaxSendMsgSize)),
grpc.WithUnaryInterceptor(grpcerrors.UnaryClientInterceptor),
grpc.WithStreamInterceptor(grpcerrors.StreamClientInterceptor),
}
if span := trace.SpanFromContext(ctx); span.SpanContext().IsValid() {
unary = append(unary, filterClient(otelgrpc.UnaryClientInterceptor(otelgrpc.WithTracerProvider(span.TracerProvider()), otelgrpc.WithPropagators(propagators)))) //nolint:staticcheck // TODO(thaJeztah): ignore SA1019 for deprecated options: see https://github.com/moby/buildkit/issues/4681
stream = append(stream, otelgrpc.StreamClientInterceptor(otelgrpc.WithTracerProvider(span.TracerProvider()), otelgrpc.WithPropagators(propagators))) //nolint:staticcheck // TODO(thaJeztah): ignore SA1019 for deprecated options: see https://github.com/moby/buildkit/issues/4681
}
unary = append(unary, grpcerrors.UnaryClientInterceptor)
stream = append(stream, grpcerrors.StreamClientInterceptor)
if len(unary) == 1 {
dialOpts = append(dialOpts, grpc.WithUnaryInterceptor(unary[0]))
} else if len(unary) > 1 {
dialOpts = append(dialOpts, grpc.WithUnaryInterceptor(grpc_middleware.ChainUnaryClient(unary...)))
}
if len(stream) == 1 {
dialOpts = append(dialOpts, grpc.WithStreamInterceptor(stream[0]))
} else if len(stream) > 1 {
dialOpts = append(dialOpts, grpc.WithStreamInterceptor(grpc_middleware.ChainStreamClient(stream...)))
statsHandler := tracing.ClientStatsHandler(
otelgrpc.WithTracerProvider(span.TracerProvider()),
otelgrpc.WithPropagators(propagators),
)
dialOpts = append(dialOpts, grpc.WithStatsHandler(statsHandler))
}
cc, err := grpc.DialContext(ctx, "localhost", dialOpts...)

View File

@ -3,12 +3,11 @@ package session
import (
"context"
"net"
"strings"
"sync"
grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware"
"github.com/moby/buildkit/identity"
"github.com/moby/buildkit/util/grpcerrors"
"github.com/moby/buildkit/util/tracing"
"github.com/pkg/errors"
"go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc"
"go.opentelemetry.io/otel/propagation"
@ -53,29 +52,17 @@ type Session struct {
func NewSession(ctx context.Context, name, sharedKey string) (*Session, error) {
id := identity.NewID()
var unary []grpc.UnaryServerInterceptor
var stream []grpc.StreamServerInterceptor
serverOpts := []grpc.ServerOption{}
serverOpts := []grpc.ServerOption{
grpc.UnaryInterceptor(grpcerrors.UnaryServerInterceptor),
grpc.StreamInterceptor(grpcerrors.StreamServerInterceptor),
}
if span := trace.SpanFromContext(ctx); span.SpanContext().IsValid() {
unary = append(unary, filterServer(otelgrpc.UnaryServerInterceptor(otelgrpc.WithTracerProvider(span.TracerProvider()), otelgrpc.WithPropagators(propagators)))) //nolint:staticcheck // TODO(thaJeztah): ignore SA1019 for deprecated options: see https://github.com/moby/buildkit/issues/4681
stream = append(stream, otelgrpc.StreamServerInterceptor(otelgrpc.WithTracerProvider(span.TracerProvider()), otelgrpc.WithPropagators(propagators))) //nolint:staticcheck // TODO(thaJeztah): ignore SA1019 for deprecated options: see https://github.com/moby/buildkit/issues/4681
}
unary = append(unary, grpcerrors.UnaryServerInterceptor)
stream = append(stream, grpcerrors.StreamServerInterceptor)
if len(unary) == 1 {
serverOpts = append(serverOpts, grpc.UnaryInterceptor(unary[0]))
} else if len(unary) > 1 {
serverOpts = append(serverOpts, grpc.UnaryInterceptor(grpc_middleware.ChainUnaryServer(unary...)))
}
if len(stream) == 1 {
serverOpts = append(serverOpts, grpc.StreamInterceptor(stream[0]))
} else if len(stream) > 1 {
serverOpts = append(serverOpts, grpc.StreamInterceptor(grpc_middleware.ChainStreamServer(stream...)))
statsHandler := tracing.ServerStatsHandler(
otelgrpc.WithTracerProvider(span.TracerProvider()),
otelgrpc.WithPropagators(propagators),
)
serverOpts = append(serverOpts, grpc.StatsHandler(statsHandler))
}
s := &Session{
@ -167,22 +154,3 @@ func (s *Session) closed() bool {
func MethodURL(s, m string) string {
return "/" + s + "/" + m
}
// updates needed in opentelemetry-contrib to avoid this
func filterServer(intercept grpc.UnaryServerInterceptor) grpc.UnaryServerInterceptor {
return func(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (resp interface{}, err error) {
if strings.HasSuffix(info.FullMethod, "Health/Check") {
return handler(ctx, req)
}
return intercept(ctx, req, info, handler)
}
}
func filterClient(intercept grpc.UnaryClientInterceptor) grpc.UnaryClientInterceptor {
return func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error {
if strings.HasSuffix(method, "Health/Check") {
return invoker(ctx, method, req, reply, cc, opts...)
}
return intercept(ctx, method, req, reply, cc, invoker, opts...)
}
}

View File

@ -61,10 +61,11 @@ const (
CapExecCgroupsMounted apicaps.CapID = "exec.cgroup"
CapExecSecretEnv apicaps.CapID = "exec.secretenv"
CapFileBase apicaps.CapID = "file.base"
CapFileRmWildcard apicaps.CapID = "file.rm.wildcard"
CapFileCopyIncludeExcludePatterns apicaps.CapID = "file.copy.includeexcludepatterns"
CapFileRmNoFollowSymlink apicaps.CapID = "file.rm.nofollowsymlink"
CapFileBase apicaps.CapID = "file.base"
CapFileRmWildcard apicaps.CapID = "file.rm.wildcard"
CapFileCopyIncludeExcludePatterns apicaps.CapID = "file.copy.includeexcludepatterns"
CapFileRmNoFollowSymlink apicaps.CapID = "file.rm.nofollowsymlink"
CapFileCopyAlwaysReplaceExistingDestPaths apicaps.CapID = "file.copy.alwaysreplaceexistingdestpaths"
CapConstraints apicaps.CapID = "constraints"
CapPlatform apicaps.CapID = "platform"
@ -384,6 +385,12 @@ func init() {
Status: apicaps.CapStatusExperimental,
})
Caps.Init(apicaps.Cap{
ID: CapFileCopyAlwaysReplaceExistingDestPaths,
Enabled: true,
Status: apicaps.CapStatusExperimental,
})
Caps.Init(apicaps.Cap{
ID: CapConstraints,
Enabled: true,

View File

@ -2137,6 +2137,8 @@ type FileActionCopy struct {
IncludePatterns []string `protobuf:"bytes,12,rep,name=include_patterns,json=includePatterns,proto3" json:"include_patterns,omitempty"`
// exclude files/dir matching any of these patterns (even if they match an include pattern)
ExcludePatterns []string `protobuf:"bytes,13,rep,name=exclude_patterns,json=excludePatterns,proto3" json:"exclude_patterns,omitempty"`
// alwaysReplaceExistingDestPaths results in an existing dest path that differs in type from the src path being replaced rather than the default of returning an error
AlwaysReplaceExistingDestPaths bool `protobuf:"varint,14,opt,name=alwaysReplaceExistingDestPaths,proto3" json:"alwaysReplaceExistingDestPaths,omitempty"`
}
func (m *FileActionCopy) Reset() { *m = FileActionCopy{} }
@ -2259,6 +2261,13 @@ func (m *FileActionCopy) GetExcludePatterns() []string {
return nil
}
func (m *FileActionCopy) GetAlwaysReplaceExistingDestPaths() bool {
if m != nil {
return m.AlwaysReplaceExistingDestPaths
}
return false
}
type FileActionMkFile struct {
// path for the new file
Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"`
@ -2892,172 +2901,173 @@ func init() {
func init() { proto.RegisterFile("ops.proto", fileDescriptor_8de16154b2733812) }
var fileDescriptor_8de16154b2733812 = []byte{
// 2627 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x59, 0xcf, 0x6f, 0x5b, 0xc7,
0xf1, 0x17, 0x7f, 0x93, 0x43, 0x8a, 0x66, 0xd6, 0x4e, 0xc2, 0xe8, 0xeb, 0xaf, 0xac, 0xbc, 0xe4,
// 2656 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x59, 0xcf, 0x6f, 0x1b, 0xc7,
0xf5, 0x17, 0x7f, 0x93, 0x8f, 0x14, 0xcd, 0x8c, 0x9d, 0x84, 0xd1, 0xd7, 0x5f, 0x59, 0xd9, 0xe4,
0x1b, 0xc8, 0xb2, 0x2d, 0x21, 0x0a, 0x10, 0xe7, 0x6b, 0x04, 0x45, 0x25, 0x91, 0x8a, 0x18, 0xdb,
0xa2, 0xb0, 0xb4, 0x9c, 0x1e, 0x0a, 0x18, 0x4f, 0x8f, 0x4b, 0xea, 0x41, 0x8f, 0xef, 0x3d, 0xec,
0x5b, 0x5a, 0x62, 0x0f, 0x3d, 0xf4, 0xd4, 0x63, 0x80, 0x02, 0xbd, 0x15, 0xfd, 0x27, 0x7a, 0x6c,
0xef, 0x01, 0x72, 0x09, 0xd0, 0x1e, 0x82, 0x1e, 0xd2, 0xc2, 0xb9, 0xf4, 0x8f, 0x68, 0x81, 0x62,
0x66, 0xf7, 0xfd, 0x20, 0x25, 0xc3, 0x71, 0x5b, 0xf4, 0xc4, 0xd9, 0x99, 0xcf, 0xce, 0xce, 0xcc,
0xce, 0xec, 0xce, 0x5b, 0x42, 0x2d, 0x08, 0xa3, 0xcd, 0x50, 0x06, 0x2a, 0x60, 0xf9, 0xf0, 0x64,
0xe5, 0xde, 0xd8, 0x55, 0xa7, 0xd3, 0x93, 0x4d, 0x27, 0x98, 0x6c, 0x8d, 0x83, 0x71, 0xb0, 0x45,
0xa2, 0x93, 0xe9, 0x88, 0x46, 0x34, 0x20, 0x4a, 0x4f, 0xb1, 0xfe, 0x96, 0x87, 0x7c, 0x3f, 0x64,
0xef, 0x42, 0xd9, 0xf5, 0xc3, 0xa9, 0x8a, 0xda, 0xb9, 0xb5, 0xc2, 0x7a, 0x7d, 0xbb, 0xb6, 0x19,
0x9e, 0x6c, 0xf6, 0x90, 0xc3, 0x8d, 0x80, 0xad, 0x41, 0x51, 0x5c, 0x08, 0xa7, 0x9d, 0x5f, 0xcb,
0xad, 0xd7, 0xb7, 0x01, 0x01, 0xdd, 0x0b, 0xe1, 0xf4, 0xc3, 0x83, 0x25, 0x4e, 0x12, 0xf6, 0x01,
0x94, 0xa3, 0x60, 0x2a, 0x1d, 0xd1, 0x2e, 0x10, 0xa6, 0x81, 0x98, 0x01, 0x71, 0x08, 0x65, 0xa4,
0xa8, 0x69, 0xe4, 0x7a, 0xa2, 0x5d, 0x4c, 0x35, 0xed, 0xbb, 0x9e, 0xc6, 0x90, 0x84, 0xbd, 0x07,
0xa5, 0x93, 0xa9, 0xeb, 0x0d, 0xdb, 0x25, 0x82, 0xd4, 0x11, 0xb2, 0x8b, 0x0c, 0xc2, 0x68, 0x19,
0x82, 0x26, 0x42, 0x8e, 0x45, 0xbb, 0x9c, 0x82, 0x1e, 0x23, 0x43, 0x83, 0x48, 0x86, 0x6b, 0x0d,
0xdd, 0xd1, 0xa8, 0x5d, 0x49, 0xd7, 0xea, 0xb8, 0xa3, 0x91, 0x5e, 0x0b, 0x25, 0x6c, 0x1d, 0xaa,
0xa1, 0x67, 0xab, 0x51, 0x20, 0x27, 0x6d, 0x48, 0xed, 0x3e, 0x32, 0x3c, 0x9e, 0x48, 0xd9, 0x7d,
0xa8, 0x3b, 0x81, 0x1f, 0x29, 0x69, 0xbb, 0xbe, 0x8a, 0xda, 0x75, 0x02, 0xbf, 0x89, 0xe0, 0x2f,
0x02, 0x79, 0x26, 0xe4, 0x5e, 0x2a, 0xe4, 0x59, 0xe4, 0x6e, 0x11, 0xf2, 0x41, 0x68, 0xfd, 0x3a,
0x07, 0xd5, 0x58, 0x2b, 0xb3, 0xa0, 0xb1, 0x23, 0x9d, 0x53, 0x57, 0x09, 0x47, 0x4d, 0xa5, 0x68,
0xe7, 0xd6, 0x72, 0xeb, 0x35, 0x3e, 0xc7, 0x63, 0x4d, 0xc8, 0xf7, 0x07, 0x14, 0xef, 0x1a, 0xcf,
0xf7, 0x07, 0xac, 0x0d, 0x95, 0xa7, 0xb6, 0x74, 0x6d, 0x5f, 0x51, 0x80, 0x6b, 0x3c, 0x1e, 0xb2,
0x9b, 0x50, 0xeb, 0x0f, 0x9e, 0x0a, 0x19, 0xb9, 0x81, 0x4f, 0x61, 0xad, 0xf1, 0x94, 0xc1, 0x56,
0x01, 0xfa, 0x83, 0x7d, 0x61, 0xa3, 0xd2, 0xa8, 0x5d, 0x5a, 0x2b, 0xac, 0xd7, 0x78, 0x86, 0x63,
0xfd, 0x1c, 0x4a, 0xb4, 0xd5, 0xec, 0x73, 0x28, 0x0f, 0xdd, 0xb1, 0x88, 0x94, 0x36, 0x67, 0x77,
0xfb, 0xab, 0xef, 0x6e, 0x2d, 0xfd, 0xf9, 0xbb, 0x5b, 0x1b, 0x99, 0x9c, 0x0a, 0x42, 0xe1, 0x3b,
0x81, 0xaf, 0x6c, 0xd7, 0x17, 0x32, 0xda, 0x1a, 0x07, 0xf7, 0xf4, 0x94, 0xcd, 0x0e, 0xfd, 0x70,
0xa3, 0x81, 0xdd, 0x86, 0x92, 0xeb, 0x0f, 0xc5, 0x05, 0xd9, 0x5f, 0xd8, 0xbd, 0x6e, 0x54, 0xd5,
0xfb, 0x53, 0x15, 0x4e, 0x55, 0x0f, 0x45, 0x5c, 0x23, 0xac, 0xaf, 0x73, 0x50, 0xd6, 0xa9, 0xc4,
0x6e, 0x42, 0x71, 0x22, 0x94, 0x4d, 0xeb, 0xd7, 0xb7, 0xab, 0x7a, 0x4b, 0x95, 0xcd, 0x89, 0x8b,
0x59, 0x3a, 0x09, 0xa6, 0x18, 0xfb, 0x7c, 0x9a, 0xa5, 0x8f, 0x91, 0xc3, 0x8d, 0x80, 0xfd, 0x1f,
0x54, 0x7c, 0xa1, 0xce, 0x03, 0x79, 0x46, 0x31, 0x6a, 0xea, 0xb4, 0x38, 0x14, 0xea, 0x71, 0x30,
0x14, 0x3c, 0x96, 0xb1, 0xbb, 0x50, 0x8d, 0x84, 0x33, 0x95, 0xae, 0x9a, 0x51, 0xbc, 0x9a, 0xdb,
0x2d, 0x4a, 0x56, 0xc3, 0x23, 0x70, 0x82, 0x60, 0x77, 0xa0, 0x16, 0x09, 0x47, 0x0a, 0x25, 0xfc,
0xe7, 0x14, 0xbf, 0xfa, 0xf6, 0xb2, 0x81, 0x4b, 0xa1, 0xba, 0xfe, 0x73, 0x9e, 0xca, 0xad, 0xaf,
0xf3, 0x50, 0x44, 0x9b, 0x19, 0x83, 0xa2, 0x2d, 0xc7, 0xba, 0xa2, 0x6a, 0x9c, 0x68, 0xd6, 0x82,
0x02, 0xea, 0xc8, 0x13, 0x0b, 0x49, 0xe4, 0x38, 0xe7, 0x43, 0xb3, 0xa1, 0x48, 0xe2, 0xbc, 0x69,
0x24, 0xa4, 0xd9, 0x47, 0xa2, 0xd9, 0x6d, 0xa8, 0x85, 0x32, 0xb8, 0x98, 0x3d, 0xd3, 0x16, 0xa4,
0x59, 0x8a, 0x4c, 0x34, 0xa0, 0x1a, 0x1a, 0x8a, 0x6d, 0x00, 0x88, 0x0b, 0x25, 0xed, 0x83, 0x20,
0x52, 0x51, 0xbb, 0x4c, 0xd6, 0x52, 0xde, 0x23, 0xa3, 0x77, 0xc4, 0x33, 0x52, 0xb6, 0x02, 0xd5,
0xd3, 0x20, 0x52, 0xbe, 0x3d, 0x11, 0x54, 0x21, 0x35, 0x9e, 0x8c, 0x99, 0x05, 0xe5, 0xa9, 0xe7,
0x4e, 0x5c, 0xd5, 0xae, 0xa5, 0x3a, 0x8e, 0x89, 0xc3, 0x8d, 0x04, 0xb3, 0xd8, 0x19, 0xcb, 0x60,
0x1a, 0x1e, 0xd9, 0x52, 0xf8, 0x8a, 0xea, 0xa7, 0xc6, 0xe7, 0x78, 0xec, 0x53, 0x78, 0x47, 0x8a,
0x49, 0xf0, 0x5c, 0xd0, 0x46, 0x0d, 0xd4, 0xf4, 0x24, 0xe2, 0x18, 0xd8, 0xc8, 0x7d, 0x2e, 0xa8,
0x86, 0xaa, 0xfc, 0xe5, 0x00, 0xeb, 0x2e, 0x94, 0xb5, 0xdd, 0x18, 0x16, 0xa4, 0x4c, 0xa5, 0x10,
0x8d, 0x15, 0xd2, 0x3b, 0x8a, 0x2b, 0xa4, 0x77, 0x64, 0x75, 0xa0, 0xac, 0x2d, 0x44, 0xf4, 0x21,
0x7a, 0x65, 0xd0, 0x48, 0x23, 0x6f, 0x10, 0x8c, 0x94, 0xce, 0x48, 0x4e, 0x34, 0x69, 0xb5, 0xa5,
0x8e, 0x7f, 0x81, 0x13, 0x6d, 0x3d, 0x84, 0x5a, 0xb2, 0xb3, 0xb4, 0x44, 0xc7, 0xa8, 0xc9, 0xf7,
0x3a, 0x38, 0x81, 0xc2, 0xa5, 0x17, 0x25, 0x1a, 0xc3, 0x18, 0x84, 0xca, 0x0d, 0x7c, 0xdb, 0x23,
0x45, 0x55, 0x9e, 0x8c, 0xad, 0x3f, 0x16, 0xa0, 0x44, 0x8e, 0xb1, 0x75, 0xac, 0x88, 0x70, 0xaa,
0x3d, 0x28, 0xec, 0x32, 0x53, 0x11, 0x40, 0xb5, 0x97, 0x14, 0x04, 0xd6, 0xe1, 0x0a, 0x66, 0xa7,
0x27, 0x1c, 0x15, 0x48, 0xb3, 0x4e, 0x32, 0xc6, 0xf5, 0x87, 0x58, 0xa1, 0x3a, 0x61, 0x88, 0x66,
0x77, 0xa0, 0x1c, 0x50, 0x59, 0x51, 0xce, 0xbc, 0xa4, 0xd8, 0x0c, 0x04, 0x95, 0x4b, 0x61, 0x0f,
0x03, 0xdf, 0x9b, 0x51, 0x26, 0x55, 0x79, 0x32, 0xc6, 0x44, 0xa7, 0x3a, 0x7a, 0x32, 0x0b, 0xf5,
0xb1, 0xda, 0xd4, 0x89, 0xfe, 0x38, 0x66, 0xf2, 0x54, 0x8e, 0x07, 0xe7, 0x93, 0x49, 0x38, 0x8a,
0xfa, 0xa1, 0x6a, 0x5f, 0x4f, 0x53, 0x32, 0xe6, 0xf1, 0x44, 0x8a, 0x48, 0xc7, 0x76, 0x4e, 0x05,
0x22, 0x6f, 0xa4, 0xc8, 0x3d, 0xc3, 0xe3, 0x89, 0x34, 0xad, 0x34, 0x84, 0xbe, 0x49, 0xd0, 0x4c,
0xa5, 0x21, 0x36, 0x95, 0x63, 0x86, 0x0e, 0x06, 0x07, 0x88, 0x7c, 0x2b, 0x3d, 0xdd, 0x35, 0x87,
0x1b, 0x89, 0xf6, 0x36, 0x9a, 0x7a, 0xaa, 0xd7, 0x69, 0xbf, 0xad, 0x43, 0x19, 0x8f, 0xd9, 0xff,
0x43, 0x03, 0x4f, 0x32, 0xe1, 0x2b, 0xb2, 0xa4, 0xdd, 0x26, 0x87, 0xdf, 0x4c, 0x1c, 0xde, 0xcb,
0x08, 0xf9, 0x1c, 0xd4, 0x5a, 0x4d, 0x7d, 0xc7, 0x1d, 0x89, 0xdc, 0x9f, 0xe9, 0x54, 0x2b, 0x70,
0xa2, 0xad, 0x1e, 0x54, 0x63, 0xef, 0x2e, 0x65, 0xd0, 0x3d, 0xa8, 0x44, 0xa7, 0xb6, 0x74, 0xfd,
0x31, 0x6d, 0x6e, 0x73, 0xfb, 0x7a, 0x12, 0x8c, 0x81, 0xe6, 0xa3, 0x03, 0x31, 0xc6, 0x0a, 0xe2,
0x6c, 0xbc, 0x4a, 0x57, 0x0b, 0x0a, 0x53, 0x77, 0x48, 0x7a, 0x96, 0x39, 0x92, 0xc8, 0x19, 0xbb,
0x3a, 0x9f, 0x97, 0x39, 0x92, 0x68, 0xdf, 0x24, 0x18, 0xea, 0xeb, 0x76, 0x99, 0x13, 0x3d, 0x97,
0xb1, 0xa5, 0x85, 0x8c, 0xf5, 0xe2, 0xb0, 0xfe, 0x57, 0x56, 0xfb, 0x55, 0x0e, 0xaa, 0x71, 0x8f,
0x80, 0x37, 0x95, 0x3b, 0x14, 0xbe, 0x72, 0x47, 0xae, 0x90, 0x66, 0xe1, 0x0c, 0x87, 0xdd, 0x83,
0x92, 0xad, 0x94, 0x8c, 0xcf, 0xff, 0xb7, 0xb3, 0x0d, 0xc6, 0xe6, 0x0e, 0x4a, 0xba, 0xbe, 0x92,
0x33, 0xae, 0x51, 0x2b, 0x9f, 0x00, 0xa4, 0x4c, 0xb4, 0xf5, 0x4c, 0xcc, 0x8c, 0x56, 0x24, 0xd9,
0x0d, 0x28, 0x3d, 0xb7, 0xbd, 0x69, 0x5c, 0xcc, 0x7a, 0xf0, 0x20, 0xff, 0x49, 0xce, 0xfa, 0x43,
0x1e, 0x2a, 0xa6, 0xe1, 0x60, 0x77, 0xa1, 0x42, 0x0d, 0x87, 0xb1, 0xe8, 0xea, 0xca, 0x8d, 0x21,
0x6c, 0x2b, 0xe9, 0xa4, 0x32, 0x36, 0x1a, 0x55, 0xba, 0xa3, 0x32, 0x36, 0xa6, 0x7d, 0x55, 0x61,
0x28, 0x46, 0xa6, 0x65, 0x6a, 0x52, 0x83, 0x22, 0x46, 0xae, 0xef, 0x62, 0x7c, 0x38, 0x8a, 0xd8,
0xdd, 0xd8, 0xeb, 0x22, 0x69, 0x7c, 0x2b, 0xab, 0xf1, 0xb2, 0xd3, 0x3d, 0xa8, 0x67, 0x96, 0xb9,
0xc2, 0xeb, 0xf7, 0xb3, 0x5e, 0x9b, 0x25, 0x49, 0x9d, 0xee, 0xf7, 0xd2, 0x28, 0xfc, 0x1b, 0xf1,
0xfb, 0x18, 0x20, 0x55, 0xf9, 0xc3, 0x4f, 0x3e, 0xeb, 0xf7, 0x05, 0x80, 0x7e, 0x88, 0xd7, 0xe7,
0xd0, 0xa6, 0x0b, 0xbf, 0xe1, 0x8e, 0xfd, 0x40, 0x8a, 0x67, 0x74, 0x42, 0xd0, 0xfc, 0x2a, 0xaf,
0x6b, 0x1e, 0x55, 0x0c, 0xdb, 0x81, 0xfa, 0x50, 0x44, 0x8e, 0x74, 0x29, 0xa1, 0x4c, 0xd0, 0x6f,
0xa1, 0x4f, 0xa9, 0x9e, 0xcd, 0x4e, 0x8a, 0xd0, 0xb1, 0xca, 0xce, 0x61, 0xdb, 0xd0, 0x10, 0x17,
0x61, 0x20, 0x95, 0x59, 0x45, 0xf7, 0xa5, 0xd7, 0x74, 0x87, 0x8b, 0x7c, 0x7d, 0x02, 0xd4, 0x45,
0x3a, 0x60, 0x36, 0x14, 0x1d, 0x3b, 0x8c, 0x4c, 0x37, 0xd0, 0x5e, 0x58, 0x6f, 0xcf, 0x0e, 0x75,
0xd0, 0x76, 0x3f, 0x42, 0x5f, 0x7f, 0xf1, 0x97, 0x5b, 0x77, 0x32, 0x2d, 0xd4, 0x24, 0x38, 0x99,
0x6d, 0x51, 0xbe, 0x9c, 0xb9, 0x6a, 0x6b, 0xaa, 0x5c, 0x6f, 0xcb, 0x0e, 0x5d, 0x54, 0x87, 0x13,
0x7b, 0x1d, 0x4e, 0xaa, 0xd9, 0x27, 0xd0, 0x0c, 0x65, 0x30, 0x96, 0x22, 0x8a, 0x9e, 0xd1, 0x85,
0x6a, 0x1a, 0xdd, 0x37, 0xcc, 0xc5, 0x4f, 0x92, 0xcf, 0x50, 0xc0, 0x97, 0xc3, 0xec, 0x70, 0xe5,
0x47, 0xd0, 0x5a, 0xf4, 0xf8, 0x75, 0x76, 0x6f, 0xe5, 0x3e, 0xd4, 0x12, 0x0f, 0x5e, 0x35, 0xb1,
0x9a, 0xdd, 0xf6, 0xdf, 0xe5, 0xa0, 0xac, 0xeb, 0x91, 0xdd, 0x87, 0x9a, 0x17, 0x38, 0x36, 0x1a,
0x10, 0x7f, 0x54, 0xbc, 0x93, 0x96, 0xeb, 0xe6, 0xa3, 0x58, 0xa6, 0xf7, 0x23, 0xc5, 0x62, 0x7a,
0xba, 0xfe, 0x28, 0x88, 0xeb, 0xa7, 0x99, 0x4e, 0xea, 0xf9, 0xa3, 0x80, 0x6b, 0xe1, 0xca, 0x43,
0x68, 0xce, 0xab, 0xb8, 0xc2, 0xce, 0xf7, 0xe6, 0x13, 0x9d, 0x2e, 0x92, 0x64, 0x52, 0xd6, 0xec,
0xfb, 0x50, 0x4b, 0xf8, 0x6c, 0xe3, 0xb2, 0xe1, 0x8d, 0xec, 0xcc, 0x8c, 0xad, 0xd6, 0x2f, 0x73,
0x00, 0xa9, 0x6d, 0x78, 0xce, 0xe1, 0xe7, 0x8b, 0x9f, 0x36, 0x1e, 0xc9, 0x98, 0xee, 0x6d, 0x5b,
0xd9, 0x64, 0x4b, 0x83, 0x13, 0xcd, 0x36, 0x01, 0x86, 0x49, 0xad, 0xbf, 0xe4, 0x04, 0xc8, 0x20,
0x50, 0xbf, 0x67, 0xfb, 0xe3, 0xa9, 0x3d, 0x16, 0xa6, 0x3b, 0x4c, 0xc6, 0x56, 0x1f, 0xaa, 0xb1,
0x85, 0x6c, 0x0d, 0xea, 0x91, 0xb1, 0x0a, 0x3b, 0x70, 0x34, 0xa5, 0xc4, 0xb3, 0x2c, 0xec, 0xa4,
0xa5, 0xed, 0x8f, 0xc5, 0x5c, 0x27, 0xcd, 0x91, 0xc3, 0x8d, 0xc0, 0xfa, 0x02, 0x4a, 0xc4, 0xc0,
0xea, 0x8d, 0x94, 0x2d, 0x95, 0x69, 0xca, 0x75, 0xdf, 0x19, 0x44, 0x64, 0xd2, 0x6e, 0x11, 0xf3,
0x9b, 0x6b, 0x00, 0x7b, 0x1f, 0xbb, 0xdb, 0xa1, 0x09, 0xf7, 0x55, 0x38, 0x14, 0x5b, 0x9f, 0x42,
0x35, 0x66, 0x63, 0x54, 0x3c, 0xd7, 0x17, 0xc6, 0x44, 0xa2, 0xf1, 0x63, 0xc6, 0x39, 0xb5, 0xa5,
0xed, 0x28, 0xa1, 0xdb, 0x9f, 0x12, 0x4f, 0x19, 0xd6, 0x7b, 0x50, 0xcf, 0x14, 0x25, 0xe6, 0xe2,
0x53, 0xda, 0x63, 0x7d, 0x34, 0xe8, 0x81, 0xf5, 0x19, 0x2c, 0xcf, 0x15, 0x08, 0xde, 0x64, 0xee,
0x30, 0xbe, 0xc9, 0xf4, 0x2d, 0x75, 0xa9, 0x8b, 0x63, 0x50, 0x3c, 0x17, 0xf6, 0x99, 0xe9, 0xe0,
0x88, 0xb6, 0x7e, 0x8b, 0xdf, 0x6c, 0x71, 0x67, 0xfd, 0xbf, 0x00, 0xa7, 0x4a, 0x85, 0xcf, 0xa8,
0xd5, 0x36, 0xca, 0x6a, 0xc8, 0x21, 0x04, 0xbb, 0x05, 0x75, 0x1c, 0x44, 0x46, 0xae, 0x55, 0xd3,
0x8c, 0x48, 0x03, 0xfe, 0x07, 0x6a, 0xa3, 0x64, 0x7a, 0xc1, 0xe4, 0x47, 0x3c, 0xfb, 0x1d, 0xa8,
0xfa, 0x81, 0x91, 0xe9, 0xbd, 0xad, 0xf8, 0x41, 0x32, 0xcf, 0xf6, 0x3c, 0x23, 0x2b, 0xe9, 0x79,
0xb6, 0xe7, 0x91, 0xd0, 0xba, 0x03, 0x6f, 0x5c, 0xfa, 0xfa, 0x64, 0x6f, 0x41, 0x79, 0xe4, 0x7a,
0x8a, 0x6e, 0x2c, 0xfc, 0xd2, 0x30, 0x23, 0xeb, 0x1f, 0x39, 0x80, 0x34, 0xb7, 0xb0, 0x64, 0xf0,
0xea, 0x41, 0x4c, 0x43, 0x5f, 0x35, 0x1e, 0x54, 0x27, 0xe6, 0x10, 0x33, 0x99, 0x71, 0x73, 0x3e,
0x1f, 0x37, 0xe3, 0x33, 0x4e, 0x1f, 0x6f, 0xdb, 0xe6, 0x78, 0x7b, 0x9d, 0x2f, 0xc4, 0x64, 0x05,
0x6a, 0xe0, 0xb2, 0x0f, 0x06, 0x90, 0xd6, 0x3a, 0x37, 0x92, 0x95, 0x87, 0xb0, 0x3c, 0xb7, 0xe4,
0x0f, 0xbc, 0xd0, 0xd2, 0xc3, 0x38, 0x5b, 0xe8, 0xdb, 0x50, 0xd6, 0x2f, 0x0d, 0x6c, 0x1d, 0x2a,
0xb6, 0xa3, 0x6b, 0x3c, 0x73, 0xce, 0xa0, 0x70, 0x87, 0xd8, 0x3c, 0x16, 0x5b, 0x7f, 0xca, 0x03,
0xa4, 0xfc, 0xd7, 0xe8, 0xe2, 0x1f, 0x40, 0x33, 0x12, 0x4e, 0xe0, 0x0f, 0x6d, 0x39, 0x23, 0xa9,
0xf9, 0x14, 0xbe, 0x6a, 0xca, 0x02, 0x32, 0xd3, 0xd1, 0x17, 0x5e, 0xdd, 0xd1, 0xaf, 0x43, 0xd1,
0x09, 0xc2, 0x99, 0xb9, 0xb7, 0xd8, 0xbc, 0x23, 0x7b, 0x41, 0x38, 0x3b, 0x58, 0xe2, 0x84, 0x60,
0x9b, 0x50, 0x9e, 0x9c, 0xd1, 0xdb, 0x8b, 0xfe, 0x86, 0xbc, 0x31, 0x8f, 0x7d, 0x7c, 0x86, 0xf4,
0xc1, 0x12, 0x37, 0x28, 0x76, 0x07, 0x4a, 0x93, 0xb3, 0xa1, 0x2b, 0xcd, 0xcd, 0x73, 0x7d, 0x11,
0xde, 0x71, 0x25, 0x3d, 0xb5, 0x20, 0x86, 0x59, 0x90, 0x97, 0x13, 0xf3, 0xd0, 0xd2, 0x5a, 0x88,
0xe6, 0xe4, 0x60, 0x89, 0xe7, 0xe5, 0x64, 0xb7, 0x0a, 0x65, 0x1d, 0x57, 0xeb, 0xef, 0x05, 0x68,
0xce, 0x5b, 0x89, 0x3b, 0x1b, 0x49, 0x27, 0xde, 0xd9, 0x48, 0x3a, 0xc9, 0xc7, 0x4e, 0x3e, 0xf3,
0xb1, 0x63, 0x41, 0x29, 0x38, 0xf7, 0x85, 0xcc, 0x3e, 0x32, 0xed, 0x9d, 0x06, 0xe7, 0x3e, 0x76,
0xcd, 0x5a, 0x34, 0xd7, 0x84, 0x96, 0x4c, 0x13, 0xfa, 0x3e, 0x2c, 0x8f, 0x02, 0xcf, 0x0b, 0xce,
0x07, 0xb3, 0x89, 0xe7, 0xfa, 0x67, 0xa6, 0x13, 0x9d, 0x67, 0xb2, 0x75, 0xb8, 0x36, 0x74, 0x25,
0x9a, 0x63, 0xba, 0xff, 0x88, 0x7c, 0xaf, 0xf2, 0x45, 0x36, 0xfb, 0x1c, 0xd6, 0x6c, 0xa5, 0xc4,
0x24, 0x54, 0xc7, 0x7e, 0x68, 0x3b, 0x67, 0x9d, 0xc0, 0xa1, 0x2a, 0x9c, 0x84, 0xb6, 0x72, 0x4f,
0x5c, 0xcf, 0x55, 0x33, 0x0a, 0x46, 0x95, 0xbf, 0x12, 0xc7, 0x3e, 0x80, 0xa6, 0x23, 0x85, 0xad,
0x44, 0x47, 0x44, 0xea, 0xc8, 0x56, 0xa7, 0xed, 0x2a, 0xcd, 0x5c, 0xe0, 0xa2, 0x0f, 0x36, 0x5a,
0xfb, 0x85, 0xeb, 0x0d, 0x1d, 0xfc, 0x6c, 0xad, 0x69, 0x1f, 0xe6, 0x98, 0x6c, 0x13, 0x18, 0x31,
0xba, 0x93, 0x50, 0xcd, 0x12, 0x28, 0x10, 0xf4, 0x0a, 0x09, 0x1e, 0xb8, 0xca, 0x9d, 0x88, 0x48,
0xd9, 0x93, 0x90, 0xbe, 0xc8, 0x0b, 0x3c, 0x65, 0xb0, 0xdb, 0xd0, 0x72, 0x7d, 0xc7, 0x9b, 0x0e,
0xc5, 0xb3, 0x10, 0x1d, 0x91, 0x7e, 0xd4, 0x6e, 0xd0, 0xa9, 0x72, 0xcd, 0xf0, 0x8f, 0x0c, 0x1b,
0xa1, 0xe2, 0x62, 0x01, 0xba, 0xac, 0xa1, 0x86, 0x1f, 0x43, 0xad, 0x2f, 0x73, 0xd0, 0x5a, 0x4c,
0x3c, 0xdc, 0xb6, 0x10, 0x9d, 0x37, 0x1f, 0xed, 0x48, 0x27, 0x5b, 0x99, 0xcf, 0x6c, 0x65, 0x7c,
0x97, 0x16, 0x32, 0x77, 0x69, 0x92, 0x16, 0xc5, 0x97, 0xa7, 0xc5, 0x9c, 0xa3, 0xa5, 0x05, 0x47,
0xad, 0xdf, 0xe4, 0xe0, 0xda, 0x42, 0x72, 0xff, 0x60, 0x8b, 0xd6, 0xa0, 0x3e, 0xb1, 0xcf, 0x84,
0x7e, 0xf2, 0x88, 0xcc, 0x15, 0x92, 0x65, 0xfd, 0x07, 0xec, 0xf3, 0xa1, 0x91, 0xad, 0xa8, 0x2b,
0x6d, 0x8b, 0x13, 0xe4, 0x30, 0x50, 0xfb, 0xc1, 0xd4, 0xdc, 0xc5, 0x71, 0x82, 0xc4, 0xcc, 0xcb,
0x69, 0x54, 0xb8, 0x22, 0x8d, 0xac, 0x43, 0xa8, 0xc6, 0x06, 0xb2, 0x5b, 0xe6, 0x4d, 0x2a, 0x97,
0x3e, 0xb5, 0x1e, 0x47, 0x42, 0xa2, 0xed, 0xfa, 0x81, 0xea, 0x5d, 0x28, 0xe9, 0x1e, 0x35, 0x7f,
0x19, 0xa1, 0x25, 0xd6, 0x00, 0x2a, 0x86, 0xc3, 0x36, 0xa0, 0x7c, 0x32, 0x4b, 0xde, 0x67, 0xcc,
0x71, 0x81, 0xe3, 0xa1, 0x41, 0xe0, 0x19, 0xa4, 0x11, 0xec, 0x06, 0x14, 0x4f, 0x66, 0xbd, 0x8e,
0xfe, 0xea, 0xc4, 0x93, 0x0c, 0x47, 0xbb, 0x65, 0x6d, 0x90, 0xf5, 0x08, 0x1a, 0xd9, 0x79, 0xc9,
0xc5, 0x9e, 0xcb, 0x5c, 0xec, 0xc9, 0x91, 0x9d, 0x7f, 0xd5, 0xe7, 0xc7, 0xc7, 0x00, 0xf4, 0x82,
0xfc, 0xba, 0x9f, 0x2d, 0x1f, 0x42, 0xc5, 0xbc, 0x3c, 0xb3, 0x0f, 0x16, 0x5e, 0xd2, 0x9b, 0xc9,
0xb3, 0xf4, 0xdc, 0x73, 0xba, 0xf5, 0x00, 0x1b, 0xd8, 0x73, 0x21, 0x3b, 0xee, 0x68, 0xf4, 0xba,
0xcb, 0x3d, 0x80, 0xe6, 0x71, 0x18, 0xfe, 0x6b, 0x73, 0x7f, 0x0a, 0x65, 0xfd, 0x00, 0x8e, 0x73,
0x3c, 0xb4, 0xc0, 0xec, 0x01, 0xd3, 0x4d, 0x6e, 0xd6, 0x24, 0xae, 0x01, 0x88, 0x9c, 0xe2, 0x7a,
0x66, 0x73, 0x09, 0x39, 0x6f, 0x00, 0xd7, 0x80, 0x8d, 0x75, 0xa8, 0x98, 0xb7, 0x56, 0x56, 0x83,
0xd2, 0xf1, 0xe1, 0xa0, 0xfb, 0xa4, 0xb5, 0xc4, 0xaa, 0x50, 0x3c, 0xe8, 0x0f, 0x9e, 0xb4, 0x72,
0x48, 0x1d, 0xf6, 0x0f, 0xbb, 0xad, 0xfc, 0xc6, 0x6d, 0x68, 0x64, 0x5f, 0x5b, 0x59, 0x1d, 0x2a,
0x83, 0x9d, 0xc3, 0xce, 0x6e, 0xff, 0x27, 0xad, 0x25, 0xd6, 0x80, 0x6a, 0xef, 0x70, 0xd0, 0xdd,
0x3b, 0xe6, 0xdd, 0x56, 0x6e, 0xe3, 0xc7, 0x50, 0x4b, 0x1e, 0xa0, 0x50, 0xc3, 0x6e, 0xef, 0xb0,
0xd3, 0x5a, 0x62, 0x00, 0xe5, 0x41, 0x77, 0x8f, 0x77, 0x51, 0x6f, 0x05, 0x0a, 0x83, 0xc1, 0x41,
0x2b, 0x8f, 0xab, 0xee, 0xed, 0xec, 0x1d, 0x74, 0x5b, 0x05, 0x24, 0x9f, 0x3c, 0x3e, 0xda, 0x1f,
0xb4, 0x8a, 0x1b, 0x1f, 0xc2, 0x1b, 0x97, 0x5e, 0x74, 0x70, 0xc5, 0x4e, 0x77, 0x7f, 0xe7, 0xf8,
0x11, 0x9a, 0x58, 0x86, 0x7c, 0xff, 0x50, 0x2b, 0xea, 0xef, 0xef, 0xb7, 0xf2, 0x1b, 0x1f, 0xc3,
0xb5, 0x85, 0x27, 0x19, 0x5a, 0xf0, 0x60, 0x87, 0x77, 0x71, 0xf1, 0x3a, 0x54, 0x8e, 0x78, 0xef,
0xe9, 0xce, 0x93, 0x6e, 0x2b, 0x87, 0x82, 0x47, 0xfd, 0xbd, 0x87, 0xdd, 0x4e, 0x2b, 0xbf, 0x7b,
0xf3, 0xab, 0x17, 0xab, 0xb9, 0x6f, 0x5e, 0xac, 0xe6, 0xbe, 0x7d, 0xb1, 0x9a, 0xfb, 0xeb, 0x8b,
0xd5, 0xdc, 0x97, 0xdf, 0xaf, 0x2e, 0x7d, 0xf3, 0xfd, 0xea, 0xd2, 0xb7, 0xdf, 0xaf, 0x2e, 0x9d,
0x94, 0xe9, 0x5f, 0x97, 0x8f, 0xfe, 0x19, 0x00, 0x00, 0xff, 0xff, 0xe8, 0xd7, 0x00, 0x96, 0xb5,
0x19, 0x00, 0x00,
0xa2, 0x30, 0xb4, 0x9c, 0x1e, 0x0a, 0x18, 0xab, 0xe5, 0x90, 0x5a, 0x68, 0xb9, 0xbb, 0x98, 0x1d,
0x5a, 0x62, 0x0f, 0x3d, 0xf4, 0xd4, 0x63, 0x80, 0x02, 0xbd, 0x15, 0xfd, 0x27, 0x7a, 0x6c, 0x6f,
0x3d, 0x04, 0xc8, 0x25, 0x40, 0x7b, 0x08, 0x7a, 0x48, 0x0b, 0xe7, 0xd2, 0x7f, 0xa2, 0x40, 0xf1,
0xde, 0xcc, 0xfe, 0x20, 0x25, 0xc3, 0x71, 0x5b, 0xf4, 0xc4, 0x99, 0xcf, 0xfb, 0xcc, 0x9b, 0x37,
0x6f, 0xde, 0x9b, 0x79, 0x3b, 0x84, 0x5a, 0x10, 0x46, 0x9b, 0xa1, 0x0c, 0x54, 0xc0, 0xf2, 0xe1,
0xc9, 0xca, 0xbd, 0xb1, 0xab, 0x4e, 0xa7, 0x27, 0x9b, 0x4e, 0x30, 0xd9, 0x1a, 0x07, 0xe3, 0x60,
0x8b, 0x44, 0x27, 0xd3, 0x11, 0xf5, 0xa8, 0x43, 0x2d, 0x3d, 0xc4, 0xfa, 0x7b, 0x1e, 0xf2, 0xfd,
0x90, 0xbd, 0x0b, 0x65, 0xd7, 0x0f, 0xa7, 0x2a, 0x6a, 0xe7, 0xd6, 0x0a, 0xeb, 0xf5, 0xed, 0xda,
0x66, 0x78, 0xb2, 0xd9, 0x43, 0x84, 0x1b, 0x01, 0x5b, 0x83, 0xa2, 0xb8, 0x10, 0x4e, 0x3b, 0xbf,
0x96, 0x5b, 0xaf, 0x6f, 0x03, 0x12, 0xba, 0x17, 0xc2, 0xe9, 0x87, 0x07, 0x4b, 0x9c, 0x24, 0xec,
0x03, 0x28, 0x47, 0xc1, 0x54, 0x3a, 0xa2, 0x5d, 0x20, 0x4e, 0x03, 0x39, 0x03, 0x42, 0x88, 0x65,
0xa4, 0xa8, 0x69, 0xe4, 0x7a, 0xa2, 0x5d, 0x4c, 0x35, 0xed, 0xbb, 0x9e, 0xe6, 0x90, 0x84, 0xbd,
0x07, 0xa5, 0x93, 0xa9, 0xeb, 0x0d, 0xdb, 0x25, 0xa2, 0xd4, 0x91, 0xb2, 0x8b, 0x00, 0x71, 0xb4,
0x0c, 0x49, 0x13, 0x21, 0xc7, 0xa2, 0x5d, 0x4e, 0x49, 0x8f, 0x11, 0xd0, 0x24, 0x92, 0xe1, 0x5c,
0x43, 0x77, 0x34, 0x6a, 0x57, 0xd2, 0xb9, 0x3a, 0xee, 0x68, 0xa4, 0xe7, 0x42, 0x09, 0x5b, 0x87,
0x6a, 0xe8, 0xd9, 0x6a, 0x14, 0xc8, 0x49, 0x1b, 0x52, 0xbb, 0x8f, 0x0c, 0xc6, 0x13, 0x29, 0xbb,
0x0f, 0x75, 0x27, 0xf0, 0x23, 0x25, 0x6d, 0xd7, 0x57, 0x51, 0xbb, 0x4e, 0xe4, 0x37, 0x91, 0xfc,
0x45, 0x20, 0xcf, 0x84, 0xdc, 0x4b, 0x85, 0x3c, 0xcb, 0xdc, 0x2d, 0x42, 0x3e, 0x08, 0xad, 0x5f,
0xe7, 0xa0, 0x1a, 0x6b, 0x65, 0x16, 0x34, 0x76, 0xa4, 0x73, 0xea, 0x2a, 0xe1, 0xa8, 0xa9, 0x14,
0xed, 0xdc, 0x5a, 0x6e, 0xbd, 0xc6, 0xe7, 0x30, 0xd6, 0x84, 0x7c, 0x7f, 0x40, 0xfe, 0xae, 0xf1,
0x7c, 0x7f, 0xc0, 0xda, 0x50, 0x79, 0x6a, 0x4b, 0xd7, 0xf6, 0x15, 0x39, 0xb8, 0xc6, 0xe3, 0x2e,
0xbb, 0x09, 0xb5, 0xfe, 0xe0, 0xa9, 0x90, 0x91, 0x1b, 0xf8, 0xe4, 0xd6, 0x1a, 0x4f, 0x01, 0xb6,
0x0a, 0xd0, 0x1f, 0xec, 0x0b, 0x1b, 0x95, 0x46, 0xed, 0xd2, 0x5a, 0x61, 0xbd, 0xc6, 0x33, 0x88,
0xf5, 0x73, 0x28, 0xd1, 0x56, 0xb3, 0xcf, 0xa1, 0x3c, 0x74, 0xc7, 0x22, 0x52, 0xda, 0x9c, 0xdd,
0xed, 0xaf, 0xbe, 0xbb, 0xb5, 0xf4, 0x97, 0xef, 0x6e, 0x6d, 0x64, 0x62, 0x2a, 0x08, 0x85, 0xef,
0x04, 0xbe, 0xb2, 0x5d, 0x5f, 0xc8, 0x68, 0x6b, 0x1c, 0xdc, 0xd3, 0x43, 0x36, 0x3b, 0xf4, 0xc3,
0x8d, 0x06, 0x76, 0x1b, 0x4a, 0xae, 0x3f, 0x14, 0x17, 0x64, 0x7f, 0x61, 0xf7, 0xba, 0x51, 0x55,
0xef, 0x4f, 0x55, 0x38, 0x55, 0x3d, 0x14, 0x71, 0xcd, 0xb0, 0xbe, 0xce, 0x41, 0x59, 0x87, 0x12,
0xbb, 0x09, 0xc5, 0x89, 0x50, 0x36, 0xcd, 0x5f, 0xdf, 0xae, 0xea, 0x2d, 0x55, 0x36, 0x27, 0x14,
0xa3, 0x74, 0x12, 0x4c, 0xd1, 0xf7, 0xf9, 0x34, 0x4a, 0x1f, 0x23, 0xc2, 0x8d, 0x80, 0xfd, 0x1f,
0x54, 0x7c, 0xa1, 0xce, 0x03, 0x79, 0x46, 0x3e, 0x6a, 0xea, 0xb0, 0x38, 0x14, 0xea, 0x71, 0x30,
0x14, 0x3c, 0x96, 0xb1, 0xbb, 0x50, 0x8d, 0x84, 0x33, 0x95, 0xae, 0x9a, 0x91, 0xbf, 0x9a, 0xdb,
0x2d, 0x0a, 0x56, 0x83, 0x11, 0x39, 0x61, 0xb0, 0x3b, 0x50, 0x8b, 0x84, 0x23, 0x85, 0x12, 0xfe,
0x73, 0xf2, 0x5f, 0x7d, 0x7b, 0xd9, 0xd0, 0xa5, 0x50, 0x5d, 0xff, 0x39, 0x4f, 0xe5, 0xd6, 0xd7,
0x79, 0x28, 0xa2, 0xcd, 0x8c, 0x41, 0xd1, 0x96, 0x63, 0x9d, 0x51, 0x35, 0x4e, 0x6d, 0xd6, 0x82,
0x02, 0xea, 0xc8, 0x13, 0x84, 0x4d, 0x44, 0x9c, 0xf3, 0xa1, 0xd9, 0x50, 0x6c, 0xe2, 0xb8, 0x69,
0x24, 0xa4, 0xd9, 0x47, 0x6a, 0xb3, 0xdb, 0x50, 0x0b, 0x65, 0x70, 0x31, 0x7b, 0xa6, 0x2d, 0x48,
0xa3, 0x14, 0x41, 0x34, 0xa0, 0x1a, 0x9a, 0x16, 0xdb, 0x00, 0x10, 0x17, 0x4a, 0xda, 0x07, 0x41,
0xa4, 0xa2, 0x76, 0x99, 0xac, 0xa5, 0xb8, 0x47, 0xa0, 0x77, 0xc4, 0x33, 0x52, 0xb6, 0x02, 0xd5,
0xd3, 0x20, 0x52, 0xbe, 0x3d, 0x11, 0x94, 0x21, 0x35, 0x9e, 0xf4, 0x99, 0x05, 0xe5, 0xa9, 0xe7,
0x4e, 0x5c, 0xd5, 0xae, 0xa5, 0x3a, 0x8e, 0x09, 0xe1, 0x46, 0x82, 0x51, 0xec, 0x8c, 0x65, 0x30,
0x0d, 0x8f, 0x6c, 0x29, 0x7c, 0x45, 0xf9, 0x53, 0xe3, 0x73, 0x18, 0xfb, 0x14, 0xde, 0x91, 0x62,
0x12, 0x3c, 0x17, 0xb4, 0x51, 0x03, 0x35, 0x3d, 0x89, 0x38, 0x3a, 0x36, 0x72, 0x9f, 0x0b, 0xca,
0xa1, 0x2a, 0x7f, 0x39, 0xc1, 0xba, 0x0b, 0x65, 0x6d, 0x37, 0xba, 0x05, 0x5b, 0x26, 0x53, 0xa8,
0x8d, 0x19, 0xd2, 0x3b, 0x8a, 0x33, 0xa4, 0x77, 0x64, 0x75, 0xa0, 0xac, 0x2d, 0x44, 0xf6, 0x21,
0xae, 0xca, 0xb0, 0xb1, 0x8d, 0xd8, 0x20, 0x18, 0x29, 0x1d, 0x91, 0x9c, 0xda, 0xa4, 0xd5, 0x96,
0xda, 0xff, 0x05, 0x4e, 0x6d, 0xeb, 0x21, 0xd4, 0x92, 0x9d, 0xa5, 0x29, 0x3a, 0x46, 0x4d, 0xbe,
0xd7, 0xc1, 0x01, 0xe4, 0x2e, 0x3d, 0x29, 0xb5, 0xd1, 0x8d, 0x41, 0xa8, 0xdc, 0xc0, 0xb7, 0x3d,
0x52, 0x54, 0xe5, 0x49, 0xdf, 0xfa, 0x53, 0x01, 0x4a, 0xb4, 0x30, 0xb6, 0x8e, 0x19, 0x11, 0x4e,
0xf5, 0x0a, 0x0a, 0xbb, 0xcc, 0x64, 0x04, 0x50, 0xee, 0x25, 0x09, 0x81, 0x79, 0xb8, 0x82, 0xd1,
0xe9, 0x09, 0x47, 0x05, 0xd2, 0xcc, 0x93, 0xf4, 0x71, 0xfe, 0x21, 0x66, 0xa8, 0x0e, 0x18, 0x6a,
0xb3, 0x3b, 0x50, 0x0e, 0x28, 0xad, 0x28, 0x66, 0x5e, 0x92, 0x6c, 0x86, 0x82, 0xca, 0xa5, 0xb0,
0x87, 0x81, 0xef, 0xcd, 0x28, 0x92, 0xaa, 0x3c, 0xe9, 0x63, 0xa0, 0x53, 0x1e, 0x3d, 0x99, 0x85,
0xfa, 0x58, 0x6d, 0xea, 0x40, 0x7f, 0x1c, 0x83, 0x3c, 0x95, 0xe3, 0xc1, 0xf9, 0x64, 0x12, 0x8e,
0xa2, 0x7e, 0xa8, 0xda, 0xd7, 0xd3, 0x90, 0x8c, 0x31, 0x9e, 0x48, 0x91, 0xe9, 0xd8, 0xce, 0xa9,
0x40, 0xe6, 0x8d, 0x94, 0xb9, 0x67, 0x30, 0x9e, 0x48, 0xd3, 0x4c, 0x43, 0xea, 0x9b, 0x44, 0xcd,
0x64, 0x1a, 0x72, 0x53, 0x39, 0x46, 0xe8, 0x60, 0x70, 0x80, 0xcc, 0xb7, 0xd2, 0xd3, 0x5d, 0x23,
0xdc, 0x48, 0xf4, 0x6a, 0xa3, 0xa9, 0xa7, 0x7a, 0x9d, 0xf6, 0xdb, 0xda, 0x95, 0x71, 0x9f, 0xfd,
0x3f, 0x34, 0xf0, 0x24, 0x13, 0xbe, 0x22, 0x4b, 0xda, 0x6d, 0x5a, 0xf0, 0x9b, 0xc9, 0x82, 0xf7,
0x32, 0x42, 0x3e, 0x47, 0xb5, 0x56, 0xd3, 0xb5, 0xe3, 0x8e, 0x44, 0xee, 0xcf, 0x74, 0xa8, 0x15,
0x38, 0xb5, 0xad, 0x1e, 0x54, 0xe3, 0xd5, 0x5d, 0x8a, 0xa0, 0x7b, 0x50, 0x89, 0x4e, 0x6d, 0xe9,
0xfa, 0x63, 0xda, 0xdc, 0xe6, 0xf6, 0xf5, 0xc4, 0x19, 0x03, 0x8d, 0xe3, 0x02, 0x62, 0x8e, 0x15,
0xc4, 0xd1, 0x78, 0x95, 0xae, 0x16, 0x14, 0xa6, 0xee, 0x90, 0xf4, 0x2c, 0x73, 0x6c, 0x22, 0x32,
0x76, 0x75, 0x3c, 0x2f, 0x73, 0x6c, 0xa2, 0x7d, 0x93, 0x60, 0xa8, 0xaf, 0xdb, 0x65, 0x4e, 0xed,
0xb9, 0x88, 0x2d, 0x2d, 0x44, 0xac, 0x17, 0xbb, 0xf5, 0xbf, 0x32, 0xdb, 0xaf, 0x72, 0x50, 0x8d,
0x6b, 0x04, 0xbc, 0xa9, 0xdc, 0xa1, 0xf0, 0x95, 0x3b, 0x72, 0x85, 0x34, 0x13, 0x67, 0x10, 0x76,
0x0f, 0x4a, 0xb6, 0x52, 0x32, 0x3e, 0xff, 0xdf, 0xce, 0x16, 0x18, 0x9b, 0x3b, 0x28, 0xe9, 0xfa,
0x4a, 0xce, 0xb8, 0x66, 0xad, 0x7c, 0x02, 0x90, 0x82, 0x68, 0xeb, 0x99, 0x98, 0x19, 0xad, 0xd8,
0x64, 0x37, 0xa0, 0xf4, 0xdc, 0xf6, 0xa6, 0x71, 0x32, 0xeb, 0xce, 0x83, 0xfc, 0x27, 0x39, 0xeb,
0x0f, 0x79, 0xa8, 0x98, 0x82, 0x83, 0xdd, 0x85, 0x0a, 0x15, 0x1c, 0xc6, 0xa2, 0xab, 0x33, 0x37,
0xa6, 0xb0, 0xad, 0xa4, 0x92, 0xca, 0xd8, 0x68, 0x54, 0xe9, 0x8a, 0xca, 0xd8, 0x98, 0xd6, 0x55,
0x85, 0xa1, 0x18, 0x99, 0x92, 0xa9, 0x49, 0x05, 0x8a, 0x18, 0xb9, 0xbe, 0x8b, 0xfe, 0xe1, 0x28,
0x62, 0x77, 0xe3, 0x55, 0x17, 0x49, 0xe3, 0x5b, 0x59, 0x8d, 0x97, 0x17, 0xdd, 0x83, 0x7a, 0x66,
0x9a, 0x2b, 0x56, 0xfd, 0x7e, 0x76, 0xd5, 0x66, 0x4a, 0x52, 0xa7, 0xeb, 0xbd, 0xd4, 0x0b, 0xff,
0x86, 0xff, 0x3e, 0x06, 0x48, 0x55, 0xfe, 0xf0, 0x93, 0xcf, 0xfa, 0x7d, 0x01, 0xa0, 0x1f, 0xe2,
0xf5, 0x39, 0xb4, 0xe9, 0xc2, 0x6f, 0xb8, 0x63, 0x3f, 0x90, 0xe2, 0x19, 0x9d, 0x10, 0x34, 0xbe,
0xca, 0xeb, 0x1a, 0xa3, 0x8c, 0x61, 0x3b, 0x50, 0x1f, 0x8a, 0xc8, 0x91, 0x2e, 0x05, 0x94, 0x71,
0xfa, 0x2d, 0x5c, 0x53, 0xaa, 0x67, 0xb3, 0x93, 0x32, 0xb4, 0xaf, 0xb2, 0x63, 0xd8, 0x36, 0x34,
0xc4, 0x45, 0x18, 0x48, 0x65, 0x66, 0xd1, 0x75, 0xe9, 0x35, 0x5d, 0xe1, 0x22, 0xae, 0x4f, 0x80,
0xba, 0x48, 0x3b, 0xcc, 0x86, 0xa2, 0x63, 0x87, 0x91, 0xa9, 0x06, 0xda, 0x0b, 0xf3, 0xed, 0xd9,
0xa1, 0x76, 0xda, 0xee, 0x47, 0xb8, 0xd6, 0x5f, 0xfc, 0xf5, 0xd6, 0x9d, 0x4c, 0x09, 0x35, 0x09,
0x4e, 0x66, 0x5b, 0x14, 0x2f, 0x67, 0xae, 0xda, 0x9a, 0x2a, 0xd7, 0xdb, 0xb2, 0x43, 0x17, 0xd5,
0xe1, 0xc0, 0x5e, 0x87, 0x93, 0x6a, 0xf6, 0x09, 0x34, 0x43, 0x19, 0x8c, 0xa5, 0x88, 0xa2, 0x67,
0x74, 0xa1, 0x9a, 0x42, 0xf7, 0x0d, 0x73, 0xf1, 0x93, 0xe4, 0x33, 0x14, 0xf0, 0xe5, 0x30, 0xdb,
0x5d, 0xf9, 0x11, 0xb4, 0x16, 0x57, 0xfc, 0x3a, 0xbb, 0xb7, 0x72, 0x1f, 0x6a, 0xc9, 0x0a, 0x5e,
0x35, 0xb0, 0x9a, 0xdd, 0xf6, 0xdf, 0xe5, 0xa0, 0xac, 0xf3, 0x91, 0xdd, 0x87, 0x9a, 0x17, 0x38,
0x36, 0x1a, 0x10, 0x7f, 0x54, 0xbc, 0x93, 0xa6, 0xeb, 0xe6, 0xa3, 0x58, 0xa6, 0xf7, 0x23, 0xe5,
0x62, 0x78, 0xba, 0xfe, 0x28, 0x88, 0xf3, 0xa7, 0x99, 0x0e, 0xea, 0xf9, 0xa3, 0x80, 0x6b, 0xe1,
0xca, 0x43, 0x68, 0xce, 0xab, 0xb8, 0xc2, 0xce, 0xf7, 0xe6, 0x03, 0x9d, 0x2e, 0x92, 0x64, 0x50,
0xd6, 0xec, 0xfb, 0x50, 0x4b, 0x70, 0xb6, 0x71, 0xd9, 0xf0, 0x46, 0x76, 0x64, 0xc6, 0x56, 0xeb,
0x97, 0x39, 0x80, 0xd4, 0x36, 0x3c, 0xe7, 0xf0, 0xf3, 0xc5, 0x4f, 0x0b, 0x8f, 0xa4, 0x4f, 0xf7,
0xb6, 0xad, 0x6c, 0xb2, 0xa5, 0xc1, 0xa9, 0xcd, 0x36, 0x01, 0x86, 0x49, 0xae, 0xbf, 0xe4, 0x04,
0xc8, 0x30, 0x50, 0xbf, 0x67, 0xfb, 0xe3, 0xa9, 0x3d, 0x16, 0xa6, 0x3a, 0x4c, 0xfa, 0x56, 0x1f,
0xaa, 0xb1, 0x85, 0x6c, 0x0d, 0xea, 0x91, 0xb1, 0x0a, 0x2b, 0x70, 0x34, 0xa5, 0xc4, 0xb3, 0x10,
0x56, 0xd2, 0xd2, 0xf6, 0xc7, 0x62, 0xae, 0x92, 0xe6, 0x88, 0x70, 0x23, 0xb0, 0xbe, 0x80, 0x12,
0x01, 0x98, 0xbd, 0x91, 0xb2, 0xa5, 0x32, 0x45, 0xb9, 0xae, 0x3b, 0x83, 0x88, 0x4c, 0xda, 0x2d,
0x62, 0x7c, 0x73, 0x4d, 0x60, 0xef, 0x63, 0x75, 0x3b, 0x34, 0xee, 0xbe, 0x8a, 0x87, 0x62, 0xeb,
0x53, 0xa8, 0xc6, 0x30, 0x7a, 0xc5, 0x73, 0x7d, 0x61, 0x4c, 0xa4, 0x36, 0x7e, 0xcc, 0x38, 0xa7,
0xb6, 0xb4, 0x1d, 0x25, 0x74, 0xf9, 0x53, 0xe2, 0x29, 0x60, 0xbd, 0x07, 0xf5, 0x4c, 0x52, 0x62,
0x2c, 0x3e, 0xa5, 0x3d, 0xd6, 0x47, 0x83, 0xee, 0x58, 0x9f, 0xc1, 0xf2, 0x5c, 0x82, 0xe0, 0x4d,
0xe6, 0x0e, 0xe3, 0x9b, 0x4c, 0xdf, 0x52, 0x97, 0xaa, 0x38, 0x06, 0xc5, 0x73, 0x61, 0x9f, 0x99,
0x0a, 0x8e, 0xda, 0xd6, 0x6f, 0xf1, 0x9b, 0x2d, 0xae, 0xac, 0xff, 0x17, 0xe0, 0x54, 0xa9, 0xf0,
0x19, 0x95, 0xda, 0x46, 0x59, 0x0d, 0x11, 0x62, 0xb0, 0x5b, 0x50, 0xc7, 0x4e, 0x64, 0xe4, 0x5a,
0x35, 0x8d, 0x88, 0x34, 0xe1, 0x7f, 0xa0, 0x36, 0x4a, 0x86, 0x17, 0x4c, 0x7c, 0xc4, 0xa3, 0xdf,
0x81, 0xaa, 0x1f, 0x18, 0x99, 0xde, 0xdb, 0x8a, 0x1f, 0x24, 0xe3, 0x6c, 0xcf, 0x33, 0xb2, 0x92,
0x1e, 0x67, 0x7b, 0x1e, 0x09, 0xad, 0x3b, 0xf0, 0xc6, 0xa5, 0xaf, 0x4f, 0xf6, 0x16, 0x94, 0x47,
0xae, 0xa7, 0xe8, 0xc6, 0xc2, 0x2f, 0x0d, 0xd3, 0xb3, 0xfe, 0x91, 0x03, 0x48, 0x63, 0x0b, 0x53,
0x06, 0xaf, 0x1e, 0xe4, 0x34, 0xf4, 0x55, 0xe3, 0x41, 0x75, 0x62, 0x0e, 0x31, 0x13, 0x19, 0x37,
0xe7, 0xe3, 0x71, 0x33, 0x3e, 0xe3, 0xf4, 0xf1, 0xb6, 0x6d, 0x8e, 0xb7, 0xd7, 0xf9, 0x42, 0x4c,
0x66, 0xa0, 0x02, 0x2e, 0xfb, 0x60, 0x00, 0x69, 0xae, 0x73, 0x23, 0x59, 0x79, 0x08, 0xcb, 0x73,
0x53, 0xfe, 0xc0, 0x0b, 0x2d, 0x3d, 0x8c, 0xb3, 0x89, 0xbe, 0x0d, 0x65, 0xfd, 0xd2, 0xc0, 0xd6,
0xa1, 0x62, 0x3b, 0x3a, 0xc7, 0x33, 0xe7, 0x0c, 0x0a, 0x77, 0x08, 0xe6, 0xb1, 0xd8, 0xfa, 0x73,
0x1e, 0x20, 0xc5, 0x5f, 0xa3, 0x8a, 0x7f, 0x00, 0xcd, 0x48, 0x38, 0x81, 0x3f, 0xb4, 0xe5, 0x8c,
0xa4, 0xe6, 0x53, 0xf8, 0xaa, 0x21, 0x0b, 0xcc, 0x4c, 0x45, 0x5f, 0x78, 0x75, 0x45, 0xbf, 0x0e,
0x45, 0x27, 0x08, 0x67, 0xe6, 0xde, 0x62, 0xf3, 0x0b, 0xd9, 0x0b, 0xc2, 0xd9, 0xc1, 0x12, 0x27,
0x06, 0xdb, 0x84, 0xf2, 0xe4, 0x8c, 0xde, 0x5e, 0xf4, 0x37, 0xe4, 0x8d, 0x79, 0xee, 0xe3, 0x33,
0x6c, 0x1f, 0x2c, 0x71, 0xc3, 0x62, 0x77, 0xa0, 0x34, 0x39, 0x1b, 0xba, 0xd2, 0xdc, 0x3c, 0xd7,
0x17, 0xe9, 0x1d, 0x57, 0xd2, 0x53, 0x0b, 0x72, 0x98, 0x05, 0x79, 0x39, 0x31, 0x0f, 0x2d, 0xad,
0x05, 0x6f, 0x4e, 0x0e, 0x96, 0x78, 0x5e, 0x4e, 0x76, 0xab, 0x50, 0xd6, 0x7e, 0xb5, 0xfe, 0x58,
0x84, 0xe6, 0xbc, 0x95, 0xb8, 0xb3, 0x91, 0x74, 0xe2, 0x9d, 0x8d, 0xa4, 0x93, 0x7c, 0xec, 0xe4,
0x33, 0x1f, 0x3b, 0x16, 0x94, 0x82, 0x73, 0x5f, 0xc8, 0xec, 0x23, 0xd3, 0xde, 0x69, 0x70, 0xee,
0x63, 0xd5, 0xac, 0x45, 0x73, 0x45, 0x68, 0xc9, 0x14, 0xa1, 0xef, 0xc3, 0xf2, 0x28, 0xf0, 0xbc,
0xe0, 0x7c, 0x30, 0x9b, 0x78, 0xae, 0x7f, 0x66, 0x2a, 0xd1, 0x79, 0x90, 0xad, 0xc3, 0xb5, 0xa1,
0x2b, 0xd1, 0x1c, 0x53, 0xfd, 0x47, 0xb4, 0xf6, 0x2a, 0x5f, 0x84, 0xd9, 0xe7, 0xb0, 0x66, 0x2b,
0x25, 0x26, 0xa1, 0x3a, 0xf6, 0x43, 0xdb, 0x39, 0xeb, 0x04, 0x0e, 0x65, 0xe1, 0x24, 0xb4, 0x95,
0x7b, 0xe2, 0x7a, 0xae, 0x9a, 0x91, 0x33, 0xaa, 0xfc, 0x95, 0x3c, 0xf6, 0x01, 0x34, 0x1d, 0x29,
0x6c, 0x25, 0x3a, 0x22, 0x52, 0x47, 0xb6, 0x3a, 0x6d, 0x57, 0x69, 0xe4, 0x02, 0x8a, 0x6b, 0xb0,
0xd1, 0xda, 0x2f, 0x5c, 0x6f, 0xe8, 0xe0, 0x67, 0x6b, 0x4d, 0xaf, 0x61, 0x0e, 0x64, 0x9b, 0xc0,
0x08, 0xe8, 0x4e, 0x42, 0x35, 0x4b, 0xa8, 0x40, 0xd4, 0x2b, 0x24, 0x78, 0xe0, 0x2a, 0x77, 0x22,
0x22, 0x65, 0x4f, 0x42, 0xfa, 0x22, 0x2f, 0xf0, 0x14, 0x60, 0xb7, 0xa1, 0xe5, 0xfa, 0x8e, 0x37,
0x1d, 0x8a, 0x67, 0x21, 0x2e, 0x44, 0xfa, 0x51, 0xbb, 0x41, 0xa7, 0xca, 0x35, 0x83, 0x1f, 0x19,
0x18, 0xa9, 0xe2, 0x62, 0x81, 0xba, 0xac, 0xa9, 0x06, 0x4f, 0xa8, 0xfb, 0xb0, 0x6a, 0x7b, 0xe7,
0xf6, 0x2c, 0xe2, 0x22, 0xf4, 0x6c, 0x47, 0x74, 0x2f, 0xdc, 0x48, 0xb9, 0xfe, 0x38, 0x5e, 0x6a,
0xd4, 0x6e, 0x92, 0xbd, 0xaf, 0x60, 0x59, 0x5f, 0xe6, 0xa0, 0xb5, 0x18, 0xc0, 0xb8, 0xfd, 0x21,
0x3a, 0xd1, 0x7c, 0xfc, 0x63, 0x3b, 0x09, 0x89, 0x7c, 0x26, 0x24, 0xe2, 0x3b, 0xb9, 0x90, 0xb9,
0x93, 0x93, 0xf0, 0x2a, 0xbe, 0x3c, 0xbc, 0xe6, 0x1c, 0x56, 0x5a, 0x70, 0x98, 0xf5, 0x9b, 0x1c,
0x5c, 0x5b, 0x48, 0x92, 0x1f, 0x6c, 0xd1, 0x1a, 0xd4, 0x27, 0xf6, 0x99, 0xd0, 0x4f, 0x27, 0x91,
0xb9, 0x8a, 0xb2, 0xd0, 0x7f, 0xc0, 0x3e, 0x1f, 0x1a, 0xd9, 0xcc, 0xbc, 0xd2, 0xb6, 0x38, 0xd0,
0x0e, 0x03, 0xb5, 0x1f, 0x4c, 0xcd, 0x9d, 0x1e, 0x07, 0x5a, 0x0c, 0x5e, 0x0e, 0xc7, 0xc2, 0x15,
0xe1, 0x68, 0x1d, 0x42, 0x35, 0x36, 0x90, 0xdd, 0x32, 0x6f, 0x5b, 0xb9, 0xf4, 0xc9, 0xf6, 0x38,
0x12, 0x12, 0x6d, 0xd7, 0x0f, 0x5d, 0xef, 0x42, 0x49, 0xd7, 0xba, 0xf9, 0xcb, 0x0c, 0x2d, 0xb1,
0x06, 0x50, 0x31, 0x08, 0xdb, 0x80, 0xf2, 0xc9, 0x2c, 0x79, 0xe7, 0x31, 0xc7, 0x0e, 0xf6, 0x87,
0x86, 0x81, 0x67, 0x99, 0x66, 0xb0, 0x1b, 0x50, 0x3c, 0x99, 0xf5, 0x3a, 0xfa, 0xeb, 0x15, 0x4f,
0x44, 0xec, 0xed, 0x96, 0xb5, 0x41, 0xd6, 0x23, 0x68, 0x64, 0xc7, 0x25, 0x05, 0x42, 0x2e, 0x53,
0x20, 0x24, 0x47, 0x7f, 0xfe, 0x55, 0x9f, 0x31, 0x1f, 0x03, 0xd0, 0x4b, 0xf4, 0xeb, 0x7e, 0xfe,
0x7c, 0x08, 0x15, 0xf3, 0x82, 0xcd, 0x3e, 0x58, 0x78, 0x91, 0x6f, 0x26, 0xcf, 0xdb, 0x73, 0xcf,
0xf2, 0xd6, 0x03, 0x2c, 0x84, 0xcf, 0x85, 0xec, 0xb8, 0xa3, 0xd1, 0xeb, 0x4e, 0xf7, 0x00, 0x9a,
0xc7, 0x61, 0xf8, 0xaf, 0x8d, 0xfd, 0x29, 0x94, 0xf5, 0x43, 0x3a, 0x8e, 0xf1, 0xd0, 0x02, 0xb3,
0x07, 0x4c, 0x17, 0xcb, 0x59, 0x93, 0xb8, 0x26, 0x20, 0x73, 0x8a, 0xf3, 0x99, 0xcd, 0x25, 0xe6,
0xbc, 0x01, 0x5c, 0x13, 0x36, 0xd6, 0xa1, 0x62, 0xde, 0x6c, 0x59, 0x0d, 0x4a, 0xc7, 0x87, 0x83,
0xee, 0x93, 0xd6, 0x12, 0xab, 0x42, 0xf1, 0xa0, 0x3f, 0x78, 0xd2, 0xca, 0x61, 0xeb, 0xb0, 0x7f,
0xd8, 0x6d, 0xe5, 0x37, 0x6e, 0x43, 0x23, 0xfb, 0x6a, 0xcb, 0xea, 0x50, 0x19, 0xec, 0x1c, 0x76,
0x76, 0xfb, 0x3f, 0x69, 0x2d, 0xb1, 0x06, 0x54, 0x7b, 0x87, 0x83, 0xee, 0xde, 0x31, 0xef, 0xb6,
0x72, 0x1b, 0x3f, 0x86, 0x5a, 0xf2, 0x90, 0x85, 0x1a, 0x76, 0x7b, 0x87, 0x9d, 0xd6, 0x12, 0x03,
0x28, 0x0f, 0xba, 0x7b, 0xbc, 0x8b, 0x7a, 0x2b, 0x50, 0x18, 0x0c, 0x0e, 0x5a, 0x79, 0x9c, 0x75,
0x6f, 0x67, 0xef, 0xa0, 0xdb, 0x2a, 0x60, 0xf3, 0xc9, 0xe3, 0xa3, 0xfd, 0x41, 0xab, 0xb8, 0xf1,
0x21, 0xbc, 0x71, 0xe9, 0x65, 0x08, 0x67, 0xec, 0x74, 0xf7, 0x77, 0x8e, 0x1f, 0xa1, 0x89, 0x65,
0xc8, 0xf7, 0x0f, 0xb5, 0xa2, 0xfe, 0xfe, 0x7e, 0x2b, 0xbf, 0xf1, 0x31, 0x5c, 0x5b, 0x78, 0xda,
0xa1, 0x09, 0x0f, 0x76, 0x78, 0x17, 0x27, 0xaf, 0x43, 0xe5, 0x88, 0xf7, 0x9e, 0xee, 0x3c, 0xe9,
0xb6, 0x72, 0x28, 0x78, 0xd4, 0xdf, 0x7b, 0xd8, 0xed, 0xb4, 0xf2, 0xbb, 0x37, 0xbf, 0x7a, 0xb1,
0x9a, 0xfb, 0xe6, 0xc5, 0x6a, 0xee, 0xdb, 0x17, 0xab, 0xb9, 0xbf, 0xbd, 0x58, 0xcd, 0x7d, 0xf9,
0xfd, 0xea, 0xd2, 0x37, 0xdf, 0xaf, 0x2e, 0x7d, 0xfb, 0xfd, 0xea, 0xd2, 0x49, 0x99, 0xfe, 0xbd,
0xf9, 0xe8, 0x9f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xef, 0xce, 0x68, 0x38, 0xfd, 0x19, 0x00, 0x00,
}
func (m *Op) Marshal() (dAtA []byte, err error) {
@ -4969,6 +4979,16 @@ func (m *FileActionCopy) MarshalToSizedBuffer(dAtA []byte) (int, error) {
_ = i
var l int
_ = l
if m.AlwaysReplaceExistingDestPaths {
i--
if m.AlwaysReplaceExistingDestPaths {
dAtA[i] = 1
} else {
dAtA[i] = 0
}
i--
dAtA[i] = 0x70
}
if len(m.ExcludePatterns) > 0 {
for iNdEx := len(m.ExcludePatterns) - 1; iNdEx >= 0; iNdEx-- {
i -= len(m.ExcludePatterns[iNdEx])
@ -6463,6 +6483,9 @@ func (m *FileActionCopy) Size() (n int) {
n += 1 + l + sovOps(uint64(l))
}
}
if m.AlwaysReplaceExistingDestPaths {
n += 2
}
return n
}
@ -12391,6 +12414,26 @@ func (m *FileActionCopy) Unmarshal(dAtA []byte) error {
}
m.ExcludePatterns = append(m.ExcludePatterns, string(dAtA[iNdEx:postIndex]))
iNdEx = postIndex
case 14:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field AlwaysReplaceExistingDestPaths", wireType)
}
var v int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowOps
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
m.AlwaysReplaceExistingDestPaths = bool(v != 0)
default:
iNdEx = preIndex
skippy, err := skipOps(dAtA[iNdEx:])

View File

@ -340,6 +340,8 @@ message FileActionCopy {
repeated string include_patterns = 12;
// exclude files/dir matching any of these patterns (even if they match an include pattern)
repeated string exclude_patterns = 13;
// alwaysReplaceExistingDestPaths results in an existing dest path that differs in type from the src path being replaced rather than the default of returning an error
bool alwaysReplaceExistingDestPaths = 14;
}
message FileActionMkFile {

View File

@ -1,6 +1,6 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.31.0
// protoc-gen-go v1.33.0
// protoc v3.11.4
// source: stack.proto

View File

@ -32,7 +32,6 @@ type detector struct {
}
var ServiceName string
var Recorder *TraceRecorder
var detectors map[string]detector
var once sync.Once
@ -116,45 +115,32 @@ func detectExporter[T any](envVar string, fn func(d ExporterDetector) (T, bool,
return exp, nil
}
func getExporters() (sdktrace.SpanExporter, sdkmetric.Exporter, error) {
texp, mexp, err := detectExporters()
if err != nil {
return nil, nil, err
}
if Recorder != nil {
Recorder.SpanExporter = texp
texp = Recorder
}
return texp, mexp, nil
}
func detect() error {
tp = noop.NewTracerProvider()
mp = sdkmetric.NewMeterProvider()
texp, mexp, err := getExporters()
texp, mexp, err := detectExporters()
if err != nil || (texp == nil && mexp == nil) {
return err
}
res := Resource()
// enable log with traceID when valid exporter
if texp != nil {
if texp != nil || Recorder != nil {
// enable log with traceID when a valid exporter is used
bklog.EnableLogWithTraceID(true)
sp := sdktrace.NewBatchSpanProcessor(texp)
if Recorder != nil {
Recorder.flush = sp.ForceFlush
}
sdktp := sdktrace.NewTracerProvider(
sdktrace.WithSpanProcessor(sp),
sdktpopts := []sdktrace.TracerProviderOption{
sdktrace.WithResource(res),
)
}
if texp != nil {
sdktpopts = append(sdktpopts, sdktrace.WithBatcher(texp))
}
if Recorder != nil {
sp := sdktrace.NewSimpleSpanProcessor(Recorder)
sdktpopts = append(sdktpopts, sdktrace.WithSpanProcessor(sp))
}
sdktp := sdktrace.NewTracerProvider(sdktpopts...)
closers = append(closers, sdktp.Shutdown)
exporter.SpanExporter = texp

View File

@ -5,18 +5,31 @@ import (
"sync"
"time"
"github.com/pkg/errors"
sdktrace "go.opentelemetry.io/otel/sdk/trace"
"go.opentelemetry.io/otel/sdk/trace/tracetest"
"go.opentelemetry.io/otel/trace"
"golang.org/x/sync/semaphore"
)
type TraceRecorder struct {
sdktrace.SpanExporter
var Recorder *TraceRecorder
mu sync.Mutex
type TraceRecorder struct {
// sem is a binary semaphore for this struct.
// This is used instead of sync.Mutex because it allows
// for context cancellation to work properly.
sem *semaphore.Weighted
// shutdown function for the gc.
shutdownGC func(err error)
// done channel that marks when background goroutines
// are closed.
done chan struct{}
// track traces and listeners for traces.
m map[trace.TraceID]*stubs
listeners map[trace.TraceID]int
flush func(context.Context) error
}
type stubs struct {
@ -26,37 +39,52 @@ type stubs struct {
func NewTraceRecorder() *TraceRecorder {
tr := &TraceRecorder{
sem: semaphore.NewWeighted(1),
done: make(chan struct{}),
m: map[trace.TraceID]*stubs{},
listeners: map[trace.TraceID]int{},
}
go func() {
t := time.NewTimer(60 * time.Second)
for {
<-t.C
tr.gc()
t.Reset(50 * time.Second)
}
}()
ctx, cancel := context.WithCancelCause(context.Background())
go tr.gcLoop(ctx)
tr.shutdownGC = cancel
return tr
}
func (r *TraceRecorder) Record(traceID trace.TraceID) func() []tracetest.SpanStub {
r.mu.Lock()
defer r.mu.Unlock()
// Record signals to the TraceRecorder that it should track spans associated with the current
// trace and returns a function that will return these spans.
//
// If the TraceRecorder is nil or there is no valid active span, the returned function
// will be nil to signal that the trace cannot be recorded.
func (r *TraceRecorder) Record(ctx context.Context) (func() []tracetest.SpanStub, error) {
if r == nil {
return nil, nil
}
spanCtx := trace.SpanContextFromContext(ctx)
if !spanCtx.IsValid() {
return nil, nil
}
if err := r.sem.Acquire(ctx, 1); err != nil {
return nil, err
}
defer r.sem.Release(1)
traceID := spanCtx.TraceID()
r.listeners[traceID]++
var once sync.Once
var spans []tracetest.SpanStub
var (
once sync.Once
spans []tracetest.SpanStub
)
return func() []tracetest.SpanStub {
once.Do(func() {
if r.flush != nil {
r.flush(context.TODO())
if err := r.sem.Acquire(context.Background(), 1); err != nil {
return
}
r.mu.Lock()
defer r.mu.Unlock()
defer r.sem.Release(1)
if v, ok := r.m[traceID]; ok {
spans = v.spans
@ -67,26 +95,46 @@ func (r *TraceRecorder) Record(traceID trace.TraceID) func() []tracetest.SpanStu
}
})
return spans
}, nil
}
func (r *TraceRecorder) gcLoop(ctx context.Context) {
defer close(r.done)
ticker := time.NewTicker(time.Minute)
defer ticker.Stop()
for {
select {
case <-ctx.Done():
return
case now := <-ticker.C:
r.gc(ctx, now)
}
}
}
func (r *TraceRecorder) gc() {
r.mu.Lock()
defer r.mu.Unlock()
func (r *TraceRecorder) gc(ctx context.Context, now time.Time) {
if err := r.sem.Acquire(ctx, 1); err != nil {
return
}
defer r.sem.Release(1)
now := time.Now()
for k, s := range r.m {
if _, ok := r.listeners[k]; ok {
continue
}
if now.Sub(s.last) > 60*time.Second {
if now.Sub(s.last) > time.Minute {
delete(r.m, k)
}
}
}
func (r *TraceRecorder) ExportSpans(ctx context.Context, spans []sdktrace.ReadOnlySpan) error {
r.mu.Lock()
if err := r.sem.Acquire(ctx, 1); err != nil {
return err
}
defer r.sem.Release(1)
now := time.Now()
for _, s := range spans {
@ -99,17 +147,18 @@ func (r *TraceRecorder) ExportSpans(ctx context.Context, spans []sdktrace.ReadOn
v.last = now
v.spans = append(v.spans, ss)
}
r.mu.Unlock()
if r.SpanExporter == nil {
return nil
}
return r.SpanExporter.ExportSpans(ctx, spans)
return nil
}
func (r *TraceRecorder) Shutdown(ctx context.Context) error {
if r.SpanExporter == nil {
// Initiate the shutdown of the gc loop.
r.shutdownGC(errors.WithStack(context.Canceled))
// Wait for it to be done or the context is canceled.
select {
case <-r.done:
return nil
case <-ctx.Done():
return context.Cause(ctx)
}
return r.SpanExporter.Shutdown(ctx)
}

View File

@ -0,0 +1,61 @@
package tracing
import (
"context"
"strings"
"go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc"
"google.golang.org/grpc/stats"
)
func ServerStatsHandler(opts ...otelgrpc.Option) stats.Handler {
handler := otelgrpc.NewServerHandler(opts...)
return &statsFilter{
inner: handler,
filter: defaultStatsFilter,
}
}
func ClientStatsHandler(opts ...otelgrpc.Option) stats.Handler {
handler := otelgrpc.NewClientHandler(opts...)
return &statsFilter{
inner: handler,
filter: defaultStatsFilter,
}
}
type contextKey int
const filterContextKey contextKey = iota
type statsFilter struct {
inner stats.Handler
filter func(info *stats.RPCTagInfo) bool
}
func (s *statsFilter) TagRPC(ctx context.Context, info *stats.RPCTagInfo) context.Context {
if s.filter(info) {
return context.WithValue(ctx, filterContextKey, struct{}{})
}
return s.inner.TagRPC(ctx, info)
}
func (s *statsFilter) HandleRPC(ctx context.Context, rpcStats stats.RPCStats) {
if ctx.Value(filterContextKey) != nil {
return
}
s.inner.HandleRPC(ctx, rpcStats)
}
func (s *statsFilter) TagConn(ctx context.Context, info *stats.ConnTagInfo) context.Context {
return s.inner.TagConn(ctx, info)
}
func (s *statsFilter) HandleConn(ctx context.Context, connStats stats.ConnStats) {
s.inner.HandleConn(ctx, connStats)
}
func defaultStatsFilter(info *stats.RPCTagInfo) bool {
return strings.HasSuffix(info.FullMethodName, "opentelemetry.proto.collector.trace.v1.TraceService/Export") ||
strings.HasSuffix(info.FullMethodName, "Health/Check")
}