vendor: github.com/aws/aws-sdk-go-v2/config v1.26.6

vendor github.com/aws/aws-sdk-go-v2/config v1.26.6 and related dependencies.

Signed-off-by: Sebastiaan van Stijn <github@gone.nl>
This commit is contained in:
Sebastiaan van Stijn
2024-02-05 18:08:03 +01:00
parent 089982153f
commit 43ed470208
190 changed files with 12340 additions and 13837 deletions

View File

@ -0,0 +1,45 @@
package auth
import (
"github.com/aws/smithy-go/auth"
smithyhttp "github.com/aws/smithy-go/transport/http"
)
// HTTPAuthScheme is the SDK's internal implementation of smithyhttp.AuthScheme
// for pre-existing implementations where the signer was added to client
// config. SDK clients will key off of this type and ensure per-operation
// updates to those signers persist on the scheme itself.
type HTTPAuthScheme struct {
schemeID string
signer smithyhttp.Signer
}
var _ smithyhttp.AuthScheme = (*HTTPAuthScheme)(nil)
// NewHTTPAuthScheme returns an auth scheme instance with the given config.
func NewHTTPAuthScheme(schemeID string, signer smithyhttp.Signer) *HTTPAuthScheme {
return &HTTPAuthScheme{
schemeID: schemeID,
signer: signer,
}
}
// SchemeID identifies the auth scheme.
func (s *HTTPAuthScheme) SchemeID() string {
return s.schemeID
}
// IdentityResolver gets the identity resolver for the auth scheme.
func (s *HTTPAuthScheme) IdentityResolver(o auth.IdentityResolverOptions) auth.IdentityResolver {
return o.GetIdentityResolver(s.schemeID)
}
// Signer gets the signer for the auth scheme.
func (s *HTTPAuthScheme) Signer() smithyhttp.Signer {
return s.signer
}
// WithSigner returns a new instance of the auth scheme with the updated signer.
func (s *HTTPAuthScheme) WithSigner(signer smithyhttp.Signer) *HTTPAuthScheme {
return NewHTTPAuthScheme(s.schemeID, signer)
}

View File

@ -0,0 +1,191 @@
package auth
import (
"context"
"fmt"
smithy "github.com/aws/smithy-go"
"github.com/aws/smithy-go/middleware"
)
// SigV4 is a constant representing
// Authentication Scheme Signature Version 4
const SigV4 = "sigv4"
// SigV4A is a constant representing
// Authentication Scheme Signature Version 4A
const SigV4A = "sigv4a"
// SigV4S3Express identifies the S3 S3Express auth scheme.
const SigV4S3Express = "sigv4-s3express"
// None is a constant representing the
// None Authentication Scheme
const None = "none"
// SupportedSchemes is a data structure
// that indicates the list of supported AWS
// authentication schemes
var SupportedSchemes = map[string]bool{
SigV4: true,
SigV4A: true,
SigV4S3Express: true,
None: true,
}
// AuthenticationScheme is a representation of
// AWS authentication schemes
type AuthenticationScheme interface {
isAuthenticationScheme()
}
// AuthenticationSchemeV4 is a AWS SigV4 representation
type AuthenticationSchemeV4 struct {
Name string
SigningName *string
SigningRegion *string
DisableDoubleEncoding *bool
}
func (a *AuthenticationSchemeV4) isAuthenticationScheme() {}
// AuthenticationSchemeV4A is a AWS SigV4A representation
type AuthenticationSchemeV4A struct {
Name string
SigningName *string
SigningRegionSet []string
DisableDoubleEncoding *bool
}
func (a *AuthenticationSchemeV4A) isAuthenticationScheme() {}
// AuthenticationSchemeNone is a representation for the none auth scheme
type AuthenticationSchemeNone struct{}
func (a *AuthenticationSchemeNone) isAuthenticationScheme() {}
// NoAuthenticationSchemesFoundError is used in signaling
// that no authentication schemes have been specified.
type NoAuthenticationSchemesFoundError struct{}
func (e *NoAuthenticationSchemesFoundError) Error() string {
return fmt.Sprint("No authentication schemes specified.")
}
// UnSupportedAuthenticationSchemeSpecifiedError is used in
// signaling that only unsupported authentication schemes
// were specified.
type UnSupportedAuthenticationSchemeSpecifiedError struct {
UnsupportedSchemes []string
}
func (e *UnSupportedAuthenticationSchemeSpecifiedError) Error() string {
return fmt.Sprint("Unsupported authentication scheme specified.")
}
// GetAuthenticationSchemes extracts the relevant authentication scheme data
// into a custom strongly typed Go data structure.
func GetAuthenticationSchemes(p *smithy.Properties) ([]AuthenticationScheme, error) {
var result []AuthenticationScheme
if !p.Has("authSchemes") {
return nil, &NoAuthenticationSchemesFoundError{}
}
authSchemes, _ := p.Get("authSchemes").([]interface{})
var unsupportedSchemes []string
for _, scheme := range authSchemes {
authScheme, _ := scheme.(map[string]interface{})
version := authScheme["name"].(string)
switch version {
case SigV4, SigV4S3Express:
v4Scheme := AuthenticationSchemeV4{
Name: version,
SigningName: getSigningName(authScheme),
SigningRegion: getSigningRegion(authScheme),
DisableDoubleEncoding: getDisableDoubleEncoding(authScheme),
}
result = append(result, AuthenticationScheme(&v4Scheme))
case SigV4A:
v4aScheme := AuthenticationSchemeV4A{
Name: SigV4A,
SigningName: getSigningName(authScheme),
SigningRegionSet: getSigningRegionSet(authScheme),
DisableDoubleEncoding: getDisableDoubleEncoding(authScheme),
}
result = append(result, AuthenticationScheme(&v4aScheme))
case None:
noneScheme := AuthenticationSchemeNone{}
result = append(result, AuthenticationScheme(&noneScheme))
default:
unsupportedSchemes = append(unsupportedSchemes, authScheme["name"].(string))
continue
}
}
if len(result) == 0 {
return nil, &UnSupportedAuthenticationSchemeSpecifiedError{
UnsupportedSchemes: unsupportedSchemes,
}
}
return result, nil
}
type disableDoubleEncoding struct{}
// SetDisableDoubleEncoding sets or modifies the disable double encoding option
// on the context.
//
// Scoped to stack values. Use github.com/aws/smithy-go/middleware#ClearStackValues
// to clear all stack values.
func SetDisableDoubleEncoding(ctx context.Context, value bool) context.Context {
return middleware.WithStackValue(ctx, disableDoubleEncoding{}, value)
}
// GetDisableDoubleEncoding retrieves the disable double encoding option
// from the context.
//
// Scoped to stack values. Use github.com/aws/smithy-go/middleware#ClearStackValues
// to clear all stack values.
func GetDisableDoubleEncoding(ctx context.Context) (value bool, ok bool) {
value, ok = middleware.GetStackValue(ctx, disableDoubleEncoding{}).(bool)
return value, ok
}
func getSigningName(authScheme map[string]interface{}) *string {
signingName, ok := authScheme["signingName"].(string)
if !ok || signingName == "" {
return nil
}
return &signingName
}
func getSigningRegionSet(authScheme map[string]interface{}) []string {
untypedSigningRegionSet, ok := authScheme["signingRegionSet"].([]interface{})
if !ok {
return nil
}
signingRegionSet := []string{}
for _, item := range untypedSigningRegionSet {
signingRegionSet = append(signingRegionSet, item.(string))
}
return signingRegionSet
}
func getSigningRegion(authScheme map[string]interface{}) *string {
signingRegion, ok := authScheme["signingRegion"].(string)
if !ok || signingRegion == "" {
return nil
}
return &signingRegion
}
func getDisableDoubleEncoding(authScheme map[string]interface{}) *bool {
disableDoubleEncoding, ok := authScheme["disableDoubleEncoding"].(bool)
if !ok {
return nil
}
return &disableDoubleEncoding
}

View File

@ -0,0 +1,43 @@
package smithy
import (
"context"
"fmt"
"time"
"github.com/aws/smithy-go"
"github.com/aws/smithy-go/auth"
"github.com/aws/smithy-go/auth/bearer"
)
// BearerTokenAdapter adapts smithy bearer.Token to smithy auth.Identity.
type BearerTokenAdapter struct {
Token bearer.Token
}
var _ auth.Identity = (*BearerTokenAdapter)(nil)
// Expiration returns the time of expiration for the token.
func (v *BearerTokenAdapter) Expiration() time.Time {
return v.Token.Expires
}
// BearerTokenProviderAdapter adapts smithy bearer.TokenProvider to smithy
// auth.IdentityResolver.
type BearerTokenProviderAdapter struct {
Provider bearer.TokenProvider
}
var _ (auth.IdentityResolver) = (*BearerTokenProviderAdapter)(nil)
// GetIdentity retrieves a bearer token using the underlying provider.
func (v *BearerTokenProviderAdapter) GetIdentity(ctx context.Context, _ smithy.Properties) (
auth.Identity, error,
) {
token, err := v.Provider.RetrieveBearerToken(ctx)
if err != nil {
return nil, fmt.Errorf("get token: %w", err)
}
return &BearerTokenAdapter{Token: token}, nil
}

View File

@ -0,0 +1,35 @@
package smithy
import (
"context"
"fmt"
"github.com/aws/smithy-go"
"github.com/aws/smithy-go/auth"
"github.com/aws/smithy-go/auth/bearer"
smithyhttp "github.com/aws/smithy-go/transport/http"
)
// BearerTokenSignerAdapter adapts smithy bearer.Signer to smithy http
// auth.Signer.
type BearerTokenSignerAdapter struct {
Signer bearer.Signer
}
var _ (smithyhttp.Signer) = (*BearerTokenSignerAdapter)(nil)
// SignRequest signs the request with the provided bearer token.
func (v *BearerTokenSignerAdapter) SignRequest(ctx context.Context, r *smithyhttp.Request, identity auth.Identity, _ smithy.Properties) error {
ca, ok := identity.(*BearerTokenAdapter)
if !ok {
return fmt.Errorf("unexpected identity type: %T", identity)
}
signed, err := v.Signer.SignWithBearerToken(ctx, ca.Token, r)
if err != nil {
return fmt.Errorf("sign request: %w", err)
}
*r = *signed.(*smithyhttp.Request)
return nil
}

View File

@ -0,0 +1,46 @@
package smithy
import (
"context"
"fmt"
"time"
"github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/smithy-go"
"github.com/aws/smithy-go/auth"
)
// CredentialsAdapter adapts aws.Credentials to auth.Identity.
type CredentialsAdapter struct {
Credentials aws.Credentials
}
var _ auth.Identity = (*CredentialsAdapter)(nil)
// Expiration returns the time of expiration for the credentials.
func (v *CredentialsAdapter) Expiration() time.Time {
return v.Credentials.Expires
}
// CredentialsProviderAdapter adapts aws.CredentialsProvider to auth.IdentityResolver.
type CredentialsProviderAdapter struct {
Provider aws.CredentialsProvider
}
var _ (auth.IdentityResolver) = (*CredentialsProviderAdapter)(nil)
// GetIdentity retrieves AWS credentials using the underlying provider.
func (v *CredentialsProviderAdapter) GetIdentity(ctx context.Context, _ smithy.Properties) (
auth.Identity, error,
) {
if v.Provider == nil {
return &CredentialsAdapter{Credentials: aws.Credentials{}}, nil
}
creds, err := v.Provider.Retrieve(ctx)
if err != nil {
return nil, fmt.Errorf("get credentials: %w", err)
}
return &CredentialsAdapter{Credentials: creds}, nil
}

View File

@ -0,0 +1,2 @@
// Package smithy adapts concrete AWS auth and signing types to the generic smithy versions.
package smithy

View File

@ -0,0 +1,53 @@
package smithy
import (
"context"
"fmt"
v4 "github.com/aws/aws-sdk-go-v2/aws/signer/v4"
"github.com/aws/aws-sdk-go-v2/internal/sdk"
"github.com/aws/smithy-go"
"github.com/aws/smithy-go/auth"
"github.com/aws/smithy-go/logging"
smithyhttp "github.com/aws/smithy-go/transport/http"
)
// V4SignerAdapter adapts v4.HTTPSigner to smithy http.Signer.
type V4SignerAdapter struct {
Signer v4.HTTPSigner
Logger logging.Logger
LogSigning bool
}
var _ (smithyhttp.Signer) = (*V4SignerAdapter)(nil)
// SignRequest signs the request with the provided identity.
func (v *V4SignerAdapter) SignRequest(ctx context.Context, r *smithyhttp.Request, identity auth.Identity, props smithy.Properties) error {
ca, ok := identity.(*CredentialsAdapter)
if !ok {
return fmt.Errorf("unexpected identity type: %T", identity)
}
name, ok := smithyhttp.GetSigV4SigningName(&props)
if !ok {
return fmt.Errorf("sigv4 signing name is required")
}
region, ok := smithyhttp.GetSigV4SigningRegion(&props)
if !ok {
return fmt.Errorf("sigv4 signing region is required")
}
hash := v4.GetPayloadHash(ctx)
err := v.Signer.SignHTTP(ctx, ca.Credentials, r.Request, hash, name, region, sdk.NowTime(), func(o *v4.SignerOptions) {
o.DisableURIPathEscaping, _ = smithyhttp.GetDisableDoubleEncoding(&props)
o.Logger = v.Logger
o.LogSigning = v.LogSigning
})
if err != nil {
return fmt.Errorf("sign http: %w", err)
}
return nil
}

View File

@ -1,3 +1,100 @@
# v1.2.10 (2024-01-04)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.2.9 (2023-12-07)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.2.8 (2023-12-01)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.2.7 (2023-11-30)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.2.6 (2023-11-29)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.2.5 (2023-11-28.2)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.2.4 (2023-11-20)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.2.3 (2023-11-15)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.2.2 (2023-11-09)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.2.1 (2023-11-01)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.2.0 (2023-10-31)
* **Feature**: **BREAKING CHANGE**: Bump minimum go version to 1.19 per the revised [go version support policy](https://aws.amazon.com/blogs/developer/aws-sdk-for-go-aligns-with-go-release-policy-on-supported-runtimes/).
* **Dependency Update**: Updated to the latest SDK module versions
# v1.1.43 (2023-10-12)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.1.42 (2023-10-06)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.1.41 (2023-08-21)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.1.40 (2023-08-18)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.1.39 (2023-08-17)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.1.38 (2023-08-07)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.1.37 (2023-07-31)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.1.36 (2023-07-28)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.1.35 (2023-07-13)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.1.34 (2023-06-13)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.1.33 (2023-04-24)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.1.32 (2023-04-07)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.1.31 (2023-03-21)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.1.30 (2023-03-10)
* **Dependency Update**: Updated to the latest SDK module versions

View File

@ -0,0 +1,57 @@
package configsources
import (
"context"
)
// ServiceBaseEndpointProvider is needed to search for all providers
// that provide a configured service endpoint
type ServiceBaseEndpointProvider interface {
GetServiceBaseEndpoint(ctx context.Context, sdkID string) (string, bool, error)
}
// IgnoreConfiguredEndpointsProvider is needed to search for all providers
// that provide a flag to disable configured endpoints.
//
// Currently duplicated from github.com/aws/aws-sdk-go-v2/config because
// service packages cannot import github.com/aws/aws-sdk-go-v2/config
// due to result import cycle error.
type IgnoreConfiguredEndpointsProvider interface {
GetIgnoreConfiguredEndpoints(ctx context.Context) (bool, bool, error)
}
// GetIgnoreConfiguredEndpoints is used in knowing when to disable configured
// endpoints feature.
//
// Currently duplicated from github.com/aws/aws-sdk-go-v2/config because
// service packages cannot import github.com/aws/aws-sdk-go-v2/config
// due to result import cycle error.
func GetIgnoreConfiguredEndpoints(ctx context.Context, configs []interface{}) (value bool, found bool, err error) {
for _, cfg := range configs {
if p, ok := cfg.(IgnoreConfiguredEndpointsProvider); ok {
value, found, err = p.GetIgnoreConfiguredEndpoints(ctx)
if err != nil || found {
break
}
}
}
return
}
// ResolveServiceBaseEndpoint is used to retrieve service endpoints from configured sources
// while allowing for configured endpoints to be disabled
func ResolveServiceBaseEndpoint(ctx context.Context, sdkID string, configs []interface{}) (value string, found bool, err error) {
if val, found, _ := GetIgnoreConfiguredEndpoints(ctx, configs); found && val {
return "", false, nil
}
for _, cs := range configs {
if p, ok := cs.(ServiceBaseEndpointProvider); ok {
value, found, err = p.GetServiceBaseEndpoint(context.Background(), sdkID)
if err != nil || found {
break
}
}
}
return
}

View File

@ -3,4 +3,4 @@
package configsources
// goModuleVersion is the tagged release for this module
const goModuleVersion = "1.1.30"
const goModuleVersion = "1.2.10"

View File

@ -0,0 +1,94 @@
package awsrulesfn
import (
"strings"
)
// ARN provides AWS ARN components broken out into a data structure.
type ARN struct {
Partition string
Service string
Region string
AccountId string
ResourceId OptionalStringSlice
}
const (
arnDelimiters = ":"
resourceDelimiters = "/:"
arnSections = 6
arnPrefix = "arn:"
// zero-indexed
sectionPartition = 1
sectionService = 2
sectionRegion = 3
sectionAccountID = 4
sectionResource = 5
)
// ParseARN returns an [ARN] value parsed from the input string provided. If
// the ARN cannot be parsed nil will be returned, and error added to
// [ErrorCollector].
func ParseARN(input string) *ARN {
if !strings.HasPrefix(input, arnPrefix) {
return nil
}
sections := strings.SplitN(input, arnDelimiters, arnSections)
if numSections := len(sections); numSections != arnSections {
return nil
}
if sections[sectionPartition] == "" {
return nil
}
if sections[sectionService] == "" {
return nil
}
if sections[sectionResource] == "" {
return nil
}
return &ARN{
Partition: sections[sectionPartition],
Service: sections[sectionService],
Region: sections[sectionRegion],
AccountId: sections[sectionAccountID],
ResourceId: splitResource(sections[sectionResource]),
}
}
// splitResource splits the resource components by the ARN resource delimiters.
func splitResource(v string) []string {
var parts []string
var offset int
for offset <= len(v) {
idx := strings.IndexAny(v[offset:], "/:")
if idx < 0 {
parts = append(parts, v[offset:])
break
}
parts = append(parts, v[offset:idx+offset])
offset += idx + 1
}
return parts
}
// OptionalStringSlice provides a helper to safely get the index of a string
// slice that may be out of bounds. Returns pointer to string if index is
// valid. Otherwise returns nil.
type OptionalStringSlice []string
// Get returns a string pointer of the string at index i if the index is valid.
// Otherwise returns nil.
func (s OptionalStringSlice) Get(i int) *string {
if i < 0 || i >= len(s) {
return nil
}
v := s[i]
return &v
}

View File

@ -0,0 +1,3 @@
// Package awsrulesfn provides AWS focused endpoint rule functions for
// evaluating endpoint resolution rules.
package awsrulesfn

View File

@ -0,0 +1,7 @@
//go:build codegen
// +build codegen
package awsrulesfn
//go:generate go run -tags codegen ./internal/partition/codegen.go -model partitions.json -output partitions.go
//go:generate gofmt -w -s .

View File

@ -0,0 +1,51 @@
package awsrulesfn
import (
"net"
"strings"
smithyhttp "github.com/aws/smithy-go/transport/http"
)
// IsVirtualHostableS3Bucket returns if the input is a DNS compatible bucket
// name and can be used with Amazon S3 virtual hosted style addressing. Similar
// to [rulesfn.IsValidHostLabel] with the added restriction that the length of label
// must be [3:63] characters long, all lowercase, and not formatted as an IP
// address.
func IsVirtualHostableS3Bucket(input string, allowSubDomains bool) bool {
// input should not be formatted as an IP address
// NOTE: this will technically trip up on IPv6 hosts with zone IDs, but
// validation further down will catch that anyway (it's guaranteed to have
// unfriendly characters % and : if that's the case)
if net.ParseIP(input) != nil {
return false
}
var labels []string
if allowSubDomains {
labels = strings.Split(input, ".")
} else {
labels = []string{input}
}
for _, label := range labels {
// validate special length constraints
if l := len(label); l < 3 || l > 63 {
return false
}
// Validate no capital letters
for _, r := range label {
if r >= 'A' && r <= 'Z' {
return false
}
}
// Validate valid host label
if !smithyhttp.ValidHostLabel(label) {
return false
}
}
return true
}

View File

@ -0,0 +1,75 @@
package awsrulesfn
import "regexp"
// Partition provides the metadata describing an AWS partition.
type Partition struct {
ID string `json:"id"`
Regions map[string]RegionOverrides `json:"regions"`
RegionRegex string `json:"regionRegex"`
DefaultConfig PartitionConfig `json:"outputs"`
}
// PartitionConfig provides the endpoint metadata for an AWS region or partition.
type PartitionConfig struct {
Name string `json:"name"`
DnsSuffix string `json:"dnsSuffix"`
DualStackDnsSuffix string `json:"dualStackDnsSuffix"`
SupportsFIPS bool `json:"supportsFIPS"`
SupportsDualStack bool `json:"supportsDualStack"`
}
type RegionOverrides struct {
Name *string `json:"name"`
DnsSuffix *string `json:"dnsSuffix"`
DualStackDnsSuffix *string `json:"dualStackDnsSuffix"`
SupportsFIPS *bool `json:"supportsFIPS"`
SupportsDualStack *bool `json:"supportsDualStack"`
}
const defaultPartition = "aws"
func getPartition(partitions []Partition, region string) *PartitionConfig {
for _, partition := range partitions {
if v, ok := partition.Regions[region]; ok {
p := mergeOverrides(partition.DefaultConfig, v)
return &p
}
}
for _, partition := range partitions {
regionRegex := regexp.MustCompile(partition.RegionRegex)
if regionRegex.MatchString(region) {
v := partition.DefaultConfig
return &v
}
}
for _, partition := range partitions {
if partition.ID == defaultPartition {
v := partition.DefaultConfig
return &v
}
}
return nil
}
func mergeOverrides(into PartitionConfig, from RegionOverrides) PartitionConfig {
if from.Name != nil {
into.Name = *from.Name
}
if from.DnsSuffix != nil {
into.DnsSuffix = *from.DnsSuffix
}
if from.DualStackDnsSuffix != nil {
into.DualStackDnsSuffix = *from.DualStackDnsSuffix
}
if from.SupportsFIPS != nil {
into.SupportsFIPS = *from.SupportsFIPS
}
if from.SupportsDualStack != nil {
into.SupportsDualStack = *from.SupportsDualStack
}
return into
}

View File

@ -0,0 +1,381 @@
// Code generated by endpoint/awsrulesfn/internal/partition. DO NOT EDIT.
package awsrulesfn
// GetPartition returns an AWS [Partition] for the region provided. If the
// partition cannot be determined nil will be returned.
func GetPartition(region string) *PartitionConfig {
return getPartition(partitions, region)
}
var partitions = []Partition{
{
ID: "aws",
RegionRegex: "^(us|eu|ap|sa|ca|me|af|il)\\-\\w+\\-\\d+$",
DefaultConfig: PartitionConfig{
Name: "aws",
DnsSuffix: "amazonaws.com",
DualStackDnsSuffix: "api.aws",
SupportsFIPS: true,
SupportsDualStack: true,
},
Regions: map[string]RegionOverrides{
"af-south-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"ap-east-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"ap-northeast-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"ap-northeast-2": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"ap-northeast-3": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"ap-south-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"ap-south-2": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"ap-southeast-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"ap-southeast-2": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"ap-southeast-3": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"ap-southeast-4": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"aws-global": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"ca-central-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"eu-central-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"eu-central-2": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"eu-north-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"eu-south-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"eu-south-2": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"eu-west-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"eu-west-2": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"eu-west-3": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"il-central-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"me-central-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"me-south-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"sa-east-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"us-east-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"us-east-2": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"us-west-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"us-west-2": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
},
},
{
ID: "aws-cn",
RegionRegex: "^cn\\-\\w+\\-\\d+$",
DefaultConfig: PartitionConfig{
Name: "aws-cn",
DnsSuffix: "amazonaws.com.cn",
DualStackDnsSuffix: "api.amazonwebservices.com.cn",
SupportsFIPS: true,
SupportsDualStack: true,
},
Regions: map[string]RegionOverrides{
"aws-cn-global": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"cn-north-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"cn-northwest-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
},
},
{
ID: "aws-us-gov",
RegionRegex: "^us\\-gov\\-\\w+\\-\\d+$",
DefaultConfig: PartitionConfig{
Name: "aws-us-gov",
DnsSuffix: "amazonaws.com",
DualStackDnsSuffix: "api.aws",
SupportsFIPS: true,
SupportsDualStack: true,
},
Regions: map[string]RegionOverrides{
"aws-us-gov-global": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"us-gov-east-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"us-gov-west-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
},
},
{
ID: "aws-iso",
RegionRegex: "^us\\-iso\\-\\w+\\-\\d+$",
DefaultConfig: PartitionConfig{
Name: "aws-iso",
DnsSuffix: "c2s.ic.gov",
DualStackDnsSuffix: "c2s.ic.gov",
SupportsFIPS: true,
SupportsDualStack: false,
},
Regions: map[string]RegionOverrides{
"aws-iso-global": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"us-iso-east-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"us-iso-west-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
},
},
{
ID: "aws-iso-b",
RegionRegex: "^us\\-isob\\-\\w+\\-\\d+$",
DefaultConfig: PartitionConfig{
Name: "aws-iso-b",
DnsSuffix: "sc2s.sgov.gov",
DualStackDnsSuffix: "sc2s.sgov.gov",
SupportsFIPS: true,
SupportsDualStack: false,
},
Regions: map[string]RegionOverrides{
"aws-iso-b-global": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
"us-isob-east-1": {
Name: nil,
DnsSuffix: nil,
DualStackDnsSuffix: nil,
SupportsFIPS: nil,
SupportsDualStack: nil,
},
},
},
{
ID: "aws-iso-e",
RegionRegex: "^eu\\-isoe\\-\\w+\\-\\d+$",
DefaultConfig: PartitionConfig{
Name: "aws-iso-e",
DnsSuffix: "cloud.adc-e.uk",
DualStackDnsSuffix: "cloud.adc-e.uk",
SupportsFIPS: true,
SupportsDualStack: false,
},
Regions: map[string]RegionOverrides{},
},
{
ID: "aws-iso-f",
RegionRegex: "^us\\-isof\\-\\w+\\-\\d+$",
DefaultConfig: PartitionConfig{
Name: "aws-iso-f",
DnsSuffix: "csp.hci.ic.gov",
DualStackDnsSuffix: "csp.hci.ic.gov",
SupportsFIPS: true,
SupportsDualStack: false,
},
Regions: map[string]RegionOverrides{},
},
}

View File

@ -0,0 +1,216 @@
{
"partitions" : [ {
"id" : "aws",
"outputs" : {
"dnsSuffix" : "amazonaws.com",
"dualStackDnsSuffix" : "api.aws",
"implicitGlobalRegion" : "us-east-1",
"name" : "aws",
"supportsDualStack" : true,
"supportsFIPS" : true
},
"regionRegex" : "^(us|eu|ap|sa|ca|me|af|il)\\-\\w+\\-\\d+$",
"regions" : {
"af-south-1" : {
"description" : "Africa (Cape Town)"
},
"ap-east-1" : {
"description" : "Asia Pacific (Hong Kong)"
},
"ap-northeast-1" : {
"description" : "Asia Pacific (Tokyo)"
},
"ap-northeast-2" : {
"description" : "Asia Pacific (Seoul)"
},
"ap-northeast-3" : {
"description" : "Asia Pacific (Osaka)"
},
"ap-south-1" : {
"description" : "Asia Pacific (Mumbai)"
},
"ap-south-2" : {
"description" : "Asia Pacific (Hyderabad)"
},
"ap-southeast-1" : {
"description" : "Asia Pacific (Singapore)"
},
"ap-southeast-2" : {
"description" : "Asia Pacific (Sydney)"
},
"ap-southeast-3" : {
"description" : "Asia Pacific (Jakarta)"
},
"ap-southeast-4" : {
"description" : "Asia Pacific (Melbourne)"
},
"aws-global" : {
"description" : "AWS Standard global region"
},
"ca-central-1" : {
"description" : "Canada (Central)"
},
"ca-west-1" : {
"description" : "Canada West (Calgary)"
},
"eu-central-1" : {
"description" : "Europe (Frankfurt)"
},
"eu-central-2" : {
"description" : "Europe (Zurich)"
},
"eu-north-1" : {
"description" : "Europe (Stockholm)"
},
"eu-south-1" : {
"description" : "Europe (Milan)"
},
"eu-south-2" : {
"description" : "Europe (Spain)"
},
"eu-west-1" : {
"description" : "Europe (Ireland)"
},
"eu-west-2" : {
"description" : "Europe (London)"
},
"eu-west-3" : {
"description" : "Europe (Paris)"
},
"il-central-1" : {
"description" : "Israel (Tel Aviv)"
},
"me-central-1" : {
"description" : "Middle East (UAE)"
},
"me-south-1" : {
"description" : "Middle East (Bahrain)"
},
"sa-east-1" : {
"description" : "South America (Sao Paulo)"
},
"us-east-1" : {
"description" : "US East (N. Virginia)"
},
"us-east-2" : {
"description" : "US East (Ohio)"
},
"us-west-1" : {
"description" : "US West (N. California)"
},
"us-west-2" : {
"description" : "US West (Oregon)"
}
}
}, {
"id" : "aws-cn",
"outputs" : {
"dnsSuffix" : "amazonaws.com.cn",
"dualStackDnsSuffix" : "api.amazonwebservices.com.cn",
"implicitGlobalRegion" : "cn-northwest-1",
"name" : "aws-cn",
"supportsDualStack" : true,
"supportsFIPS" : true
},
"regionRegex" : "^cn\\-\\w+\\-\\d+$",
"regions" : {
"aws-cn-global" : {
"description" : "AWS China global region"
},
"cn-north-1" : {
"description" : "China (Beijing)"
},
"cn-northwest-1" : {
"description" : "China (Ningxia)"
}
}
}, {
"id" : "aws-us-gov",
"outputs" : {
"dnsSuffix" : "amazonaws.com",
"dualStackDnsSuffix" : "api.aws",
"implicitGlobalRegion" : "us-gov-west-1",
"name" : "aws-us-gov",
"supportsDualStack" : true,
"supportsFIPS" : true
},
"regionRegex" : "^us\\-gov\\-\\w+\\-\\d+$",
"regions" : {
"aws-us-gov-global" : {
"description" : "AWS GovCloud (US) global region"
},
"us-gov-east-1" : {
"description" : "AWS GovCloud (US-East)"
},
"us-gov-west-1" : {
"description" : "AWS GovCloud (US-West)"
}
}
}, {
"id" : "aws-iso",
"outputs" : {
"dnsSuffix" : "c2s.ic.gov",
"dualStackDnsSuffix" : "c2s.ic.gov",
"implicitGlobalRegion" : "us-iso-east-1",
"name" : "aws-iso",
"supportsDualStack" : false,
"supportsFIPS" : true
},
"regionRegex" : "^us\\-iso\\-\\w+\\-\\d+$",
"regions" : {
"aws-iso-global" : {
"description" : "AWS ISO (US) global region"
},
"us-iso-east-1" : {
"description" : "US ISO East"
},
"us-iso-west-1" : {
"description" : "US ISO WEST"
}
}
}, {
"id" : "aws-iso-b",
"outputs" : {
"dnsSuffix" : "sc2s.sgov.gov",
"dualStackDnsSuffix" : "sc2s.sgov.gov",
"implicitGlobalRegion" : "us-isob-east-1",
"name" : "aws-iso-b",
"supportsDualStack" : false,
"supportsFIPS" : true
},
"regionRegex" : "^us\\-isob\\-\\w+\\-\\d+$",
"regions" : {
"aws-iso-b-global" : {
"description" : "AWS ISOB (US) global region"
},
"us-isob-east-1" : {
"description" : "US ISOB East (Ohio)"
}
}
}, {
"id" : "aws-iso-e",
"outputs" : {
"dnsSuffix" : "cloud.adc-e.uk",
"dualStackDnsSuffix" : "cloud.adc-e.uk",
"implicitGlobalRegion" : "eu-isoe-west-1",
"name" : "aws-iso-e",
"supportsDualStack" : false,
"supportsFIPS" : true
},
"regionRegex" : "^eu\\-isoe\\-\\w+\\-\\d+$",
"regions" : { }
}, {
"id" : "aws-iso-f",
"outputs" : {
"dnsSuffix" : "csp.hci.ic.gov",
"dualStackDnsSuffix" : "csp.hci.ic.gov",
"implicitGlobalRegion" : "us-isof-south-1",
"name" : "aws-iso-f",
"supportsDualStack" : false,
"supportsFIPS" : true
},
"regionRegex" : "^us\\-isof\\-\\w+\\-\\d+$",
"regions" : { }
} ],
"version" : "1.1"
}

View File

@ -0,0 +1,201 @@
package endpoints
import (
"fmt"
"regexp"
"strings"
"github.com/aws/aws-sdk-go-v2/aws"
)
const (
defaultProtocol = "https"
defaultSigner = "v4"
)
var (
protocolPriority = []string{"https", "http"}
signerPriority = []string{"v4"}
)
// Options provide configuration needed to direct how endpoints are resolved.
type Options struct {
// Disable usage of HTTPS (TLS / SSL)
DisableHTTPS bool
}
// Partitions is a slice of partition
type Partitions []Partition
// ResolveEndpoint resolves a service endpoint for the given region and options.
func (ps Partitions) ResolveEndpoint(region string, opts Options) (aws.Endpoint, error) {
if len(ps) == 0 {
return aws.Endpoint{}, fmt.Errorf("no partitions found")
}
for i := 0; i < len(ps); i++ {
if !ps[i].canResolveEndpoint(region) {
continue
}
return ps[i].ResolveEndpoint(region, opts)
}
// fallback to first partition format to use when resolving the endpoint.
return ps[0].ResolveEndpoint(region, opts)
}
// Partition is an AWS partition description for a service and its' region endpoints.
type Partition struct {
ID string
RegionRegex *regexp.Regexp
PartitionEndpoint string
IsRegionalized bool
Defaults Endpoint
Endpoints Endpoints
}
func (p Partition) canResolveEndpoint(region string) bool {
_, ok := p.Endpoints[region]
return ok || p.RegionRegex.MatchString(region)
}
// ResolveEndpoint resolves and service endpoint for the given region and options.
func (p Partition) ResolveEndpoint(region string, options Options) (resolved aws.Endpoint, err error) {
if len(region) == 0 && len(p.PartitionEndpoint) != 0 {
region = p.PartitionEndpoint
}
e, _ := p.endpointForRegion(region)
return e.resolve(p.ID, region, p.Defaults, options), nil
}
func (p Partition) endpointForRegion(region string) (Endpoint, bool) {
if e, ok := p.Endpoints[region]; ok {
return e, true
}
if !p.IsRegionalized {
return p.Endpoints[p.PartitionEndpoint], region == p.PartitionEndpoint
}
// Unable to find any matching endpoint, return
// blank that will be used for generic endpoint creation.
return Endpoint{}, false
}
// Endpoints is a map of service config regions to endpoints
type Endpoints map[string]Endpoint
// CredentialScope is the credential scope of a region and service
type CredentialScope struct {
Region string
Service string
}
// Endpoint is a service endpoint description
type Endpoint struct {
// True if the endpoint cannot be resolved for this partition/region/service
Unresolveable aws.Ternary
Hostname string
Protocols []string
CredentialScope CredentialScope
SignatureVersions []string `json:"signatureVersions"`
}
func (e Endpoint) resolve(partition, region string, def Endpoint, options Options) aws.Endpoint {
var merged Endpoint
merged.mergeIn(def)
merged.mergeIn(e)
e = merged
var u string
if e.Unresolveable != aws.TrueTernary {
// Only attempt to resolve the endpoint if it can be resolved.
hostname := strings.Replace(e.Hostname, "{region}", region, 1)
scheme := getEndpointScheme(e.Protocols, options.DisableHTTPS)
u = scheme + "://" + hostname
}
signingRegion := e.CredentialScope.Region
if len(signingRegion) == 0 {
signingRegion = region
}
signingName := e.CredentialScope.Service
return aws.Endpoint{
URL: u,
PartitionID: partition,
SigningRegion: signingRegion,
SigningName: signingName,
SigningMethod: getByPriority(e.SignatureVersions, signerPriority, defaultSigner),
}
}
func (e *Endpoint) mergeIn(other Endpoint) {
if other.Unresolveable != aws.UnknownTernary {
e.Unresolveable = other.Unresolveable
}
if len(other.Hostname) > 0 {
e.Hostname = other.Hostname
}
if len(other.Protocols) > 0 {
e.Protocols = other.Protocols
}
if len(other.CredentialScope.Region) > 0 {
e.CredentialScope.Region = other.CredentialScope.Region
}
if len(other.CredentialScope.Service) > 0 {
e.CredentialScope.Service = other.CredentialScope.Service
}
if len(other.SignatureVersions) > 0 {
e.SignatureVersions = other.SignatureVersions
}
}
func getEndpointScheme(protocols []string, disableHTTPS bool) string {
if disableHTTPS {
return "http"
}
return getByPriority(protocols, protocolPriority, defaultProtocol)
}
func getByPriority(s []string, p []string, def string) string {
if len(s) == 0 {
return def
}
for i := 0; i < len(p); i++ {
for j := 0; j < len(s); j++ {
if s[j] == p[i] {
return s[j]
}
}
}
return s[0]
}
// MapFIPSRegion extracts the intrinsic AWS region from one that may have an
// embedded FIPS microformat.
func MapFIPSRegion(region string) string {
const fipsInfix = "-fips-"
const fipsPrefix = "fips-"
const fipsSuffix = "-fips"
if strings.Contains(region, fipsInfix) ||
strings.Contains(region, fipsPrefix) ||
strings.Contains(region, fipsSuffix) {
region = strings.ReplaceAll(region, fipsInfix, "-")
region = strings.ReplaceAll(region, fipsPrefix, "")
region = strings.ReplaceAll(region, fipsSuffix, "")
}
return region
}

View File

@ -1,3 +1,100 @@
# v2.5.10 (2024-01-04)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.5.9 (2023-12-07)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.5.8 (2023-12-01)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.5.7 (2023-11-30)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.5.6 (2023-11-29)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.5.5 (2023-11-28.2)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.5.4 (2023-11-20)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.5.3 (2023-11-15)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.5.2 (2023-11-09)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.5.1 (2023-11-01)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.5.0 (2023-10-31)
* **Feature**: **BREAKING CHANGE**: Bump minimum go version to 1.19 per the revised [go version support policy](https://aws.amazon.com/blogs/developer/aws-sdk-for-go-aligns-with-go-release-policy-on-supported-runtimes/).
* **Dependency Update**: Updated to the latest SDK module versions
# v2.4.37 (2023-10-12)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.4.36 (2023-10-06)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.4.35 (2023-08-21)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.4.34 (2023-08-18)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.4.33 (2023-08-17)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.4.32 (2023-08-07)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.4.31 (2023-07-31)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.4.30 (2023-07-28)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.4.29 (2023-07-13)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.4.28 (2023-06-13)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.4.27 (2023-04-24)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.4.26 (2023-04-07)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.4.25 (2023-03-21)
* **Dependency Update**: Updated to the latest SDK module versions
# v2.4.24 (2023-03-10)
* **Dependency Update**: Updated to the latest SDK module versions

View File

@ -3,4 +3,4 @@
package endpoints
// goModuleVersion is the tagged release for this module
const goModuleVersion = "2.4.24"
const goModuleVersion = "2.5.10"

View File

@ -1,3 +1,98 @@
# v1.7.3 (2024-01-22)
* **Bug Fix**: Remove invalid escaping of shared config values. All values in the shared config file will now be interpreted literally, save for fully-quoted strings which are unwrapped for legacy reasons.
# v1.7.2 (2023-12-08)
* **Bug Fix**: Correct loading of [services *] sections into shared config.
# v1.7.1 (2023-11-16)
* **Bug Fix**: Fix recognition of trailing comments in shared config properties. # or ; separators that aren't preceded by whitespace at the end of a property value should be considered part of it.
# v1.7.0 (2023-11-13)
* **Feature**: Replace the legacy config parser with a modern, less-strict implementation. Parsing failures within a section will now simply ignore the invalid line rather than silently drop the entire section.
# v1.6.0 (2023-11-09.2)
* **Feature**: BREAKFIX: In order to support subproperty parsing, invalid property definitions must not be ignored
# v1.5.2 (2023-11-09)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.5.1 (2023-11-07)
* **Bug Fix**: Fix subproperty performance regression
# v1.5.0 (2023-11-01)
* **Feature**: Adds support for configured endpoints via environment variables and the AWS shared configuration file.
* **Dependency Update**: Updated to the latest SDK module versions
# v1.4.0 (2023-10-31)
* **Feature**: **BREAKING CHANGE**: Bump minimum go version to 1.19 per the revised [go version support policy](https://aws.amazon.com/blogs/developer/aws-sdk-for-go-aligns-with-go-release-policy-on-supported-runtimes/).
* **Dependency Update**: Updated to the latest SDK module versions
# v1.3.45 (2023-10-12)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.3.44 (2023-10-06)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.3.43 (2023-09-22)
* **Bug Fix**: Fixed a bug where merging `max_attempts` or `duration_seconds` fields across shared config files with invalid values would silently default them to 0.
* **Bug Fix**: Move type assertion of config values out of the parsing stage, which resolves an issue where the contents of a profile would silently be dropped with certain numeric formats.
# v1.3.42 (2023-08-21)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.3.41 (2023-08-18)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.3.40 (2023-08-17)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.3.39 (2023-08-07)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.3.38 (2023-07-31)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.3.37 (2023-07-28)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.3.36 (2023-07-13)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.3.35 (2023-06-13)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.3.34 (2023-04-24)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.3.33 (2023-04-07)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.3.32 (2023-03-21)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.3.31 (2023-03-10)
* **Dependency Update**: Updated to the latest SDK module versions

View File

@ -1,120 +0,0 @@
package ini
// ASTKind represents different states in the parse table
// and the type of AST that is being constructed
type ASTKind int
// ASTKind* is used in the parse table to transition between
// the different states
const (
ASTKindNone = ASTKind(iota)
ASTKindStart
ASTKindExpr
ASTKindEqualExpr
ASTKindStatement
ASTKindSkipStatement
ASTKindExprStatement
ASTKindSectionStatement
ASTKindNestedSectionStatement
ASTKindCompletedNestedSectionStatement
ASTKindCommentStatement
ASTKindCompletedSectionStatement
)
func (k ASTKind) String() string {
switch k {
case ASTKindNone:
return "none"
case ASTKindStart:
return "start"
case ASTKindExpr:
return "expr"
case ASTKindStatement:
return "stmt"
case ASTKindSectionStatement:
return "section_stmt"
case ASTKindExprStatement:
return "expr_stmt"
case ASTKindCommentStatement:
return "comment"
case ASTKindNestedSectionStatement:
return "nested_section_stmt"
case ASTKindCompletedSectionStatement:
return "completed_stmt"
case ASTKindSkipStatement:
return "skip"
default:
return ""
}
}
// AST interface allows us to determine what kind of node we
// are on and casting may not need to be necessary.
//
// The root is always the first node in Children
type AST struct {
Kind ASTKind
Root Token
RootToken bool
Children []AST
}
func newAST(kind ASTKind, root AST, children ...AST) AST {
return AST{
Kind: kind,
Children: append([]AST{root}, children...),
}
}
func newASTWithRootToken(kind ASTKind, root Token, children ...AST) AST {
return AST{
Kind: kind,
Root: root,
RootToken: true,
Children: children,
}
}
// AppendChild will append to the list of children an AST has.
func (a *AST) AppendChild(child AST) {
a.Children = append(a.Children, child)
}
// GetRoot will return the root AST which can be the first entry
// in the children list or a token.
func (a *AST) GetRoot() AST {
if a.RootToken {
return *a
}
if len(a.Children) == 0 {
return AST{}
}
return a.Children[0]
}
// GetChildren will return the current AST's list of children
func (a *AST) GetChildren() []AST {
if len(a.Children) == 0 {
return []AST{}
}
if a.RootToken {
return a.Children
}
return a.Children[1:]
}
// SetChildren will set and override all children of the AST.
func (a *AST) SetChildren(children []AST) {
if a.RootToken {
a.Children = children
} else {
a.Children = append(a.Children[:1], children...)
}
}
// Start is used to indicate the starting state of the parse table.
var Start = newAST(ASTKindStart, AST{})

View File

@ -1,11 +0,0 @@
package ini
var commaRunes = []rune(",")
func isComma(b rune) bool {
return b == ','
}
func newCommaToken() Token {
return newToken(TokenComma, commaRunes, NoneType)
}

View File

@ -1,35 +0,0 @@
package ini
// isComment will return whether or not the next byte(s) is a
// comment.
func isComment(b []rune) bool {
if len(b) == 0 {
return false
}
switch b[0] {
case ';':
return true
case '#':
return true
}
return false
}
// newCommentToken will create a comment token and
// return how many bytes were read.
func newCommentToken(b []rune) (Token, int, error) {
i := 0
for ; i < len(b); i++ {
if b[i] == '\n' {
break
}
if len(b)-i > 2 && b[i] == '\r' && b[i+1] == '\n' {
break
}
}
return newToken(TokenComment, b[:i], NoneType), i, nil
}

View File

@ -1,6 +0,0 @@
package ini
import (
// internal/ini module was carved out of this module
_ "github.com/aws/aws-sdk-go-v2"
)

View File

@ -1,43 +0,0 @@
// Package ini is an LL(1) parser for configuration files.
//
// Example:
// sections, err := ini.OpenFile("/path/to/file")
// if err != nil {
// panic(err)
// }
//
// profile := "foo"
// section, ok := sections.GetSection(profile)
// if !ok {
// fmt.Printf("section %q could not be found", profile)
// }
//
// Below is the BNF that describes this parser
//
// Grammar:
// stmt -> section | stmt'
// stmt' -> epsilon | expr
// expr -> value (stmt)* | equal_expr (stmt)*
// equal_expr -> value ( ':' | '=' ) equal_expr'
// equal_expr' -> number | string | quoted_string
// quoted_string -> " quoted_string'
// quoted_string' -> string quoted_string_end
// quoted_string_end -> "
//
// section -> [ section'
// section' -> section_value section_close
// section_value -> number | string_subset | boolean | quoted_string_subset
// quoted_string_subset -> " quoted_string_subset'
// quoted_string_subset' -> string_subset quoted_string_end
// quoted_string_subset -> "
// section_close -> ]
//
// value -> number | string_subset | boolean
// string -> ? UTF-8 Code-Points except '\n' (U+000A) and '\r\n' (U+000D U+000A) ?
// string_subset -> ? Code-points excepted by <string> grammar except ':' (U+003A), '=' (U+003D), '[' (U+005B), and ']' (U+005D) ?
//
// SkipState will skip (NL WS)+
//
// comment -> # comment' | ; comment'
// comment' -> epsilon | value
package ini

View File

@ -1,4 +0,0 @@
package ini
// emptyToken is used to satisfy the Token interface
var emptyToken = newToken(TokenNone, []rune{}, NoneType)

View File

@ -1,24 +0,0 @@
package ini
// newExpression will return an expression AST.
// Expr represents an expression
//
// grammar:
// expr -> string | number
func newExpression(tok Token) AST {
return newASTWithRootToken(ASTKindExpr, tok)
}
func newEqualExpr(left AST, tok Token) AST {
return newASTWithRootToken(ASTKindEqualExpr, tok, left)
}
// EqualExprKey will return a LHS value in the equal expr
func EqualExprKey(ast AST) string {
children := ast.GetChildren()
if len(children) == 0 || ast.Kind != ASTKindEqualExpr {
return ""
}
return string(children[0].Root.Raw())
}

View File

@ -3,4 +3,4 @@
package ini
// goModuleVersion is the tagged release for this module
const goModuleVersion = "1.3.31"
const goModuleVersion = "1.7.3"

View File

@ -1,13 +1,26 @@
// Package ini implements parsing of the AWS shared config file.
//
// Example:
// sections, err := ini.OpenFile("/path/to/file")
// if err != nil {
// panic(err)
// }
//
// profile := "foo"
// section, ok := sections.GetSection(profile)
// if !ok {
// fmt.Printf("section %q could not be found", profile)
// }
package ini
import (
"fmt"
"io"
"os"
"strings"
)
// OpenFile takes a path to a given file, and will open and parse
// that file.
// OpenFile parses shared config from the given file path.
func OpenFile(path string) (sections Sections, err error) {
f, oerr := os.Open(path)
if oerr != nil {
@ -26,33 +39,18 @@ func OpenFile(path string) (sections Sections, err error) {
return Parse(f, path)
}
// Parse will parse the given file using the shared config
// visitor.
func Parse(f io.Reader, path string) (Sections, error) {
tree, err := ParseAST(f)
// Parse parses shared config from the given reader.
func Parse(r io.Reader, path string) (Sections, error) {
contents, err := io.ReadAll(r)
if err != nil {
return Sections{}, err
return Sections{}, fmt.Errorf("read all: %v", err)
}
v := NewDefaultVisitor(path)
if err = Walk(tree, v); err != nil {
return Sections{}, err
}
return v.Sections, nil
}
// ParseBytes will parse the given bytes and return the parsed sections.
func ParseBytes(b []byte) (Sections, error) {
tree, err := ParseASTBytes(b)
lines := strings.Split(string(contents), "\n")
tokens, err := tokenize(lines)
if err != nil {
return Sections{}, err
return Sections{}, fmt.Errorf("tokenize: %v", err)
}
v := NewDefaultVisitor("")
if err = Walk(tree, v); err != nil {
return Sections{}, err
}
return v.Sections, nil
return parse(tokens, path), nil
}

View File

@ -1,157 +0,0 @@
package ini
import (
"bytes"
"io"
"io/ioutil"
)
// TokenType represents the various different tokens types
type TokenType int
func (t TokenType) String() string {
switch t {
case TokenNone:
return "none"
case TokenLit:
return "literal"
case TokenSep:
return "sep"
case TokenOp:
return "op"
case TokenWS:
return "ws"
case TokenNL:
return "newline"
case TokenComment:
return "comment"
case TokenComma:
return "comma"
default:
return ""
}
}
// TokenType enums
const (
TokenNone = TokenType(iota)
TokenLit
TokenSep
TokenComma
TokenOp
TokenWS
TokenNL
TokenComment
)
type iniLexer struct{}
// Tokenize will return a list of tokens during lexical analysis of the
// io.Reader.
func (l *iniLexer) Tokenize(r io.Reader) ([]Token, error) {
b, err := ioutil.ReadAll(r)
if err != nil {
return nil, &UnableToReadFile{Err: err}
}
return l.tokenize(b)
}
func (l *iniLexer) tokenize(b []byte) ([]Token, error) {
runes := bytes.Runes(b)
var err error
n := 0
tokenAmount := countTokens(runes)
tokens := make([]Token, tokenAmount)
count := 0
for len(runes) > 0 && count < tokenAmount {
switch {
case isWhitespace(runes[0]):
tokens[count], n, err = newWSToken(runes)
case isComma(runes[0]):
tokens[count], n = newCommaToken(), 1
case isComment(runes):
tokens[count], n, err = newCommentToken(runes)
case isNewline(runes):
tokens[count], n, err = newNewlineToken(runes)
case isSep(runes):
tokens[count], n, err = newSepToken(runes)
case isOp(runes):
tokens[count], n, err = newOpToken(runes)
default:
tokens[count], n, err = newLitToken(runes)
}
if err != nil {
return nil, err
}
count++
runes = runes[n:]
}
return tokens[:count], nil
}
func countTokens(runes []rune) int {
count, n := 0, 0
var err error
for len(runes) > 0 {
switch {
case isWhitespace(runes[0]):
_, n, err = newWSToken(runes)
case isComma(runes[0]):
_, n = newCommaToken(), 1
case isComment(runes):
_, n, err = newCommentToken(runes)
case isNewline(runes):
_, n, err = newNewlineToken(runes)
case isSep(runes):
_, n, err = newSepToken(runes)
case isOp(runes):
_, n, err = newOpToken(runes)
default:
_, n, err = newLitToken(runes)
}
if err != nil {
return 0
}
count++
runes = runes[n:]
}
return count + 1
}
// Token indicates a metadata about a given value.
type Token struct {
t TokenType
ValueType ValueType
base int
raw []rune
}
var emptyValue = Value{}
func newToken(t TokenType, raw []rune, v ValueType) Token {
return Token{
t: t,
raw: raw,
ValueType: v,
}
}
// Raw return the raw runes that were consumed
func (tok Token) Raw() []rune {
return tok.raw
}
// Type returns the token type
func (tok Token) Type() TokenType {
return tok.t
}

View File

@ -1,349 +0,0 @@
package ini
import (
"fmt"
"io"
)
// ParseState represents the current state of the parser.
type ParseState uint
// State enums for the parse table
const (
InvalidState ParseState = iota
// stmt -> value stmt'
StatementState
// stmt' -> MarkComplete | op stmt
StatementPrimeState
// value -> number | string | boolean | quoted_string
ValueState
// section -> [ section'
OpenScopeState
// section' -> value section_close
SectionState
// section_close -> ]
CloseScopeState
// SkipState will skip (NL WS)+
SkipState
// SkipTokenState will skip any token and push the previous
// state onto the stack.
SkipTokenState
// comment -> # comment' | ; comment'
// comment' -> MarkComplete | value
CommentState
// MarkComplete state will complete statements and move that
// to the completed AST list
MarkCompleteState
// TerminalState signifies that the tokens have been fully parsed
TerminalState
)
// parseTable is a state machine to dictate the grammar above.
var parseTable = map[ASTKind]map[TokenType]ParseState{
ASTKindStart: {
TokenLit: StatementState,
TokenSep: OpenScopeState,
TokenWS: SkipTokenState,
TokenNL: SkipTokenState,
TokenComment: CommentState,
TokenNone: TerminalState,
},
ASTKindCommentStatement: {
TokenLit: StatementState,
TokenSep: OpenScopeState,
TokenWS: SkipTokenState,
TokenNL: SkipTokenState,
TokenComment: CommentState,
TokenNone: MarkCompleteState,
},
ASTKindExpr: {
TokenOp: StatementPrimeState,
TokenLit: ValueState,
TokenSep: OpenScopeState,
TokenWS: ValueState,
TokenNL: SkipState,
TokenComment: CommentState,
TokenNone: MarkCompleteState,
},
ASTKindEqualExpr: {
TokenLit: ValueState,
TokenSep: ValueState,
TokenOp: ValueState,
TokenWS: SkipTokenState,
TokenNL: SkipState,
},
ASTKindStatement: {
TokenLit: SectionState,
TokenSep: CloseScopeState,
TokenWS: SkipTokenState,
TokenNL: SkipTokenState,
TokenComment: CommentState,
TokenNone: MarkCompleteState,
},
ASTKindExprStatement: {
TokenLit: ValueState,
TokenSep: ValueState,
TokenOp: ValueState,
TokenWS: ValueState,
TokenNL: MarkCompleteState,
TokenComment: CommentState,
TokenNone: TerminalState,
TokenComma: SkipState,
},
ASTKindSectionStatement: {
TokenLit: SectionState,
TokenOp: SectionState,
TokenSep: CloseScopeState,
TokenWS: SectionState,
TokenNL: SkipTokenState,
},
ASTKindCompletedSectionStatement: {
TokenWS: SkipTokenState,
TokenNL: SkipTokenState,
TokenLit: StatementState,
TokenSep: OpenScopeState,
TokenComment: CommentState,
TokenNone: MarkCompleteState,
},
ASTKindSkipStatement: {
TokenLit: StatementState,
TokenSep: OpenScopeState,
TokenWS: SkipTokenState,
TokenNL: SkipTokenState,
TokenComment: CommentState,
TokenNone: TerminalState,
},
}
// ParseAST will parse input from an io.Reader using
// an LL(1) parser.
func ParseAST(r io.Reader) ([]AST, error) {
lexer := iniLexer{}
tokens, err := lexer.Tokenize(r)
if err != nil {
return []AST{}, err
}
return parse(tokens)
}
// ParseASTBytes will parse input from a byte slice using
// an LL(1) parser.
func ParseASTBytes(b []byte) ([]AST, error) {
lexer := iniLexer{}
tokens, err := lexer.tokenize(b)
if err != nil {
return []AST{}, err
}
return parse(tokens)
}
func parse(tokens []Token) ([]AST, error) {
start := Start
stack := newParseStack(3, len(tokens))
stack.Push(start)
s := newSkipper()
loop:
for stack.Len() > 0 {
k := stack.Pop()
var tok Token
if len(tokens) == 0 {
// this occurs when all the tokens have been processed
// but reduction of what's left on the stack needs to
// occur.
tok = emptyToken
} else {
tok = tokens[0]
}
step := parseTable[k.Kind][tok.Type()]
if s.ShouldSkip(tok) {
// being in a skip state with no tokens will break out of
// the parse loop since there is nothing left to process.
if len(tokens) == 0 {
break loop
}
// if should skip is true, we skip the tokens until should skip is set to false.
step = SkipTokenState
}
switch step {
case TerminalState:
// Finished parsing. Push what should be the last
// statement to the stack. If there is anything left
// on the stack, an error in parsing has occurred.
if k.Kind != ASTKindStart {
stack.MarkComplete(k)
}
break loop
case SkipTokenState:
// When skipping a token, the previous state was popped off the stack.
// To maintain the correct state, the previous state will be pushed
// onto the stack.
stack.Push(k)
case StatementState:
if k.Kind != ASTKindStart {
stack.MarkComplete(k)
}
expr := newExpression(tok)
stack.Push(expr)
case StatementPrimeState:
if tok.Type() != TokenOp {
stack.MarkComplete(k)
continue
}
if k.Kind != ASTKindExpr {
return nil, NewParseError(
fmt.Sprintf("invalid expression: expected Expr type, but found %T type", k),
)
}
k = trimSpaces(k)
expr := newEqualExpr(k, tok)
stack.Push(expr)
case ValueState:
// ValueState requires the previous state to either be an equal expression
// or an expression statement.
switch k.Kind {
case ASTKindEqualExpr:
// assigning a value to some key
k.AppendChild(newExpression(tok))
stack.Push(newExprStatement(k))
case ASTKindExpr:
k.Root.raw = append(k.Root.raw, tok.Raw()...)
stack.Push(k)
case ASTKindExprStatement:
root := k.GetRoot()
children := root.GetChildren()
if len(children) == 0 {
return nil, NewParseError(
fmt.Sprintf("invalid expression: AST contains no children %s", k.Kind),
)
}
rhs := children[len(children)-1]
if rhs.Root.ValueType != QuotedStringType {
rhs.Root.ValueType = StringType
rhs.Root.raw = append(rhs.Root.raw, tok.Raw()...)
}
children[len(children)-1] = rhs
root.SetChildren(children)
stack.Push(k)
}
case OpenScopeState:
if !runeCompare(tok.Raw(), openBrace) {
return nil, NewParseError("expected '['")
}
// If OpenScopeState is not at the start, we must mark the previous ast as complete
//
// for example: if previous ast was a skip statement;
// we should mark it as complete before we create a new statement
if k.Kind != ASTKindStart {
stack.MarkComplete(k)
}
stmt := newStatement()
stack.Push(stmt)
case CloseScopeState:
if !runeCompare(tok.Raw(), closeBrace) {
return nil, NewParseError("expected ']'")
}
k = trimSpaces(k)
stack.Push(newCompletedSectionStatement(k))
case SectionState:
var stmt AST
switch k.Kind {
case ASTKindStatement:
// If there are multiple literals inside of a scope declaration,
// then the current token's raw value will be appended to the Name.
//
// This handles cases like [ profile default ]
//
// k will represent a SectionStatement with the children representing
// the label of the section
stmt = newSectionStatement(tok)
case ASTKindSectionStatement:
k.Root.raw = append(k.Root.raw, tok.Raw()...)
stmt = k
default:
return nil, NewParseError(
fmt.Sprintf("invalid statement: expected statement: %v", k.Kind),
)
}
stack.Push(stmt)
case MarkCompleteState:
if k.Kind != ASTKindStart {
stack.MarkComplete(k)
}
if stack.Len() == 0 {
stack.Push(start)
}
case SkipState:
stack.Push(newSkipStatement(k))
s.Skip()
case CommentState:
if k.Kind == ASTKindStart {
stack.Push(k)
} else {
stack.MarkComplete(k)
}
stmt := newCommentStatement(tok)
stack.Push(stmt)
default:
return nil, NewParseError(
fmt.Sprintf("invalid state with ASTKind %v and TokenType %v",
k.Kind, tok.Type()))
}
if len(tokens) > 0 {
tokens = tokens[1:]
}
}
// this occurs when a statement has not been completed
if stack.top > 1 {
return nil, NewParseError(fmt.Sprintf("incomplete ini expression"))
}
// returns a sublist which exludes the start symbol
return stack.List(), nil
}
// trimSpaces will trim spaces on the left and right hand side of
// the literal.
func trimSpaces(k AST) AST {
// trim left hand side of spaces
for i := 0; i < len(k.Root.raw); i++ {
if !isWhitespace(k.Root.raw[i]) {
break
}
k.Root.raw = k.Root.raw[1:]
i--
}
// trim right hand side of spaces
for i := len(k.Root.raw) - 1; i >= 0; i-- {
if !isWhitespace(k.Root.raw[i]) {
break
}
k.Root.raw = k.Root.raw[:len(k.Root.raw)-1]
}
return k
}

View File

@ -1,336 +0,0 @@
package ini
import (
"fmt"
"strconv"
"strings"
"unicode"
)
var (
runesTrue = []rune("true")
runesFalse = []rune("false")
)
var literalValues = [][]rune{
runesTrue,
runesFalse,
}
func isBoolValue(b []rune) bool {
for _, lv := range literalValues {
if isCaselessLitValue(lv, b) {
return true
}
}
return false
}
func isLitValue(want, have []rune) bool {
if len(have) < len(want) {
return false
}
for i := 0; i < len(want); i++ {
if want[i] != have[i] {
return false
}
}
return true
}
// isCaselessLitValue is a caseless value comparison, assumes want is already lower-cased for efficiency.
func isCaselessLitValue(want, have []rune) bool {
if len(have) < len(want) {
return false
}
for i := 0; i < len(want); i++ {
if want[i] != unicode.ToLower(have[i]) {
return false
}
}
return true
}
// isNumberValue will return whether not the leading characters in
// a byte slice is a number. A number is delimited by whitespace or
// the newline token.
//
// A number is defined to be in a binary, octal, decimal (int | float), hex format,
// or in scientific notation.
func isNumberValue(b []rune) bool {
negativeIndex := 0
helper := numberHelper{}
needDigit := false
for i := 0; i < len(b); i++ {
negativeIndex++
switch b[i] {
case '-':
if helper.IsNegative() || negativeIndex != 1 {
return false
}
helper.Determine(b[i])
needDigit = true
continue
case 'e', 'E':
if err := helper.Determine(b[i]); err != nil {
return false
}
negativeIndex = 0
needDigit = true
continue
case 'b':
if helper.numberFormat == hex {
break
}
fallthrough
case 'o', 'x':
needDigit = true
if i == 0 {
return false
}
fallthrough
case '.':
if err := helper.Determine(b[i]); err != nil {
return false
}
needDigit = true
continue
}
if i > 0 && (isNewline(b[i:]) || isWhitespace(b[i])) {
return !needDigit
}
if !helper.CorrectByte(b[i]) {
return false
}
needDigit = false
}
return !needDigit
}
func isValid(b []rune) (bool, int, error) {
if len(b) == 0 {
// TODO: should probably return an error
return false, 0, nil
}
return isValidRune(b[0]), 1, nil
}
func isValidRune(r rune) bool {
return r != ':' && r != '=' && r != '[' && r != ']' && r != ' ' && r != '\n'
}
// ValueType is an enum that will signify what type
// the Value is
type ValueType int
func (v ValueType) String() string {
switch v {
case NoneType:
return "NONE"
case DecimalType:
return "FLOAT"
case IntegerType:
return "INT"
case StringType:
return "STRING"
case BoolType:
return "BOOL"
}
return ""
}
// ValueType enums
const (
NoneType = ValueType(iota)
DecimalType
IntegerType
StringType
QuotedStringType
BoolType
)
// Value is a union container
type Value struct {
Type ValueType
raw []rune
integer int64
decimal float64
boolean bool
str string
}
func newValue(t ValueType, base int, raw []rune) (Value, error) {
v := Value{
Type: t,
raw: raw,
}
var err error
switch t {
case DecimalType:
v.decimal, err = strconv.ParseFloat(string(raw), 64)
case IntegerType:
if base != 10 {
raw = raw[2:]
}
v.integer, err = strconv.ParseInt(string(raw), base, 64)
case StringType:
v.str = string(raw)
case QuotedStringType:
v.str = string(raw[1 : len(raw)-1])
case BoolType:
v.boolean = isCaselessLitValue(runesTrue, v.raw)
}
// issue 2253
//
// if the value trying to be parsed is too large, then we will use
// the 'StringType' and raw value instead.
if nerr, ok := err.(*strconv.NumError); ok && nerr.Err == strconv.ErrRange {
v.Type = StringType
v.str = string(raw)
err = nil
}
return v, err
}
// NewStringValue returns a Value type generated using a string input.
func NewStringValue(str string) (Value, error) {
return newValue(StringType, 10, []rune(str))
}
// NewIntValue returns a Value type generated using an int64 input.
func NewIntValue(i int64) (Value, error) {
v := strconv.FormatInt(i, 10)
return newValue(IntegerType, 10, []rune(v))
}
func (v Value) String() string {
switch v.Type {
case DecimalType:
return fmt.Sprintf("decimal: %f", v.decimal)
case IntegerType:
return fmt.Sprintf("integer: %d", v.integer)
case StringType:
return fmt.Sprintf("string: %s", string(v.raw))
case QuotedStringType:
return fmt.Sprintf("quoted string: %s", string(v.raw))
case BoolType:
return fmt.Sprintf("bool: %t", v.boolean)
default:
return "union not set"
}
}
func newLitToken(b []rune) (Token, int, error) {
n := 0
var err error
token := Token{}
if b[0] == '"' {
n, err = getStringValue(b)
if err != nil {
return token, n, err
}
token = newToken(TokenLit, b[:n], QuotedStringType)
} else if isNumberValue(b) {
var base int
base, n, err = getNumericalValue(b)
if err != nil {
return token, 0, err
}
value := b[:n]
vType := IntegerType
if contains(value, '.') || hasExponent(value) {
vType = DecimalType
}
token = newToken(TokenLit, value, vType)
token.base = base
} else if isBoolValue(b) {
n, err = getBoolValue(b)
token = newToken(TokenLit, b[:n], BoolType)
} else {
n, err = getValue(b)
token = newToken(TokenLit, b[:n], StringType)
}
return token, n, err
}
// IntValue returns an integer value
func (v Value) IntValue() int64 {
return v.integer
}
// FloatValue returns a float value
func (v Value) FloatValue() float64 {
return v.decimal
}
// BoolValue returns a bool value
func (v Value) BoolValue() bool {
return v.boolean
}
func isTrimmable(r rune) bool {
switch r {
case '\n', ' ':
return true
}
return false
}
// StringValue returns the string value
func (v Value) StringValue() string {
switch v.Type {
case StringType:
return strings.TrimFunc(string(v.raw), isTrimmable)
case QuotedStringType:
// preserve all characters in the quotes
return string(removeEscapedCharacters(v.raw[1 : len(v.raw)-1]))
default:
return strings.TrimFunc(string(v.raw), isTrimmable)
}
}
func contains(runes []rune, c rune) bool {
for i := 0; i < len(runes); i++ {
if runes[i] == c {
return true
}
}
return false
}
func runeCompare(v1 []rune, v2 []rune) bool {
if len(v1) != len(v2) {
return false
}
for i := 0; i < len(v1); i++ {
if v1[i] != v2[i] {
return false
}
}
return true
}

View File

@ -1,30 +0,0 @@
package ini
func isNewline(b []rune) bool {
if len(b) == 0 {
return false
}
if b[0] == '\n' {
return true
}
if len(b) < 2 {
return false
}
return b[0] == '\r' && b[1] == '\n'
}
func newNewlineToken(b []rune) (Token, int, error) {
i := 1
if b[0] == '\r' && isNewline(b[1:]) {
i++
}
if !isNewline([]rune(b[:i])) {
return emptyToken, 0, NewParseError("invalid new line token")
}
return newToken(TokenNL, b[:i], NoneType), i, nil
}

View File

@ -1,152 +0,0 @@
package ini
import (
"bytes"
"fmt"
"strconv"
)
const (
none = numberFormat(iota)
binary
octal
decimal
hex
exponent
)
type numberFormat int
// numberHelper is used to dictate what format a number is in
// and what to do for negative values. Since -1e-4 is a valid
// number, we cannot just simply check for duplicate negatives.
type numberHelper struct {
numberFormat numberFormat
negative bool
negativeExponent bool
}
func (b numberHelper) Exists() bool {
return b.numberFormat != none
}
func (b numberHelper) IsNegative() bool {
return b.negative || b.negativeExponent
}
func (b *numberHelper) Determine(c rune) error {
if b.Exists() {
return NewParseError(fmt.Sprintf("multiple number formats: 0%v", string(c)))
}
switch c {
case 'b':
b.numberFormat = binary
case 'o':
b.numberFormat = octal
case 'x':
b.numberFormat = hex
case 'e', 'E':
b.numberFormat = exponent
case '-':
if b.numberFormat != exponent {
b.negative = true
} else {
b.negativeExponent = true
}
case '.':
b.numberFormat = decimal
default:
return NewParseError(fmt.Sprintf("invalid number character: %v", string(c)))
}
return nil
}
func (b numberHelper) CorrectByte(c rune) bool {
switch {
case b.numberFormat == binary:
if !isBinaryByte(c) {
return false
}
case b.numberFormat == octal:
if !isOctalByte(c) {
return false
}
case b.numberFormat == hex:
if !isHexByte(c) {
return false
}
case b.numberFormat == decimal:
if !isDigit(c) {
return false
}
case b.numberFormat == exponent:
if !isDigit(c) {
return false
}
case b.negativeExponent:
if !isDigit(c) {
return false
}
case b.negative:
if !isDigit(c) {
return false
}
default:
if !isDigit(c) {
return false
}
}
return true
}
func (b numberHelper) Base() int {
switch b.numberFormat {
case binary:
return 2
case octal:
return 8
case hex:
return 16
default:
return 10
}
}
func (b numberHelper) String() string {
buf := bytes.Buffer{}
i := 0
switch b.numberFormat {
case binary:
i++
buf.WriteString(strconv.Itoa(i) + ": binary format\n")
case octal:
i++
buf.WriteString(strconv.Itoa(i) + ": octal format\n")
case hex:
i++
buf.WriteString(strconv.Itoa(i) + ": hex format\n")
case exponent:
i++
buf.WriteString(strconv.Itoa(i) + ": exponent format\n")
default:
i++
buf.WriteString(strconv.Itoa(i) + ": integer format\n")
}
if b.negative {
i++
buf.WriteString(strconv.Itoa(i) + ": negative format\n")
}
if b.negativeExponent {
i++
buf.WriteString(strconv.Itoa(i) + ": negative exponent format\n")
}
return buf.String()
}

View File

@ -1,39 +0,0 @@
package ini
import (
"fmt"
)
var (
equalOp = []rune("=")
equalColonOp = []rune(":")
)
func isOp(b []rune) bool {
if len(b) == 0 {
return false
}
switch b[0] {
case '=':
return true
case ':':
return true
default:
return false
}
}
func newOpToken(b []rune) (Token, int, error) {
tok := Token{}
switch b[0] {
case '=':
tok = newToken(TokenOp, equalOp, NoneType)
case ':':
tok = newToken(TokenOp, equalColonOp, NoneType)
default:
return tok, 0, NewParseError(fmt.Sprintf("unexpected op type, %v", b[0]))
}
return tok, 1, nil
}

View File

@ -0,0 +1,109 @@
package ini
import (
"fmt"
"strings"
)
func parse(tokens []lineToken, path string) Sections {
parser := &parser{
path: path,
sections: NewSections(),
}
parser.parse(tokens)
return parser.sections
}
type parser struct {
csection, ckey string // current state
path string // source file path
sections Sections // parse result
}
func (p *parser) parse(tokens []lineToken) {
for _, otok := range tokens {
switch tok := otok.(type) {
case *lineTokenProfile:
p.handleProfile(tok)
case *lineTokenProperty:
p.handleProperty(tok)
case *lineTokenSubProperty:
p.handleSubProperty(tok)
case *lineTokenContinuation:
p.handleContinuation(tok)
}
}
}
func (p *parser) handleProfile(tok *lineTokenProfile) {
name := tok.Name
if tok.Type != "" {
name = fmt.Sprintf("%s %s", tok.Type, tok.Name)
}
p.ckey = ""
p.csection = name
if _, ok := p.sections.container[name]; !ok {
p.sections.container[name] = NewSection(name)
}
}
func (p *parser) handleProperty(tok *lineTokenProperty) {
if p.csection == "" {
return // LEGACY: don't error on "global" properties
}
p.ckey = tok.Key
if _, ok := p.sections.container[p.csection].values[tok.Key]; ok {
section := p.sections.container[p.csection]
section.Logs = append(p.sections.container[p.csection].Logs,
fmt.Sprintf(
"For profile: %v, overriding %v value, with a %v value found in a duplicate profile defined later in the same file %v. \n",
p.csection, tok.Key, tok.Key, p.path,
),
)
p.sections.container[p.csection] = section
}
p.sections.container[p.csection].values[tok.Key] = Value{
str: tok.Value,
}
p.sections.container[p.csection].SourceFile[tok.Key] = p.path
}
func (p *parser) handleSubProperty(tok *lineTokenSubProperty) {
if p.csection == "" {
return // LEGACY: don't error on "global" properties
}
if p.ckey == "" || p.sections.container[p.csection].values[p.ckey].str != "" {
// This is an "orphaned" subproperty, either because it's at
// the beginning of a section or because the last property's
// value isn't empty. Either way we're lenient here and
// "promote" this to a normal property.
p.handleProperty(&lineTokenProperty{
Key: tok.Key,
Value: strings.TrimSpace(trimPropertyComment(tok.Value)),
})
return
}
if p.sections.container[p.csection].values[p.ckey].mp == nil {
p.sections.container[p.csection].values[p.ckey] = Value{
mp: map[string]string{},
}
}
p.sections.container[p.csection].values[p.ckey].mp[tok.Key] = tok.Value
}
func (p *parser) handleContinuation(tok *lineTokenContinuation) {
if p.ckey == "" {
return
}
value, _ := p.sections.container[p.csection].values[p.ckey]
if value.str != "" && value.mp == nil {
value.str = fmt.Sprintf("%s\n%s", value.str, tok.Value)
}
p.sections.container[p.csection].values[p.ckey] = value
}

View File

@ -1,19 +0,0 @@
package ini
// ParseError is an error which is returned during any part of
// the parsing process.
type ParseError struct {
msg string
}
// NewParseError will return a new ParseError where message
// is the description of the error.
func NewParseError(message string) *ParseError {
return &ParseError{
msg: message,
}
}
func (err *ParseError) Error() string {
return err.msg
}

View File

@ -1,60 +0,0 @@
package ini
import (
"bytes"
"fmt"
)
// ParseStack is a stack that contains a container, the stack portion,
// and the list which is the list of ASTs that have been successfully
// parsed.
type ParseStack struct {
top int
container []AST
list []AST
index int
}
func newParseStack(sizeContainer, sizeList int) ParseStack {
return ParseStack{
container: make([]AST, sizeContainer),
list: make([]AST, sizeList),
}
}
// Pop will return and truncate the last container element.
func (s *ParseStack) Pop() AST {
s.top--
return s.container[s.top]
}
// Push will add the new AST to the container
func (s *ParseStack) Push(ast AST) {
s.container[s.top] = ast
s.top++
}
// MarkComplete will append the AST to the list of completed statements
func (s *ParseStack) MarkComplete(ast AST) {
s.list[s.index] = ast
s.index++
}
// List will return the completed statements
func (s ParseStack) List() []AST {
return s.list[:s.index]
}
// Len will return the length of the container
func (s *ParseStack) Len() int {
return s.top
}
func (s ParseStack) String() string {
buf := bytes.Buffer{}
for i, node := range s.list {
buf.WriteString(fmt.Sprintf("%d: %v\n", i+1, node))
}
return buf.String()
}

View File

@ -0,0 +1,157 @@
package ini
import (
"sort"
)
// Sections is a map of Section structures that represent
// a configuration.
type Sections struct {
container map[string]Section
}
// NewSections returns empty ini Sections
func NewSections() Sections {
return Sections{
container: make(map[string]Section, 0),
}
}
// GetSection will return section p. If section p does not exist,
// false will be returned in the second parameter.
func (t Sections) GetSection(p string) (Section, bool) {
v, ok := t.container[p]
return v, ok
}
// HasSection denotes if Sections consist of a section with
// provided name.
func (t Sections) HasSection(p string) bool {
_, ok := t.container[p]
return ok
}
// SetSection sets a section value for provided section name.
func (t Sections) SetSection(p string, v Section) Sections {
t.container[p] = v
return t
}
// DeleteSection deletes a section entry/value for provided section name./
func (t Sections) DeleteSection(p string) {
delete(t.container, p)
}
// values represents a map of union values.
type values map[string]Value
// List will return a list of all sections that were successfully
// parsed.
func (t Sections) List() []string {
keys := make([]string, len(t.container))
i := 0
for k := range t.container {
keys[i] = k
i++
}
sort.Strings(keys)
return keys
}
// Section contains a name and values. This represent
// a sectioned entry in a configuration file.
type Section struct {
// Name is the Section profile name
Name string
// values are the values within parsed profile
values values
// Errors is the list of errors
Errors []error
// Logs is the list of logs
Logs []string
// SourceFile is the INI Source file from where this section
// was retrieved. They key is the property, value is the
// source file the property was retrieved from.
SourceFile map[string]string
}
// NewSection returns an initialize section for the name
func NewSection(name string) Section {
return Section{
Name: name,
values: values{},
SourceFile: map[string]string{},
}
}
// List will return a list of all
// services in values
func (t Section) List() []string {
keys := make([]string, len(t.values))
i := 0
for k := range t.values {
keys[i] = k
i++
}
sort.Strings(keys)
return keys
}
// UpdateSourceFile updates source file for a property to provided filepath.
func (t Section) UpdateSourceFile(property string, filepath string) {
t.SourceFile[property] = filepath
}
// UpdateValue updates value for a provided key with provided value
func (t Section) UpdateValue(k string, v Value) error {
t.values[k] = v
return nil
}
// Has will return whether or not an entry exists in a given section
func (t Section) Has(k string) bool {
_, ok := t.values[k]
return ok
}
// ValueType will returned what type the union is set to. If
// k was not found, the NoneType will be returned.
func (t Section) ValueType(k string) (ValueType, bool) {
v, ok := t.values[k]
return v.Type, ok
}
// Bool returns a bool value at k
func (t Section) Bool(k string) (bool, bool) {
return t.values[k].BoolValue()
}
// Int returns an integer value at k
func (t Section) Int(k string) (int64, bool) {
return t.values[k].IntValue()
}
// Map returns a map value at k
func (t Section) Map(k string) map[string]string {
return t.values[k].MapValue()
}
// Float64 returns a float value at k
func (t Section) Float64(k string) (float64, bool) {
return t.values[k].FloatValue()
}
// String returns the string value at k
func (t Section) String(k string) string {
_, ok := t.values[k]
if !ok {
return ""
}
return t.values[k].StringValue()
}

View File

@ -1,41 +0,0 @@
package ini
import (
"fmt"
)
var (
emptyRunes = []rune{}
)
func isSep(b []rune) bool {
if len(b) == 0 {
return false
}
switch b[0] {
case '[', ']':
return true
default:
return false
}
}
var (
openBrace = []rune("[")
closeBrace = []rune("]")
)
func newSepToken(b []rune) (Token, int, error) {
tok := Token{}
switch b[0] {
case '[':
tok = newToken(TokenSep, openBrace, NoneType)
case ']':
tok = newToken(TokenSep, closeBrace, NoneType)
default:
return tok, 0, NewParseError(fmt.Sprintf("unexpected sep type, %v", b[0]))
}
return tok, 1, nil
}

View File

@ -1,45 +0,0 @@
package ini
// skipper is used to skip certain blocks of an ini file.
// Currently skipper is used to skip nested blocks of ini
// files. See example below
//
// [ foo ]
// nested = ; this section will be skipped
// a=b
// c=d
// bar=baz ; this will be included
type skipper struct {
shouldSkip bool
TokenSet bool
prevTok Token
}
func newSkipper() skipper {
return skipper{
prevTok: emptyToken,
}
}
func (s *skipper) ShouldSkip(tok Token) bool {
// should skip state will be modified only if previous token was new line (NL);
// and the current token is not WhiteSpace (WS).
if s.shouldSkip &&
s.prevTok.Type() == TokenNL &&
tok.Type() != TokenWS {
s.Continue()
return false
}
s.prevTok = tok
return s.shouldSkip
}
func (s *skipper) Skip() {
s.shouldSkip = true
}
func (s *skipper) Continue() {
s.shouldSkip = false
s.prevTok = emptyToken
}

View File

@ -1,35 +0,0 @@
package ini
// Statement is an empty AST mostly used for transitioning states.
func newStatement() AST {
return newAST(ASTKindStatement, AST{})
}
// SectionStatement represents a section AST
func newSectionStatement(tok Token) AST {
return newASTWithRootToken(ASTKindSectionStatement, tok)
}
// ExprStatement represents a completed expression AST
func newExprStatement(ast AST) AST {
return newAST(ASTKindExprStatement, ast)
}
// CommentStatement represents a comment in the ini defintion.
//
// grammar:
// comment -> #comment' | ;comment'
// comment' -> epsilon | value
func newCommentStatement(tok Token) AST {
return newAST(ASTKindCommentStatement, newExpression(tok))
}
// CompletedSectionStatement represents a completed section
func newCompletedSectionStatement(ast AST) AST {
return newAST(ASTKindCompletedSectionStatement, ast)
}
// SkipStatement is used to skip whole statements
func newSkipStatement(ast AST) AST {
return newAST(ASTKindSkipStatement, ast)
}

View File

@ -0,0 +1,89 @@
package ini
import (
"strings"
)
func trimProfileComment(s string) string {
r, _, _ := strings.Cut(s, "#")
r, _, _ = strings.Cut(r, ";")
return r
}
func trimPropertyComment(s string) string {
r, _, _ := strings.Cut(s, " #")
r, _, _ = strings.Cut(r, " ;")
r, _, _ = strings.Cut(r, "\t#")
r, _, _ = strings.Cut(r, "\t;")
return r
}
// assumes no surrounding comment
func splitProperty(s string) (string, string, bool) {
equalsi := strings.Index(s, "=")
coloni := strings.Index(s, ":") // LEGACY: also supported for property assignment
sep := "="
if equalsi == -1 || coloni != -1 && coloni < equalsi {
sep = ":"
}
k, v, ok := strings.Cut(s, sep)
if !ok {
return "", "", false
}
return strings.TrimSpace(k), strings.TrimSpace(v), true
}
// assumes no surrounding comment, whitespace, or profile brackets
func splitProfile(s string) (string, string) {
var first int
for i, r := range s {
if isLineSpace(r) {
if first == 0 {
first = i
}
} else {
if first != 0 {
return s[:first], s[i:]
}
}
}
if first == 0 {
return "", s // type component is effectively blank
}
return "", ""
}
func isLineSpace(r rune) bool {
return r == ' ' || r == '\t'
}
func unquote(s string) string {
if isSingleQuoted(s) || isDoubleQuoted(s) {
return s[1 : len(s)-1]
}
return s
}
// applies various legacy conversions to property values:
// - remote wrapping single/doublequotes
func legacyStrconv(s string) string {
s = unquote(s)
return s
}
func isSingleQuoted(s string) bool {
return hasAffixes(s, "'", "'")
}
func isDoubleQuoted(s string) bool {
return hasAffixes(s, `"`, `"`)
}
func isBracketed(s string) bool {
return hasAffixes(s, "[", "]")
}
func hasAffixes(s, left, right string) bool {
return strings.HasPrefix(s, left) && strings.HasSuffix(s, right)
}

View File

@ -0,0 +1,32 @@
package ini
type lineToken interface {
isLineToken()
}
type lineTokenProfile struct {
Type string
Name string
}
func (*lineTokenProfile) isLineToken() {}
type lineTokenProperty struct {
Key string
Value string
}
func (*lineTokenProperty) isLineToken() {}
type lineTokenContinuation struct {
Value string
}
func (*lineTokenContinuation) isLineToken() {}
type lineTokenSubProperty struct {
Key string
Value string
}
func (*lineTokenSubProperty) isLineToken() {}

View File

@ -0,0 +1,92 @@
package ini
import (
"strings"
)
func tokenize(lines []string) ([]lineToken, error) {
tokens := make([]lineToken, 0, len(lines))
for _, line := range lines {
if len(strings.TrimSpace(line)) == 0 || isLineComment(line) {
continue
}
if tok := asProfile(line); tok != nil {
tokens = append(tokens, tok)
} else if tok := asProperty(line); tok != nil {
tokens = append(tokens, tok)
} else if tok := asSubProperty(line); tok != nil {
tokens = append(tokens, tok)
} else if tok := asContinuation(line); tok != nil {
tokens = append(tokens, tok)
} // unrecognized tokens are effectively ignored
}
return tokens, nil
}
func isLineComment(line string) bool {
trimmed := strings.TrimLeft(line, " \t")
return strings.HasPrefix(trimmed, "#") || strings.HasPrefix(trimmed, ";")
}
func asProfile(line string) *lineTokenProfile { // " [ type name ] ; comment"
trimmed := strings.TrimSpace(trimProfileComment(line)) // "[ type name ]"
if !isBracketed(trimmed) {
return nil
}
trimmed = trimmed[1 : len(trimmed)-1] // " type name " (or just " name ")
trimmed = strings.TrimSpace(trimmed) // "type name" / "name"
typ, name := splitProfile(trimmed)
return &lineTokenProfile{
Type: typ,
Name: name,
}
}
func asProperty(line string) *lineTokenProperty {
if isLineSpace(rune(line[0])) {
return nil
}
trimmed := trimPropertyComment(line)
trimmed = strings.TrimRight(trimmed, " \t")
k, v, ok := splitProperty(trimmed)
if !ok {
return nil
}
return &lineTokenProperty{
Key: strings.ToLower(k), // LEGACY: normalize key case
Value: legacyStrconv(v), // LEGACY: see func docs
}
}
func asSubProperty(line string) *lineTokenSubProperty {
if !isLineSpace(rune(line[0])) {
return nil
}
// comments on sub-properties are included in the value
trimmed := strings.TrimLeft(line, " \t")
k, v, ok := splitProperty(trimmed)
if !ok {
return nil
}
return &lineTokenSubProperty{ // same LEGACY constraints as in normal property
Key: strings.ToLower(k),
Value: legacyStrconv(v),
}
}
func asContinuation(line string) *lineTokenContinuation {
if !isLineSpace(rune(line[0])) {
return nil
}
// includes comments like sub-properties
trimmed := strings.TrimLeft(line, " \t")
return &lineTokenContinuation{
Value: trimmed,
}
}

View File

@ -0,0 +1,93 @@
package ini
import (
"fmt"
"strconv"
"strings"
)
// ValueType is an enum that will signify what type
// the Value is
type ValueType int
func (v ValueType) String() string {
switch v {
case NoneType:
return "NONE"
case StringType:
return "STRING"
}
return ""
}
// ValueType enums
const (
NoneType = ValueType(iota)
StringType
QuotedStringType
)
// Value is a union container
type Value struct {
Type ValueType
str string
mp map[string]string
}
// NewStringValue returns a Value type generated using a string input.
func NewStringValue(str string) (Value, error) {
return Value{str: str}, nil
}
func (v Value) String() string {
switch v.Type {
case StringType:
return fmt.Sprintf("string: %s", string(v.str))
case QuotedStringType:
return fmt.Sprintf("quoted string: %s", string(v.str))
default:
return "union not set"
}
}
// MapValue returns a map value for sub properties
func (v Value) MapValue() map[string]string {
return v.mp
}
// IntValue returns an integer value
func (v Value) IntValue() (int64, bool) {
i, err := strconv.ParseInt(string(v.str), 0, 64)
if err != nil {
return 0, false
}
return i, true
}
// FloatValue returns a float value
func (v Value) FloatValue() (float64, bool) {
f, err := strconv.ParseFloat(string(v.str), 64)
if err != nil {
return 0, false
}
return f, true
}
// BoolValue returns a bool value
func (v Value) BoolValue() (bool, bool) {
// we don't use ParseBool as it recognizes more than what we've
// historically supported
if strings.EqualFold(v.str, "true") {
return true, true
} else if strings.EqualFold(v.str, "false") {
return false, true
}
return false, false
}
// StringValue returns the string value
func (v Value) StringValue() string {
return v.str
}

View File

@ -1,284 +0,0 @@
package ini
import (
"fmt"
)
// getStringValue will return a quoted string and the amount
// of bytes read
//
// an error will be returned if the string is not properly formatted
func getStringValue(b []rune) (int, error) {
if b[0] != '"' {
return 0, NewParseError("strings must start with '\"'")
}
endQuote := false
i := 1
for ; i < len(b) && !endQuote; i++ {
if escaped := isEscaped(b[:i], b[i]); b[i] == '"' && !escaped {
endQuote = true
break
} else if escaped {
/*c, err := getEscapedByte(b[i])
if err != nil {
return 0, err
}
b[i-1] = c
b = append(b[:i], b[i+1:]...)
i--*/
continue
}
}
if !endQuote {
return 0, NewParseError("missing '\"' in string value")
}
return i + 1, nil
}
// getBoolValue will return a boolean and the amount
// of bytes read
//
// an error will be returned if the boolean is not of a correct
// value
func getBoolValue(b []rune) (int, error) {
if len(b) < 4 {
return 0, NewParseError("invalid boolean value")
}
n := 0
for _, lv := range literalValues {
if len(lv) > len(b) {
continue
}
if isCaselessLitValue(lv, b) {
n = len(lv)
}
}
if n == 0 {
return 0, NewParseError("invalid boolean value")
}
return n, nil
}
// getNumericalValue will return a numerical string, the amount
// of bytes read, and the base of the number
//
// an error will be returned if the number is not of a correct
// value
func getNumericalValue(b []rune) (int, int, error) {
if !isDigit(b[0]) {
return 0, 0, NewParseError("invalid digit value")
}
i := 0
helper := numberHelper{}
loop:
for negativeIndex := 0; i < len(b); i++ {
negativeIndex++
if !isDigit(b[i]) {
switch b[i] {
case '-':
if helper.IsNegative() || negativeIndex != 1 {
return 0, 0, NewParseError("parse error '-'")
}
n := getNegativeNumber(b[i:])
i += (n - 1)
helper.Determine(b[i])
continue
case '.':
if err := helper.Determine(b[i]); err != nil {
return 0, 0, err
}
case 'e', 'E':
if err := helper.Determine(b[i]); err != nil {
return 0, 0, err
}
negativeIndex = 0
case 'b':
if helper.numberFormat == hex {
break
}
fallthrough
case 'o', 'x':
if i == 0 && b[i] != '0' {
return 0, 0, NewParseError("incorrect base format, expected leading '0'")
}
if i != 1 {
return 0, 0, NewParseError(fmt.Sprintf("incorrect base format found %s at %d index", string(b[i]), i))
}
if err := helper.Determine(b[i]); err != nil {
return 0, 0, err
}
default:
if isWhitespace(b[i]) {
break loop
}
if isNewline(b[i:]) {
break loop
}
if !(helper.numberFormat == hex && isHexByte(b[i])) {
if i+2 < len(b) && !isNewline(b[i:i+2]) {
return 0, 0, NewParseError("invalid numerical character")
} else if !isNewline([]rune{b[i]}) {
return 0, 0, NewParseError("invalid numerical character")
}
break loop
}
}
}
}
return helper.Base(), i, nil
}
// isDigit will return whether or not something is an integer
func isDigit(b rune) bool {
return b >= '0' && b <= '9'
}
func hasExponent(v []rune) bool {
return contains(v, 'e') || contains(v, 'E')
}
func isBinaryByte(b rune) bool {
switch b {
case '0', '1':
return true
default:
return false
}
}
func isOctalByte(b rune) bool {
switch b {
case '0', '1', '2', '3', '4', '5', '6', '7':
return true
default:
return false
}
}
func isHexByte(b rune) bool {
if isDigit(b) {
return true
}
return (b >= 'A' && b <= 'F') ||
(b >= 'a' && b <= 'f')
}
func getValue(b []rune) (int, error) {
i := 0
for i < len(b) {
if isNewline(b[i:]) {
break
}
if isOp(b[i:]) {
break
}
valid, n, err := isValid(b[i:])
if err != nil {
return 0, err
}
if !valid {
break
}
i += n
}
return i, nil
}
// getNegativeNumber will return a negative number from a
// byte slice. This will iterate through all characters until
// a non-digit has been found.
func getNegativeNumber(b []rune) int {
if b[0] != '-' {
return 0
}
i := 1
for ; i < len(b); i++ {
if !isDigit(b[i]) {
return i
}
}
return i
}
// isEscaped will return whether or not the character is an escaped
// character.
func isEscaped(value []rune, b rune) bool {
if len(value) == 0 {
return false
}
switch b {
case '\'': // single quote
case '"': // quote
case 'n': // newline
case 't': // tab
case '\\': // backslash
default:
return false
}
return value[len(value)-1] == '\\'
}
func getEscapedByte(b rune) (rune, error) {
switch b {
case '\'': // single quote
return '\'', nil
case '"': // quote
return '"', nil
case 'n': // newline
return '\n', nil
case 't': // table
return '\t', nil
case '\\': // backslash
return '\\', nil
default:
return b, NewParseError(fmt.Sprintf("invalid escaped character %c", b))
}
}
func removeEscapedCharacters(b []rune) []rune {
for i := 0; i < len(b); i++ {
if isEscaped(b[:i], b[i]) {
c, err := getEscapedByte(b[i])
if err != nil {
return b
}
b[i-1] = c
b = append(b[:i], b[i+1:]...)
i--
}
}
return b
}

View File

@ -1,269 +0,0 @@
package ini
import (
"fmt"
"sort"
"strings"
)
// Visitor is an interface used by walkers that will
// traverse an array of ASTs.
type Visitor interface {
VisitExpr(AST) error
VisitStatement(AST) error
}
// DefaultVisitor is used to visit statements and expressions
// and ensure that they are both of the correct format.
// In addition, upon visiting this will build sections and populate
// the Sections field which can be used to retrieve profile
// configuration.
type DefaultVisitor struct {
// scope is the profile which is being visited
scope string
// path is the file path which the visitor is visiting
path string
// Sections defines list of the profile section
Sections Sections
}
// NewDefaultVisitor returns a DefaultVisitor. It takes in a filepath
// which points to the file it is visiting.
func NewDefaultVisitor(filepath string) *DefaultVisitor {
return &DefaultVisitor{
Sections: Sections{
container: map[string]Section{},
},
path: filepath,
}
}
// VisitExpr visits expressions...
func (v *DefaultVisitor) VisitExpr(expr AST) error {
t := v.Sections.container[v.scope]
if t.values == nil {
t.values = values{}
}
if t.SourceFile == nil {
t.SourceFile = make(map[string]string, 0)
}
switch expr.Kind {
case ASTKindExprStatement:
opExpr := expr.GetRoot()
switch opExpr.Kind {
case ASTKindEqualExpr:
children := opExpr.GetChildren()
if len(children) <= 1 {
return NewParseError("unexpected token type")
}
rhs := children[1]
// The right-hand value side the equality expression is allowed to contain '[', ']', ':', '=' in the values.
// If the token is not either a literal or one of the token types that identifies those four additional
// tokens then error.
if !(rhs.Root.Type() == TokenLit || rhs.Root.Type() == TokenOp || rhs.Root.Type() == TokenSep) {
return NewParseError("unexpected token type")
}
key := EqualExprKey(opExpr)
val, err := newValue(rhs.Root.ValueType, rhs.Root.base, rhs.Root.Raw())
if err != nil {
return err
}
// lower case key to standardize
k := strings.ToLower(key)
// identify if the section already had this key, append log on section
if t.Has(k) {
t.Logs = append(t.Logs,
fmt.Sprintf("For profile: %v, overriding %v value, "+
"with a %v value found in a duplicate profile defined later in the same file %v. \n",
t.Name, k, k, v.path))
}
// assign the value
t.values[k] = val
// update the source file path for region
t.SourceFile[k] = v.path
default:
return NewParseError(fmt.Sprintf("unsupported expression %v", expr))
}
default:
return NewParseError(fmt.Sprintf("unsupported expression %v", expr))
}
v.Sections.container[v.scope] = t
return nil
}
// VisitStatement visits statements...
func (v *DefaultVisitor) VisitStatement(stmt AST) error {
switch stmt.Kind {
case ASTKindCompletedSectionStatement:
child := stmt.GetRoot()
if child.Kind != ASTKindSectionStatement {
return NewParseError(fmt.Sprintf("unsupported child statement: %T", child))
}
name := string(child.Root.Raw())
// trim start and end space
name = strings.TrimSpace(name)
// if has prefix "profile " + [ws+] + "profile-name",
// we standardize by removing the [ws+] between prefix and profile-name.
if strings.HasPrefix(name, "profile ") {
names := strings.SplitN(name, " ", 2)
name = names[0] + " " + strings.TrimLeft(names[1], " ")
}
// attach profile name on section
if !v.Sections.HasSection(name) {
v.Sections.container[name] = NewSection(name)
}
v.scope = name
default:
return NewParseError(fmt.Sprintf("unsupported statement: %s", stmt.Kind))
}
return nil
}
// Sections is a map of Section structures that represent
// a configuration.
type Sections struct {
container map[string]Section
}
// NewSections returns empty ini Sections
func NewSections() Sections {
return Sections{
container: make(map[string]Section, 0),
}
}
// GetSection will return section p. If section p does not exist,
// false will be returned in the second parameter.
func (t Sections) GetSection(p string) (Section, bool) {
v, ok := t.container[p]
return v, ok
}
// HasSection denotes if Sections consist of a section with
// provided name.
func (t Sections) HasSection(p string) bool {
_, ok := t.container[p]
return ok
}
// SetSection sets a section value for provided section name.
func (t Sections) SetSection(p string, v Section) Sections {
t.container[p] = v
return t
}
// DeleteSection deletes a section entry/value for provided section name./
func (t Sections) DeleteSection(p string) {
delete(t.container, p)
}
// values represents a map of union values.
type values map[string]Value
// List will return a list of all sections that were successfully
// parsed.
func (t Sections) List() []string {
keys := make([]string, len(t.container))
i := 0
for k := range t.container {
keys[i] = k
i++
}
sort.Strings(keys)
return keys
}
// Section contains a name and values. This represent
// a sectioned entry in a configuration file.
type Section struct {
// Name is the Section profile name
Name string
// values are the values within parsed profile
values values
// Errors is the list of errors
Errors []error
// Logs is the list of logs
Logs []string
// SourceFile is the INI Source file from where this section
// was retrieved. They key is the property, value is the
// source file the property was retrieved from.
SourceFile map[string]string
}
// NewSection returns an initialize section for the name
func NewSection(name string) Section {
return Section{
Name: name,
values: values{},
SourceFile: map[string]string{},
}
}
// UpdateSourceFile updates source file for a property to provided filepath.
func (t Section) UpdateSourceFile(property string, filepath string) {
t.SourceFile[property] = filepath
}
// UpdateValue updates value for a provided key with provided value
func (t Section) UpdateValue(k string, v Value) error {
t.values[k] = v
return nil
}
// Has will return whether or not an entry exists in a given section
func (t Section) Has(k string) bool {
_, ok := t.values[k]
return ok
}
// ValueType will returned what type the union is set to. If
// k was not found, the NoneType will be returned.
func (t Section) ValueType(k string) (ValueType, bool) {
v, ok := t.values[k]
return v.Type, ok
}
// Bool returns a bool value at k
func (t Section) Bool(k string) bool {
return t.values[k].BoolValue()
}
// Int returns an integer value at k
func (t Section) Int(k string) int64 {
return t.values[k].IntValue()
}
// Float64 returns a float value at k
func (t Section) Float64(k string) float64 {
return t.values[k].FloatValue()
}
// String returns the string value at k
func (t Section) String(k string) string {
_, ok := t.values[k]
if !ok {
return ""
}
return t.values[k].StringValue()
}

View File

@ -1,25 +0,0 @@
package ini
// Walk will traverse the AST using the v, the Visitor.
func Walk(tree []AST, v Visitor) error {
for _, node := range tree {
switch node.Kind {
case ASTKindExpr,
ASTKindExprStatement:
if err := v.VisitExpr(node); err != nil {
return err
}
case ASTKindStatement,
ASTKindCompletedSectionStatement,
ASTKindNestedSectionStatement,
ASTKindCompletedNestedSectionStatement:
if err := v.VisitStatement(node); err != nil {
return err
}
}
}
return nil
}

View File

@ -1,24 +0,0 @@
package ini
import (
"unicode"
)
// isWhitespace will return whether or not the character is
// a whitespace character.
//
// Whitespace is defined as a space or tab.
func isWhitespace(c rune) bool {
return unicode.IsSpace(c) && c != '\n' && c != '\r'
}
func newWSToken(b []rune) (Token, int, error) {
i := 0
for ; i < len(b); i++ {
if !isWhitespace(b[i]) {
break
}
}
return newToken(TokenWS, b[:i], NoneType), i, nil
}