mirror of
https://gitea.com/Lydanne/buildx.git
synced 2025-07-09 21:17:09 +08:00
vendor: update buildkit with typed errors support
Signed-off-by: Tonis Tiigi <tonistiigi@gmail.com>
This commit is contained in:
36
vendor/github.com/json-iterator/go/README.md
generated
vendored
36
vendor/github.com/json-iterator/go/README.md
generated
vendored
@ -1,5 +1,5 @@
|
||||
[](https://sourcegraph.com/github.com/json-iterator/go?badge)
|
||||
[](http://godoc.org/github.com/json-iterator/go)
|
||||
[](https://pkg.go.dev/github.com/json-iterator/go)
|
||||
[](https://travis-ci.org/json-iterator/go)
|
||||
[](https://codecov.io/gh/json-iterator/go)
|
||||
[](https://goreportcard.com/report/github.com/json-iterator/go)
|
||||
@ -18,16 +18,16 @@ Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/githu
|
||||
|
||||
Raw Result (easyjson requires static code generation)
|
||||
|
||||
| | ns/op | allocation bytes | allocation times |
|
||||
| --- | --- | --- | --- |
|
||||
| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
|
||||
| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
|
||||
| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
|
||||
| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
|
||||
| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
|
||||
| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
|
||||
| | ns/op | allocation bytes | allocation times |
|
||||
| --------------- | ----------- | ---------------- | ---------------- |
|
||||
| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
|
||||
| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
|
||||
| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
|
||||
| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
|
||||
| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
|
||||
| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
|
||||
|
||||
Always benchmark with your own workload.
|
||||
Always benchmark with your own workload.
|
||||
The result depends heavily on the data input.
|
||||
|
||||
# Usage
|
||||
@ -41,10 +41,10 @@ import "encoding/json"
|
||||
json.Marshal(&data)
|
||||
```
|
||||
|
||||
with
|
||||
with
|
||||
|
||||
```go
|
||||
import "github.com/json-iterator/go"
|
||||
import jsoniter "github.com/json-iterator/go"
|
||||
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
json.Marshal(&data)
|
||||
@ -60,7 +60,7 @@ json.Unmarshal(input, &data)
|
||||
with
|
||||
|
||||
```go
|
||||
import "github.com/json-iterator/go"
|
||||
import jsoniter "github.com/json-iterator/go"
|
||||
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
json.Unmarshal(input, &data)
|
||||
@ -78,10 +78,10 @@ go get github.com/json-iterator/go
|
||||
|
||||
Contributors
|
||||
|
||||
* [thockin](https://github.com/thockin)
|
||||
* [mattn](https://github.com/mattn)
|
||||
* [cch123](https://github.com/cch123)
|
||||
* [Oleg Shaldybin](https://github.com/olegshaldybin)
|
||||
* [Jason Toffaletti](https://github.com/toffaletti)
|
||||
- [thockin](https://github.com/thockin)
|
||||
- [mattn](https://github.com/mattn)
|
||||
- [cch123](https://github.com/cch123)
|
||||
- [Oleg Shaldybin](https://github.com/olegshaldybin)
|
||||
- [Jason Toffaletti](https://github.com/toffaletti)
|
||||
|
||||
Report issue or pull request, or email taowen@gmail.com, or [](https://gitter.im/json-iterator/Lobby)
|
||||
|
4
vendor/github.com/json-iterator/go/any_str.go
generated
vendored
4
vendor/github.com/json-iterator/go/any_str.go
generated
vendored
@ -64,7 +64,6 @@ func (any *stringAny) ToInt64() int64 {
|
||||
|
||||
flag := 1
|
||||
startPos := 0
|
||||
endPos := 0
|
||||
if any.val[0] == '+' || any.val[0] == '-' {
|
||||
startPos = 1
|
||||
}
|
||||
@ -73,6 +72,7 @@ func (any *stringAny) ToInt64() int64 {
|
||||
flag = -1
|
||||
}
|
||||
|
||||
endPos := startPos
|
||||
for i := startPos; i < len(any.val); i++ {
|
||||
if any.val[i] >= '0' && any.val[i] <= '9' {
|
||||
endPos = i + 1
|
||||
@ -98,7 +98,6 @@ func (any *stringAny) ToUint64() uint64 {
|
||||
}
|
||||
|
||||
startPos := 0
|
||||
endPos := 0
|
||||
|
||||
if any.val[0] == '-' {
|
||||
return 0
|
||||
@ -107,6 +106,7 @@ func (any *stringAny) ToUint64() uint64 {
|
||||
startPos = 1
|
||||
}
|
||||
|
||||
endPos := startPos
|
||||
for i := startPos; i < len(any.val); i++ {
|
||||
if any.val[i] >= '0' && any.val[i] <= '9' {
|
||||
endPos = i + 1
|
||||
|
4
vendor/github.com/json-iterator/go/config.go
generated
vendored
4
vendor/github.com/json-iterator/go/config.go
generated
vendored
@ -183,11 +183,11 @@ func (cfg *frozenConfig) validateJsonRawMessage(extension EncoderExtension) {
|
||||
encoder := &funcEncoder{func(ptr unsafe.Pointer, stream *Stream) {
|
||||
rawMessage := *(*json.RawMessage)(ptr)
|
||||
iter := cfg.BorrowIterator([]byte(rawMessage))
|
||||
defer cfg.ReturnIterator(iter)
|
||||
iter.Read()
|
||||
if iter.Error != nil {
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
stream.WriteRaw("null")
|
||||
} else {
|
||||
cfg.ReturnIterator(iter)
|
||||
stream.WriteRaw(string(rawMessage))
|
||||
}
|
||||
}, func(ptr unsafe.Pointer) bool {
|
||||
|
27
vendor/github.com/json-iterator/go/iter.go
generated
vendored
27
vendor/github.com/json-iterator/go/iter.go
generated
vendored
@ -74,6 +74,7 @@ type Iterator struct {
|
||||
buf []byte
|
||||
head int
|
||||
tail int
|
||||
depth int
|
||||
captureStartedAt int
|
||||
captured []byte
|
||||
Error error
|
||||
@ -88,6 +89,7 @@ func NewIterator(cfg API) *Iterator {
|
||||
buf: nil,
|
||||
head: 0,
|
||||
tail: 0,
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@ -99,6 +101,7 @@ func Parse(cfg API, reader io.Reader, bufSize int) *Iterator {
|
||||
buf: make([]byte, bufSize),
|
||||
head: 0,
|
||||
tail: 0,
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@ -110,6 +113,7 @@ func ParseBytes(cfg API, input []byte) *Iterator {
|
||||
buf: input,
|
||||
head: 0,
|
||||
tail: len(input),
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@ -128,6 +132,7 @@ func (iter *Iterator) Reset(reader io.Reader) *Iterator {
|
||||
iter.reader = reader
|
||||
iter.head = 0
|
||||
iter.tail = 0
|
||||
iter.depth = 0
|
||||
return iter
|
||||
}
|
||||
|
||||
@ -137,6 +142,7 @@ func (iter *Iterator) ResetBytes(input []byte) *Iterator {
|
||||
iter.buf = input
|
||||
iter.head = 0
|
||||
iter.tail = len(input)
|
||||
iter.depth = 0
|
||||
return iter
|
||||
}
|
||||
|
||||
@ -320,3 +326,24 @@ func (iter *Iterator) Read() interface{} {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// limit maximum depth of nesting, as allowed by https://tools.ietf.org/html/rfc7159#section-9
|
||||
const maxDepth = 10000
|
||||
|
||||
func (iter *Iterator) incrementDepth() (success bool) {
|
||||
iter.depth++
|
||||
if iter.depth <= maxDepth {
|
||||
return true
|
||||
}
|
||||
iter.ReportError("incrementDepth", "exceeded max depth")
|
||||
return false
|
||||
}
|
||||
|
||||
func (iter *Iterator) decrementDepth() (success bool) {
|
||||
iter.depth--
|
||||
if iter.depth >= 0 {
|
||||
return true
|
||||
}
|
||||
iter.ReportError("decrementDepth", "unexpected negative nesting")
|
||||
return false
|
||||
}
|
||||
|
10
vendor/github.com/json-iterator/go/iter_array.go
generated
vendored
10
vendor/github.com/json-iterator/go/iter_array.go
generated
vendored
@ -28,26 +28,32 @@ func (iter *Iterator) ReadArray() (ret bool) {
|
||||
func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
|
||||
c := iter.nextToken()
|
||||
if c == '[' {
|
||||
if !iter.incrementDepth() {
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c != ']' {
|
||||
iter.unreadByte()
|
||||
if !callback(iter) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
for c == ',' {
|
||||
if !callback(iter) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
}
|
||||
if c != ']' {
|
||||
iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
|
28
vendor/github.com/json-iterator/go/iter_object.go
generated
vendored
28
vendor/github.com/json-iterator/go/iter_object.go
generated
vendored
@ -112,6 +112,9 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
||||
c := iter.nextToken()
|
||||
var field string
|
||||
if c == '{' {
|
||||
if !iter.incrementDepth() {
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c == '"' {
|
||||
iter.unreadByte()
|
||||
@ -121,6 +124,7 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
||||
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
@ -131,20 +135,23 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
||||
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
}
|
||||
if c != '}' {
|
||||
iter.ReportError("ReadObjectCB", `object not ended with }`)
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
if c == '}' {
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
iter.ReportError("ReadObjectCB", `expect " after }, but found `+string([]byte{c}))
|
||||
iter.ReportError("ReadObjectCB", `expect " after {, but found `+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if c == 'n' {
|
||||
@ -159,15 +166,20 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
||||
func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
|
||||
c := iter.nextToken()
|
||||
if c == '{' {
|
||||
if !iter.incrementDepth() {
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c == '"' {
|
||||
iter.unreadByte()
|
||||
field := iter.ReadString()
|
||||
if iter.nextToken() != ':' {
|
||||
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
@ -175,23 +187,27 @@ func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
|
||||
field = iter.ReadString()
|
||||
if iter.nextToken() != ':' {
|
||||
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
}
|
||||
if c != '}' {
|
||||
iter.ReportError("ReadMapCB", `object not ended with }`)
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
if c == '}' {
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c}))
|
||||
iter.ReportError("ReadMapCB", `expect " after {, but found `+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if c == 'n' {
|
||||
|
19
vendor/github.com/json-iterator/go/iter_skip_sloppy.go
generated
vendored
19
vendor/github.com/json-iterator/go/iter_skip_sloppy.go
generated
vendored
@ -22,6 +22,9 @@ func (iter *Iterator) skipNumber() {
|
||||
|
||||
func (iter *Iterator) skipArray() {
|
||||
level := 1
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
switch iter.buf[i] {
|
||||
@ -31,8 +34,14 @@ func (iter *Iterator) skipArray() {
|
||||
i = iter.head - 1 // it will be i++ soon
|
||||
case '[': // If open symbol, increase level
|
||||
level++
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
case ']': // If close symbol, increase level
|
||||
level--
|
||||
if !iter.decrementDepth() {
|
||||
return
|
||||
}
|
||||
|
||||
// If we have returned to the original level, we're done
|
||||
if level == 0 {
|
||||
@ -50,6 +59,10 @@ func (iter *Iterator) skipArray() {
|
||||
|
||||
func (iter *Iterator) skipObject() {
|
||||
level := 1
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
switch iter.buf[i] {
|
||||
@ -59,8 +72,14 @@ func (iter *Iterator) skipObject() {
|
||||
i = iter.head - 1 // it will be i++ soon
|
||||
case '{': // If open symbol, increase level
|
||||
level++
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
case '}': // If close symbol, increase level
|
||||
level--
|
||||
if !iter.decrementDepth() {
|
||||
return
|
||||
}
|
||||
|
||||
// If we have returned to the original level, we're done
|
||||
if level == 0 {
|
||||
|
5
vendor/github.com/json-iterator/go/reflect.go
generated
vendored
5
vendor/github.com/json-iterator/go/reflect.go
generated
vendored
@ -60,6 +60,7 @@ func (b *ctx) append(prefix string) *ctx {
|
||||
|
||||
// ReadVal copy the underlying JSON into go interface, same as json.Unmarshal
|
||||
func (iter *Iterator) ReadVal(obj interface{}) {
|
||||
depth := iter.depth
|
||||
cacheKey := reflect2.RTypeOf(obj)
|
||||
decoder := iter.cfg.getDecoderFromCache(cacheKey)
|
||||
if decoder == nil {
|
||||
@ -76,6 +77,10 @@ func (iter *Iterator) ReadVal(obj interface{}) {
|
||||
return
|
||||
}
|
||||
decoder.Decode(ptr, iter)
|
||||
if iter.depth != depth {
|
||||
iter.ReportError("ReadVal", "unexpected mismatched nesting")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// WriteVal copy the go interface into underlying JSON, same as json.Marshal
|
||||
|
6
vendor/github.com/json-iterator/go/reflect_extension.go
generated
vendored
6
vendor/github.com/json-iterator/go/reflect_extension.go
generated
vendored
@ -341,10 +341,10 @@ func describeStruct(ctx *ctx, typ reflect2.Type) *StructDescriptor {
|
||||
if ctx.onlyTaggedField && !hastag && !field.Anonymous() {
|
||||
continue
|
||||
}
|
||||
tagParts := strings.Split(tag, ",")
|
||||
if tag == "-" {
|
||||
if tag == "-" || field.Name() == "_" {
|
||||
continue
|
||||
}
|
||||
tagParts := strings.Split(tag, ",")
|
||||
if field.Anonymous() && (tag == "" || tagParts[0] == "") {
|
||||
if field.Type().Kind() == reflect.Struct {
|
||||
structDescriptor := describeStruct(ctx, field.Type())
|
||||
@ -475,7 +475,7 @@ func calcFieldNames(originalFieldName string, tagProvidedFieldName string, whole
|
||||
fieldNames = []string{tagProvidedFieldName}
|
||||
}
|
||||
// private?
|
||||
isNotExported := unicode.IsLower(rune(originalFieldName[0]))
|
||||
isNotExported := unicode.IsLower(rune(originalFieldName[0])) || originalFieldName[0] == '_'
|
||||
if isNotExported {
|
||||
fieldNames = []string{}
|
||||
}
|
||||
|
94
vendor/github.com/json-iterator/go/reflect_map.go
generated
vendored
94
vendor/github.com/json-iterator/go/reflect_map.go
generated
vendored
@ -49,6 +49,33 @@ func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
return decoder
|
||||
}
|
||||
}
|
||||
|
||||
ptrType := reflect2.PtrTo(typ)
|
||||
if ptrType.Implements(unmarshalerType) {
|
||||
return &referenceDecoder{
|
||||
&unmarshalerDecoder{
|
||||
valType: ptrType,
|
||||
},
|
||||
}
|
||||
}
|
||||
if typ.Implements(unmarshalerType) {
|
||||
return &unmarshalerDecoder{
|
||||
valType: typ,
|
||||
}
|
||||
}
|
||||
if ptrType.Implements(textUnmarshalerType) {
|
||||
return &referenceDecoder{
|
||||
&textUnmarshalerDecoder{
|
||||
valType: ptrType,
|
||||
},
|
||||
}
|
||||
}
|
||||
if typ.Implements(textUnmarshalerType) {
|
||||
return &textUnmarshalerDecoder{
|
||||
valType: typ,
|
||||
}
|
||||
}
|
||||
|
||||
switch typ.Kind() {
|
||||
case reflect.String:
|
||||
return decoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
|
||||
@ -63,31 +90,6 @@ func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
typ = reflect2.DefaultTypeOfKind(typ.Kind())
|
||||
return &numericMapKeyDecoder{decoderOfType(ctx, typ)}
|
||||
default:
|
||||
ptrType := reflect2.PtrTo(typ)
|
||||
if ptrType.Implements(unmarshalerType) {
|
||||
return &referenceDecoder{
|
||||
&unmarshalerDecoder{
|
||||
valType: ptrType,
|
||||
},
|
||||
}
|
||||
}
|
||||
if typ.Implements(unmarshalerType) {
|
||||
return &unmarshalerDecoder{
|
||||
valType: typ,
|
||||
}
|
||||
}
|
||||
if ptrType.Implements(textUnmarshalerType) {
|
||||
return &referenceDecoder{
|
||||
&textUnmarshalerDecoder{
|
||||
valType: ptrType,
|
||||
},
|
||||
}
|
||||
}
|
||||
if typ.Implements(textUnmarshalerType) {
|
||||
return &textUnmarshalerDecoder{
|
||||
valType: typ,
|
||||
}
|
||||
}
|
||||
return &lazyErrorDecoder{err: fmt.Errorf("unsupported map key type: %v", typ)}
|
||||
}
|
||||
}
|
||||
@ -103,6 +105,19 @@ func encoderOfMapKey(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
return encoder
|
||||
}
|
||||
}
|
||||
|
||||
if typ == textMarshalerType {
|
||||
return &directTextMarshalerEncoder{
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
}
|
||||
}
|
||||
if typ.Implements(textMarshalerType) {
|
||||
return &textMarshalerEncoder{
|
||||
valType: typ,
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
}
|
||||
}
|
||||
|
||||
switch typ.Kind() {
|
||||
case reflect.String:
|
||||
return encoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
|
||||
@ -117,17 +132,6 @@ func encoderOfMapKey(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
typ = reflect2.DefaultTypeOfKind(typ.Kind())
|
||||
return &numericMapKeyEncoder{encoderOfType(ctx, typ)}
|
||||
default:
|
||||
if typ == textMarshalerType {
|
||||
return &directTextMarshalerEncoder{
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
}
|
||||
}
|
||||
if typ.Implements(textMarshalerType) {
|
||||
return &textMarshalerEncoder{
|
||||
valType: typ,
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
}
|
||||
}
|
||||
if typ.Kind() == reflect.Interface {
|
||||
return &dynamicMapKeyEncoder{ctx, typ}
|
||||
}
|
||||
@ -163,10 +167,6 @@ func (decoder *mapDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if c == '}' {
|
||||
return
|
||||
}
|
||||
if c != '"' {
|
||||
iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
iter.unreadByte()
|
||||
key := decoder.keyType.UnsafeNew()
|
||||
decoder.keyDecoder.Decode(key, iter)
|
||||
@ -249,6 +249,10 @@ type mapEncoder struct {
|
||||
}
|
||||
|
||||
func (encoder *mapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
if *(*unsafe.Pointer)(ptr) == nil {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
stream.WriteObjectStart()
|
||||
iter := encoder.mapType.UnsafeIterate(ptr)
|
||||
for i := 0; iter.HasNext(); i++ {
|
||||
@ -286,16 +290,17 @@ func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteObjectStart()
|
||||
mapIter := encoder.mapType.UnsafeIterate(ptr)
|
||||
subStream := stream.cfg.BorrowStream(nil)
|
||||
subStream.Attachment = stream.Attachment
|
||||
subIter := stream.cfg.BorrowIterator(nil)
|
||||
keyValues := encodedKeyValues{}
|
||||
for mapIter.HasNext() {
|
||||
subStream.buf = make([]byte, 0, 64)
|
||||
key, elem := mapIter.UnsafeNext()
|
||||
subStreamIndex := subStream.Buffered()
|
||||
encoder.keyEncoder.Encode(key, subStream)
|
||||
if subStream.Error != nil && subStream.Error != io.EOF && stream.Error == nil {
|
||||
stream.Error = subStream.Error
|
||||
}
|
||||
encodedKey := subStream.Buffer()
|
||||
encodedKey := subStream.Buffer()[subStreamIndex:]
|
||||
subIter.ResetBytes(encodedKey)
|
||||
decodedKey := subIter.ReadString()
|
||||
if stream.indention > 0 {
|
||||
@ -306,7 +311,7 @@ func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
encoder.elemEncoder.Encode(elem, subStream)
|
||||
keyValues = append(keyValues, encodedKV{
|
||||
key: decodedKey,
|
||||
keyValue: subStream.Buffer(),
|
||||
keyValue: subStream.Buffer()[subStreamIndex:],
|
||||
})
|
||||
}
|
||||
sort.Sort(keyValues)
|
||||
@ -316,6 +321,9 @@ func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
}
|
||||
stream.Write(keyValue.keyValue)
|
||||
}
|
||||
if subStream.Error != nil && stream.Error == nil {
|
||||
stream.Error = subStream.Error
|
||||
}
|
||||
stream.WriteObjectEnd()
|
||||
stream.cfg.ReturnStream(subStream)
|
||||
stream.cfg.ReturnIterator(subIter)
|
||||
|
12
vendor/github.com/json-iterator/go/reflect_marshaler.go
generated
vendored
12
vendor/github.com/json-iterator/go/reflect_marshaler.go
generated
vendored
@ -3,8 +3,9 @@ package jsoniter
|
||||
import (
|
||||
"encoding"
|
||||
"encoding/json"
|
||||
"github.com/modern-go/reflect2"
|
||||
"unsafe"
|
||||
|
||||
"github.com/modern-go/reflect2"
|
||||
)
|
||||
|
||||
var marshalerType = reflect2.TypeOfPtr((*json.Marshaler)(nil)).Elem()
|
||||
@ -93,10 +94,17 @@ func (encoder *marshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
bytes, err := json.Marshal(obj)
|
||||
marshaler := obj.(json.Marshaler)
|
||||
bytes, err := marshaler.MarshalJSON()
|
||||
if err != nil {
|
||||
stream.Error = err
|
||||
} else {
|
||||
// html escape was already done by jsoniter
|
||||
// but the extra '\n' should be trimed
|
||||
l := len(bytes)
|
||||
if l > 0 && bytes[l-1] == '\n' {
|
||||
bytes = bytes[:l-1]
|
||||
}
|
||||
stream.Write(bytes)
|
||||
}
|
||||
}
|
||||
|
4
vendor/github.com/json-iterator/go/reflect_optional.go
generated
vendored
4
vendor/github.com/json-iterator/go/reflect_optional.go
generated
vendored
@ -2,7 +2,6 @@ package jsoniter
|
||||
|
||||
import (
|
||||
"github.com/modern-go/reflect2"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
@ -10,9 +9,6 @@ func decoderOfOptional(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
ptrType := typ.(*reflect2.UnsafePtrType)
|
||||
elemType := ptrType.Elem()
|
||||
decoder := decoderOfType(ctx, elemType)
|
||||
if ctx.prefix == "" && elemType.Kind() == reflect.Ptr {
|
||||
return &dereferenceDecoder{elemType, decoder}
|
||||
}
|
||||
return &OptionalDecoder{elemType, decoder}
|
||||
}
|
||||
|
||||
|
66
vendor/github.com/json-iterator/go/reflect_struct_decoder.go
generated
vendored
66
vendor/github.com/json-iterator/go/reflect_struct_decoder.go
generated
vendored
@ -500,16 +500,20 @@ func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
var c byte
|
||||
for c = ','; c == ','; c = iter.nextToken() {
|
||||
decoder.decodeOneField(ptr, iter)
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
if c != '}' {
|
||||
iter.ReportError("struct Decode", `expect }, but found `+string([]byte{c}))
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
func (decoder *generalStructDecoder) decodeOneField(ptr unsafe.Pointer, iter *Iterator) {
|
||||
@ -571,6 +575,9 @@ func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
if iter.readFieldHash() == decoder.fieldHash {
|
||||
decoder.fieldDecoder.Decode(ptr, iter)
|
||||
@ -581,9 +588,10 @@ func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type twoFieldsStructDecoder struct {
|
||||
@ -598,6 +606,9 @@ func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -611,9 +622,10 @@ func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type threeFieldsStructDecoder struct {
|
||||
@ -630,6 +642,9 @@ func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -645,9 +660,10 @@ func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type fourFieldsStructDecoder struct {
|
||||
@ -666,6 +682,9 @@ func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -683,9 +702,10 @@ func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type fiveFieldsStructDecoder struct {
|
||||
@ -706,6 +726,9 @@ func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -725,9 +748,10 @@ func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type sixFieldsStructDecoder struct {
|
||||
@ -750,6 +774,9 @@ func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -771,9 +798,10 @@ func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type sevenFieldsStructDecoder struct {
|
||||
@ -798,6 +826,9 @@ func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -821,9 +852,10 @@ func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type eightFieldsStructDecoder struct {
|
||||
@ -850,6 +882,9 @@ func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -875,9 +910,10 @@ func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type nineFieldsStructDecoder struct {
|
||||
@ -906,6 +942,9 @@ func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -933,9 +972,10 @@ func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type tenFieldsStructDecoder struct {
|
||||
@ -966,6 +1006,9 @@ func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -995,9 +1038,10 @@ func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type structFieldDecoder struct {
|
||||
|
1
vendor/github.com/json-iterator/go/reflect_struct_encoder.go
generated
vendored
1
vendor/github.com/json-iterator/go/reflect_struct_encoder.go
generated
vendored
@ -200,6 +200,7 @@ type stringModeStringEncoder struct {
|
||||
|
||||
func (encoder *stringModeStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
tempStream := encoder.cfg.BorrowStream(nil)
|
||||
tempStream.Attachment = stream.Attachment
|
||||
defer encoder.cfg.ReturnStream(tempStream)
|
||||
encoder.elemEncoder.Encode(ptr, tempStream)
|
||||
stream.WriteString(string(tempStream.Buffer()))
|
||||
|
5
vendor/github.com/json-iterator/go/stream.go
generated
vendored
5
vendor/github.com/json-iterator/go/stream.go
generated
vendored
@ -103,14 +103,14 @@ func (stream *Stream) Flush() error {
|
||||
if stream.Error != nil {
|
||||
return stream.Error
|
||||
}
|
||||
n, err := stream.out.Write(stream.buf)
|
||||
_, err := stream.out.Write(stream.buf)
|
||||
if err != nil {
|
||||
if stream.Error == nil {
|
||||
stream.Error = err
|
||||
}
|
||||
return err
|
||||
}
|
||||
stream.buf = stream.buf[n:]
|
||||
stream.buf = stream.buf[:0]
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -177,7 +177,6 @@ func (stream *Stream) WriteEmptyObject() {
|
||||
func (stream *Stream) WriteMore() {
|
||||
stream.writeByte(',')
|
||||
stream.writeIndention(0)
|
||||
stream.Flush()
|
||||
}
|
||||
|
||||
// WriteArrayStart write [ with possible indention
|
||||
|
Reference in New Issue
Block a user