1
0
mirror of https://github.com/json-iterator/go.git synced 2025-06-15 22:50:24 +02:00

25 Commits
1.1.3 ... 1.1.4

Author SHA1 Message Date
ab8a2e0c74 fix #276 allow rename when set naming strategy 2018-07-01 15:16:28 +08:00
2fbdfbb595 merge 2018-07-01 13:06:34 +08:00
720ab8dc7f add tests for #283 2018-07-01 13:05:25 +08:00
f2b4162afb Merge pull request #285 from nikhita/fix-case-sensitivity
Fix case sensitivity
2018-06-12 13:28:35 -07:00
3830516ed0 Fix case sensitivity for nested fields 2018-06-12 11:27:24 +05:30
7cceb6c2e3 Merge pull request #282 from caesarxuchao/optional-case-sensitivity
Make case sensitivity optional
2018-06-10 17:13:47 +08:00
b92cf78708 Make case sensitivity optional. Fix
https://github.com/kubernetes/kubernetes/issues/64612
2018-06-07 21:01:05 -07:00
8744d7c5c7 \n should not be ignored in base64 decode 2018-05-26 09:43:29 +08:00
37cc313d18 fix #274, unescape before base64 decode 2018-05-26 09:38:52 +08:00
2ddf6d7582 Merge pull request #266 from ceshihao/fix_base64_with_whitespace
fix base64 contains newline case
2018-04-24 08:46:23 +08:00
6a6742f0a2 fix base64 contains newline characters \r or \n 2018-04-23 23:10:55 +08:00
6c702ce12a fix #264 check io.EOF when test decoder.More 2018-04-20 16:10:56 +08:00
f88871b601 fix #263, support empty string as 0 in fuzz mode 2018-04-18 16:34:54 +08:00
f246f80f14 fix #260, support rename for extra.SupportPrivateFields 2018-04-18 16:28:55 +08:00
51dd70305b add more test for #252 2018-04-18 16:22:47 +08:00
a949c42748 fix #261 should load from reader 2018-04-18 16:11:14 +08:00
f89479f5c0 Merge pull request #257 from ash2k/release-writer
Release writer to enable GC
2018-04-08 08:25:46 +08:00
b858ec296c Release writer to enable GC 2018-04-07 21:40:08 +10:00
885a41a0a6 Merge branch 'master' of https://github.com/json-iterator/go 2018-04-03 13:41:12 +08:00
9e9a97040e always benchmark yourself 2018-04-03 13:41:01 +08:00
fb4d53e4cc Merge pull request #255 from bboreham/error-test
Add a test for input errors, and fix one bug that it finds
2018-04-02 13:50:44 +08:00
b53656d459 Check that a struct ends with closing brace 2018-04-01 22:02:44 +00:00
8f27a81d90 Add a test for input errors
Send various malformed JSON strings into the decoder for each type,
and check we get an error each time.
2018-04-01 22:01:21 +00:00
4930b053b8 explit test case sensitive for #252 2018-03-24 22:38:32 +08:00
06e0f9391e fix #250 case insensitive field match 2018-03-20 21:43:30 +08:00
21 changed files with 370 additions and 39 deletions

6
Gopkg.lock generated
View File

@ -10,12 +10,12 @@
[[projects]] [[projects]]
name = "github.com/modern-go/reflect2" name = "github.com/modern-go/reflect2"
packages = ["."] packages = ["."]
revision = "1df9eeb2bb81f327b96228865c5687bc2194af3f" revision = "4b7aa43c6742a2c18fdef89dd197aaae7dac7ccd"
version = "1.0.0" version = "1.0.1"
[solve-meta] [solve-meta]
analyzer-name = "dep" analyzer-name = "dep"
analyzer-version = 1 analyzer-version = 1
inputs-digest = "ac7003b5a981716353a43055ab7d4c5357403cb30a60de2dbdeb446c1544beaa" inputs-digest = "ea54a775e5a354cb015502d2e7aa4b74230fc77e894f34a838b268c25ec8eeb8"
solver-name = "gps-cdcl" solver-name = "gps-cdcl"
solver-version = 1 solver-version = 1

View File

@ -23,4 +23,4 @@ ignored = ["github.com/davecgh/go-spew*","github.com/google/gofuzz*","github.com
[[constraint]] [[constraint]]
name = "github.com/modern-go/reflect2" name = "github.com/modern-go/reflect2"
version = "1.0.0" version = "1.0.1"

View File

@ -31,6 +31,9 @@ Raw Result (easyjson requires static code generation)
| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op | | easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op | | jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
Always benchmark with your own workload.
The result depends heavily on the data input.
# Usage # Usage
100% compatibility with standard lib 100% compatibility with standard lib

View File

@ -77,7 +77,14 @@ func (adapter *Decoder) Decode(obj interface{}) error {
// More is there more? // More is there more?
func (adapter *Decoder) More() bool { func (adapter *Decoder) More() bool {
return adapter.iter.head != adapter.iter.tail iter := adapter.iter
if iter.Error != nil {
return false
}
if iter.head != iter.tail {
return true
}
return iter.loadMore()
} }
// Buffered remaining buffer // Buffered remaining buffer

View File

@ -2,9 +2,10 @@ package test
import ( import (
"encoding/json" "encoding/json"
"testing"
"github.com/json-iterator/go" "github.com/json-iterator/go"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"testing"
) )
func Test_use_number_for_unmarshal(t *testing.T) { func Test_use_number_for_unmarshal(t *testing.T) {
@ -45,3 +46,129 @@ func Test_read_large_number_as_interface(t *testing.T) {
should.Nil(err) should.Nil(err)
should.Equal(`123456789123456789123456789`, output) should.Equal(`123456789123456789123456789`, output)
} }
type caseSensitiveStruct struct {
A string `json:"a"`
B string `json:"b,omitempty"`
C *C `json:"C,omitempty"`
}
type C struct {
D int64 `json:"D,omitempty"`
E *E `json:"e,omitempty"`
}
type E struct {
F string `json:"F,omitempty"`
}
func Test_CaseSensitive(t *testing.T) {
should := require.New(t)
testCases := []struct {
input string
expectedOutput string
caseSensitive bool
}{
{
input: `{"A":"foo","B":"bar"}`,
expectedOutput: `{"a":"foo","b":"bar"}`,
caseSensitive: false,
},
{
input: `{"a":"foo","b":"bar"}`,
expectedOutput: `{"a":"foo","b":"bar"}`,
caseSensitive: true,
},
{
input: `{"a":"foo","b":"bar","C":{"D":10}}`,
expectedOutput: `{"a":"foo","b":"bar","C":{"D":10}}`,
caseSensitive: true,
},
{
input: `{"a":"foo","B":"bar","c":{"d":10}}`,
expectedOutput: `{"a":"foo"}`,
caseSensitive: true,
},
{
input: `{"a":"foo","C":{"d":10}}`,
expectedOutput: `{"a":"foo","C":{}}`,
caseSensitive: true,
},
{
input: `{"a":"foo","C":{"D":10,"e":{"f":"baz"}}}`,
expectedOutput: `{"a":"foo","C":{"D":10,"e":{}}}`,
caseSensitive: true,
},
{
input: `{"a":"foo","C":{"D":10,"e":{"F":"baz"}}}`,
expectedOutput: `{"a":"foo","C":{"D":10,"e":{"F":"baz"}}}`,
caseSensitive: true,
},
{
input: `{"A":"foo","c":{"d":10,"E":{"f":"baz"}}}`,
expectedOutput: `{"a":"foo","C":{"D":10,"e":{"F":"baz"}}}`,
caseSensitive: false,
},
}
for _, tc := range testCases {
val := caseSensitiveStruct{}
err := jsoniter.Config{CaseSensitive: tc.caseSensitive}.Froze().UnmarshalFromString(tc.input, &val)
should.Nil(err)
output, err := jsoniter.MarshalToString(val)
should.Nil(err)
should.Equal(tc.expectedOutput, output)
}
}
type structWithElevenFields struct {
A string `json:"A,omitempty"`
B string `json:"B,omitempty"`
C string `json:"C,omitempty"`
D string `json:"d,omitempty"`
E string `json:"e,omitempty"`
F string `json:"f,omitempty"`
G string `json:"g,omitempty"`
H string `json:"h,omitempty"`
I string `json:"i,omitempty"`
J string `json:"j,omitempty"`
K string `json:"k,omitempty"`
}
func Test_CaseSensitive_MoreThanTenFields(t *testing.T) {
should := require.New(t)
testCases := []struct {
input string
expectedOutput string
caseSensitive bool
}{
{
input: `{"A":"1","B":"2","C":"3","d":"4","e":"5","f":"6","g":"7","h":"8","i":"9","j":"10","k":"11"}`,
expectedOutput: `{"A":"1","B":"2","C":"3","d":"4","e":"5","f":"6","g":"7","h":"8","i":"9","j":"10","k":"11"}`,
caseSensitive: true,
},
{
input: `{"a":"1","b":"2","c":"3","D":"4","E":"5","F":"6"}`,
expectedOutput: `{"A":"1","B":"2","C":"3","d":"4","e":"5","f":"6"}`,
caseSensitive: false,
},
{
input: `{"A":"1","b":"2","d":"4","E":"5"}`,
expectedOutput: `{"A":"1","d":"4"}`,
caseSensitive: true,
},
}
for _, tc := range testCases {
val := structWithElevenFields{}
err := jsoniter.Config{CaseSensitive: tc.caseSensitive}.Froze().UnmarshalFromString(tc.input, &val)
should.Nil(err)
output, err := jsoniter.MarshalToString(val)
should.Nil(err)
should.Equal(tc.expectedOutput, output)
}
}

View File

@ -56,3 +56,9 @@ func Test_use_number(t *testing.T) {
should.Nil(decoder2.Decode(&obj2)) should.Nil(decoder2.Decode(&obj2))
should.Equal(json.Number("123"), obj2) should.Equal(json.Number("123"), obj2)
} }
func Test_decoder_more(t *testing.T) {
should := require.New(t)
decoder := jsoniter.NewDecoder(bytes.NewBufferString("abcde"))
should.True(decoder.More())
}

View File

@ -2,12 +2,13 @@ package jsoniter
import ( import (
"encoding/json" "encoding/json"
"github.com/modern-go/concurrent"
"github.com/modern-go/reflect2"
"io" "io"
"reflect" "reflect"
"sync" "sync"
"unsafe" "unsafe"
"github.com/modern-go/concurrent"
"github.com/modern-go/reflect2"
) )
// Config customize how the API should behave. // Config customize how the API should behave.
@ -23,6 +24,7 @@ type Config struct {
OnlyTaggedField bool OnlyTaggedField bool
ValidateJsonRawMessage bool ValidateJsonRawMessage bool
ObjectFieldMustBeSimpleString bool ObjectFieldMustBeSimpleString bool
CaseSensitive bool
} }
// API the public interface of this package. // API the public interface of this package.
@ -75,6 +77,7 @@ type frozenConfig struct {
extensions []Extension extensions []Extension
streamPool *sync.Pool streamPool *sync.Pool
iteratorPool *sync.Pool iteratorPool *sync.Pool
caseSensitive bool
} }
func (cfg *frozenConfig) initCache() { func (cfg *frozenConfig) initCache() {
@ -128,6 +131,7 @@ func (cfg Config) Froze() API {
objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString, objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString,
onlyTaggedField: cfg.OnlyTaggedField, onlyTaggedField: cfg.OnlyTaggedField,
disallowUnknownFields: cfg.DisallowUnknownFields, disallowUnknownFields: cfg.DisallowUnknownFields,
caseSensitive: cfg.CaseSensitive,
} }
api.streamPool = &sync.Pool{ api.streamPool = &sync.Pool{
New: func() interface{} { New: func() interface{} {

View File

@ -217,6 +217,9 @@ func (decoder *fuzzyIntegerDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.It
default: default:
iter.ReportError("fuzzyIntegerDecoder", "not number or string") iter.ReportError("fuzzyIntegerDecoder", "not number or string")
} }
if len(str) == 0 {
str = "0"
}
newIter := iter.Pool().BorrowIterator([]byte(str)) newIter := iter.Pool().BorrowIterator([]byte(str))
defer iter.Pool().ReturnIterator(newIter) defer iter.Pool().ReturnIterator(newIter)
isFloat := strings.IndexByte(str, '.') != -1 isFloat := strings.IndexByte(str, '.') != -1

View File

@ -37,6 +37,8 @@ func Test_any_to_int64(t *testing.T) {
should.Equal(int64(10), val) should.Equal(int64(10), val)
should.Nil(jsoniter.UnmarshalFromString(`10`, &val)) should.Nil(jsoniter.UnmarshalFromString(`10`, &val))
should.Equal(int64(10), val) should.Equal(int64(10), val)
should.Nil(jsoniter.UnmarshalFromString(`""`, &val))
should.Equal(int64(0), val)
// bool part // bool part
should.Nil(jsoniter.UnmarshalFromString(`false`, &val)) should.Nil(jsoniter.UnmarshalFromString(`false`, &val))

View File

@ -2,6 +2,7 @@ package extra
import ( import (
"github.com/json-iterator/go" "github.com/json-iterator/go"
"strings"
"unicode" "unicode"
) )
@ -17,6 +18,16 @@ type namingStrategyExtension struct {
func (extension *namingStrategyExtension) UpdateStructDescriptor(structDescriptor *jsoniter.StructDescriptor) { func (extension *namingStrategyExtension) UpdateStructDescriptor(structDescriptor *jsoniter.StructDescriptor) {
for _, binding := range structDescriptor.Fields { for _, binding := range structDescriptor.Fields {
tag, hastag := binding.Field.Tag().Lookup("json")
if hastag {
tagParts := strings.Split(tag, ",")
if tagParts[0] == "-" {
continue // hidden field
}
if tagParts[0] != "" {
continue // field explicitly named
}
}
binding.ToNames = []string{extension.translate(binding.Field.Name())} binding.ToNames = []string{extension.translate(binding.Field.Name())}
binding.FromNames = []string{extension.translate(binding.Field.Name())} binding.FromNames = []string{extension.translate(binding.Field.Name())}
} }

View File

@ -21,3 +21,30 @@ func Test_lower_case_with_underscores(t *testing.T) {
should.Nil(err) should.Nil(err)
should.Equal(`{"user_name":"taowen","first_language":"Chinese"}`, string(output)) should.Equal(`{"user_name":"taowen","first_language":"Chinese"}`, string(output))
} }
func Test_set_naming_strategy_with_overrides(t *testing.T) {
should := require.New(t)
SetNamingStrategy(LowerCaseWithUnderscores)
output, err := jsoniter.Marshal(struct {
UserName string `json:"UserName"`
FirstLanguage string
}{
UserName: "taowen",
FirstLanguage: "Chinese",
})
should.Nil(err)
should.Equal(`{"UserName":"taowen","first_language":"Chinese"}`, string(output))
}
func Test_set_naming_strategy_with_omitempty(t *testing.T) {
should := require.New(t)
SetNamingStrategy(LowerCaseWithUnderscores)
output, err := jsoniter.Marshal(struct {
UserName string
FirstLanguage string `json:",omitempty"`
}{
UserName: "taowen",
})
should.Nil(err)
should.Equal(`{"user_name":"taowen"}`, string(output))
}

View File

@ -2,6 +2,7 @@ package extra
import ( import (
"github.com/json-iterator/go" "github.com/json-iterator/go"
"strings"
"unicode" "unicode"
) )
@ -18,8 +19,36 @@ func (extension *privateFieldsExtension) UpdateStructDescriptor(structDescriptor
for _, binding := range structDescriptor.Fields { for _, binding := range structDescriptor.Fields {
isPrivate := unicode.IsLower(rune(binding.Field.Name()[0])) isPrivate := unicode.IsLower(rune(binding.Field.Name()[0]))
if isPrivate { if isPrivate {
tag, hastag := binding.Field.Tag().Lookup("json")
if !hastag {
binding.FromNames = []string{binding.Field.Name()} binding.FromNames = []string{binding.Field.Name()}
binding.ToNames = []string{binding.Field.Name()} binding.ToNames = []string{binding.Field.Name()}
continue
}
tagParts := strings.Split(tag, ",")
names := calcFieldNames(binding.Field.Name(), tagParts[0], tag)
binding.FromNames = names
binding.ToNames = names
} }
} }
} }
func calcFieldNames(originalFieldName string, tagProvidedFieldName string, wholeTag string) []string {
// ignore?
if wholeTag == "-" {
return []string{}
}
// rename?
var fieldNames []string
if tagProvidedFieldName == "" {
fieldNames = []string{originalFieldName}
} else {
fieldNames = []string{tagProvidedFieldName}
}
// private?
isNotExported := unicode.IsLower(rune(originalFieldName[0]))
if isNotExported {
fieldNames = []string{}
}
return fieldNames
}

View File

@ -60,7 +60,7 @@ func (iter *Iterator) readFieldHash() int64 {
if b == '\\' { if b == '\\' {
iter.head = i iter.head = i
for _, b := range iter.readStringSlowPath() { for _, b := range iter.readStringSlowPath() {
if 'A' <= b && b <= 'Z' { if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
b += 'a' - 'A' b += 'a' - 'A'
} }
hash ^= int64(b) hash ^= int64(b)
@ -82,7 +82,7 @@ func (iter *Iterator) readFieldHash() int64 {
} }
return hash return hash
} }
if 'A' <= b && b <= 'Z' { if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
b += 'a' - 'A' b += 'a' - 'A'
} }
hash ^= int64(b) hash ^= int64(b)
@ -95,10 +95,14 @@ func (iter *Iterator) readFieldHash() int64 {
} }
} }
func calcHash(str string) int64 { func calcHash(str string, caseSensitive bool) int64 {
hash := int64(0x811c9dc5) hash := int64(0x811c9dc5)
for _, b := range str { for _, b := range str {
if caseSensitive {
hash ^= int64(b)
} else {
hash ^= int64(unicode.ToLower(b)) hash ^= int64(unicode.ToLower(b))
}
hash *= 0x1000193 hash *= 0x1000193
} }
return int64(hash) return int64(hash)

View File

@ -3,9 +3,10 @@ package misc_tests
import ( import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"testing"
"github.com/json-iterator/go" "github.com/json-iterator/go"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"testing"
) )
func Test_empty_array(t *testing.T) { func Test_empty_array(t *testing.T) {
@ -168,6 +169,17 @@ func Test_decode_byte_array_from_base64(t *testing.T) {
should.Equal([]byte{1, 2, 3}, data) should.Equal([]byte{1, 2, 3}, data)
} }
func Test_decode_byte_array_from_base64_with_newlines(t *testing.T) {
should := require.New(t)
data := []byte{}
err := json.Unmarshal([]byte(`"A\rQ\nID"`), &data)
should.Nil(err)
should.Equal([]byte{1, 2, 3}, data)
err = jsoniter.Unmarshal([]byte(`"A\rQ\nID"`), &data)
should.Nil(err)
should.Equal([]byte{1, 2, 3}, data)
}
func Test_decode_byte_array_from_array(t *testing.T) { func Test_decode_byte_array_from_array(t *testing.T) {
should := require.New(t) should := require.New(t)
data := []byte{} data := []byte{}

View File

@ -23,6 +23,7 @@ func (cfg *frozenConfig) BorrowStream(writer io.Writer) *Stream {
} }
func (cfg *frozenConfig) ReturnStream(stream *Stream) { func (cfg *frozenConfig) ReturnStream(stream *Stream) {
stream.out = nil
stream.Error = nil stream.Error = nil
stream.Attachment = nil stream.Attachment = nil
cfg.streamPool.Put(stream) cfg.streamPool.Put(stream)

View File

@ -2,9 +2,10 @@ package jsoniter
import ( import (
"fmt" "fmt"
"github.com/modern-go/reflect2"
"reflect" "reflect"
"unsafe" "unsafe"
"github.com/modern-go/reflect2"
) )
// ValDecoder is an internal type registered to cache as needed. // ValDecoder is an internal type registered to cache as needed.
@ -40,6 +41,14 @@ type ctx struct {
decoders map[reflect2.Type]ValDecoder decoders map[reflect2.Type]ValDecoder
} }
func (b *ctx) caseSensitive() bool {
if b.frozenConfig == nil {
// default is case-insensitive
return false
}
return b.frozenConfig.caseSensitive
}
func (b *ctx) append(prefix string) *ctx { func (b *ctx) append(prefix string) *ctx {
return &ctx{ return &ctx{
frozenConfig: b.frozenConfig, frozenConfig: b.frozenConfig,

View File

@ -2,10 +2,11 @@ package jsoniter
import ( import (
"encoding/base64" "encoding/base64"
"github.com/modern-go/reflect2"
"reflect" "reflect"
"strconv" "strconv"
"unsafe" "unsafe"
"github.com/modern-go/reflect2"
) )
const ptrSize = 32 << uintptr(^uintptr(0)>>63) const ptrSize = 32 << uintptr(^uintptr(0)>>63)
@ -416,16 +417,11 @@ func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
} }
switch iter.WhatIsNext() { switch iter.WhatIsNext() {
case StringValue: case StringValue:
encoding := base64.StdEncoding src := iter.ReadString()
src := iter.SkipAndReturnBytes() dst, err := base64.StdEncoding.DecodeString(src)
src = src[1 : len(src)-1]
decodedLen := encoding.DecodedLen(len(src))
dst := make([]byte, decodedLen)
len, err := encoding.Decode(dst, src)
if err != nil { if err != nil {
iter.ReportError("decode base64", err.Error()) iter.ReportError("decode base64", err.Error())
} else { } else {
dst = dst[:len]
codec.sliceType.UnsafeSet(ptr, unsafe.Pointer(&dst)) codec.sliceType.UnsafeSet(ptr, unsafe.Pointer(&dst))
} }
case ArrayValue: case ArrayValue:

View File

@ -2,10 +2,11 @@ package jsoniter
import ( import (
"fmt" "fmt"
"github.com/modern-go/reflect2"
"io" "io"
"strings" "strings"
"unsafe" "unsafe"
"github.com/modern-go/reflect2"
) )
func decoderOfStruct(ctx *ctx, typ reflect2.Type) ValDecoder { func decoderOfStruct(ctx *ctx, typ reflect2.Type) ValDecoder {
@ -31,6 +32,15 @@ func decoderOfStruct(ctx *ctx, typ reflect2.Type) ValDecoder {
for k, binding := range bindings { for k, binding := range bindings {
fields[k] = binding.Decoder.(*structFieldDecoder) fields[k] = binding.Decoder.(*structFieldDecoder)
} }
if !ctx.caseSensitive() {
for k, binding := range bindings {
if _, found := fields[strings.ToLower(k)]; !found {
fields[strings.ToLower(k)] = binding.Decoder.(*structFieldDecoder)
}
}
}
return createStructDecoder(ctx, typ, fields) return createStructDecoder(ctx, typ, fields)
} }
@ -41,12 +51,13 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
knownHash := map[int64]struct{}{ knownHash := map[int64]struct{}{
0: {}, 0: {},
} }
switch len(fields) { switch len(fields) {
case 0: case 0:
return &skipObjectDecoder{typ} return &skipObjectDecoder{typ}
case 1: case 1:
for fieldName, fieldDecoder := range fields { for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName) fieldHash := calcHash(fieldName, ctx.caseSensitive())
_, known := knownHash[fieldHash] _, known := knownHash[fieldHash]
if known { if known {
return &generalStructDecoder{typ, fields, false} return &generalStructDecoder{typ, fields, false}
@ -60,7 +71,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
var fieldDecoder1 *structFieldDecoder var fieldDecoder1 *structFieldDecoder
var fieldDecoder2 *structFieldDecoder var fieldDecoder2 *structFieldDecoder
for fieldName, fieldDecoder := range fields { for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName) fieldHash := calcHash(fieldName, ctx.caseSensitive())
_, known := knownHash[fieldHash] _, known := knownHash[fieldHash]
if known { if known {
return &generalStructDecoder{typ, fields, false} return &generalStructDecoder{typ, fields, false}
@ -83,7 +94,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
var fieldDecoder2 *structFieldDecoder var fieldDecoder2 *structFieldDecoder
var fieldDecoder3 *structFieldDecoder var fieldDecoder3 *structFieldDecoder
for fieldName, fieldDecoder := range fields { for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName) fieldHash := calcHash(fieldName, ctx.caseSensitive())
_, known := knownHash[fieldHash] _, known := knownHash[fieldHash]
if known { if known {
return &generalStructDecoder{typ, fields, false} return &generalStructDecoder{typ, fields, false}
@ -114,7 +125,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
var fieldDecoder3 *structFieldDecoder var fieldDecoder3 *structFieldDecoder
var fieldDecoder4 *structFieldDecoder var fieldDecoder4 *structFieldDecoder
for fieldName, fieldDecoder := range fields { for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName) fieldHash := calcHash(fieldName, ctx.caseSensitive())
_, known := knownHash[fieldHash] _, known := knownHash[fieldHash]
if known { if known {
return &generalStructDecoder{typ, fields, false} return &generalStructDecoder{typ, fields, false}
@ -151,7 +162,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
var fieldDecoder4 *structFieldDecoder var fieldDecoder4 *structFieldDecoder
var fieldDecoder5 *structFieldDecoder var fieldDecoder5 *structFieldDecoder
for fieldName, fieldDecoder := range fields { for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName) fieldHash := calcHash(fieldName, ctx.caseSensitive())
_, known := knownHash[fieldHash] _, known := knownHash[fieldHash]
if known { if known {
return &generalStructDecoder{typ, fields, false} return &generalStructDecoder{typ, fields, false}
@ -194,7 +205,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
var fieldDecoder5 *structFieldDecoder var fieldDecoder5 *structFieldDecoder
var fieldDecoder6 *structFieldDecoder var fieldDecoder6 *structFieldDecoder
for fieldName, fieldDecoder := range fields { for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName) fieldHash := calcHash(fieldName, ctx.caseSensitive())
_, known := knownHash[fieldHash] _, known := knownHash[fieldHash]
if known { if known {
return &generalStructDecoder{typ, fields, false} return &generalStructDecoder{typ, fields, false}
@ -243,7 +254,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
var fieldDecoder6 *structFieldDecoder var fieldDecoder6 *structFieldDecoder
var fieldDecoder7 *structFieldDecoder var fieldDecoder7 *structFieldDecoder
for fieldName, fieldDecoder := range fields { for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName) fieldHash := calcHash(fieldName, ctx.caseSensitive())
_, known := knownHash[fieldHash] _, known := knownHash[fieldHash]
if known { if known {
return &generalStructDecoder{typ, fields, false} return &generalStructDecoder{typ, fields, false}
@ -298,7 +309,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
var fieldDecoder7 *structFieldDecoder var fieldDecoder7 *structFieldDecoder
var fieldDecoder8 *structFieldDecoder var fieldDecoder8 *structFieldDecoder
for fieldName, fieldDecoder := range fields { for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName) fieldHash := calcHash(fieldName, ctx.caseSensitive())
_, known := knownHash[fieldHash] _, known := knownHash[fieldHash]
if known { if known {
return &generalStructDecoder{typ, fields, false} return &generalStructDecoder{typ, fields, false}
@ -359,7 +370,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
var fieldDecoder8 *structFieldDecoder var fieldDecoder8 *structFieldDecoder
var fieldDecoder9 *structFieldDecoder var fieldDecoder9 *structFieldDecoder
for fieldName, fieldDecoder := range fields { for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName) fieldHash := calcHash(fieldName, ctx.caseSensitive())
_, known := knownHash[fieldHash] _, known := knownHash[fieldHash]
if known { if known {
return &generalStructDecoder{typ, fields, false} return &generalStructDecoder{typ, fields, false}
@ -426,7 +437,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
var fieldDecoder9 *structFieldDecoder var fieldDecoder9 *structFieldDecoder
var fieldDecoder10 *structFieldDecoder var fieldDecoder10 *structFieldDecoder
for fieldName, fieldDecoder := range fields { for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName) fieldHash := calcHash(fieldName, ctx.caseSensitive())
_, known := knownHash[fieldHash] _, known := knownHash[fieldHash]
if known { if known {
return &generalStructDecoder{typ, fields, false} return &generalStructDecoder{typ, fields, false}
@ -489,13 +500,16 @@ func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
if !iter.readObjectStart() { if !iter.readObjectStart() {
return return
} }
decoder.decodeOneField(ptr, iter) var c byte
for iter.nextToken() == ',' { for c = ','; c == ','; c = iter.nextToken() {
decoder.decodeOneField(ptr, iter) decoder.decodeOneField(ptr, iter)
} }
if iter.Error != nil && iter.Error != io.EOF { if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
} }
if c != '}' {
iter.ReportError("struct Decode", `expect }, but found `+string([]byte{c}))
}
} }
func (decoder *generalStructDecoder) decodeOneField(ptr unsafe.Pointer, iter *Iterator) { func (decoder *generalStructDecoder) decodeOneField(ptr unsafe.Pointer, iter *Iterator) {
@ -505,13 +519,13 @@ func (decoder *generalStructDecoder) decodeOneField(ptr unsafe.Pointer, iter *It
fieldBytes := iter.ReadStringAsSlice() fieldBytes := iter.ReadStringAsSlice()
field = *(*string)(unsafe.Pointer(&fieldBytes)) field = *(*string)(unsafe.Pointer(&fieldBytes))
fieldDecoder = decoder.fields[field] fieldDecoder = decoder.fields[field]
if fieldDecoder == nil { if fieldDecoder == nil && !iter.cfg.caseSensitive {
fieldDecoder = decoder.fields[strings.ToLower(field)] fieldDecoder = decoder.fields[strings.ToLower(field)]
} }
} else { } else {
field = iter.ReadString() field = iter.ReadString()
fieldDecoder = decoder.fields[field] fieldDecoder = decoder.fields[field]
if fieldDecoder == nil { if fieldDecoder == nil && !iter.cfg.caseSensitive {
fieldDecoder = decoder.fields[strings.ToLower(field)] fieldDecoder = decoder.fields[strings.ToLower(field)]
} }
} }

36
value_tests/error_test.go Normal file
View File

@ -0,0 +1,36 @@
package test
import (
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"reflect"
"testing"
)
func Test_errorInput(t *testing.T) {
for _, testCase := range unmarshalCases {
if testCase.obj != nil {
continue
}
valType := reflect.TypeOf(testCase.ptr).Elem()
t.Run(valType.String(), func(t *testing.T) {
for _, data := range []string{
`x`,
`n`,
`nul`,
`{x}`,
`{"x"}`,
`{"x": "y"x}`,
`{"x": "y"`,
`{"x": "y", "a"}`,
`[`,
`[{"x": "y"}`,
} {
ptrVal := reflect.New(valType)
ptr := ptrVal.Interface()
err := jsoniter.ConfigCompatibleWithStandardLibrary.Unmarshal([]byte(data), ptr)
require.Error(t, err, "on input %q", data)
}
})
}
}

View File

@ -20,5 +20,8 @@ func init() {
}, unmarshalCase{ }, unmarshalCase{
ptr: (*[]byte)(nil), ptr: (*[]byte)(nil),
input: `"aGVsbG8="`, input: `"aGVsbG8="`,
}, unmarshalCase{
ptr: (*[]byte)(nil),
input: `"c3ViamVjdHM\/X2Q9MQ=="`,
}) })
} }

View File

@ -63,6 +63,43 @@ func init() {
d *time.Timer d *time.Timer
})(nil), })(nil),
input: `{"a": 444, "b":"bad", "C":256, "d":{"not":"a timer"}}`, input: `{"a": 444, "b":"bad", "C":256, "d":{"not":"a timer"}}`,
}, unmarshalCase{
ptr: (*struct {
A string
B string
C string
D string
E string
F string
G string
H string
I string
J string
K string
})(nil),
input: `{"a":"1","b":"2","c":"3","d":"4","e":"5","f":"6","g":"7","h":"8","i":"9","j":"10","k":"11"}`,
}, unmarshalCase{
ptr: (*struct {
T float64 `json:"T"`
})(nil),
input: `{"t":10.0}`,
}, unmarshalCase{
ptr: (*struct {
T float64 `json:"T"`
})(nil),
input: `{"T":10.0}`,
}, unmarshalCase{
ptr: (*struct {
T float64 `json:"t"`
})(nil),
input: `{"T":10.0}`,
}, unmarshalCase{
ptr: (*struct {
KeyString string `json:"key_string"`
Type string `json:"type"`
Asks [][2]float64 `json:"asks"`
})(nil),
input: `{"key_string": "KEYSTRING","type": "TYPE","asks": [[1e+66,1]]}`,
}) })
marshalCases = append(marshalCases, marshalCases = append(marshalCases,
struct { struct {