1
0
mirror of https://github.com/json-iterator/go.git synced 2025-06-15 22:50:24 +02:00

24 Commits

Author SHA1 Message Date
0ff49de124 update README 2019-03-06 22:29:09 +08:00
5bc9320502 Merge pull request #316 from proemergotech/master
fix #308 do NOT skip embedded structs without tag when OnlyTaggedFiel…
2019-02-08 15:56:42 +08:00
f64ce68b6e Merge pull request #338 from dvrkps/master
Clean go vet error and ineffassign warnings.
2019-01-14 23:53:30 +08:00
2d42ff74dd Merge pull request #337 from denverdino/fix-encode-with-MarshalJSON
Fix the incompatible encoding #336
2019-01-14 23:52:16 +08:00
3a023a5fbc clean readPositiveFloat64 2019-01-10 18:00:15 +01:00
16aef10b2b clean readPositiveFloat32 2019-01-10 17:55:28 +01:00
ae4c002f78 rename ExampleMyKey 2019-01-10 17:51:25 +01:00
e4aa2ec063 Fix the incompatible encoding 2019-01-03 18:19:22 +08:00
d05f387f50 fix #317, try parse as BigFloat if overflow 2018-11-12 14:45:56 +08:00
a9403d25cd fix #308 do NOT skip embedded structs without tag when OnlyTaggedField is set to true 2018-10-29 12:00:51 +01:00
05d041de10 fix #313 support json marshaller type as map key 2018-10-24 23:28:41 +08:00
5916df66b3 fix #311 handle nil any 2018-10-24 21:05:37 +08:00
2433035e51 Merge pull request #304 from Quasilyte/quasilyte/emptyFallthrough
use multi-value case clause instead of fallthrough
2018-09-14 09:48:43 +08:00
6dfc0bf2dd Merge pull request #305 from Quasilyte/quasilyte/assignOp
simplify `x = x <op> y` to `x <op>= y`
2018-09-14 09:48:25 +08:00
b9be8dd373 Merge pull request #306 from Quasilyte/quasilyte/underef
remove redundant dereferencing expressions
2018-09-14 09:47:54 +08:00
b8d78b6aaf Merge pull request #307 from Quasilyte/quasilyte/commentedOutCode
any_tests: remove commented-out code
2018-09-14 09:47:25 +08:00
7109b5e7dd any_tests: remove commented-out code
Found using https://go-critic.github.io/overview#commentedOutCode-ref
2018-09-13 21:57:53 +03:00
4cc76529e8 remove redundant dereferencing expressions
Found using https://go-critic.github.io/overview#underef-ref
2018-09-13 21:51:59 +03:00
c5ddac9dc3 simplify x = x <op> y to x <op>= y
Found using https://go-critic.github.io/overview#assignOp-ref
2018-09-13 21:48:13 +03:00
f76d712086 use multi-value case clause instead of fallthrough
Found using https://go-critic.github.io/overview#emptyFallthrough-ref
2018-09-13 21:43:37 +03:00
1624edc445 fix #295 decoder more was not compatible with standard library 2018-08-06 14:07:27 +08:00
5d789e5e02 fix #291 omit empty was not handled properly for json raw message 2018-08-06 13:58:33 +08:00
0260c89b54 fix #286 calcHash should use byte not rune to calc hash 2018-08-06 13:23:06 +08:00
10a568c511 fix #293 copy extensions 2018-07-22 11:51:51 +08:00
25 changed files with 396 additions and 90 deletions

View File

@ -10,10 +10,6 @@ A high-performance 100% compatible drop-in replacement of "encoding/json"
You can also use thrift like JSON using [thrift-iterator](https://github.com/thrift-iterator/go)
```
Go开发者们请加入我们,滴滴出行平台技术部 taowen@didichuxing.com
```
# Benchmark
![benchmark](http://jsoniter.com/benchmarks/go-benchmark.png)

View File

@ -81,10 +81,12 @@ func (adapter *Decoder) More() bool {
if iter.Error != nil {
return false
}
if iter.head != iter.tail {
return true
c := iter.nextToken()
if c == 0 {
return false
}
return iter.loadMore()
iter.unreadByte()
return c != ']' && c != '}'
}
// Buffered remaining buffer
@ -98,7 +100,7 @@ func (adapter *Decoder) Buffered() io.Reader {
func (adapter *Decoder) UseNumber() {
cfg := adapter.iter.cfg.configBeforeFrozen
cfg.UseNumber = true
adapter.iter.cfg = cfg.frozeWithCacheReuse()
adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
}
// DisallowUnknownFields causes the Decoder to return an error when the destination
@ -107,7 +109,7 @@ func (adapter *Decoder) UseNumber() {
func (adapter *Decoder) DisallowUnknownFields() {
cfg := adapter.iter.cfg.configBeforeFrozen
cfg.DisallowUnknownFields = true
adapter.iter.cfg = cfg.frozeWithCacheReuse()
adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
}
// NewEncoder same as json.NewEncoder
@ -132,14 +134,14 @@ func (adapter *Encoder) Encode(val interface{}) error {
func (adapter *Encoder) SetIndent(prefix, indent string) {
config := adapter.stream.cfg.configBeforeFrozen
config.IndentionStep = len(indent)
adapter.stream.cfg = config.frozeWithCacheReuse()
adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
}
// SetEscapeHTML escape html by default, set to false to disable
func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) {
config := adapter.stream.cfg.configBeforeFrozen
config.EscapeHTML = escapeHTML
adapter.stream.cfg = config.frozeWithCacheReuse()
adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
}
// Valid reports whether data is a valid JSON encoding.

4
any.go
View File

@ -312,6 +312,10 @@ func (codec *directAnyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *directAnyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
any := *(*Any)(ptr)
if any == nil {
stream.WriteNil()
return
}
any.WriteTo(stream)
}

View File

@ -82,10 +82,8 @@ func Test_read_float_to_any(t *testing.T) {
should := require.New(t)
any := jsoniter.WrapFloat64(12.3)
anyFloat64 := float64(12.3)
//negaAnyFloat64 := float64(-1.1)
any2 := jsoniter.WrapFloat64(-1.1)
should.Equal(float64(12.3), any.ToFloat64())
//should.Equal("12.3", any.ToString())
should.True(any.ToBool())
should.Equal(float32(anyFloat64), any.ToFloat32())
should.Equal(int(anyFloat64), any.ToInt())

View File

@ -118,6 +118,4 @@ func Test_object_wrapper_any_get_all(t *testing.T) {
should.Contains(any.Keys(), "Field1")
should.Contains(any.Keys(), "Field2")
should.NotContains(any.Keys(), "Field3")
//should.Contains(any.GetObject()["Field1"].GetArray()[0], 1)
}

View File

@ -8,24 +8,24 @@ import (
)
var stringConvertMap = map[string]string{
"null": "",
"321.1": "321.1",
`"1.1"`: "1.1",
`"-123.1"`: "-123.1",
"0.0": "0.0",
"0": "0",
`"0"`: "0",
`"0.0"`: "0.0",
`"00.0"`: "00.0",
"true": "true",
"false": "false",
`"true"`: "true",
`"false"`: "false",
`"true123"`: "true123",
`"+1"`: "+1",
"[]": "[]",
"[1,2]": "[1,2]",
"{}": "{}",
"null": "",
"321.1": "321.1",
`"1.1"`: "1.1",
`"-123.1"`: "-123.1",
"0.0": "0.0",
"0": "0",
`"0"`: "0",
`"0.0"`: "0.0",
`"00.0"`: "00.0",
"true": "true",
"false": "false",
`"true"`: "true",
`"false"`: "false",
`"true123"`: "true123",
`"+1"`: "+1",
"[]": "[]",
"[1,2]": "[1,2]",
"{}": "{}",
`{"a":1, "stream":true}`: `{"a":1, "stream":true}`,
}

View File

@ -172,3 +172,58 @@ func Test_CaseSensitive_MoreThanTenFields(t *testing.T) {
should.Equal(tc.expectedOutput, output)
}
}
type onlyTaggedFieldStruct struct {
A string `json:"a"`
B string
FSimpl F `json:"f_simpl"`
ISimpl I
FPtr *F `json:"f_ptr"`
IPtr *I
F
*I
}
type F struct {
G string `json:"g"`
H string
}
type I struct {
J string `json:"j"`
K string
}
func Test_OnlyTaggedField(t *testing.T) {
should := require.New(t)
obj := onlyTaggedFieldStruct{
A: "a",
B: "b",
FSimpl: F{G: "g", H: "h"},
ISimpl: I{J: "j", K: "k"},
FPtr: &F{G: "g", H: "h"},
IPtr: &I{J: "j", K: "k"},
F: F{G: "g", H: "h"},
I: &I{J: "j", K: "k"},
}
output, err := jsoniter.Config{OnlyTaggedField: true}.Froze().Marshal(obj)
should.Nil(err)
m := make(map[string]interface{})
err = jsoniter.Unmarshal(output, &m)
should.Nil(err)
should.Equal(map[string]interface{}{
"a": "a",
"f_simpl": map[string]interface{}{
"g": "g",
},
"f_ptr": map[string]interface{}{
"g": "g",
},
"g": "g",
"j": "j",
}, m)
}

View File

@ -0,0 +1,36 @@
package test
import (
"bytes"
"encoding/json"
"github.com/json-iterator/go"
"testing"
"github.com/stretchr/testify/require"
)
type Foo struct {
Bar interface{}
}
func (f Foo) MarshalJSON() ([]byte, error) {
var buf bytes.Buffer
err := json.NewEncoder(&buf).Encode(f.Bar)
return buf.Bytes(), err
}
// Standard Encoder has trailing newline.
func TestEncodeMarshalJSON(t *testing.T) {
foo := Foo {
Bar: 123,
}
should := require.New(t)
var buf, stdbuf bytes.Buffer
enc := jsoniter.ConfigCompatibleWithStandardLibrary.NewEncoder(&buf)
enc.Encode(foo)
stdenc := json.NewEncoder(&stdbuf)
stdenc.Encode(foo)
should.Equal(stdbuf.Bytes(), buf.Bytes())
}

View File

@ -74,7 +74,9 @@ type frozenConfig struct {
disallowUnknownFields bool
decoderCache *concurrent.Map
encoderCache *concurrent.Map
extensions []Extension
encoderExtension Extension
decoderExtension Extension
extraExtensions []Extension
streamPool *sync.Pool
iteratorPool *sync.Pool
caseSensitive bool
@ -158,22 +160,21 @@ func (cfg Config) Froze() API {
if cfg.ValidateJsonRawMessage {
api.validateJsonRawMessage(encoderExtension)
}
if len(encoderExtension) > 0 {
api.extensions = append(api.extensions, encoderExtension)
}
if len(decoderExtension) > 0 {
api.extensions = append(api.extensions, decoderExtension)
}
api.encoderExtension = encoderExtension
api.decoderExtension = decoderExtension
api.configBeforeFrozen = cfg
return api
}
func (cfg Config) frozeWithCacheReuse() *frozenConfig {
func (cfg Config) frozeWithCacheReuse(extraExtensions []Extension) *frozenConfig {
api := getFrozenConfigFromCache(cfg)
if api != nil {
return api
}
api = cfg.Froze().(*frozenConfig)
for _, extension := range extraExtensions {
api.RegisterExtension(extension)
}
addFrozenConfigToCache(cfg, api)
return api
}
@ -190,7 +191,7 @@ func (cfg *frozenConfig) validateJsonRawMessage(extension EncoderExtension) {
stream.WriteRaw(string(rawMessage))
}
}, func(ptr unsafe.Pointer) bool {
return false
return len(*((*json.RawMessage)(ptr))) == 0
}}
extension[reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()] = encoder
extension[reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()] = encoder
@ -219,7 +220,9 @@ func (cfg *frozenConfig) getTagKey() string {
}
func (cfg *frozenConfig) RegisterExtension(extension Extension) {
cfg.extensions = append(cfg.extensions, extension)
cfg.extraExtensions = append(cfg.extraExtensions, extension)
copied := cfg.configBeforeFrozen
cfg.configBeforeFrozen = copied
}
type lossyFloat32Encoder struct {
@ -314,7 +317,7 @@ func (cfg *frozenConfig) MarshalIndent(v interface{}, prefix, indent string) ([]
}
newCfg := cfg.configBeforeFrozen
newCfg.IndentionStep = len(indent)
return newCfg.frozeWithCacheReuse().Marshal(v)
return newCfg.frozeWithCacheReuse(cfg.extraExtensions).Marshal(v)
}
func (cfg *frozenConfig) UnmarshalFromString(str string, v interface{}) error {

View File

@ -95,7 +95,7 @@ func ExampleGet() {
// Crimson
}
func ExampleMapKey() {
func ExampleMyKey() {
hello := MyKey("hello")
output, _ := Marshal(map[*MyKey]string{&hello: "world"})
fmt.Println(string(output))

View File

@ -1,6 +1,7 @@
package test
import (
"bytes"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"strconv"
@ -98,3 +99,92 @@ func Test_read_custom_interface(t *testing.T) {
should.Nil(err)
should.Equal("hello", val.Hello())
}
const flow1 = `
{"A":"hello"}
{"A":"hello"}
{"A":"hello"}
{"A":"hello"}
{"A":"hello"}`
const flow2 = `
{"A":"hello"}
{"A":"hello"}
{"A":"hello"}
{"A":"hello"}
{"A":"hello"}
`
type (
Type1 struct {
A string
}
Type2 struct {
A string
}
)
func (t *Type2) UnmarshalJSON(data []byte) error {
return nil
}
func (t *Type2) MarshalJSON() ([]byte, error) {
return nil, nil
}
func TestType1NoFinalLF(t *testing.T) {
reader := bytes.NewReader([]byte(flow1))
dec := jsoniter.NewDecoder(reader)
i := 0
for dec.More() {
data := &Type1{}
if err := dec.Decode(data); err != nil {
t.Errorf("at %v got %v", i, err)
}
i++
}
}
func TestType1FinalLF(t *testing.T) {
reader := bytes.NewReader([]byte(flow2))
dec := jsoniter.NewDecoder(reader)
i := 0
for dec.More() {
data := &Type1{}
if err := dec.Decode(data); err != nil {
t.Errorf("at %v got %v", i, err)
}
i++
}
}
func TestType2NoFinalLF(t *testing.T) {
reader := bytes.NewReader([]byte(flow1))
dec := jsoniter.NewDecoder(reader)
i := 0
for dec.More() {
data := &Type2{}
if err := dec.Decode(data); err != nil {
t.Errorf("at %v got %v", i, err)
}
i++
}
}
func TestType2FinalLF(t *testing.T) {
reader := bytes.NewReader([]byte(flow2))
dec := jsoniter.NewDecoder(reader)
i := 0
for dec.More() {
data := &Type2{}
if err := dec.Decode(data); err != nil {
t.Errorf("at %v got %v", i, err)
}
i++
}
}

View File

@ -153,7 +153,7 @@ func (codec *binaryAsStringCodec) Decode(ptr unsafe.Pointer, iter *jsoniter.Iter
}
b4 := rawBytes[i+3]
b5 := rawBytes[i+4]
i = i + 4
i += 4
b = readHex(iter, b4, b5)
}
bytes = append(bytes, b)
@ -178,7 +178,7 @@ func readHex(iter *jsoniter.Iterator, b1, b2 byte) byte {
iter.ReportError("read hex", "expects 0~9 or a~f, but found "+string([]byte{b1}))
return 0
}
ret = ret * 16
ret *= 16
if b2 >= '0' && b2 <= '9' {
ret = b2 - '0'
} else if b2 >= 'a' && b2 <= 'f' {

View File

@ -77,14 +77,12 @@ func (iter *Iterator) ReadFloat32() (ret float32) {
}
func (iter *Iterator) readPositiveFloat32() (ret float32) {
value := uint64(0)
c := byte(' ')
i := iter.head
// first char
if i == iter.tail {
return iter.readFloat32SlowPath()
}
c = iter.buf[i]
c := iter.buf[i]
i++
ind := floatDigits[c]
switch ind {
@ -107,7 +105,7 @@ func (iter *Iterator) readPositiveFloat32() (ret float32) {
return
}
}
value = uint64(ind)
value := uint64(ind)
// chars before dot
non_decimal_loop:
for ; i < iter.tail; i++ {
@ -145,9 +143,7 @@ non_decimal_loop:
}
// too many decimal places
return iter.readFloat32SlowPath()
case invalidCharForNumber:
fallthrough
case dotInNumber:
case invalidCharForNumber, dotInNumber:
return iter.readFloat32SlowPath()
}
decimalPlaces++
@ -218,14 +214,12 @@ func (iter *Iterator) ReadFloat64() (ret float64) {
}
func (iter *Iterator) readPositiveFloat64() (ret float64) {
value := uint64(0)
c := byte(' ')
i := iter.head
// first char
if i == iter.tail {
return iter.readFloat64SlowPath()
}
c = iter.buf[i]
c := iter.buf[i]
i++
ind := floatDigits[c]
switch ind {
@ -248,7 +242,7 @@ func (iter *Iterator) readPositiveFloat64() (ret float64) {
return
}
}
value = uint64(ind)
value := uint64(ind)
// chars before dot
non_decimal_loop:
for ; i < iter.tail; i++ {
@ -286,9 +280,7 @@ non_decimal_loop:
}
// too many decimal places
return iter.readFloat64SlowPath()
case invalidCharForNumber:
fallthrough
case dotInNumber:
case invalidCharForNumber, dotInNumber:
return iter.readFloat64SlowPath()
}
decimalPlaces++

View File

@ -2,7 +2,7 @@ package jsoniter
import (
"fmt"
"unicode"
"strings"
)
// ReadObject read one field from object.
@ -96,13 +96,12 @@ func (iter *Iterator) readFieldHash() int64 {
}
func calcHash(str string, caseSensitive bool) int64 {
if !caseSensitive {
str = strings.ToLower(str)
}
hash := int64(0x811c9dc5)
for _, b := range str {
if caseSensitive {
hash ^= int64(b)
} else {
hash ^= int64(unicode.ToLower(b))
}
for _, b := range []byte(str) {
hash ^= int64(b)
hash *= 0x1000193
}
return int64(hash)

View File

@ -2,12 +2,22 @@
package jsoniter
import "fmt"
import (
"fmt"
"io"
)
func (iter *Iterator) skipNumber() {
if !iter.trySkipNumber() {
iter.unreadByte()
iter.ReadFloat32()
if iter.Error != nil && iter.Error != io.EOF {
return
}
iter.ReadFloat64()
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = nil
iter.ReadBigFloat()
}
}
}

View File

@ -114,7 +114,7 @@ func Test_overwrite_interface_value_with_nil(t *testing.T) {
err := json.Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
should.NoError(err)
should.Equal(42, (*(wrapper.Payload.(*Payload))).Value)
should.Equal(42, wrapper.Payload.(*Payload).Value)
err = json.Unmarshal([]byte(`{"payload": null}`), &wrapper)
should.NoError(err)
@ -128,7 +128,7 @@ func Test_overwrite_interface_value_with_nil(t *testing.T) {
err = jsoniter.Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
should.Equal(nil, err)
should.Equal(42, (*(wrapper.Payload.(*Payload))).Value)
should.Equal(42, wrapper.Payload.(*Payload).Value)
err = jsoniter.Unmarshal([]byte(`{"payload": null}`), &wrapper)
should.Equal(nil, err)

View File

@ -120,7 +120,8 @@ func decoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
for _, extension := range extensions {
decoder = extension.DecorateDecoder(typ, decoder)
}
for _, extension := range ctx.extensions {
decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder)
for _, extension := range ctx.extraExtensions {
decoder = extension.DecorateDecoder(typ, decoder)
}
return decoder
@ -222,7 +223,8 @@ func encoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
for _, extension := range extensions {
encoder = extension.DecorateEncoder(typ, encoder)
}
for _, extension := range ctx.extensions {
encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder)
for _, extension := range ctx.extraExtensions {
encoder = extension.DecorateEncoder(typ, encoder)
}
return encoder

View File

@ -246,7 +246,8 @@ func getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder {
for _, extension := range extensions {
decoder = extension.DecorateDecoder(typ, decoder)
}
for _, extension := range ctx.extensions {
decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder)
for _, extension := range ctx.extraExtensions {
decoder = extension.DecorateDecoder(typ, decoder)
}
}
@ -259,14 +260,18 @@ func _getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder {
return decoder
}
}
for _, extension := range ctx.extensions {
decoder := ctx.decoderExtension.CreateDecoder(typ)
if decoder != nil {
return decoder
}
for _, extension := range ctx.extraExtensions {
decoder := extension.CreateDecoder(typ)
if decoder != nil {
return decoder
}
}
typeName := typ.String()
decoder := typeDecoders[typeName]
decoder = typeDecoders[typeName]
if decoder != nil {
return decoder
}
@ -286,7 +291,8 @@ func getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder {
for _, extension := range extensions {
encoder = extension.DecorateEncoder(typ, encoder)
}
for _, extension := range ctx.extensions {
encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder)
for _, extension := range ctx.extraExtensions {
encoder = extension.DecorateEncoder(typ, encoder)
}
}
@ -300,14 +306,18 @@ func _getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder {
return encoder
}
}
for _, extension := range ctx.extensions {
encoder := ctx.encoderExtension.CreateEncoder(typ)
if encoder != nil {
return encoder
}
for _, extension := range ctx.extraExtensions {
encoder := extension.CreateEncoder(typ)
if encoder != nil {
return encoder
}
}
typeName := typ.String()
encoder := typeEncoders[typeName]
encoder = typeEncoders[typeName]
if encoder != nil {
return encoder
}
@ -328,7 +338,7 @@ func describeStruct(ctx *ctx, typ reflect2.Type) *StructDescriptor {
for i := 0; i < structType.NumField(); i++ {
field := structType.Field(i)
tag, hastag := field.Tag().Lookup(ctx.getTagKey())
if ctx.onlyTaggedField && !hastag {
if ctx.onlyTaggedField && !hastag && !field.Anonymous() {
continue
}
tagParts := strings.Split(tag, ",")
@ -393,7 +403,9 @@ func createStructDescriptor(ctx *ctx, typ reflect2.Type, bindings []*Binding, em
for _, extension := range extensions {
extension.UpdateStructDescriptor(structDescriptor)
}
for _, extension := range ctx.extensions {
ctx.encoderExtension.UpdateStructDescriptor(structDescriptor)
ctx.decoderExtension.UpdateStructDescriptor(structDescriptor)
for _, extension := range ctx.extraExtensions {
extension.UpdateStructDescriptor(structDescriptor)
}
processTags(structDescriptor, ctx.frozenConfig)

View File

@ -39,7 +39,11 @@ func encoderOfMap(ctx *ctx, typ reflect2.Type) ValEncoder {
}
func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
for _, extension := range ctx.extensions {
decoder := ctx.decoderExtension.CreateMapKeyDecoder(typ)
if decoder != nil {
return decoder
}
for _, extension := range ctx.extraExtensions {
decoder := extension.CreateMapKeyDecoder(typ)
if decoder != nil {
return decoder
@ -60,14 +64,26 @@ func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
return &numericMapKeyDecoder{decoderOfType(ctx, typ)}
default:
ptrType := reflect2.PtrTo(typ)
if ptrType.Implements(textMarshalerType) {
if ptrType.Implements(unmarshalerType) {
return &referenceDecoder{
&unmarshalerDecoder{
valType: ptrType,
},
}
}
if typ.Implements(unmarshalerType) {
return &unmarshalerDecoder{
valType: typ,
}
}
if ptrType.Implements(textUnmarshalerType) {
return &referenceDecoder{
&textUnmarshalerDecoder{
valType: ptrType,
},
}
}
if typ.Implements(textMarshalerType) {
if typ.Implements(textUnmarshalerType) {
return &textUnmarshalerDecoder{
valType: typ,
}
@ -77,7 +93,11 @@ func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
}
func encoderOfMapKey(ctx *ctx, typ reflect2.Type) ValEncoder {
for _, extension := range ctx.extensions {
encoder := ctx.encoderExtension.CreateMapKeyEncoder(typ)
if encoder != nil {
return encoder
}
for _, extension := range ctx.extraExtensions {
encoder := extension.CreateMapKeyEncoder(typ)
if encoder != nil {
return encoder

View File

@ -93,8 +93,7 @@ func (encoder *marshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteNil()
return
}
marshaler := obj.(json.Marshaler)
bytes, err := marshaler.MarshalJSON()
bytes, err := json.Marshal(obj)
if err != nil {
stream.Error = err
} else {

View File

@ -13,7 +13,7 @@ func init() {
`{"hello":{}}`, // valid
`{"hello":{}}}`, // invalid
`{"hello": { "hello": 1}}`, // valid
`{abc}`, // invalid
`{abc}`, // invalid
},
})
}

View File

@ -145,6 +145,9 @@ func init() {
(*struct {
Field bool `json:",omitempty,string"`
})(nil),
(*struct {
Field bool `json:"中文"`
})(nil),
)
}

View File

@ -224,3 +224,13 @@ func Test_EmptyInput(t *testing.T) {
t.Errorf("Expected error")
}
}
type Foo struct {
A jsoniter.Any
}
func Test_nil_any(t *testing.T) {
should := require.New(t)
data, _ := jsoniter.Marshal(&Foo{})
should.Equal(`{"A":null}`, string(data))
}

View File

@ -2,7 +2,9 @@ package test
import (
"encoding/json"
"fmt"
"math/big"
"time"
)
func init() {
@ -27,6 +29,8 @@ func init() {
nilMap,
&nilMap,
map[string]*json.RawMessage{"hello": pRawMessage(json.RawMessage("[]"))},
map[Date]bool{{}: true},
map[Date2]bool{{}: true},
)
unmarshalCases = append(unmarshalCases, unmarshalCase{
ptr: (*map[string]string)(nil),
@ -37,6 +41,20 @@ func init() {
}, unmarshalCase{
ptr: (*map[string]*json.RawMessage)(nil),
input: "{\"test\":[{\"key\":\"value\"}]}",
}, unmarshalCase{
ptr: (*map[Date]bool)(nil),
input: `{
"2018-12-12": true,
"2018-12-13": true,
"2018-12-14": true
}`,
}, unmarshalCase{
ptr: (*map[Date2]bool)(nil),
input: `{
"2018-12-12": true,
"2018-12-13": true,
"2018-12-14": true
}`,
})
}
@ -49,3 +67,51 @@ type MyString string
func (ms MyString) Hello() string {
return string(ms)
}
type Date struct {
time.Time
}
func (d *Date) UnmarshalJSON(b []byte) error {
dateStr := string(b) // something like `"2017-08-20"`
if dateStr == "null" {
return nil
}
t, err := time.Parse(`"2006-01-02"`, dateStr)
if err != nil {
return fmt.Errorf("cant parse date: %#v", err)
}
d.Time = t
return nil
}
func (d *Date) MarshalJSON() ([]byte, error) {
return []byte(d.Time.Format("2006-01-02")), nil
}
type Date2 struct {
time.Time
}
func (d Date2) UnmarshalJSON(b []byte) error {
dateStr := string(b) // something like `"2017-08-20"`
if dateStr == "null" {
return nil
}
t, err := time.Parse(`"2006-01-02"`, dateStr)
if err != nil {
return fmt.Errorf("cant parse date: %#v", err)
}
d.Time = t
return nil
}
func (d Date2) MarshalJSON() ([]byte, error) {
return []byte(d.Time.Format("2006-01-02")), nil
}

View File

@ -1,13 +1,24 @@
package test
import "encoding/json"
import (
"encoding/json"
)
func init() {
marshalCases = append(marshalCases,
json.RawMessage("{}"),
struct {
Env string `json:"env"`
Extra json.RawMessage `json:"extra,omitempty"`
}{
Env: "jfdk",
},
)
unmarshalCases = append(unmarshalCases, unmarshalCase{
ptr: (*json.RawMessage)(nil),
input: `[1,2,3]`,
}, unmarshalCase{
ptr: (*json.RawMessage)(nil),
input: `1.122e+250`,
})
}