You've already forked json-iterator
mirror of
https://github.com/json-iterator/go.git
synced 2025-06-15 22:50:24 +02:00
Compare commits
93 Commits
Author | SHA1 | Date | |
---|---|---|---|
acfec88f7a | |||
e88512faf8 | |||
b681149eae | |||
d1af7639b3 | |||
7c9f8c2d20 | |||
f814d6c0f1 | |||
aba8654400 | |||
a1c9557592 | |||
44a7e7340d | |||
2834c7e43c | |||
d296277d5c | |||
dc11f49689 | |||
83f7b825b3 | |||
03217c3e97 | |||
908eaed151 | |||
eec24895fe | |||
1ba732a07d | |||
819acad769 | |||
695ec2b83b | |||
028e2ef2bd | |||
976454858b | |||
27518f6661 | |||
94869abf43 | |||
459f0e30ae | |||
0039f4ac3d | |||
fb5614a4ca | |||
f71b9090aa | |||
08047c174c | |||
68347ec4d6 | |||
0fd91468bb | |||
1bc9828b4f | |||
24c3d57281 | |||
0ff49de124 | |||
5bc9320502 | |||
f64ce68b6e | |||
2d42ff74dd | |||
3a023a5fbc | |||
16aef10b2b | |||
ae4c002f78 | |||
e4aa2ec063 | |||
d05f387f50 | |||
a9403d25cd | |||
05d041de10 | |||
5916df66b3 | |||
2433035e51 | |||
6dfc0bf2dd | |||
b9be8dd373 | |||
b8d78b6aaf | |||
7109b5e7dd | |||
4cc76529e8 | |||
c5ddac9dc3 | |||
f76d712086 | |||
1624edc445 | |||
5d789e5e02 | |||
0260c89b54 | |||
10a568c511 | |||
ab8a2e0c74 | |||
2fbdfbb595 | |||
720ab8dc7f | |||
f2b4162afb | |||
3830516ed0 | |||
7cceb6c2e3 | |||
b92cf78708 | |||
8744d7c5c7 | |||
37cc313d18 | |||
2ddf6d7582 | |||
6a6742f0a2 | |||
6c702ce12a | |||
f88871b601 | |||
f246f80f14 | |||
51dd70305b | |||
a949c42748 | |||
f89479f5c0 | |||
b858ec296c | |||
885a41a0a6 | |||
9e9a97040e | |||
fb4d53e4cc | |||
b53656d459 | |||
8f27a81d90 | |||
4930b053b8 | |||
06e0f9391e | |||
ca39e5af3e | |||
39acec93e0 | |||
25fa392355 | |||
d51e841de0 | |||
3353055b2a | |||
455b3f8bb8 | |||
2a93f9003e | |||
9472474ffd | |||
ad83167dc6 | |||
fff342fd04 | |||
8d6662b81b | |||
a377e2656b |
40
Gopkg.lock
generated
40
Gopkg.lock
generated
@ -2,44 +2,20 @@
|
|||||||
|
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "github.com/davecgh/go-spew"
|
name = "github.com/modern-go/concurrent"
|
||||||
packages = ["spew"]
|
|
||||||
revision = "346938d642f2ec3594ed81d874461961cd0faa76"
|
|
||||||
version = "v1.1.0"
|
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
branch = "master"
|
|
||||||
name = "github.com/google/gofuzz"
|
|
||||||
packages = ["."]
|
packages = ["."]
|
||||||
revision = "24818f796faf91cd76ec7bddd72458fbced7a6c1"
|
revision = "e0a39a4cb4216ea8db28e22a69f4ec25610d513a"
|
||||||
|
version = "1.0.0"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "github.com/pmezard/go-difflib"
|
name = "github.com/modern-go/reflect2"
|
||||||
packages = ["difflib"]
|
packages = ["."]
|
||||||
revision = "792786c7400a136282c1664665ae0a8db921c6c2"
|
revision = "4b7aa43c6742a2c18fdef89dd197aaae7dac7ccd"
|
||||||
version = "v1.0.0"
|
version = "1.0.1"
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
name = "github.com/stretchr/testify"
|
|
||||||
packages = [
|
|
||||||
"assert",
|
|
||||||
"require"
|
|
||||||
]
|
|
||||||
revision = "12b6f73e6084dad08a7c6e575284b177ecafbc71"
|
|
||||||
version = "v1.2.1"
|
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
name = "github.com/v2pro/plz"
|
|
||||||
packages = [
|
|
||||||
"concurrent",
|
|
||||||
"reflect2"
|
|
||||||
]
|
|
||||||
revision = "10fc95fad3224a032229e59f6e7023137d82b526"
|
|
||||||
version = "0.9.1"
|
|
||||||
|
|
||||||
[solve-meta]
|
[solve-meta]
|
||||||
analyzer-name = "dep"
|
analyzer-name = "dep"
|
||||||
analyzer-version = 1
|
analyzer-version = 1
|
||||||
inputs-digest = "64fe3937a1afce5cb551c06ff7109065c971643e082512243d1071bab428ff14"
|
inputs-digest = "ea54a775e5a354cb015502d2e7aa4b74230fc77e894f34a838b268c25ec8eeb8"
|
||||||
solver-name = "gps-cdcl"
|
solver-name = "gps-cdcl"
|
||||||
solver-version = 1
|
solver-version = 1
|
||||||
|
17
Gopkg.toml
17
Gopkg.toml
@ -19,19 +19,8 @@
|
|||||||
# name = "github.com/x/y"
|
# name = "github.com/x/y"
|
||||||
# version = "2.4.0"
|
# version = "2.4.0"
|
||||||
|
|
||||||
|
ignored = ["github.com/davecgh/go-spew*","github.com/google/gofuzz*","github.com/stretchr/testify*"]
|
||||||
|
|
||||||
[[constraint]]
|
[[constraint]]
|
||||||
name = "github.com/davecgh/go-spew"
|
name = "github.com/modern-go/reflect2"
|
||||||
version = "1.1.0"
|
version = "1.0.1"
|
||||||
|
|
||||||
[[constraint]]
|
|
||||||
branch = "master"
|
|
||||||
name = "github.com/google/gofuzz"
|
|
||||||
|
|
||||||
[[constraint]]
|
|
||||||
name = "github.com/stretchr/testify"
|
|
||||||
version = "1.1.4"
|
|
||||||
|
|
||||||
[[constraint]]
|
|
||||||
name = "github.com/v2pro/plz"
|
|
||||||
version = "0.9.1"
|
|
||||||
|
@ -10,10 +10,6 @@ A high-performance 100% compatible drop-in replacement of "encoding/json"
|
|||||||
|
|
||||||
You can also use thrift like JSON using [thrift-iterator](https://github.com/thrift-iterator/go)
|
You can also use thrift like JSON using [thrift-iterator](https://github.com/thrift-iterator/go)
|
||||||
|
|
||||||
```
|
|
||||||
Go开发者们请加入我们,滴滴出行平台技术部 taowen@didichuxing.com
|
|
||||||
```
|
|
||||||
|
|
||||||
# Benchmark
|
# Benchmark
|
||||||
|
|
||||||

|

|
||||||
@ -31,6 +27,9 @@ Raw Result (easyjson requires static code generation)
|
|||||||
| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
|
| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
|
||||||
| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
|
| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
|
||||||
|
|
||||||
|
Always benchmark with your own workload.
|
||||||
|
The result depends heavily on the data input.
|
||||||
|
|
||||||
# Usage
|
# Usage
|
||||||
|
|
||||||
100% compatibility with standard lib
|
100% compatibility with standard lib
|
||||||
|
21
adapter.go
21
adapter.go
@ -16,7 +16,7 @@ func Unmarshal(data []byte, v interface{}) error {
|
|||||||
return ConfigDefault.Unmarshal(data, v)
|
return ConfigDefault.Unmarshal(data, v)
|
||||||
}
|
}
|
||||||
|
|
||||||
// UnmarshalFromString convenient method to read from string instead of []byte
|
// UnmarshalFromString is a convenient method to read from string instead of []byte
|
||||||
func UnmarshalFromString(str string, v interface{}) error {
|
func UnmarshalFromString(str string, v interface{}) error {
|
||||||
return ConfigDefault.UnmarshalFromString(str, v)
|
return ConfigDefault.UnmarshalFromString(str, v)
|
||||||
}
|
}
|
||||||
@ -77,7 +77,16 @@ func (adapter *Decoder) Decode(obj interface{}) error {
|
|||||||
|
|
||||||
// More is there more?
|
// More is there more?
|
||||||
func (adapter *Decoder) More() bool {
|
func (adapter *Decoder) More() bool {
|
||||||
return adapter.iter.head != adapter.iter.tail
|
iter := adapter.iter
|
||||||
|
if iter.Error != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
c := iter.nextToken()
|
||||||
|
if c == 0 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
iter.unreadByte()
|
||||||
|
return c != ']' && c != '}'
|
||||||
}
|
}
|
||||||
|
|
||||||
// Buffered remaining buffer
|
// Buffered remaining buffer
|
||||||
@ -91,7 +100,7 @@ func (adapter *Decoder) Buffered() io.Reader {
|
|||||||
func (adapter *Decoder) UseNumber() {
|
func (adapter *Decoder) UseNumber() {
|
||||||
cfg := adapter.iter.cfg.configBeforeFrozen
|
cfg := adapter.iter.cfg.configBeforeFrozen
|
||||||
cfg.UseNumber = true
|
cfg.UseNumber = true
|
||||||
adapter.iter.cfg = cfg.frozeWithCacheReuse()
|
adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
|
||||||
}
|
}
|
||||||
|
|
||||||
// DisallowUnknownFields causes the Decoder to return an error when the destination
|
// DisallowUnknownFields causes the Decoder to return an error when the destination
|
||||||
@ -100,7 +109,7 @@ func (adapter *Decoder) UseNumber() {
|
|||||||
func (adapter *Decoder) DisallowUnknownFields() {
|
func (adapter *Decoder) DisallowUnknownFields() {
|
||||||
cfg := adapter.iter.cfg.configBeforeFrozen
|
cfg := adapter.iter.cfg.configBeforeFrozen
|
||||||
cfg.DisallowUnknownFields = true
|
cfg.DisallowUnknownFields = true
|
||||||
adapter.iter.cfg = cfg.frozeWithCacheReuse()
|
adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewEncoder same as json.NewEncoder
|
// NewEncoder same as json.NewEncoder
|
||||||
@ -125,14 +134,14 @@ func (adapter *Encoder) Encode(val interface{}) error {
|
|||||||
func (adapter *Encoder) SetIndent(prefix, indent string) {
|
func (adapter *Encoder) SetIndent(prefix, indent string) {
|
||||||
config := adapter.stream.cfg.configBeforeFrozen
|
config := adapter.stream.cfg.configBeforeFrozen
|
||||||
config.IndentionStep = len(indent)
|
config.IndentionStep = len(indent)
|
||||||
adapter.stream.cfg = config.frozeWithCacheReuse()
|
adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetEscapeHTML escape html by default, set to false to disable
|
// SetEscapeHTML escape html by default, set to false to disable
|
||||||
func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) {
|
func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) {
|
||||||
config := adapter.stream.cfg.configBeforeFrozen
|
config := adapter.stream.cfg.configBeforeFrozen
|
||||||
config.EscapeHTML = escapeHTML
|
config.EscapeHTML = escapeHTML
|
||||||
adapter.stream.cfg = config.frozeWithCacheReuse()
|
adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Valid reports whether data is a valid JSON encoding.
|
// Valid reports whether data is a valid JSON encoding.
|
||||||
|
8
any.go
8
any.go
@ -3,11 +3,11 @@ package jsoniter
|
|||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/modern-go/reflect2"
|
||||||
"io"
|
"io"
|
||||||
"reflect"
|
"reflect"
|
||||||
"unsafe"
|
|
||||||
"github.com/v2pro/plz/reflect2"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"unsafe"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Any generic object representation.
|
// Any generic object representation.
|
||||||
@ -312,6 +312,10 @@ func (codec *directAnyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
|||||||
|
|
||||||
func (codec *directAnyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
func (codec *directAnyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||||
any := *(*Any)(ptr)
|
any := *(*Any)(ptr)
|
||||||
|
if any == nil {
|
||||||
|
stream.WriteNil()
|
||||||
|
return
|
||||||
|
}
|
||||||
any.WriteTo(stream)
|
any.WriteTo(stream)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,8 +3,8 @@ package any_tests
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_read_empty_array_as_any(t *testing.T) {
|
func Test_read_empty_array_as_any(t *testing.T) {
|
||||||
|
@ -4,8 +4,8 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
var boolConvertMap = map[string]bool{
|
var boolConvertMap = map[string]bool{
|
||||||
|
@ -3,8 +3,8 @@ package any_tests
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
var floatConvertMap = map[string]float64{
|
var floatConvertMap = map[string]float64{
|
||||||
@ -82,10 +82,8 @@ func Test_read_float_to_any(t *testing.T) {
|
|||||||
should := require.New(t)
|
should := require.New(t)
|
||||||
any := jsoniter.WrapFloat64(12.3)
|
any := jsoniter.WrapFloat64(12.3)
|
||||||
anyFloat64 := float64(12.3)
|
anyFloat64 := float64(12.3)
|
||||||
//negaAnyFloat64 := float64(-1.1)
|
|
||||||
any2 := jsoniter.WrapFloat64(-1.1)
|
any2 := jsoniter.WrapFloat64(-1.1)
|
||||||
should.Equal(float64(12.3), any.ToFloat64())
|
should.Equal(float64(12.3), any.ToFloat64())
|
||||||
//should.Equal("12.3", any.ToString())
|
|
||||||
should.True(any.ToBool())
|
should.True(any.ToBool())
|
||||||
should.Equal(float32(anyFloat64), any.ToFloat32())
|
should.Equal(float32(anyFloat64), any.ToFloat32())
|
||||||
should.Equal(int(anyFloat64), any.ToInt())
|
should.Equal(int(anyFloat64), any.ToInt())
|
||||||
|
@ -4,8 +4,8 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
var intConvertMap = map[string]int{
|
var intConvertMap = map[string]int{
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
package any_tests
|
package any_tests
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/json-iterator/go"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"testing"
|
"testing"
|
||||||
"github.com/json-iterator/go"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_wrap_map(t *testing.T) {
|
func Test_wrap_map(t *testing.T) {
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
package any_tests
|
package any_tests
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/json-iterator/go"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"testing"
|
"testing"
|
||||||
"github.com/json-iterator/go"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_read_null_as_any(t *testing.T) {
|
func Test_read_null_as_any(t *testing.T) {
|
||||||
|
@ -3,8 +3,8 @@ package any_tests
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_read_object_as_any(t *testing.T) {
|
func Test_read_object_as_any(t *testing.T) {
|
||||||
@ -118,6 +118,4 @@ func Test_object_wrapper_any_get_all(t *testing.T) {
|
|||||||
should.Contains(any.Keys(), "Field1")
|
should.Contains(any.Keys(), "Field1")
|
||||||
should.Contains(any.Keys(), "Field2")
|
should.Contains(any.Keys(), "Field2")
|
||||||
should.NotContains(any.Keys(), "Field3")
|
should.NotContains(any.Keys(), "Field3")
|
||||||
|
}
|
||||||
//should.Contains(any.GetObject()["Field1"].GetArray()[0], 1)
|
|
||||||
}
|
|
||||||
|
@ -3,29 +3,29 @@ package any_tests
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
var stringConvertMap = map[string]string{
|
var stringConvertMap = map[string]string{
|
||||||
"null": "",
|
"null": "",
|
||||||
"321.1": "321.1",
|
"321.1": "321.1",
|
||||||
`"1.1"`: "1.1",
|
`"1.1"`: "1.1",
|
||||||
`"-123.1"`: "-123.1",
|
`"-123.1"`: "-123.1",
|
||||||
"0.0": "0.0",
|
"0.0": "0.0",
|
||||||
"0": "0",
|
"0": "0",
|
||||||
`"0"`: "0",
|
`"0"`: "0",
|
||||||
`"0.0"`: "0.0",
|
`"0.0"`: "0.0",
|
||||||
`"00.0"`: "00.0",
|
`"00.0"`: "00.0",
|
||||||
"true": "true",
|
"true": "true",
|
||||||
"false": "false",
|
"false": "false",
|
||||||
`"true"`: "true",
|
`"true"`: "true",
|
||||||
`"false"`: "false",
|
`"false"`: "false",
|
||||||
`"true123"`: "true123",
|
`"true123"`: "true123",
|
||||||
`"+1"`: "+1",
|
`"+1"`: "+1",
|
||||||
"[]": "[]",
|
"[]": "[]",
|
||||||
"[1,2]": "[1,2]",
|
"[1,2]": "[1,2]",
|
||||||
"{}": "{}",
|
"{}": "{}",
|
||||||
`{"a":1, "stream":true}`: `{"a":1, "stream":true}`,
|
`{"a":1, "stream":true}`: `{"a":1, "stream":true}`,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,8 +3,8 @@ package any_tests
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
// if must be valid is useless, just drop this test
|
// if must be valid is useless, just drop this test
|
||||||
|
@ -3,8 +3,8 @@ package any_tests
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_wrap_and_valuetype_everything(t *testing.T) {
|
func Test_wrap_and_valuetype_everything(t *testing.T) {
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
package test
|
package test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/json-iterator/go"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_use_number_for_unmarshal(t *testing.T) {
|
func Test_use_number_for_unmarshal(t *testing.T) {
|
||||||
@ -23,7 +24,6 @@ func Test_customize_float_marshal(t *testing.T) {
|
|||||||
should.Equal("1.234568", str)
|
should.Equal("1.234568", str)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func Test_customize_tag_key(t *testing.T) {
|
func Test_customize_tag_key(t *testing.T) {
|
||||||
|
|
||||||
type TestObject struct {
|
type TestObject struct {
|
||||||
@ -45,4 +45,185 @@ func Test_read_large_number_as_interface(t *testing.T) {
|
|||||||
output, err := jsoniter.MarshalToString(val)
|
output, err := jsoniter.MarshalToString(val)
|
||||||
should.Nil(err)
|
should.Nil(err)
|
||||||
should.Equal(`123456789123456789123456789`, output)
|
should.Equal(`123456789123456789123456789`, output)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type caseSensitiveStruct struct {
|
||||||
|
A string `json:"a"`
|
||||||
|
B string `json:"b,omitempty"`
|
||||||
|
C *C `json:"C,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type C struct {
|
||||||
|
D int64 `json:"D,omitempty"`
|
||||||
|
E *E `json:"e,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type E struct {
|
||||||
|
F string `json:"F,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_CaseSensitive(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
|
||||||
|
testCases := []struct {
|
||||||
|
input string
|
||||||
|
expectedOutput string
|
||||||
|
caseSensitive bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
input: `{"A":"foo","B":"bar"}`,
|
||||||
|
expectedOutput: `{"a":"foo","b":"bar"}`,
|
||||||
|
caseSensitive: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: `{"a":"foo","b":"bar"}`,
|
||||||
|
expectedOutput: `{"a":"foo","b":"bar"}`,
|
||||||
|
caseSensitive: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: `{"a":"foo","b":"bar","C":{"D":10}}`,
|
||||||
|
expectedOutput: `{"a":"foo","b":"bar","C":{"D":10}}`,
|
||||||
|
caseSensitive: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: `{"a":"foo","B":"bar","c":{"d":10}}`,
|
||||||
|
expectedOutput: `{"a":"foo"}`,
|
||||||
|
caseSensitive: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: `{"a":"foo","C":{"d":10}}`,
|
||||||
|
expectedOutput: `{"a":"foo","C":{}}`,
|
||||||
|
caseSensitive: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: `{"a":"foo","C":{"D":10,"e":{"f":"baz"}}}`,
|
||||||
|
expectedOutput: `{"a":"foo","C":{"D":10,"e":{}}}`,
|
||||||
|
caseSensitive: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: `{"a":"foo","C":{"D":10,"e":{"F":"baz"}}}`,
|
||||||
|
expectedOutput: `{"a":"foo","C":{"D":10,"e":{"F":"baz"}}}`,
|
||||||
|
caseSensitive: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: `{"A":"foo","c":{"d":10,"E":{"f":"baz"}}}`,
|
||||||
|
expectedOutput: `{"a":"foo","C":{"D":10,"e":{"F":"baz"}}}`,
|
||||||
|
caseSensitive: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
val := caseSensitiveStruct{}
|
||||||
|
err := jsoniter.Config{CaseSensitive: tc.caseSensitive}.Froze().UnmarshalFromString(tc.input, &val)
|
||||||
|
should.Nil(err)
|
||||||
|
|
||||||
|
output, err := jsoniter.MarshalToString(val)
|
||||||
|
should.Nil(err)
|
||||||
|
should.Equal(tc.expectedOutput, output)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type structWithElevenFields struct {
|
||||||
|
A string `json:"A,omitempty"`
|
||||||
|
B string `json:"B,omitempty"`
|
||||||
|
C string `json:"C,omitempty"`
|
||||||
|
D string `json:"d,omitempty"`
|
||||||
|
E string `json:"e,omitempty"`
|
||||||
|
F string `json:"f,omitempty"`
|
||||||
|
G string `json:"g,omitempty"`
|
||||||
|
H string `json:"h,omitempty"`
|
||||||
|
I string `json:"i,omitempty"`
|
||||||
|
J string `json:"j,omitempty"`
|
||||||
|
K string `json:"k,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_CaseSensitive_MoreThanTenFields(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
|
||||||
|
testCases := []struct {
|
||||||
|
input string
|
||||||
|
expectedOutput string
|
||||||
|
caseSensitive bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
input: `{"A":"1","B":"2","C":"3","d":"4","e":"5","f":"6","g":"7","h":"8","i":"9","j":"10","k":"11"}`,
|
||||||
|
expectedOutput: `{"A":"1","B":"2","C":"3","d":"4","e":"5","f":"6","g":"7","h":"8","i":"9","j":"10","k":"11"}`,
|
||||||
|
caseSensitive: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: `{"a":"1","b":"2","c":"3","D":"4","E":"5","F":"6"}`,
|
||||||
|
expectedOutput: `{"A":"1","B":"2","C":"3","d":"4","e":"5","f":"6"}`,
|
||||||
|
caseSensitive: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: `{"A":"1","b":"2","d":"4","E":"5"}`,
|
||||||
|
expectedOutput: `{"A":"1","d":"4"}`,
|
||||||
|
caseSensitive: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
val := structWithElevenFields{}
|
||||||
|
err := jsoniter.Config{CaseSensitive: tc.caseSensitive}.Froze().UnmarshalFromString(tc.input, &val)
|
||||||
|
should.Nil(err)
|
||||||
|
|
||||||
|
output, err := jsoniter.MarshalToString(val)
|
||||||
|
should.Nil(err)
|
||||||
|
should.Equal(tc.expectedOutput, output)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type onlyTaggedFieldStruct struct {
|
||||||
|
A string `json:"a"`
|
||||||
|
B string
|
||||||
|
FSimpl F `json:"f_simpl"`
|
||||||
|
ISimpl I
|
||||||
|
FPtr *F `json:"f_ptr"`
|
||||||
|
IPtr *I
|
||||||
|
F
|
||||||
|
*I
|
||||||
|
}
|
||||||
|
|
||||||
|
type F struct {
|
||||||
|
G string `json:"g"`
|
||||||
|
H string
|
||||||
|
}
|
||||||
|
|
||||||
|
type I struct {
|
||||||
|
J string `json:"j"`
|
||||||
|
K string
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_OnlyTaggedField(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
|
||||||
|
obj := onlyTaggedFieldStruct{
|
||||||
|
A: "a",
|
||||||
|
B: "b",
|
||||||
|
FSimpl: F{G: "g", H: "h"},
|
||||||
|
ISimpl: I{J: "j", K: "k"},
|
||||||
|
FPtr: &F{G: "g", H: "h"},
|
||||||
|
IPtr: &I{J: "j", K: "k"},
|
||||||
|
F: F{G: "g", H: "h"},
|
||||||
|
I: &I{J: "j", K: "k"},
|
||||||
|
}
|
||||||
|
|
||||||
|
output, err := jsoniter.Config{OnlyTaggedField: true}.Froze().Marshal(obj)
|
||||||
|
should.Nil(err)
|
||||||
|
|
||||||
|
m := make(map[string]interface{})
|
||||||
|
err = jsoniter.Unmarshal(output, &m)
|
||||||
|
should.Nil(err)
|
||||||
|
|
||||||
|
should.Equal(map[string]interface{}{
|
||||||
|
"a": "a",
|
||||||
|
"f_simpl": map[string]interface{}{
|
||||||
|
"g": "g",
|
||||||
|
},
|
||||||
|
"f_ptr": map[string]interface{}{
|
||||||
|
"g": "g",
|
||||||
|
},
|
||||||
|
"g": "g",
|
||||||
|
"j": "j",
|
||||||
|
}, m)
|
||||||
|
}
|
||||||
|
@ -2,11 +2,11 @@ package test
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"testing"
|
|
||||||
"github.com/json-iterator/go"
|
|
||||||
"io/ioutil"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"io/ioutil"
|
||||||
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_disallowUnknownFields(t *testing.T) {
|
func Test_disallowUnknownFields(t *testing.T) {
|
||||||
@ -18,7 +18,6 @@ func Test_disallowUnknownFields(t *testing.T) {
|
|||||||
should.Error(decoder.Decode(&obj))
|
should.Error(decoder.Decode(&obj))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func Test_new_decoder(t *testing.T) {
|
func Test_new_decoder(t *testing.T) {
|
||||||
should := require.New(t)
|
should := require.New(t)
|
||||||
decoder1 := json.NewDecoder(bytes.NewBufferString(`[1][2]`))
|
decoder1 := json.NewDecoder(bytes.NewBufferString(`[1][2]`))
|
||||||
@ -57,3 +56,9 @@ func Test_use_number(t *testing.T) {
|
|||||||
should.Nil(decoder2.Decode(&obj2))
|
should.Nil(decoder2.Decode(&obj2))
|
||||||
should.Equal(json.Number("123"), obj2)
|
should.Equal(json.Number("123"), obj2)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Test_decoder_more(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
decoder := jsoniter.NewDecoder(bytes.NewBufferString("abcde"))
|
||||||
|
should.True(decoder.More())
|
||||||
|
}
|
||||||
|
@ -8,8 +8,8 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_new_encoder(t *testing.T) {
|
func Test_new_encoder(t *testing.T) {
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
package test
|
package test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"bytes"
|
"bytes"
|
||||||
"github.com/json-iterator/go"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Standard Encoder has trailing newline.
|
// Standard Encoder has trailing newline.
|
||||||
@ -17,4 +17,4 @@ func TestEncoderHasTrailingNewline(t *testing.T) {
|
|||||||
stdenc := json.NewEncoder(&stdbuf)
|
stdenc := json.NewEncoder(&stdbuf)
|
||||||
stdenc.Encode(1)
|
stdenc.Encode(1)
|
||||||
should.Equal(stdbuf.Bytes(), buf.Bytes())
|
should.Equal(stdbuf.Bytes(), buf.Bytes())
|
||||||
}
|
}
|
||||||
|
@ -2,9 +2,9 @@ package test
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"github.com/json-iterator/go"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"testing"
|
"testing"
|
||||||
"github.com/json-iterator/go"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_marshal_indent(t *testing.T) {
|
func Test_marshal_indent(t *testing.T) {
|
||||||
|
47
api_tests/marshal_json_escape_test.go
Normal file
47
api_tests/marshal_json_escape_test.go
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
package test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
jsoniter "github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
var marshalConfig = jsoniter.Config{
|
||||||
|
EscapeHTML: false,
|
||||||
|
SortMapKeys: true,
|
||||||
|
ValidateJsonRawMessage: true,
|
||||||
|
}.Froze()
|
||||||
|
|
||||||
|
type Container struct {
|
||||||
|
Bar interface{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Container) MarshalJSON() ([]byte, error) {
|
||||||
|
return marshalConfig.Marshal(&c.Bar)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEncodeEscape(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
|
||||||
|
container := &Container{
|
||||||
|
Bar: []string{"123<ab>", "ooo"},
|
||||||
|
}
|
||||||
|
out, err := marshalConfig.Marshal(container)
|
||||||
|
should.Nil(err)
|
||||||
|
bufout := string(out)
|
||||||
|
|
||||||
|
var stdbuf bytes.Buffer
|
||||||
|
stdenc := json.NewEncoder(&stdbuf)
|
||||||
|
stdenc.SetEscapeHTML(false)
|
||||||
|
err = stdenc.Encode(container)
|
||||||
|
should.Nil(err)
|
||||||
|
stdout := string(stdbuf.Bytes())
|
||||||
|
if stdout[len(stdout)-1:] == "\n" {
|
||||||
|
stdout = stdout[:len(stdout)-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
should.Equal(stdout, bufout)
|
||||||
|
}
|
36
api_tests/marshal_json_test.go
Normal file
36
api_tests/marshal_json_test.go
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
package test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"github.com/json-iterator/go"
|
||||||
|
"testing"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
type Foo struct {
|
||||||
|
Bar interface{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f Foo) MarshalJSON() ([]byte, error) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
err := json.NewEncoder(&buf).Encode(f.Bar)
|
||||||
|
return buf.Bytes(), err
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Standard Encoder has trailing newline.
|
||||||
|
func TestEncodeMarshalJSON(t *testing.T) {
|
||||||
|
|
||||||
|
foo := Foo {
|
||||||
|
Bar: 123,
|
||||||
|
}
|
||||||
|
should := require.New(t)
|
||||||
|
var buf, stdbuf bytes.Buffer
|
||||||
|
enc := jsoniter.ConfigCompatibleWithStandardLibrary.NewEncoder(&buf)
|
||||||
|
enc.Encode(foo)
|
||||||
|
stdenc := json.NewEncoder(&stdbuf)
|
||||||
|
stdenc.Encode(foo)
|
||||||
|
should.Equal(stdbuf.Bytes(), buf.Bytes())
|
||||||
|
}
|
@ -2,10 +2,10 @@ package test
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"github.com/json-iterator/go"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
"testing"
|
"testing"
|
||||||
"github.com/json-iterator/go"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
//func Test_large_file(t *testing.T) {
|
//func Test_large_file(t *testing.T) {
|
||||||
|
97
config.go
97
config.go
@ -3,9 +3,12 @@ package jsoniter
|
|||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"io"
|
"io"
|
||||||
"unsafe"
|
"reflect"
|
||||||
"github.com/v2pro/plz/reflect2"
|
|
||||||
"sync"
|
"sync"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/modern-go/concurrent"
|
||||||
|
"github.com/modern-go/reflect2"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Config customize how the API should behave.
|
// Config customize how the API should behave.
|
||||||
@ -21,6 +24,7 @@ type Config struct {
|
|||||||
OnlyTaggedField bool
|
OnlyTaggedField bool
|
||||||
ValidateJsonRawMessage bool
|
ValidateJsonRawMessage bool
|
||||||
ObjectFieldMustBeSimpleString bool
|
ObjectFieldMustBeSimpleString bool
|
||||||
|
CaseSensitive bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// API the public interface of this package.
|
// API the public interface of this package.
|
||||||
@ -38,6 +42,8 @@ type API interface {
|
|||||||
NewDecoder(reader io.Reader) *Decoder
|
NewDecoder(reader io.Reader) *Decoder
|
||||||
Valid(data []byte) bool
|
Valid(data []byte) bool
|
||||||
RegisterExtension(extension Extension)
|
RegisterExtension(extension Extension)
|
||||||
|
DecoderOf(typ reflect2.Type) ValDecoder
|
||||||
|
EncoderOf(typ reflect2.Type) ValEncoder
|
||||||
}
|
}
|
||||||
|
|
||||||
// ConfigDefault the default API
|
// ConfigDefault the default API
|
||||||
@ -59,6 +65,66 @@ var ConfigFastest = Config{
|
|||||||
ObjectFieldMustBeSimpleString: true, // do not unescape object field
|
ObjectFieldMustBeSimpleString: true, // do not unescape object field
|
||||||
}.Froze()
|
}.Froze()
|
||||||
|
|
||||||
|
type frozenConfig struct {
|
||||||
|
configBeforeFrozen Config
|
||||||
|
sortMapKeys bool
|
||||||
|
indentionStep int
|
||||||
|
objectFieldMustBeSimpleString bool
|
||||||
|
onlyTaggedField bool
|
||||||
|
disallowUnknownFields bool
|
||||||
|
decoderCache *concurrent.Map
|
||||||
|
encoderCache *concurrent.Map
|
||||||
|
encoderExtension Extension
|
||||||
|
decoderExtension Extension
|
||||||
|
extraExtensions []Extension
|
||||||
|
streamPool *sync.Pool
|
||||||
|
iteratorPool *sync.Pool
|
||||||
|
caseSensitive bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cfg *frozenConfig) initCache() {
|
||||||
|
cfg.decoderCache = concurrent.NewMap()
|
||||||
|
cfg.encoderCache = concurrent.NewMap()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cfg *frozenConfig) addDecoderToCache(cacheKey uintptr, decoder ValDecoder) {
|
||||||
|
cfg.decoderCache.Store(cacheKey, decoder)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cfg *frozenConfig) addEncoderToCache(cacheKey uintptr, encoder ValEncoder) {
|
||||||
|
cfg.encoderCache.Store(cacheKey, encoder)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cfg *frozenConfig) getDecoderFromCache(cacheKey uintptr) ValDecoder {
|
||||||
|
decoder, found := cfg.decoderCache.Load(cacheKey)
|
||||||
|
if found {
|
||||||
|
return decoder.(ValDecoder)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cfg *frozenConfig) getEncoderFromCache(cacheKey uintptr) ValEncoder {
|
||||||
|
encoder, found := cfg.encoderCache.Load(cacheKey)
|
||||||
|
if found {
|
||||||
|
return encoder.(ValEncoder)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var cfgCache = concurrent.NewMap()
|
||||||
|
|
||||||
|
func getFrozenConfigFromCache(cfg Config) *frozenConfig {
|
||||||
|
obj, found := cfgCache.Load(cfg)
|
||||||
|
if found {
|
||||||
|
return obj.(*frozenConfig)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func addFrozenConfigToCache(cfg Config, frozenConfig *frozenConfig) {
|
||||||
|
cfgCache.Store(cfg, frozenConfig)
|
||||||
|
}
|
||||||
|
|
||||||
// Froze forge API from config
|
// Froze forge API from config
|
||||||
func (cfg Config) Froze() API {
|
func (cfg Config) Froze() API {
|
||||||
api := &frozenConfig{
|
api := &frozenConfig{
|
||||||
@ -67,6 +133,7 @@ func (cfg Config) Froze() API {
|
|||||||
objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString,
|
objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString,
|
||||||
onlyTaggedField: cfg.OnlyTaggedField,
|
onlyTaggedField: cfg.OnlyTaggedField,
|
||||||
disallowUnknownFields: cfg.DisallowUnknownFields,
|
disallowUnknownFields: cfg.DisallowUnknownFields,
|
||||||
|
caseSensitive: cfg.CaseSensitive,
|
||||||
}
|
}
|
||||||
api.streamPool = &sync.Pool{
|
api.streamPool = &sync.Pool{
|
||||||
New: func() interface{} {
|
New: func() interface{} {
|
||||||
@ -93,22 +160,21 @@ func (cfg Config) Froze() API {
|
|||||||
if cfg.ValidateJsonRawMessage {
|
if cfg.ValidateJsonRawMessage {
|
||||||
api.validateJsonRawMessage(encoderExtension)
|
api.validateJsonRawMessage(encoderExtension)
|
||||||
}
|
}
|
||||||
if len(encoderExtension) > 0 {
|
api.encoderExtension = encoderExtension
|
||||||
api.extensions = append(api.extensions, encoderExtension)
|
api.decoderExtension = decoderExtension
|
||||||
}
|
|
||||||
if len(decoderExtension) > 0 {
|
|
||||||
api.extensions = append(api.extensions, decoderExtension)
|
|
||||||
}
|
|
||||||
api.configBeforeFrozen = cfg
|
api.configBeforeFrozen = cfg
|
||||||
return api
|
return api
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cfg Config) frozeWithCacheReuse() *frozenConfig {
|
func (cfg Config) frozeWithCacheReuse(extraExtensions []Extension) *frozenConfig {
|
||||||
api := getFrozenConfigFromCache(cfg)
|
api := getFrozenConfigFromCache(cfg)
|
||||||
if api != nil {
|
if api != nil {
|
||||||
return api
|
return api
|
||||||
}
|
}
|
||||||
api = cfg.Froze().(*frozenConfig)
|
api = cfg.Froze().(*frozenConfig)
|
||||||
|
for _, extension := range extraExtensions {
|
||||||
|
api.RegisterExtension(extension)
|
||||||
|
}
|
||||||
addFrozenConfigToCache(cfg, api)
|
addFrozenConfigToCache(cfg, api)
|
||||||
return api
|
return api
|
||||||
}
|
}
|
||||||
@ -125,7 +191,7 @@ func (cfg *frozenConfig) validateJsonRawMessage(extension EncoderExtension) {
|
|||||||
stream.WriteRaw(string(rawMessage))
|
stream.WriteRaw(string(rawMessage))
|
||||||
}
|
}
|
||||||
}, func(ptr unsafe.Pointer) bool {
|
}, func(ptr unsafe.Pointer) bool {
|
||||||
return false
|
return len(*((*json.RawMessage)(ptr))) == 0
|
||||||
}}
|
}}
|
||||||
extension[reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()] = encoder
|
extension[reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()] = encoder
|
||||||
extension[reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()] = encoder
|
extension[reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()] = encoder
|
||||||
@ -133,6 +199,11 @@ func (cfg *frozenConfig) validateJsonRawMessage(extension EncoderExtension) {
|
|||||||
|
|
||||||
func (cfg *frozenConfig) useNumber(extension DecoderExtension) {
|
func (cfg *frozenConfig) useNumber(extension DecoderExtension) {
|
||||||
extension[reflect2.TypeOfPtr((*interface{})(nil)).Elem()] = &funcDecoder{func(ptr unsafe.Pointer, iter *Iterator) {
|
extension[reflect2.TypeOfPtr((*interface{})(nil)).Elem()] = &funcDecoder{func(ptr unsafe.Pointer, iter *Iterator) {
|
||||||
|
exitingValue := *((*interface{})(ptr))
|
||||||
|
if exitingValue != nil && reflect.TypeOf(exitingValue).Kind() == reflect.Ptr {
|
||||||
|
iter.ReadVal(exitingValue)
|
||||||
|
return
|
||||||
|
}
|
||||||
if iter.WhatIsNext() == NumberValue {
|
if iter.WhatIsNext() == NumberValue {
|
||||||
*((*interface{})(ptr)) = json.Number(iter.readNumberAsString())
|
*((*interface{})(ptr)) = json.Number(iter.readNumberAsString())
|
||||||
} else {
|
} else {
|
||||||
@ -149,7 +220,9 @@ func (cfg *frozenConfig) getTagKey() string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (cfg *frozenConfig) RegisterExtension(extension Extension) {
|
func (cfg *frozenConfig) RegisterExtension(extension Extension) {
|
||||||
cfg.extensions = append(cfg.extensions, extension)
|
cfg.extraExtensions = append(cfg.extraExtensions, extension)
|
||||||
|
copied := cfg.configBeforeFrozen
|
||||||
|
cfg.configBeforeFrozen = copied
|
||||||
}
|
}
|
||||||
|
|
||||||
type lossyFloat32Encoder struct {
|
type lossyFloat32Encoder struct {
|
||||||
@ -244,7 +317,7 @@ func (cfg *frozenConfig) MarshalIndent(v interface{}, prefix, indent string) ([]
|
|||||||
}
|
}
|
||||||
newCfg := cfg.configBeforeFrozen
|
newCfg := cfg.configBeforeFrozen
|
||||||
newCfg.IndentionStep = len(indent)
|
newCfg.IndentionStep = len(indent)
|
||||||
return newCfg.frozeWithCacheReuse().Marshal(v)
|
return newCfg.frozeWithCacheReuse(cfg.extraExtensions).Marshal(v)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cfg *frozenConfig) UnmarshalFromString(str string, v interface{}) error {
|
func (cfg *frozenConfig) UnmarshalFromString(str string, v interface{}) error {
|
||||||
|
@ -1,64 +0,0 @@
|
|||||||
//+build go1.9
|
|
||||||
|
|
||||||
package jsoniter
|
|
||||||
|
|
||||||
import (
|
|
||||||
"sync"
|
|
||||||
)
|
|
||||||
|
|
||||||
type frozenConfig struct {
|
|
||||||
configBeforeFrozen Config
|
|
||||||
sortMapKeys bool
|
|
||||||
indentionStep int
|
|
||||||
objectFieldMustBeSimpleString bool
|
|
||||||
onlyTaggedField bool
|
|
||||||
disallowUnknownFields bool
|
|
||||||
decoderCache sync.Map
|
|
||||||
encoderCache sync.Map
|
|
||||||
extensions []Extension
|
|
||||||
streamPool *sync.Pool
|
|
||||||
iteratorPool *sync.Pool
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cfg *frozenConfig) initCache() {
|
|
||||||
cfg.decoderCache = sync.Map{}
|
|
||||||
cfg.encoderCache = sync.Map{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cfg *frozenConfig) addDecoderToCache(cacheKey uintptr, decoder ValDecoder) {
|
|
||||||
cfg.decoderCache.Store(cacheKey, decoder)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cfg *frozenConfig) addEncoderToCache(cacheKey uintptr, encoder ValEncoder) {
|
|
||||||
cfg.encoderCache.Store(cacheKey, encoder)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cfg *frozenConfig) getDecoderFromCache(cacheKey uintptr) ValDecoder {
|
|
||||||
decoder, found := cfg.decoderCache.Load(cacheKey)
|
|
||||||
if found {
|
|
||||||
return decoder.(ValDecoder)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cfg *frozenConfig) getEncoderFromCache(cacheKey uintptr) ValEncoder {
|
|
||||||
encoder, found := cfg.encoderCache.Load(cacheKey)
|
|
||||||
if found {
|
|
||||||
return encoder.(ValEncoder)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var cfgCache = &sync.Map{}
|
|
||||||
|
|
||||||
func getFrozenConfigFromCache(cfg Config) *frozenConfig {
|
|
||||||
obj, found := cfgCache.Load(cfg)
|
|
||||||
if found {
|
|
||||||
return obj.(*frozenConfig)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func addFrozenConfigToCache(cfg Config, frozenConfig *frozenConfig) {
|
|
||||||
cfgCache.Store(cfg, frozenConfig)
|
|
||||||
}
|
|
@ -1,70 +0,0 @@
|
|||||||
//+build !go1.9
|
|
||||||
|
|
||||||
package jsoniter
|
|
||||||
|
|
||||||
import (
|
|
||||||
"sync"
|
|
||||||
)
|
|
||||||
|
|
||||||
type frozenConfig struct {
|
|
||||||
configBeforeFrozen Config
|
|
||||||
sortMapKeys bool
|
|
||||||
indentionStep int
|
|
||||||
objectFieldMustBeSimpleString bool
|
|
||||||
onlyTaggedField bool
|
|
||||||
disallowUnknownFields bool
|
|
||||||
cacheLock *sync.RWMutex
|
|
||||||
decoderCache map[uintptr]ValDecoder
|
|
||||||
encoderCache map[uintptr]ValEncoder
|
|
||||||
extensions []Extension
|
|
||||||
streamPool *sync.Pool
|
|
||||||
iteratorPool *sync.Pool
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cfg *frozenConfig) initCache() {
|
|
||||||
cfg.cacheLock = &sync.RWMutex{}
|
|
||||||
cfg.decoderCache = map[uintptr]ValDecoder{}
|
|
||||||
cfg.encoderCache = map[uintptr]ValEncoder{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cfg *frozenConfig) addDecoderToCache(cacheKey uintptr, decoder ValDecoder) {
|
|
||||||
cfg.cacheLock.Lock()
|
|
||||||
cfg.decoderCache[cacheKey] = decoder
|
|
||||||
cfg.cacheLock.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cfg *frozenConfig) addEncoderToCache(cacheKey uintptr, encoder ValEncoder) {
|
|
||||||
cfg.cacheLock.Lock()
|
|
||||||
cfg.encoderCache[cacheKey] = encoder
|
|
||||||
cfg.cacheLock.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cfg *frozenConfig) getDecoderFromCache(cacheKey uintptr) ValDecoder {
|
|
||||||
cfg.cacheLock.RLock()
|
|
||||||
decoder, _ := cfg.decoderCache[cacheKey].(ValDecoder)
|
|
||||||
cfg.cacheLock.RUnlock()
|
|
||||||
return decoder
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cfg *frozenConfig) getEncoderFromCache(cacheKey uintptr) ValEncoder {
|
|
||||||
cfg.cacheLock.RLock()
|
|
||||||
encoder, _ := cfg.encoderCache[cacheKey].(ValEncoder)
|
|
||||||
cfg.cacheLock.RUnlock()
|
|
||||||
return encoder
|
|
||||||
}
|
|
||||||
|
|
||||||
var cfgCacheLock = &sync.RWMutex{}
|
|
||||||
var cfgCache = map[Config]*frozenConfig{}
|
|
||||||
|
|
||||||
func getFrozenConfigFromCache(cfg Config) *frozenConfig {
|
|
||||||
cfgCacheLock.RLock()
|
|
||||||
frozenConfig := cfgCache[cfg]
|
|
||||||
cfgCacheLock.RUnlock()
|
|
||||||
return frozenConfig
|
|
||||||
}
|
|
||||||
|
|
||||||
func addFrozenConfigToCache(cfg Config, frozenConfig *frozenConfig) {
|
|
||||||
cfgCacheLock.Lock()
|
|
||||||
cfgCache[cfg] = frozenConfig
|
|
||||||
cfgCacheLock.Unlock()
|
|
||||||
}
|
|
@ -3,6 +3,7 @@ package jsoniter
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
func ExampleMarshal() {
|
func ExampleMarshal() {
|
||||||
@ -93,3 +94,28 @@ func ExampleGet() {
|
|||||||
// Output:
|
// Output:
|
||||||
// Crimson
|
// Crimson
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ExampleMyKey() {
|
||||||
|
hello := MyKey("hello")
|
||||||
|
output, _ := Marshal(map[*MyKey]string{&hello: "world"})
|
||||||
|
fmt.Println(string(output))
|
||||||
|
obj := map[*MyKey]string{}
|
||||||
|
Unmarshal(output, &obj)
|
||||||
|
for k, v := range obj {
|
||||||
|
fmt.Println(*k, v)
|
||||||
|
}
|
||||||
|
// Output:
|
||||||
|
// {"Hello":"world"}
|
||||||
|
// Hel world
|
||||||
|
}
|
||||||
|
|
||||||
|
type MyKey string
|
||||||
|
|
||||||
|
func (m *MyKey) MarshalText() ([]byte, error) {
|
||||||
|
return []byte(strings.Replace(string(*m), "h", "H", -1)), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *MyKey) UnmarshalText(text []byte) error {
|
||||||
|
*m = MyKey(text[:3])
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
@ -1,12 +1,14 @@
|
|||||||
package test
|
package test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"bytes"
|
||||||
"unsafe"
|
"fmt"
|
||||||
"time"
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
"unsafe"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_customize_type_decoder(t *testing.T) {
|
func Test_customize_type_decoder(t *testing.T) {
|
||||||
@ -46,6 +48,38 @@ func Test_customize_byte_array_encoder(t *testing.T) {
|
|||||||
should.Equal(`"abc"`, str)
|
should.Equal(`"abc"`, str)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type CustomEncoderAttachmentTestStruct struct {
|
||||||
|
Value int32 `json:"value"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type CustomEncoderAttachmentTestStructEncoder struct {}
|
||||||
|
|
||||||
|
func (c *CustomEncoderAttachmentTestStructEncoder) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream) {
|
||||||
|
attachVal, ok := stream.Attachment.(int)
|
||||||
|
stream.WriteRaw(`"`)
|
||||||
|
stream.WriteRaw(fmt.Sprintf("%t %d", ok, attachVal))
|
||||||
|
stream.WriteRaw(`"`)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *CustomEncoderAttachmentTestStructEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_custom_encoder_attachment(t *testing.T) {
|
||||||
|
|
||||||
|
jsoniter.RegisterTypeEncoder("test.CustomEncoderAttachmentTestStruct", &CustomEncoderAttachmentTestStructEncoder{})
|
||||||
|
expectedValue := 17
|
||||||
|
should := require.New(t)
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
stream := jsoniter.NewStream(jsoniter.Config{SortMapKeys: true}.Froze(), buf, 4096)
|
||||||
|
stream.Attachment = expectedValue
|
||||||
|
val := map[string]CustomEncoderAttachmentTestStruct{"a": {}}
|
||||||
|
stream.WriteVal(val)
|
||||||
|
stream.Flush()
|
||||||
|
should.Nil(stream.Error)
|
||||||
|
should.Equal("{\"a\":\"true 17\"}", buf.String())
|
||||||
|
}
|
||||||
|
|
||||||
func Test_customize_field_decoder(t *testing.T) {
|
func Test_customize_field_decoder(t *testing.T) {
|
||||||
type Tom struct {
|
type Tom struct {
|
||||||
field1 string
|
field1 string
|
||||||
@ -61,7 +95,6 @@ func Test_customize_field_decoder(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func Test_recursive_empty_interface_customization(t *testing.T) {
|
func Test_recursive_empty_interface_customization(t *testing.T) {
|
||||||
t.Skip()
|
t.Skip()
|
||||||
var obj interface{}
|
var obj interface{}
|
||||||
@ -98,4 +131,93 @@ func Test_read_custom_interface(t *testing.T) {
|
|||||||
err := jsoniter.UnmarshalFromString(`"hello"`, &val)
|
err := jsoniter.UnmarshalFromString(`"hello"`, &val)
|
||||||
should.Nil(err)
|
should.Nil(err)
|
||||||
should.Equal("hello", val.Hello())
|
should.Equal("hello", val.Hello())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const flow1 = `
|
||||||
|
{"A":"hello"}
|
||||||
|
{"A":"hello"}
|
||||||
|
{"A":"hello"}
|
||||||
|
{"A":"hello"}
|
||||||
|
{"A":"hello"}`
|
||||||
|
|
||||||
|
const flow2 = `
|
||||||
|
{"A":"hello"}
|
||||||
|
{"A":"hello"}
|
||||||
|
{"A":"hello"}
|
||||||
|
{"A":"hello"}
|
||||||
|
{"A":"hello"}
|
||||||
|
`
|
||||||
|
|
||||||
|
type (
|
||||||
|
Type1 struct {
|
||||||
|
A string
|
||||||
|
}
|
||||||
|
|
||||||
|
Type2 struct {
|
||||||
|
A string
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
func (t *Type2) UnmarshalJSON(data []byte) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *Type2) MarshalJSON() ([]byte, error) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestType1NoFinalLF(t *testing.T) {
|
||||||
|
reader := bytes.NewReader([]byte(flow1))
|
||||||
|
dec := jsoniter.NewDecoder(reader)
|
||||||
|
|
||||||
|
i := 0
|
||||||
|
for dec.More() {
|
||||||
|
data := &Type1{}
|
||||||
|
if err := dec.Decode(data); err != nil {
|
||||||
|
t.Errorf("at %v got %v", i, err)
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestType1FinalLF(t *testing.T) {
|
||||||
|
reader := bytes.NewReader([]byte(flow2))
|
||||||
|
dec := jsoniter.NewDecoder(reader)
|
||||||
|
|
||||||
|
i := 0
|
||||||
|
for dec.More() {
|
||||||
|
data := &Type1{}
|
||||||
|
if err := dec.Decode(data); err != nil {
|
||||||
|
t.Errorf("at %v got %v", i, err)
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestType2NoFinalLF(t *testing.T) {
|
||||||
|
reader := bytes.NewReader([]byte(flow1))
|
||||||
|
dec := jsoniter.NewDecoder(reader)
|
||||||
|
|
||||||
|
i := 0
|
||||||
|
for dec.More() {
|
||||||
|
data := &Type2{}
|
||||||
|
if err := dec.Decode(data); err != nil {
|
||||||
|
t.Errorf("at %v got %v", i, err)
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestType2FinalLF(t *testing.T) {
|
||||||
|
reader := bytes.NewReader([]byte(flow2))
|
||||||
|
dec := jsoniter.NewDecoder(reader)
|
||||||
|
|
||||||
|
i := 0
|
||||||
|
for dec.More() {
|
||||||
|
data := &Type2{}
|
||||||
|
if err := dec.Decode(data); err != nil {
|
||||||
|
t.Errorf("at %v got %v", i, err)
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
package test
|
package test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"unsafe"
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/modern-go/reflect2"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"reflect"
|
||||||
"strconv"
|
"strconv"
|
||||||
"testing"
|
"testing"
|
||||||
"github.com/stretchr/testify/require"
|
"unsafe"
|
||||||
"github.com/json-iterator/go"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type TestObject1 struct {
|
type TestObject1 struct {
|
||||||
@ -46,6 +48,53 @@ func Test_customize_field_by_extension(t *testing.T) {
|
|||||||
should.Equal(`{"field-1":100}`, str)
|
should.Equal(`{"field-1":100}`, str)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Test_customize_map_key_encoder(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
cfg := jsoniter.Config{}.Froze()
|
||||||
|
cfg.RegisterExtension(&testMapKeyExtension{})
|
||||||
|
m := map[int]int{1: 2}
|
||||||
|
output, err := cfg.MarshalToString(m)
|
||||||
|
should.NoError(err)
|
||||||
|
should.Equal(`{"2":2}`, output)
|
||||||
|
m = map[int]int{}
|
||||||
|
should.NoError(cfg.UnmarshalFromString(output, &m))
|
||||||
|
should.Equal(map[int]int{1: 2}, m)
|
||||||
|
}
|
||||||
|
|
||||||
|
type testMapKeyExtension struct {
|
||||||
|
jsoniter.DummyExtension
|
||||||
|
}
|
||||||
|
|
||||||
|
func (extension *testMapKeyExtension) CreateMapKeyEncoder(typ reflect2.Type) jsoniter.ValEncoder {
|
||||||
|
if typ.Kind() == reflect.Int {
|
||||||
|
return &funcEncoder{
|
||||||
|
fun: func(ptr unsafe.Pointer, stream *jsoniter.Stream) {
|
||||||
|
stream.WriteRaw(`"`)
|
||||||
|
stream.WriteInt(*(*int)(ptr) + 1)
|
||||||
|
stream.WriteRaw(`"`)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (extension *testMapKeyExtension) CreateMapKeyDecoder(typ reflect2.Type) jsoniter.ValDecoder {
|
||||||
|
if typ.Kind() == reflect.Int {
|
||||||
|
return &funcDecoder{
|
||||||
|
fun: func(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
|
||||||
|
i, err := strconv.Atoi(iter.ReadString())
|
||||||
|
if err != nil {
|
||||||
|
iter.ReportError("read map key", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
i--
|
||||||
|
*(*int)(ptr) = i
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
type funcDecoder struct {
|
type funcDecoder struct {
|
||||||
fun jsoniter.DecoderFunc
|
fun jsoniter.DecoderFunc
|
||||||
}
|
}
|
||||||
|
238
extra/binary_as_string_codec.go
Normal file
238
extra/binary_as_string_codec.go
Normal file
@ -0,0 +1,238 @@
|
|||||||
|
package extra
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/modern-go/reflect2"
|
||||||
|
"unicode/utf8"
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
|
||||||
|
// safeSet holds the value true if the ASCII character with the given array
|
||||||
|
// position can be represented inside a JSON string without any further
|
||||||
|
// escaping.
|
||||||
|
//
|
||||||
|
// All values are true except for the ASCII control characters (0-31), the
|
||||||
|
// double quote ("), and the backslash character ("\").
|
||||||
|
var safeSet = [utf8.RuneSelf]bool{
|
||||||
|
' ': true,
|
||||||
|
'!': true,
|
||||||
|
'"': false,
|
||||||
|
'#': true,
|
||||||
|
'$': true,
|
||||||
|
'%': true,
|
||||||
|
'&': true,
|
||||||
|
'\'': true,
|
||||||
|
'(': true,
|
||||||
|
')': true,
|
||||||
|
'*': true,
|
||||||
|
'+': true,
|
||||||
|
',': true,
|
||||||
|
'-': true,
|
||||||
|
'.': true,
|
||||||
|
'/': true,
|
||||||
|
'0': true,
|
||||||
|
'1': true,
|
||||||
|
'2': true,
|
||||||
|
'3': true,
|
||||||
|
'4': true,
|
||||||
|
'5': true,
|
||||||
|
'6': true,
|
||||||
|
'7': true,
|
||||||
|
'8': true,
|
||||||
|
'9': true,
|
||||||
|
':': true,
|
||||||
|
';': true,
|
||||||
|
'<': true,
|
||||||
|
'=': true,
|
||||||
|
'>': true,
|
||||||
|
'?': true,
|
||||||
|
'@': true,
|
||||||
|
'A': true,
|
||||||
|
'B': true,
|
||||||
|
'C': true,
|
||||||
|
'D': true,
|
||||||
|
'E': true,
|
||||||
|
'F': true,
|
||||||
|
'G': true,
|
||||||
|
'H': true,
|
||||||
|
'I': true,
|
||||||
|
'J': true,
|
||||||
|
'K': true,
|
||||||
|
'L': true,
|
||||||
|
'M': true,
|
||||||
|
'N': true,
|
||||||
|
'O': true,
|
||||||
|
'P': true,
|
||||||
|
'Q': true,
|
||||||
|
'R': true,
|
||||||
|
'S': true,
|
||||||
|
'T': true,
|
||||||
|
'U': true,
|
||||||
|
'V': true,
|
||||||
|
'W': true,
|
||||||
|
'X': true,
|
||||||
|
'Y': true,
|
||||||
|
'Z': true,
|
||||||
|
'[': true,
|
||||||
|
'\\': false,
|
||||||
|
']': true,
|
||||||
|
'^': true,
|
||||||
|
'_': true,
|
||||||
|
'`': true,
|
||||||
|
'a': true,
|
||||||
|
'b': true,
|
||||||
|
'c': true,
|
||||||
|
'd': true,
|
||||||
|
'e': true,
|
||||||
|
'f': true,
|
||||||
|
'g': true,
|
||||||
|
'h': true,
|
||||||
|
'i': true,
|
||||||
|
'j': true,
|
||||||
|
'k': true,
|
||||||
|
'l': true,
|
||||||
|
'm': true,
|
||||||
|
'n': true,
|
||||||
|
'o': true,
|
||||||
|
'p': true,
|
||||||
|
'q': true,
|
||||||
|
'r': true,
|
||||||
|
's': true,
|
||||||
|
't': true,
|
||||||
|
'u': true,
|
||||||
|
'v': true,
|
||||||
|
'w': true,
|
||||||
|
'x': true,
|
||||||
|
'y': true,
|
||||||
|
'z': true,
|
||||||
|
'{': true,
|
||||||
|
'|': true,
|
||||||
|
'}': true,
|
||||||
|
'~': true,
|
||||||
|
'\u007f': true,
|
||||||
|
}
|
||||||
|
|
||||||
|
var binaryType = reflect2.TypeOfPtr((*[]byte)(nil)).Elem()
|
||||||
|
|
||||||
|
type BinaryAsStringExtension struct {
|
||||||
|
jsoniter.DummyExtension
|
||||||
|
}
|
||||||
|
|
||||||
|
func (extension *BinaryAsStringExtension) CreateEncoder(typ reflect2.Type) jsoniter.ValEncoder {
|
||||||
|
if typ == binaryType {
|
||||||
|
return &binaryAsStringCodec{}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (extension *BinaryAsStringExtension) CreateDecoder(typ reflect2.Type) jsoniter.ValDecoder {
|
||||||
|
if typ == binaryType {
|
||||||
|
return &binaryAsStringCodec{}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type binaryAsStringCodec struct {
|
||||||
|
}
|
||||||
|
|
||||||
|
func (codec *binaryAsStringCodec) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
|
||||||
|
rawBytes := iter.ReadStringAsSlice()
|
||||||
|
bytes := make([]byte, 0, len(rawBytes))
|
||||||
|
for i := 0; i < len(rawBytes); i++ {
|
||||||
|
b := rawBytes[i]
|
||||||
|
if b == '\\' {
|
||||||
|
b2 := rawBytes[i+1]
|
||||||
|
if b2 != '\\' {
|
||||||
|
iter.ReportError("decode binary as string", `\\x is only supported escape`)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
b3 := rawBytes[i+2]
|
||||||
|
if b3 != 'x' {
|
||||||
|
iter.ReportError("decode binary as string", `\\x is only supported escape`)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
b4 := rawBytes[i+3]
|
||||||
|
b5 := rawBytes[i+4]
|
||||||
|
i += 4
|
||||||
|
b = readHex(iter, b4, b5)
|
||||||
|
}
|
||||||
|
bytes = append(bytes, b)
|
||||||
|
}
|
||||||
|
*(*[]byte)(ptr) = bytes
|
||||||
|
}
|
||||||
|
func (codec *binaryAsStringCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||||
|
return len(*((*[]byte)(ptr))) == 0
|
||||||
|
}
|
||||||
|
func (codec *binaryAsStringCodec) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream) {
|
||||||
|
newBuffer := writeBytes(stream.Buffer(), *(*[]byte)(ptr))
|
||||||
|
stream.SetBuffer(newBuffer)
|
||||||
|
}
|
||||||
|
|
||||||
|
func readHex(iter *jsoniter.Iterator, b1, b2 byte) byte {
|
||||||
|
var ret byte
|
||||||
|
if b1 >= '0' && b1 <= '9' {
|
||||||
|
ret = b1 - '0'
|
||||||
|
} else if b1 >= 'a' && b1 <= 'f' {
|
||||||
|
ret = b1 - 'a' + 10
|
||||||
|
} else {
|
||||||
|
iter.ReportError("read hex", "expects 0~9 or a~f, but found "+string([]byte{b1}))
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
ret *= 16
|
||||||
|
if b2 >= '0' && b2 <= '9' {
|
||||||
|
ret = b2 - '0'
|
||||||
|
} else if b2 >= 'a' && b2 <= 'f' {
|
||||||
|
ret = b2 - 'a' + 10
|
||||||
|
} else {
|
||||||
|
iter.ReportError("read hex", "expects 0~9 or a~f, but found "+string([]byte{b2}))
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
var hex = "0123456789abcdef"
|
||||||
|
|
||||||
|
func writeBytes(space []byte, s []byte) []byte {
|
||||||
|
space = append(space, '"')
|
||||||
|
// write string, the fast path, without utf8 and escape support
|
||||||
|
var i int
|
||||||
|
var c byte
|
||||||
|
for i, c = range s {
|
||||||
|
if c < utf8.RuneSelf && safeSet[c] {
|
||||||
|
space = append(space, c)
|
||||||
|
} else {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if i == len(s)-1 {
|
||||||
|
space = append(space, '"')
|
||||||
|
return space
|
||||||
|
}
|
||||||
|
return writeBytesSlowPath(space, s[i:])
|
||||||
|
}
|
||||||
|
|
||||||
|
func writeBytesSlowPath(space []byte, s []byte) []byte {
|
||||||
|
start := 0
|
||||||
|
// for the remaining parts, we process them char by char
|
||||||
|
var i int
|
||||||
|
var b byte
|
||||||
|
for i, b = range s {
|
||||||
|
if b >= utf8.RuneSelf {
|
||||||
|
space = append(space, '\\', '\\', 'x', hex[b>>4], hex[b&0xF])
|
||||||
|
start = i + 1
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if safeSet[b] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if start < i {
|
||||||
|
space = append(space, s[start:i]...)
|
||||||
|
}
|
||||||
|
space = append(space, '\\', '\\', 'x', hex[b>>4], hex[b&0xF])
|
||||||
|
start = i + 1
|
||||||
|
}
|
||||||
|
if start < len(s) {
|
||||||
|
space = append(space, s[start:]...)
|
||||||
|
}
|
||||||
|
return append(space, '"')
|
||||||
|
}
|
32
extra/binary_as_string_codec_test.go
Normal file
32
extra/binary_as_string_codec_test.go
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
package extra
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
jsoniter.RegisterExtension(&BinaryAsStringExtension{})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBinaryAsStringCodec(t *testing.T) {
|
||||||
|
t.Run("safe set", func(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
output, err := jsoniter.Marshal([]byte("hello"))
|
||||||
|
should.NoError(err)
|
||||||
|
should.Equal(`"hello"`, string(output))
|
||||||
|
var val []byte
|
||||||
|
should.NoError(jsoniter.Unmarshal(output, &val))
|
||||||
|
should.Equal(`hello`, string(val))
|
||||||
|
})
|
||||||
|
t.Run("non safe set", func(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
output, err := jsoniter.Marshal([]byte{1, 2, 3, 15})
|
||||||
|
should.NoError(err)
|
||||||
|
should.Equal(`"\\x01\\x02\\x03\\x0f"`, string(output))
|
||||||
|
var val []byte
|
||||||
|
should.NoError(jsoniter.Unmarshal(output, &val))
|
||||||
|
should.Equal([]byte{1, 2, 3, 15}, val)
|
||||||
|
})
|
||||||
|
}
|
@ -9,7 +9,7 @@ import (
|
|||||||
"unsafe"
|
"unsafe"
|
||||||
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
"github.com/v2pro/plz/reflect2"
|
"github.com/modern-go/reflect2"
|
||||||
)
|
)
|
||||||
|
|
||||||
const maxUint = ^uint(0)
|
const maxUint = ^uint(0)
|
||||||
@ -183,6 +183,9 @@ func (decoder *fuzzyStringDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Ite
|
|||||||
*((*string)(ptr)) = string(number)
|
*((*string)(ptr)) = string(number)
|
||||||
case jsoniter.StringValue:
|
case jsoniter.StringValue:
|
||||||
*((*string)(ptr)) = iter.ReadString()
|
*((*string)(ptr)) = iter.ReadString()
|
||||||
|
case jsoniter.NilValue:
|
||||||
|
iter.Skip()
|
||||||
|
*((*string)(ptr)) = ""
|
||||||
default:
|
default:
|
||||||
iter.ReportError("fuzzyStringDecoder", "not number or string")
|
iter.ReportError("fuzzyStringDecoder", "not number or string")
|
||||||
}
|
}
|
||||||
@ -208,9 +211,15 @@ func (decoder *fuzzyIntegerDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.It
|
|||||||
} else {
|
} else {
|
||||||
str = "0"
|
str = "0"
|
||||||
}
|
}
|
||||||
|
case jsoniter.NilValue:
|
||||||
|
iter.Skip()
|
||||||
|
str = "0"
|
||||||
default:
|
default:
|
||||||
iter.ReportError("fuzzyIntegerDecoder", "not number or string")
|
iter.ReportError("fuzzyIntegerDecoder", "not number or string")
|
||||||
}
|
}
|
||||||
|
if len(str) == 0 {
|
||||||
|
str = "0"
|
||||||
|
}
|
||||||
newIter := iter.Pool().BorrowIterator([]byte(str))
|
newIter := iter.Pool().BorrowIterator([]byte(str))
|
||||||
defer iter.Pool().ReturnIterator(newIter)
|
defer iter.Pool().ReturnIterator(newIter)
|
||||||
isFloat := strings.IndexByte(str, '.') != -1
|
isFloat := strings.IndexByte(str, '.') != -1
|
||||||
@ -244,6 +253,9 @@ func (decoder *fuzzyFloat32Decoder) Decode(ptr unsafe.Pointer, iter *jsoniter.It
|
|||||||
} else {
|
} else {
|
||||||
*((*float32)(ptr)) = 0
|
*((*float32)(ptr)) = 0
|
||||||
}
|
}
|
||||||
|
case jsoniter.NilValue:
|
||||||
|
iter.Skip()
|
||||||
|
*((*float32)(ptr)) = 0
|
||||||
default:
|
default:
|
||||||
iter.ReportError("fuzzyFloat32Decoder", "not number or string")
|
iter.ReportError("fuzzyFloat32Decoder", "not number or string")
|
||||||
}
|
}
|
||||||
@ -273,7 +285,10 @@ func (decoder *fuzzyFloat64Decoder) Decode(ptr unsafe.Pointer, iter *jsoniter.It
|
|||||||
} else {
|
} else {
|
||||||
*((*float64)(ptr)) = 0
|
*((*float64)(ptr)) = 0
|
||||||
}
|
}
|
||||||
|
case jsoniter.NilValue:
|
||||||
|
iter.Skip()
|
||||||
|
*((*float64)(ptr)) = 0
|
||||||
default:
|
default:
|
||||||
iter.ReportError("fuzzyFloat32Decoder", "not number or string")
|
iter.ReportError("fuzzyFloat64Decoder", "not number or string")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -37,6 +37,8 @@ func Test_any_to_int64(t *testing.T) {
|
|||||||
should.Equal(int64(10), val)
|
should.Equal(int64(10), val)
|
||||||
should.Nil(jsoniter.UnmarshalFromString(`10`, &val))
|
should.Nil(jsoniter.UnmarshalFromString(`10`, &val))
|
||||||
should.Equal(int64(10), val)
|
should.Equal(int64(10), val)
|
||||||
|
should.Nil(jsoniter.UnmarshalFromString(`""`, &val))
|
||||||
|
should.Equal(int64(0), val)
|
||||||
|
|
||||||
// bool part
|
// bool part
|
||||||
should.Nil(jsoniter.UnmarshalFromString(`false`, &val))
|
should.Nil(jsoniter.UnmarshalFromString(`false`, &val))
|
||||||
@ -357,3 +359,35 @@ func Test_bad_case(t *testing.T) {
|
|||||||
should := require.New(t)
|
should := require.New(t)
|
||||||
should.Nil(err)
|
should.Nil(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Test_null_to_string(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
body := []byte(`null`)
|
||||||
|
var message string
|
||||||
|
err := jsoniter.Unmarshal(body, &message)
|
||||||
|
should.NoError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_null_to_int(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
body := []byte(`null`)
|
||||||
|
var message int
|
||||||
|
err := jsoniter.Unmarshal(body, &message)
|
||||||
|
should.NoError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_null_to_float32(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
body := []byte(`null`)
|
||||||
|
var message float32
|
||||||
|
err := jsoniter.Unmarshal(body, &message)
|
||||||
|
should.NoError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_null_to_float64(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
body := []byte(`null`)
|
||||||
|
var message float64
|
||||||
|
err := jsoniter.Unmarshal(body, &message)
|
||||||
|
should.NoError(err)
|
||||||
|
}
|
||||||
|
@ -2,6 +2,7 @@ package extra
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
|
"strings"
|
||||||
"unicode"
|
"unicode"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -17,6 +18,16 @@ type namingStrategyExtension struct {
|
|||||||
|
|
||||||
func (extension *namingStrategyExtension) UpdateStructDescriptor(structDescriptor *jsoniter.StructDescriptor) {
|
func (extension *namingStrategyExtension) UpdateStructDescriptor(structDescriptor *jsoniter.StructDescriptor) {
|
||||||
for _, binding := range structDescriptor.Fields {
|
for _, binding := range structDescriptor.Fields {
|
||||||
|
tag, hastag := binding.Field.Tag().Lookup("json")
|
||||||
|
if hastag {
|
||||||
|
tagParts := strings.Split(tag, ",")
|
||||||
|
if tagParts[0] == "-" {
|
||||||
|
continue // hidden field
|
||||||
|
}
|
||||||
|
if tagParts[0] != "" {
|
||||||
|
continue // field explicitly named
|
||||||
|
}
|
||||||
|
}
|
||||||
binding.ToNames = []string{extension.translate(binding.Field.Name())}
|
binding.ToNames = []string{extension.translate(binding.Field.Name())}
|
||||||
binding.FromNames = []string{extension.translate(binding.Field.Name())}
|
binding.FromNames = []string{extension.translate(binding.Field.Name())}
|
||||||
}
|
}
|
||||||
|
@ -21,3 +21,30 @@ func Test_lower_case_with_underscores(t *testing.T) {
|
|||||||
should.Nil(err)
|
should.Nil(err)
|
||||||
should.Equal(`{"user_name":"taowen","first_language":"Chinese"}`, string(output))
|
should.Equal(`{"user_name":"taowen","first_language":"Chinese"}`, string(output))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Test_set_naming_strategy_with_overrides(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
SetNamingStrategy(LowerCaseWithUnderscores)
|
||||||
|
output, err := jsoniter.Marshal(struct {
|
||||||
|
UserName string `json:"UserName"`
|
||||||
|
FirstLanguage string
|
||||||
|
}{
|
||||||
|
UserName: "taowen",
|
||||||
|
FirstLanguage: "Chinese",
|
||||||
|
})
|
||||||
|
should.Nil(err)
|
||||||
|
should.Equal(`{"UserName":"taowen","first_language":"Chinese"}`, string(output))
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_set_naming_strategy_with_omitempty(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
SetNamingStrategy(LowerCaseWithUnderscores)
|
||||||
|
output, err := jsoniter.Marshal(struct {
|
||||||
|
UserName string
|
||||||
|
FirstLanguage string `json:",omitempty"`
|
||||||
|
}{
|
||||||
|
UserName: "taowen",
|
||||||
|
})
|
||||||
|
should.Nil(err)
|
||||||
|
should.Equal(`{"user_name":"taowen"}`, string(output))
|
||||||
|
}
|
||||||
|
@ -2,6 +2,7 @@ package extra
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
|
"strings"
|
||||||
"unicode"
|
"unicode"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -18,8 +19,36 @@ func (extension *privateFieldsExtension) UpdateStructDescriptor(structDescriptor
|
|||||||
for _, binding := range structDescriptor.Fields {
|
for _, binding := range structDescriptor.Fields {
|
||||||
isPrivate := unicode.IsLower(rune(binding.Field.Name()[0]))
|
isPrivate := unicode.IsLower(rune(binding.Field.Name()[0]))
|
||||||
if isPrivate {
|
if isPrivate {
|
||||||
binding.FromNames = []string{binding.Field.Name()}
|
tag, hastag := binding.Field.Tag().Lookup("json")
|
||||||
binding.ToNames = []string{binding.Field.Name()}
|
if !hastag {
|
||||||
|
binding.FromNames = []string{binding.Field.Name()}
|
||||||
|
binding.ToNames = []string{binding.Field.Name()}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
tagParts := strings.Split(tag, ",")
|
||||||
|
names := calcFieldNames(binding.Field.Name(), tagParts[0], tag)
|
||||||
|
binding.FromNames = names
|
||||||
|
binding.ToNames = names
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func calcFieldNames(originalFieldName string, tagProvidedFieldName string, wholeTag string) []string {
|
||||||
|
// ignore?
|
||||||
|
if wholeTag == "-" {
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
// rename?
|
||||||
|
var fieldNames []string
|
||||||
|
if tagProvidedFieldName == "" {
|
||||||
|
fieldNames = []string{originalFieldName}
|
||||||
|
} else {
|
||||||
|
fieldNames = []string{tagProvidedFieldName}
|
||||||
|
}
|
||||||
|
// private?
|
||||||
|
isNotExported := unicode.IsLower(rune(originalFieldName[0]))
|
||||||
|
if isNotExported {
|
||||||
|
fieldNames = []string{}
|
||||||
|
}
|
||||||
|
return fieldNames
|
||||||
|
}
|
||||||
|
11
go.mod
Normal file
11
go.mod
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
module github.com/json-iterator/go
|
||||||
|
|
||||||
|
go 1.12
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/davecgh/go-spew v1.1.1
|
||||||
|
github.com/google/gofuzz v1.0.0
|
||||||
|
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421
|
||||||
|
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742
|
||||||
|
github.com/stretchr/testify v1.3.0
|
||||||
|
)
|
14
go.sum
Normal file
14
go.sum
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/google/gofuzz v1.0.0 h1:A8PeW59pxE9IoFRqBp37U+mSNaQoZ46F1f0f863XSXw=
|
||||||
|
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
|
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
|
||||||
|
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
|
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg=
|
||||||
|
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
||||||
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
27
iter.go
27
iter.go
@ -74,6 +74,7 @@ type Iterator struct {
|
|||||||
buf []byte
|
buf []byte
|
||||||
head int
|
head int
|
||||||
tail int
|
tail int
|
||||||
|
depth int
|
||||||
captureStartedAt int
|
captureStartedAt int
|
||||||
captured []byte
|
captured []byte
|
||||||
Error error
|
Error error
|
||||||
@ -88,6 +89,7 @@ func NewIterator(cfg API) *Iterator {
|
|||||||
buf: nil,
|
buf: nil,
|
||||||
head: 0,
|
head: 0,
|
||||||
tail: 0,
|
tail: 0,
|
||||||
|
depth: 0,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -99,6 +101,7 @@ func Parse(cfg API, reader io.Reader, bufSize int) *Iterator {
|
|||||||
buf: make([]byte, bufSize),
|
buf: make([]byte, bufSize),
|
||||||
head: 0,
|
head: 0,
|
||||||
tail: 0,
|
tail: 0,
|
||||||
|
depth: 0,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -110,6 +113,7 @@ func ParseBytes(cfg API, input []byte) *Iterator {
|
|||||||
buf: input,
|
buf: input,
|
||||||
head: 0,
|
head: 0,
|
||||||
tail: len(input),
|
tail: len(input),
|
||||||
|
depth: 0,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -128,6 +132,7 @@ func (iter *Iterator) Reset(reader io.Reader) *Iterator {
|
|||||||
iter.reader = reader
|
iter.reader = reader
|
||||||
iter.head = 0
|
iter.head = 0
|
||||||
iter.tail = 0
|
iter.tail = 0
|
||||||
|
iter.depth = 0
|
||||||
return iter
|
return iter
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -137,6 +142,7 @@ func (iter *Iterator) ResetBytes(input []byte) *Iterator {
|
|||||||
iter.buf = input
|
iter.buf = input
|
||||||
iter.head = 0
|
iter.head = 0
|
||||||
iter.tail = len(input)
|
iter.tail = len(input)
|
||||||
|
iter.depth = 0
|
||||||
return iter
|
return iter
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -320,3 +326,24 @@ func (iter *Iterator) Read() interface{} {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// limit maximum depth of nesting, as allowed by https://tools.ietf.org/html/rfc7159#section-9
|
||||||
|
const maxDepth = 10000
|
||||||
|
|
||||||
|
func (iter *Iterator) incrementDepth() (success bool) {
|
||||||
|
iter.depth++
|
||||||
|
if iter.depth <= maxDepth {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
iter.ReportError("incrementDepth", "exceeded max depth")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (iter *Iterator) decrementDepth() (success bool) {
|
||||||
|
iter.depth--
|
||||||
|
if iter.depth >= 0 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
iter.ReportError("decrementDepth", "unexpected negative nesting")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
@ -28,26 +28,32 @@ func (iter *Iterator) ReadArray() (ret bool) {
|
|||||||
func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
|
func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
|
||||||
c := iter.nextToken()
|
c := iter.nextToken()
|
||||||
if c == '[' {
|
if c == '[' {
|
||||||
|
if !iter.incrementDepth() {
|
||||||
|
return false
|
||||||
|
}
|
||||||
c = iter.nextToken()
|
c = iter.nextToken()
|
||||||
if c != ']' {
|
if c != ']' {
|
||||||
iter.unreadByte()
|
iter.unreadByte()
|
||||||
if !callback(iter) {
|
if !callback(iter) {
|
||||||
|
iter.decrementDepth()
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
c = iter.nextToken()
|
c = iter.nextToken()
|
||||||
for c == ',' {
|
for c == ',' {
|
||||||
if !callback(iter) {
|
if !callback(iter) {
|
||||||
|
iter.decrementDepth()
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
c = iter.nextToken()
|
c = iter.nextToken()
|
||||||
}
|
}
|
||||||
if c != ']' {
|
if c != ']' {
|
||||||
iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c}))
|
iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c}))
|
||||||
|
iter.decrementDepth()
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
return true
|
return iter.decrementDepth()
|
||||||
}
|
}
|
||||||
return true
|
return iter.decrementDepth()
|
||||||
}
|
}
|
||||||
if c == 'n' {
|
if c == 'n' {
|
||||||
iter.skipThreeBytes('u', 'l', 'l')
|
iter.skipThreeBytes('u', 'l', 'l')
|
||||||
|
@ -77,14 +77,12 @@ func (iter *Iterator) ReadFloat32() (ret float32) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (iter *Iterator) readPositiveFloat32() (ret float32) {
|
func (iter *Iterator) readPositiveFloat32() (ret float32) {
|
||||||
value := uint64(0)
|
|
||||||
c := byte(' ')
|
|
||||||
i := iter.head
|
i := iter.head
|
||||||
// first char
|
// first char
|
||||||
if i == iter.tail {
|
if i == iter.tail {
|
||||||
return iter.readFloat32SlowPath()
|
return iter.readFloat32SlowPath()
|
||||||
}
|
}
|
||||||
c = iter.buf[i]
|
c := iter.buf[i]
|
||||||
i++
|
i++
|
||||||
ind := floatDigits[c]
|
ind := floatDigits[c]
|
||||||
switch ind {
|
switch ind {
|
||||||
@ -107,7 +105,7 @@ func (iter *Iterator) readPositiveFloat32() (ret float32) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
value = uint64(ind)
|
value := uint64(ind)
|
||||||
// chars before dot
|
// chars before dot
|
||||||
non_decimal_loop:
|
non_decimal_loop:
|
||||||
for ; i < iter.tail; i++ {
|
for ; i < iter.tail; i++ {
|
||||||
@ -145,9 +143,7 @@ non_decimal_loop:
|
|||||||
}
|
}
|
||||||
// too many decimal places
|
// too many decimal places
|
||||||
return iter.readFloat32SlowPath()
|
return iter.readFloat32SlowPath()
|
||||||
case invalidCharForNumber:
|
case invalidCharForNumber, dotInNumber:
|
||||||
fallthrough
|
|
||||||
case dotInNumber:
|
|
||||||
return iter.readFloat32SlowPath()
|
return iter.readFloat32SlowPath()
|
||||||
}
|
}
|
||||||
decimalPlaces++
|
decimalPlaces++
|
||||||
@ -218,14 +214,12 @@ func (iter *Iterator) ReadFloat64() (ret float64) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (iter *Iterator) readPositiveFloat64() (ret float64) {
|
func (iter *Iterator) readPositiveFloat64() (ret float64) {
|
||||||
value := uint64(0)
|
|
||||||
c := byte(' ')
|
|
||||||
i := iter.head
|
i := iter.head
|
||||||
// first char
|
// first char
|
||||||
if i == iter.tail {
|
if i == iter.tail {
|
||||||
return iter.readFloat64SlowPath()
|
return iter.readFloat64SlowPath()
|
||||||
}
|
}
|
||||||
c = iter.buf[i]
|
c := iter.buf[i]
|
||||||
i++
|
i++
|
||||||
ind := floatDigits[c]
|
ind := floatDigits[c]
|
||||||
switch ind {
|
switch ind {
|
||||||
@ -248,7 +242,7 @@ func (iter *Iterator) readPositiveFloat64() (ret float64) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
value = uint64(ind)
|
value := uint64(ind)
|
||||||
// chars before dot
|
// chars before dot
|
||||||
non_decimal_loop:
|
non_decimal_loop:
|
||||||
for ; i < iter.tail; i++ {
|
for ; i < iter.tail; i++ {
|
||||||
@ -286,9 +280,7 @@ non_decimal_loop:
|
|||||||
}
|
}
|
||||||
// too many decimal places
|
// too many decimal places
|
||||||
return iter.readFloat64SlowPath()
|
return iter.readFloat64SlowPath()
|
||||||
case invalidCharForNumber:
|
case invalidCharForNumber, dotInNumber:
|
||||||
fallthrough
|
|
||||||
case dotInNumber:
|
|
||||||
return iter.readFloat64SlowPath()
|
return iter.readFloat64SlowPath()
|
||||||
}
|
}
|
||||||
decimalPlaces++
|
decimalPlaces++
|
||||||
|
@ -2,7 +2,7 @@ package jsoniter
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"unicode"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ReadObject read one field from object.
|
// ReadObject read one field from object.
|
||||||
@ -60,7 +60,7 @@ func (iter *Iterator) readFieldHash() int64 {
|
|||||||
if b == '\\' {
|
if b == '\\' {
|
||||||
iter.head = i
|
iter.head = i
|
||||||
for _, b := range iter.readStringSlowPath() {
|
for _, b := range iter.readStringSlowPath() {
|
||||||
if 'A' <= b && b <= 'Z' {
|
if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
|
||||||
b += 'a' - 'A'
|
b += 'a' - 'A'
|
||||||
}
|
}
|
||||||
hash ^= int64(b)
|
hash ^= int64(b)
|
||||||
@ -82,7 +82,7 @@ func (iter *Iterator) readFieldHash() int64 {
|
|||||||
}
|
}
|
||||||
return hash
|
return hash
|
||||||
}
|
}
|
||||||
if 'A' <= b && b <= 'Z' {
|
if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
|
||||||
b += 'a' - 'A'
|
b += 'a' - 'A'
|
||||||
}
|
}
|
||||||
hash ^= int64(b)
|
hash ^= int64(b)
|
||||||
@ -95,10 +95,13 @@ func (iter *Iterator) readFieldHash() int64 {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func calcHash(str string) int64 {
|
func calcHash(str string, caseSensitive bool) int64 {
|
||||||
|
if !caseSensitive {
|
||||||
|
str = strings.ToLower(str)
|
||||||
|
}
|
||||||
hash := int64(0x811c9dc5)
|
hash := int64(0x811c9dc5)
|
||||||
for _, b := range str {
|
for _, b := range []byte(str) {
|
||||||
hash ^= int64(unicode.ToLower(b))
|
hash ^= int64(b)
|
||||||
hash *= 0x1000193
|
hash *= 0x1000193
|
||||||
}
|
}
|
||||||
return int64(hash)
|
return int64(hash)
|
||||||
@ -109,6 +112,9 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
|||||||
c := iter.nextToken()
|
c := iter.nextToken()
|
||||||
var field string
|
var field string
|
||||||
if c == '{' {
|
if c == '{' {
|
||||||
|
if !iter.incrementDepth() {
|
||||||
|
return false
|
||||||
|
}
|
||||||
c = iter.nextToken()
|
c = iter.nextToken()
|
||||||
if c == '"' {
|
if c == '"' {
|
||||||
iter.unreadByte()
|
iter.unreadByte()
|
||||||
@ -118,6 +124,7 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
|||||||
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
|
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
|
||||||
}
|
}
|
||||||
if !callback(iter, field) {
|
if !callback(iter, field) {
|
||||||
|
iter.decrementDepth()
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
c = iter.nextToken()
|
c = iter.nextToken()
|
||||||
@ -128,20 +135,23 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
|||||||
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
|
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
|
||||||
}
|
}
|
||||||
if !callback(iter, field) {
|
if !callback(iter, field) {
|
||||||
|
iter.decrementDepth()
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
c = iter.nextToken()
|
c = iter.nextToken()
|
||||||
}
|
}
|
||||||
if c != '}' {
|
if c != '}' {
|
||||||
iter.ReportError("ReadObjectCB", `object not ended with }`)
|
iter.ReportError("ReadObjectCB", `object not ended with }`)
|
||||||
|
iter.decrementDepth()
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
return true
|
return iter.decrementDepth()
|
||||||
}
|
}
|
||||||
if c == '}' {
|
if c == '}' {
|
||||||
return true
|
return iter.decrementDepth()
|
||||||
}
|
}
|
||||||
iter.ReportError("ReadObjectCB", `expect " after }, but found `+string([]byte{c}))
|
iter.ReportError("ReadObjectCB", `expect " after }, but found `+string([]byte{c}))
|
||||||
|
iter.decrementDepth()
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if c == 'n' {
|
if c == 'n' {
|
||||||
@ -156,15 +166,20 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
|||||||
func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
|
func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
|
||||||
c := iter.nextToken()
|
c := iter.nextToken()
|
||||||
if c == '{' {
|
if c == '{' {
|
||||||
|
if !iter.incrementDepth() {
|
||||||
|
return false
|
||||||
|
}
|
||||||
c = iter.nextToken()
|
c = iter.nextToken()
|
||||||
if c == '"' {
|
if c == '"' {
|
||||||
iter.unreadByte()
|
iter.unreadByte()
|
||||||
field := iter.ReadString()
|
field := iter.ReadString()
|
||||||
if iter.nextToken() != ':' {
|
if iter.nextToken() != ':' {
|
||||||
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
|
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
|
||||||
|
iter.decrementDepth()
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if !callback(iter, field) {
|
if !callback(iter, field) {
|
||||||
|
iter.decrementDepth()
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
c = iter.nextToken()
|
c = iter.nextToken()
|
||||||
@ -172,23 +187,27 @@ func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
|
|||||||
field = iter.ReadString()
|
field = iter.ReadString()
|
||||||
if iter.nextToken() != ':' {
|
if iter.nextToken() != ':' {
|
||||||
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
|
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
|
||||||
|
iter.decrementDepth()
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if !callback(iter, field) {
|
if !callback(iter, field) {
|
||||||
|
iter.decrementDepth()
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
c = iter.nextToken()
|
c = iter.nextToken()
|
||||||
}
|
}
|
||||||
if c != '}' {
|
if c != '}' {
|
||||||
iter.ReportError("ReadMapCB", `object not ended with }`)
|
iter.ReportError("ReadMapCB", `object not ended with }`)
|
||||||
|
iter.decrementDepth()
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
return true
|
return iter.decrementDepth()
|
||||||
}
|
}
|
||||||
if c == '}' {
|
if c == '}' {
|
||||||
return true
|
return iter.decrementDepth()
|
||||||
}
|
}
|
||||||
iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c}))
|
iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c}))
|
||||||
|
iter.decrementDepth()
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if c == 'n' {
|
if c == 'n' {
|
||||||
|
25
iter_skip.go
25
iter_skip.go
@ -37,17 +37,24 @@ func (iter *Iterator) SkipAndReturnBytes() []byte {
|
|||||||
return iter.stopCapture()
|
return iter.stopCapture()
|
||||||
}
|
}
|
||||||
|
|
||||||
type captureBuffer struct {
|
// SkipAndAppendBytes skips next JSON element and appends its content to
|
||||||
startedAt int
|
// buffer, returning the result.
|
||||||
captured []byte
|
func (iter *Iterator) SkipAndAppendBytes(buf []byte) []byte {
|
||||||
|
iter.startCaptureTo(buf, iter.head)
|
||||||
|
iter.Skip()
|
||||||
|
return iter.stopCapture()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (iter *Iterator) startCapture(captureStartedAt int) {
|
func (iter *Iterator) startCaptureTo(buf []byte, captureStartedAt int) {
|
||||||
if iter.captured != nil {
|
if iter.captured != nil {
|
||||||
panic("already in capture mode")
|
panic("already in capture mode")
|
||||||
}
|
}
|
||||||
iter.captureStartedAt = captureStartedAt
|
iter.captureStartedAt = captureStartedAt
|
||||||
iter.captured = make([]byte, 0, 32)
|
iter.captured = buf
|
||||||
|
}
|
||||||
|
|
||||||
|
func (iter *Iterator) startCapture(captureStartedAt int) {
|
||||||
|
iter.startCaptureTo(make([]byte, 0, 32), captureStartedAt)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (iter *Iterator) stopCapture() []byte {
|
func (iter *Iterator) stopCapture() []byte {
|
||||||
@ -58,13 +65,7 @@ func (iter *Iterator) stopCapture() []byte {
|
|||||||
remaining := iter.buf[iter.captureStartedAt:iter.head]
|
remaining := iter.buf[iter.captureStartedAt:iter.head]
|
||||||
iter.captureStartedAt = -1
|
iter.captureStartedAt = -1
|
||||||
iter.captured = nil
|
iter.captured = nil
|
||||||
if len(captured) == 0 {
|
return append(captured, remaining...)
|
||||||
copied := make([]byte, len(remaining))
|
|
||||||
copy(copied, remaining)
|
|
||||||
return copied
|
|
||||||
}
|
|
||||||
captured = append(captured, remaining...)
|
|
||||||
return captured
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Skip skips a json object and positions to relatively the next json object
|
// Skip skips a json object and positions to relatively the next json object
|
||||||
|
@ -22,6 +22,9 @@ func (iter *Iterator) skipNumber() {
|
|||||||
|
|
||||||
func (iter *Iterator) skipArray() {
|
func (iter *Iterator) skipArray() {
|
||||||
level := 1
|
level := 1
|
||||||
|
if !iter.incrementDepth() {
|
||||||
|
return
|
||||||
|
}
|
||||||
for {
|
for {
|
||||||
for i := iter.head; i < iter.tail; i++ {
|
for i := iter.head; i < iter.tail; i++ {
|
||||||
switch iter.buf[i] {
|
switch iter.buf[i] {
|
||||||
@ -31,8 +34,14 @@ func (iter *Iterator) skipArray() {
|
|||||||
i = iter.head - 1 // it will be i++ soon
|
i = iter.head - 1 // it will be i++ soon
|
||||||
case '[': // If open symbol, increase level
|
case '[': // If open symbol, increase level
|
||||||
level++
|
level++
|
||||||
|
if !iter.incrementDepth() {
|
||||||
|
return
|
||||||
|
}
|
||||||
case ']': // If close symbol, increase level
|
case ']': // If close symbol, increase level
|
||||||
level--
|
level--
|
||||||
|
if !iter.decrementDepth() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// If we have returned to the original level, we're done
|
// If we have returned to the original level, we're done
|
||||||
if level == 0 {
|
if level == 0 {
|
||||||
@ -50,6 +59,10 @@ func (iter *Iterator) skipArray() {
|
|||||||
|
|
||||||
func (iter *Iterator) skipObject() {
|
func (iter *Iterator) skipObject() {
|
||||||
level := 1
|
level := 1
|
||||||
|
if !iter.incrementDepth() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
for {
|
for {
|
||||||
for i := iter.head; i < iter.tail; i++ {
|
for i := iter.head; i < iter.tail; i++ {
|
||||||
switch iter.buf[i] {
|
switch iter.buf[i] {
|
||||||
@ -59,8 +72,14 @@ func (iter *Iterator) skipObject() {
|
|||||||
i = iter.head - 1 // it will be i++ soon
|
i = iter.head - 1 // it will be i++ soon
|
||||||
case '{': // If open symbol, increase level
|
case '{': // If open symbol, increase level
|
||||||
level++
|
level++
|
||||||
|
if !iter.incrementDepth() {
|
||||||
|
return
|
||||||
|
}
|
||||||
case '}': // If close symbol, increase level
|
case '}': // If close symbol, increase level
|
||||||
level--
|
level--
|
||||||
|
if !iter.decrementDepth() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// If we have returned to the original level, we're done
|
// If we have returned to the original level, we're done
|
||||||
if level == 0 {
|
if level == 0 {
|
||||||
|
@ -2,12 +2,22 @@
|
|||||||
|
|
||||||
package jsoniter
|
package jsoniter
|
||||||
|
|
||||||
import "fmt"
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
)
|
||||||
|
|
||||||
func (iter *Iterator) skipNumber() {
|
func (iter *Iterator) skipNumber() {
|
||||||
if !iter.trySkipNumber() {
|
if !iter.trySkipNumber() {
|
||||||
iter.unreadByte()
|
iter.unreadByte()
|
||||||
iter.ReadFloat32()
|
if iter.Error != nil && iter.Error != io.EOF {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
iter.ReadFloat64()
|
||||||
|
if iter.Error != nil && iter.Error != io.EOF {
|
||||||
|
iter.Error = nil
|
||||||
|
iter.ReadBigFloat()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,9 +3,10 @@ package misc_tests
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_empty_array(t *testing.T) {
|
func Test_empty_array(t *testing.T) {
|
||||||
@ -157,6 +158,27 @@ func Test_encode_byte_array(t *testing.T) {
|
|||||||
should.Equal(`"AQID"`, string(bytes))
|
should.Equal(`"AQID"`, string(bytes))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Test_encode_empty_byte_array(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
bytes, err := json.Marshal([]byte{})
|
||||||
|
should.Nil(err)
|
||||||
|
should.Equal(`""`, string(bytes))
|
||||||
|
bytes, err = jsoniter.Marshal([]byte{})
|
||||||
|
should.Nil(err)
|
||||||
|
should.Equal(`""`, string(bytes))
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_encode_nil_byte_array(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
var nilSlice []byte
|
||||||
|
bytes, err := json.Marshal(nilSlice)
|
||||||
|
should.Nil(err)
|
||||||
|
should.Equal(`null`, string(bytes))
|
||||||
|
bytes, err = jsoniter.Marshal(nilSlice)
|
||||||
|
should.Nil(err)
|
||||||
|
should.Equal(`null`, string(bytes))
|
||||||
|
}
|
||||||
|
|
||||||
func Test_decode_byte_array_from_base64(t *testing.T) {
|
func Test_decode_byte_array_from_base64(t *testing.T) {
|
||||||
should := require.New(t)
|
should := require.New(t)
|
||||||
data := []byte{}
|
data := []byte{}
|
||||||
@ -168,6 +190,17 @@ func Test_decode_byte_array_from_base64(t *testing.T) {
|
|||||||
should.Equal([]byte{1, 2, 3}, data)
|
should.Equal([]byte{1, 2, 3}, data)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Test_decode_byte_array_from_base64_with_newlines(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
data := []byte{}
|
||||||
|
err := json.Unmarshal([]byte(`"A\rQ\nID"`), &data)
|
||||||
|
should.Nil(err)
|
||||||
|
should.Equal([]byte{1, 2, 3}, data)
|
||||||
|
err = jsoniter.Unmarshal([]byte(`"A\rQ\nID"`), &data)
|
||||||
|
should.Nil(err)
|
||||||
|
should.Equal([]byte{1, 2, 3}, data)
|
||||||
|
}
|
||||||
|
|
||||||
func Test_decode_byte_array_from_array(t *testing.T) {
|
func Test_decode_byte_array_from_array(t *testing.T) {
|
||||||
should := require.New(t)
|
should := require.New(t)
|
||||||
data := []byte{}
|
data := []byte{}
|
||||||
|
@ -4,8 +4,8 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_true(t *testing.T) {
|
func Test_true(t *testing.T) {
|
||||||
@ -44,4 +44,4 @@ func Test_write_val_bool(t *testing.T) {
|
|||||||
should.Equal(stream.Buffered(), 0)
|
should.Equal(stream.Buffered(), 0)
|
||||||
should.Nil(stream.Error)
|
should.Nil(stream.Error)
|
||||||
should.Equal("true", buf.String())
|
should.Equal("true", buf.String())
|
||||||
}
|
}
|
||||||
|
@ -2,10 +2,11 @@ package misc_tests
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"math"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_read_big_float(t *testing.T) {
|
func Test_read_big_float(t *testing.T) {
|
||||||
@ -77,6 +78,26 @@ func Test_read_number(t *testing.T) {
|
|||||||
should.Equal(`92233720368547758079223372036854775807`, string(val))
|
should.Equal(`92233720368547758079223372036854775807`, string(val))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Test_encode_inf(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
_, err := json.Marshal(math.Inf(1))
|
||||||
|
should.Error(err)
|
||||||
|
_, err = jsoniter.Marshal(float32(math.Inf(1)))
|
||||||
|
should.Error(err)
|
||||||
|
_, err = jsoniter.Marshal(math.Inf(-1))
|
||||||
|
should.Error(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_encode_nan(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
_, err := json.Marshal(math.NaN())
|
||||||
|
should.Error(err)
|
||||||
|
_, err = jsoniter.Marshal(float32(math.NaN()))
|
||||||
|
should.Error(err)
|
||||||
|
_, err = jsoniter.Marshal(math.NaN())
|
||||||
|
should.Error(err)
|
||||||
|
}
|
||||||
|
|
||||||
func Benchmark_jsoniter_float(b *testing.B) {
|
func Benchmark_jsoniter_float(b *testing.B) {
|
||||||
b.ReportAllocs()
|
b.ReportAllocs()
|
||||||
input := []byte(`1.1123,`)
|
input := []byte(`1.1123,`)
|
||||||
|
@ -9,8 +9,8 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_read_uint64_invalid(t *testing.T) {
|
func Test_read_uint64_invalid(t *testing.T) {
|
||||||
|
@ -2,10 +2,10 @@ package misc_tests
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"testing"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
"io"
|
"io"
|
||||||
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_nil_non_empty_interface(t *testing.T) {
|
func Test_nil_non_empty_interface(t *testing.T) {
|
||||||
@ -114,7 +114,7 @@ func Test_overwrite_interface_value_with_nil(t *testing.T) {
|
|||||||
|
|
||||||
err := json.Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
|
err := json.Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
|
||||||
should.NoError(err)
|
should.NoError(err)
|
||||||
should.Equal(42, (*(wrapper.Payload.(*Payload))).Value)
|
should.Equal(42, wrapper.Payload.(*Payload).Value)
|
||||||
|
|
||||||
err = json.Unmarshal([]byte(`{"payload": null}`), &wrapper)
|
err = json.Unmarshal([]byte(`{"payload": null}`), &wrapper)
|
||||||
should.NoError(err)
|
should.NoError(err)
|
||||||
@ -128,7 +128,7 @@ func Test_overwrite_interface_value_with_nil(t *testing.T) {
|
|||||||
|
|
||||||
err = jsoniter.Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
|
err = jsoniter.Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
|
||||||
should.Equal(nil, err)
|
should.Equal(nil, err)
|
||||||
should.Equal(42, (*(wrapper.Payload.(*Payload))).Value)
|
should.Equal(42, wrapper.Payload.(*Payload).Value)
|
||||||
|
|
||||||
err = jsoniter.Unmarshal([]byte(`{"payload": null}`), &wrapper)
|
err = jsoniter.Unmarshal([]byte(`{"payload": null}`), &wrapper)
|
||||||
should.Equal(nil, err)
|
should.Equal(nil, err)
|
||||||
|
@ -7,8 +7,8 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_bad_case(t *testing.T) {
|
func Test_bad_case(t *testing.T) {
|
||||||
|
@ -5,9 +5,9 @@ import (
|
|||||||
"math/big"
|
"math/big"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/json-iterator/go"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"strings"
|
"strings"
|
||||||
"github.com/json-iterator/go"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_decode_TextMarshaler_key_map(t *testing.T) {
|
func Test_decode_TextMarshaler_key_map(t *testing.T) {
|
||||||
@ -31,3 +31,22 @@ func Test_read_map_with_reader(t *testing.T) {
|
|||||||
should.Equal(m2, m1)
|
should.Equal(m2, m1)
|
||||||
should.Equal("1.0.76", m1["note"].(map[string]interface{})["CoreServices"].(map[string]interface{})["version_name"])
|
should.Equal("1.0.76", m1["note"].(map[string]interface{})["CoreServices"].(map[string]interface{})["version_name"])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Test_map_eface_of_eface(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
json := jsoniter.ConfigCompatibleWithStandardLibrary
|
||||||
|
output, err := json.MarshalToString(map[interface{}]interface{}{
|
||||||
|
"1": 2,
|
||||||
|
3: "4",
|
||||||
|
})
|
||||||
|
should.NoError(err)
|
||||||
|
should.Equal(`{"1":2,"3":"4"}`, output)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_encode_nil_map(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
var nilMap map[string]string
|
||||||
|
output, err := jsoniter.MarshalToString(nilMap)
|
||||||
|
should.NoError(err)
|
||||||
|
should.Equal(`null`, output)
|
||||||
|
}
|
||||||
|
@ -2,9 +2,10 @@ package misc_tests
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"reflect"
|
|
||||||
"testing"
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Level1 struct {
|
type Level1 struct {
|
||||||
@ -15,6 +16,243 @@ type Level2 struct {
|
|||||||
World string
|
World string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Test_deep_nested(t *testing.T) {
|
||||||
|
type unstructured interface{}
|
||||||
|
|
||||||
|
testcases := []struct {
|
||||||
|
name string
|
||||||
|
data []byte
|
||||||
|
expectError string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "array under maxDepth",
|
||||||
|
data: []byte(`{"a":` + strings.Repeat(`[`, 10000-1) + strings.Repeat(`]`, 10000-1) + `}`),
|
||||||
|
expectError: "",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "array over maxDepth",
|
||||||
|
data: []byte(`{"a":` + strings.Repeat(`[`, 10000) + strings.Repeat(`]`, 10000) + `}`),
|
||||||
|
expectError: "max depth",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "object under maxDepth",
|
||||||
|
data: []byte(`{"a":` + strings.Repeat(`{"a":`, 10000-1) + `0` + strings.Repeat(`}`, 10000-1) + `}`),
|
||||||
|
expectError: "",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "object over maxDepth",
|
||||||
|
data: []byte(`{"a":` + strings.Repeat(`{"a":`, 10000) + `0` + strings.Repeat(`}`, 10000) + `}`),
|
||||||
|
expectError: "max depth",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
targets := []struct {
|
||||||
|
name string
|
||||||
|
new func() interface{}
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "unstructured",
|
||||||
|
new: func() interface{} {
|
||||||
|
var v interface{}
|
||||||
|
return &v
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "typed named field",
|
||||||
|
new: func() interface{} {
|
||||||
|
v := struct {
|
||||||
|
A interface{} `json:"a"`
|
||||||
|
}{}
|
||||||
|
return &v
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "typed missing field",
|
||||||
|
new: func() interface{} {
|
||||||
|
v := struct {
|
||||||
|
B interface{} `json:"b"`
|
||||||
|
}{}
|
||||||
|
return &v
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "typed 1 field",
|
||||||
|
new: func() interface{} {
|
||||||
|
v := struct {
|
||||||
|
A interface{} `json:"a"`
|
||||||
|
}{}
|
||||||
|
return &v
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "typed 2 field",
|
||||||
|
new: func() interface{} {
|
||||||
|
v := struct {
|
||||||
|
A interface{} `json:"a"`
|
||||||
|
B interface{} `json:"b"`
|
||||||
|
}{}
|
||||||
|
return &v
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "typed 3 field",
|
||||||
|
new: func() interface{} {
|
||||||
|
v := struct {
|
||||||
|
A interface{} `json:"a"`
|
||||||
|
B interface{} `json:"b"`
|
||||||
|
C interface{} `json:"c"`
|
||||||
|
}{}
|
||||||
|
return &v
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "typed 4 field",
|
||||||
|
new: func() interface{} {
|
||||||
|
v := struct {
|
||||||
|
A interface{} `json:"a"`
|
||||||
|
B interface{} `json:"b"`
|
||||||
|
C interface{} `json:"c"`
|
||||||
|
D interface{} `json:"d"`
|
||||||
|
}{}
|
||||||
|
return &v
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "typed 5 field",
|
||||||
|
new: func() interface{} {
|
||||||
|
v := struct {
|
||||||
|
A interface{} `json:"a"`
|
||||||
|
B interface{} `json:"b"`
|
||||||
|
C interface{} `json:"c"`
|
||||||
|
D interface{} `json:"d"`
|
||||||
|
E interface{} `json:"e"`
|
||||||
|
}{}
|
||||||
|
return &v
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "typed 6 field",
|
||||||
|
new: func() interface{} {
|
||||||
|
v := struct {
|
||||||
|
A interface{} `json:"a"`
|
||||||
|
B interface{} `json:"b"`
|
||||||
|
C interface{} `json:"c"`
|
||||||
|
D interface{} `json:"d"`
|
||||||
|
E interface{} `json:"e"`
|
||||||
|
F interface{} `json:"f"`
|
||||||
|
}{}
|
||||||
|
return &v
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "typed 7 field",
|
||||||
|
new: func() interface{} {
|
||||||
|
v := struct {
|
||||||
|
A interface{} `json:"a"`
|
||||||
|
B interface{} `json:"b"`
|
||||||
|
C interface{} `json:"c"`
|
||||||
|
D interface{} `json:"d"`
|
||||||
|
E interface{} `json:"e"`
|
||||||
|
F interface{} `json:"f"`
|
||||||
|
G interface{} `json:"g"`
|
||||||
|
}{}
|
||||||
|
return &v
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "typed 8 field",
|
||||||
|
new: func() interface{} {
|
||||||
|
v := struct {
|
||||||
|
A interface{} `json:"a"`
|
||||||
|
B interface{} `json:"b"`
|
||||||
|
C interface{} `json:"c"`
|
||||||
|
D interface{} `json:"d"`
|
||||||
|
E interface{} `json:"e"`
|
||||||
|
F interface{} `json:"f"`
|
||||||
|
G interface{} `json:"g"`
|
||||||
|
H interface{} `json:"h"`
|
||||||
|
}{}
|
||||||
|
return &v
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "typed 9 field",
|
||||||
|
new: func() interface{} {
|
||||||
|
v := struct {
|
||||||
|
A interface{} `json:"a"`
|
||||||
|
B interface{} `json:"b"`
|
||||||
|
C interface{} `json:"c"`
|
||||||
|
D interface{} `json:"d"`
|
||||||
|
E interface{} `json:"e"`
|
||||||
|
F interface{} `json:"f"`
|
||||||
|
G interface{} `json:"g"`
|
||||||
|
H interface{} `json:"h"`
|
||||||
|
I interface{} `json:"i"`
|
||||||
|
}{}
|
||||||
|
return &v
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "typed 10 field",
|
||||||
|
new: func() interface{} {
|
||||||
|
v := struct {
|
||||||
|
A interface{} `json:"a"`
|
||||||
|
B interface{} `json:"b"`
|
||||||
|
C interface{} `json:"c"`
|
||||||
|
D interface{} `json:"d"`
|
||||||
|
E interface{} `json:"e"`
|
||||||
|
F interface{} `json:"f"`
|
||||||
|
G interface{} `json:"g"`
|
||||||
|
H interface{} `json:"h"`
|
||||||
|
I interface{} `json:"i"`
|
||||||
|
J interface{} `json:"j"`
|
||||||
|
}{}
|
||||||
|
return &v
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "typed 11 field",
|
||||||
|
new: func() interface{} {
|
||||||
|
v := struct {
|
||||||
|
A interface{} `json:"a"`
|
||||||
|
B interface{} `json:"b"`
|
||||||
|
C interface{} `json:"c"`
|
||||||
|
D interface{} `json:"d"`
|
||||||
|
E interface{} `json:"e"`
|
||||||
|
F interface{} `json:"f"`
|
||||||
|
G interface{} `json:"g"`
|
||||||
|
H interface{} `json:"h"`
|
||||||
|
I interface{} `json:"i"`
|
||||||
|
J interface{} `json:"j"`
|
||||||
|
K interface{} `json:"k"`
|
||||||
|
}{}
|
||||||
|
return &v
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testcases {
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
for _, target := range targets {
|
||||||
|
t.Run(target.name, func(t *testing.T) {
|
||||||
|
err := jsoniter.Unmarshal(tc.data, target.new())
|
||||||
|
if len(tc.expectError) == 0 {
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if err == nil {
|
||||||
|
t.Errorf("expected error, got none")
|
||||||
|
} else if !strings.Contains(err.Error(), tc.expectError) {
|
||||||
|
t.Errorf("expected error containing '%s', got: %v", tc.expectError, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func Test_nested(t *testing.T) {
|
func Test_nested(t *testing.T) {
|
||||||
iter := jsoniter.ParseString(jsoniter.ConfigDefault, `{"hello": [{"world": "value1"}, {"world": "value2"}]}`)
|
iter := jsoniter.ParseString(jsoniter.ConfigDefault, `{"hello": [{"world": "value1"}, {"world": "value2"}]}`)
|
||||||
l1 := Level1{}
|
l1 := Level1{}
|
||||||
|
@ -5,8 +5,8 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_read_null(t *testing.T) {
|
func Test_read_null(t *testing.T) {
|
||||||
|
@ -4,10 +4,10 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
"time"
|
"github.com/stretchr/testify/require"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_empty_object(t *testing.T) {
|
func Test_empty_object(t *testing.T) {
|
||||||
@ -129,4 +129,21 @@ func Test_reader_and_load_more(t *testing.T) {
|
|||||||
decoder := jsoniter.ConfigCompatibleWithStandardLibrary.NewDecoder(reader)
|
decoder := jsoniter.ConfigCompatibleWithStandardLibrary.NewDecoder(reader)
|
||||||
obj := TestObject{}
|
obj := TestObject{}
|
||||||
should.Nil(decoder.Decode(&obj))
|
should.Nil(decoder.Decode(&obj))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Test_unmarshal_into_existing_value(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
type TestObject struct {
|
||||||
|
Field1 int
|
||||||
|
Field2 interface{}
|
||||||
|
}
|
||||||
|
var obj TestObject
|
||||||
|
m := map[string]interface{}{}
|
||||||
|
obj.Field2 = &m
|
||||||
|
cfg := jsoniter.Config{UseNumber: true}.Froze()
|
||||||
|
err := cfg.Unmarshal([]byte(`{"Field1":1,"Field2":{"k":"v"}}`), &obj)
|
||||||
|
should.NoError(err)
|
||||||
|
should.Equal(map[string]interface{}{
|
||||||
|
"k": "v",
|
||||||
|
}, m)
|
||||||
|
}
|
||||||
|
@ -2,10 +2,10 @@ package misc_tests
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"github.com/json-iterator/go"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"github.com/json-iterator/go"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_jsoniter_RawMessage(t *testing.T) {
|
func Test_jsoniter_RawMessage(t *testing.T) {
|
||||||
@ -45,18 +45,18 @@ func Test_marshal_invalid_json_raw_message(t *testing.T) {
|
|||||||
func Test_raw_message_memory_not_copied_issue(t *testing.T) {
|
func Test_raw_message_memory_not_copied_issue(t *testing.T) {
|
||||||
jsonStream := `{"name":"xxxxx","bundle_id":"com.zonst.majiang","app_platform":"ios","app_category":"100103", "budget_day":1000,"bidding_min":1,"bidding_max":2,"bidding_type":"CPM", "freq":{"open":true,"type":"day","num":100},"speed":1, "targeting":{"vendor":{"open":true,"list":["zonst"]}, "geo_code":{"open":true,"list":["156110100"]},"app_category":{"open":true,"list":["100101"]}, "day_parting":{"open":true,"list":["100409","100410"]},"device_type":{"open":true,"list":["ipad"]}, "os_version":{"open":true,"list":[10]},"carrier":{"open":true,"list":["mobile"]}, "network":{"open":true,"list":["4G"]}},"url":{"tracking_imp_url":"http://www.baidu.com", "tracking_clk_url":"http://www.baidu.com","jump_url":"http://www.baidu.com","deep_link_url":"http://www.baidu.com"}}`
|
jsonStream := `{"name":"xxxxx","bundle_id":"com.zonst.majiang","app_platform":"ios","app_category":"100103", "budget_day":1000,"bidding_min":1,"bidding_max":2,"bidding_type":"CPM", "freq":{"open":true,"type":"day","num":100},"speed":1, "targeting":{"vendor":{"open":true,"list":["zonst"]}, "geo_code":{"open":true,"list":["156110100"]},"app_category":{"open":true,"list":["100101"]}, "day_parting":{"open":true,"list":["100409","100410"]},"device_type":{"open":true,"list":["ipad"]}, "os_version":{"open":true,"list":[10]},"carrier":{"open":true,"list":["mobile"]}, "network":{"open":true,"list":["4G"]}},"url":{"tracking_imp_url":"http://www.baidu.com", "tracking_clk_url":"http://www.baidu.com","jump_url":"http://www.baidu.com","deep_link_url":"http://www.baidu.com"}}`
|
||||||
type IteratorObject struct {
|
type IteratorObject struct {
|
||||||
Name *string `json:"name"`
|
Name *string `json:"name"`
|
||||||
BundleId *string `json:"bundle_id"`
|
BundleId *string `json:"bundle_id"`
|
||||||
AppCategory *string `json:"app_category"`
|
AppCategory *string `json:"app_category"`
|
||||||
AppPlatform *string `json:"app_platform"`
|
AppPlatform *string `json:"app_platform"`
|
||||||
BudgetDay *float32 `json:"budget_day"`
|
BudgetDay *float32 `json:"budget_day"`
|
||||||
BiddingMax *float32 `json:"bidding_max"`
|
BiddingMax *float32 `json:"bidding_max"`
|
||||||
BiddingMin *float32 `json:"bidding_min"`
|
BiddingMin *float32 `json:"bidding_min"`
|
||||||
BiddingType *string `json:"bidding_type"`
|
BiddingType *string `json:"bidding_type"`
|
||||||
Freq *jsoniter.RawMessage `json:"freq"`
|
Freq *jsoniter.RawMessage `json:"freq"`
|
||||||
Targeting *jsoniter.RawMessage `json:"targeting"`
|
Targeting *jsoniter.RawMessage `json:"targeting"`
|
||||||
Url *jsoniter.RawMessage `json:"url"`
|
Url *jsoniter.RawMessage `json:"url"`
|
||||||
Speed *int `json:"speed" db:"speed"`
|
Speed *int `json:"speed" db:"speed"`
|
||||||
}
|
}
|
||||||
|
|
||||||
obj := &IteratorObject{}
|
obj := &IteratorObject{}
|
||||||
|
1
pool.go
1
pool.go
@ -23,6 +23,7 @@ func (cfg *frozenConfig) BorrowStream(writer io.Writer) *Stream {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (cfg *frozenConfig) ReturnStream(stream *Stream) {
|
func (cfg *frozenConfig) ReturnStream(stream *Stream) {
|
||||||
|
stream.out = nil
|
||||||
stream.Error = nil
|
stream.Error = nil
|
||||||
stream.Attachment = nil
|
stream.Attachment = nil
|
||||||
cfg.streamPool.Put(stream)
|
cfg.streamPool.Put(stream)
|
||||||
|
22
reflect.go
22
reflect.go
@ -4,7 +4,8 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"reflect"
|
"reflect"
|
||||||
"unsafe"
|
"unsafe"
|
||||||
"github.com/v2pro/plz/reflect2"
|
|
||||||
|
"github.com/modern-go/reflect2"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ValDecoder is an internal type registered to cache as needed.
|
// ValDecoder is an internal type registered to cache as needed.
|
||||||
@ -40,6 +41,14 @@ type ctx struct {
|
|||||||
decoders map[reflect2.Type]ValDecoder
|
decoders map[reflect2.Type]ValDecoder
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (b *ctx) caseSensitive() bool {
|
||||||
|
if b.frozenConfig == nil {
|
||||||
|
// default is case-insensitive
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return b.frozenConfig.caseSensitive
|
||||||
|
}
|
||||||
|
|
||||||
func (b *ctx) append(prefix string) *ctx {
|
func (b *ctx) append(prefix string) *ctx {
|
||||||
return &ctx{
|
return &ctx{
|
||||||
frozenConfig: b.frozenConfig,
|
frozenConfig: b.frozenConfig,
|
||||||
@ -51,6 +60,7 @@ func (b *ctx) append(prefix string) *ctx {
|
|||||||
|
|
||||||
// ReadVal copy the underlying JSON into go interface, same as json.Unmarshal
|
// ReadVal copy the underlying JSON into go interface, same as json.Unmarshal
|
||||||
func (iter *Iterator) ReadVal(obj interface{}) {
|
func (iter *Iterator) ReadVal(obj interface{}) {
|
||||||
|
depth := iter.depth
|
||||||
cacheKey := reflect2.RTypeOf(obj)
|
cacheKey := reflect2.RTypeOf(obj)
|
||||||
decoder := iter.cfg.getDecoderFromCache(cacheKey)
|
decoder := iter.cfg.getDecoderFromCache(cacheKey)
|
||||||
if decoder == nil {
|
if decoder == nil {
|
||||||
@ -67,6 +77,10 @@ func (iter *Iterator) ReadVal(obj interface{}) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
decoder.Decode(ptr, iter)
|
decoder.Decode(ptr, iter)
|
||||||
|
if iter.depth != depth {
|
||||||
|
iter.ReportError("ReadVal", "unexpected mismatched nesting")
|
||||||
|
return
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// WriteVal copy the go interface into underlying JSON, same as json.Marshal
|
// WriteVal copy the go interface into underlying JSON, same as json.Marshal
|
||||||
@ -111,7 +125,8 @@ func decoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
|
|||||||
for _, extension := range extensions {
|
for _, extension := range extensions {
|
||||||
decoder = extension.DecorateDecoder(typ, decoder)
|
decoder = extension.DecorateDecoder(typ, decoder)
|
||||||
}
|
}
|
||||||
for _, extension := range ctx.extensions {
|
decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder)
|
||||||
|
for _, extension := range ctx.extraExtensions {
|
||||||
decoder = extension.DecorateDecoder(typ, decoder)
|
decoder = extension.DecorateDecoder(typ, decoder)
|
||||||
}
|
}
|
||||||
return decoder
|
return decoder
|
||||||
@ -213,7 +228,8 @@ func encoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
|
|||||||
for _, extension := range extensions {
|
for _, extension := range extensions {
|
||||||
encoder = extension.DecorateEncoder(typ, encoder)
|
encoder = extension.DecorateEncoder(typ, encoder)
|
||||||
}
|
}
|
||||||
for _, extension := range ctx.extensions {
|
encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder)
|
||||||
|
for _, extension := range ctx.extraExtensions {
|
||||||
encoder = extension.DecorateEncoder(typ, encoder)
|
encoder = extension.DecorateEncoder(typ, encoder)
|
||||||
}
|
}
|
||||||
return encoder
|
return encoder
|
||||||
|
@ -2,9 +2,9 @@ package jsoniter
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/modern-go/reflect2"
|
||||||
"io"
|
"io"
|
||||||
"unsafe"
|
"unsafe"
|
||||||
"github.com/v2pro/plz/reflect2"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func decoderOfArray(ctx *ctx, typ reflect2.Type) ValDecoder {
|
func decoderOfArray(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
package jsoniter
|
package jsoniter
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/v2pro/plz/reflect2"
|
"github.com/modern-go/reflect2"
|
||||||
"unsafe"
|
|
||||||
"reflect"
|
"reflect"
|
||||||
|
"unsafe"
|
||||||
)
|
)
|
||||||
|
|
||||||
type dynamicEncoder struct {
|
type dynamicEncoder struct {
|
||||||
|
@ -2,12 +2,12 @@ package jsoniter
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/modern-go/reflect2"
|
||||||
"reflect"
|
"reflect"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"unicode"
|
"unicode"
|
||||||
"unsafe"
|
"unsafe"
|
||||||
"github.com/v2pro/plz/reflect2"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var typeDecoders = map[string]ValDecoder{}
|
var typeDecoders = map[string]ValDecoder{}
|
||||||
@ -18,8 +18,8 @@ var extensions = []Extension{}
|
|||||||
|
|
||||||
// StructDescriptor describe how should we encode/decode the struct
|
// StructDescriptor describe how should we encode/decode the struct
|
||||||
type StructDescriptor struct {
|
type StructDescriptor struct {
|
||||||
Type reflect2.Type
|
Type reflect2.Type
|
||||||
Fields []*Binding
|
Fields []*Binding
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetField get one field from the descriptor by its name.
|
// GetField get one field from the descriptor by its name.
|
||||||
@ -47,6 +47,8 @@ type Binding struct {
|
|||||||
// Can also rename fields by UpdateStructDescriptor.
|
// Can also rename fields by UpdateStructDescriptor.
|
||||||
type Extension interface {
|
type Extension interface {
|
||||||
UpdateStructDescriptor(structDescriptor *StructDescriptor)
|
UpdateStructDescriptor(structDescriptor *StructDescriptor)
|
||||||
|
CreateMapKeyDecoder(typ reflect2.Type) ValDecoder
|
||||||
|
CreateMapKeyEncoder(typ reflect2.Type) ValEncoder
|
||||||
CreateDecoder(typ reflect2.Type) ValDecoder
|
CreateDecoder(typ reflect2.Type) ValDecoder
|
||||||
CreateEncoder(typ reflect2.Type) ValEncoder
|
CreateEncoder(typ reflect2.Type) ValEncoder
|
||||||
DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder
|
DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder
|
||||||
@ -61,6 +63,16 @@ type DummyExtension struct {
|
|||||||
func (extension *DummyExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
|
func (extension *DummyExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CreateMapKeyDecoder No-op
|
||||||
|
func (extension *DummyExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateMapKeyEncoder No-op
|
||||||
|
func (extension *DummyExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// CreateDecoder No-op
|
// CreateDecoder No-op
|
||||||
func (extension *DummyExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
|
func (extension *DummyExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
|
||||||
return nil
|
return nil
|
||||||
@ -97,6 +109,16 @@ func (extension EncoderExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
|
|||||||
return extension[typ]
|
return extension[typ]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CreateMapKeyDecoder No-op
|
||||||
|
func (extension EncoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateMapKeyEncoder No-op
|
||||||
|
func (extension EncoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// DecorateDecoder No-op
|
// DecorateDecoder No-op
|
||||||
func (extension EncoderExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
|
func (extension EncoderExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
|
||||||
return decoder
|
return decoder
|
||||||
@ -113,6 +135,16 @@ type DecoderExtension map[reflect2.Type]ValDecoder
|
|||||||
func (extension DecoderExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
|
func (extension DecoderExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CreateMapKeyDecoder No-op
|
||||||
|
func (extension DecoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateMapKeyEncoder No-op
|
||||||
|
func (extension DecoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// CreateDecoder get decoder from map
|
// CreateDecoder get decoder from map
|
||||||
func (extension DecoderExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
|
func (extension DecoderExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
|
||||||
return extension[typ]
|
return extension[typ]
|
||||||
@ -214,7 +246,8 @@ func getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder {
|
|||||||
for _, extension := range extensions {
|
for _, extension := range extensions {
|
||||||
decoder = extension.DecorateDecoder(typ, decoder)
|
decoder = extension.DecorateDecoder(typ, decoder)
|
||||||
}
|
}
|
||||||
for _, extension := range ctx.extensions {
|
decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder)
|
||||||
|
for _, extension := range ctx.extraExtensions {
|
||||||
decoder = extension.DecorateDecoder(typ, decoder)
|
decoder = extension.DecorateDecoder(typ, decoder)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -227,14 +260,18 @@ func _getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder {
|
|||||||
return decoder
|
return decoder
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for _, extension := range ctx.extensions {
|
decoder := ctx.decoderExtension.CreateDecoder(typ)
|
||||||
|
if decoder != nil {
|
||||||
|
return decoder
|
||||||
|
}
|
||||||
|
for _, extension := range ctx.extraExtensions {
|
||||||
decoder := extension.CreateDecoder(typ)
|
decoder := extension.CreateDecoder(typ)
|
||||||
if decoder != nil {
|
if decoder != nil {
|
||||||
return decoder
|
return decoder
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
typeName := typ.String()
|
typeName := typ.String()
|
||||||
decoder := typeDecoders[typeName]
|
decoder = typeDecoders[typeName]
|
||||||
if decoder != nil {
|
if decoder != nil {
|
||||||
return decoder
|
return decoder
|
||||||
}
|
}
|
||||||
@ -254,7 +291,8 @@ func getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder {
|
|||||||
for _, extension := range extensions {
|
for _, extension := range extensions {
|
||||||
encoder = extension.DecorateEncoder(typ, encoder)
|
encoder = extension.DecorateEncoder(typ, encoder)
|
||||||
}
|
}
|
||||||
for _, extension := range ctx.extensions {
|
encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder)
|
||||||
|
for _, extension := range ctx.extraExtensions {
|
||||||
encoder = extension.DecorateEncoder(typ, encoder)
|
encoder = extension.DecorateEncoder(typ, encoder)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -268,14 +306,18 @@ func _getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder {
|
|||||||
return encoder
|
return encoder
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for _, extension := range ctx.extensions {
|
encoder := ctx.encoderExtension.CreateEncoder(typ)
|
||||||
|
if encoder != nil {
|
||||||
|
return encoder
|
||||||
|
}
|
||||||
|
for _, extension := range ctx.extraExtensions {
|
||||||
encoder := extension.CreateEncoder(typ)
|
encoder := extension.CreateEncoder(typ)
|
||||||
if encoder != nil {
|
if encoder != nil {
|
||||||
return encoder
|
return encoder
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
typeName := typ.String()
|
typeName := typ.String()
|
||||||
encoder := typeEncoders[typeName]
|
encoder = typeEncoders[typeName]
|
||||||
if encoder != nil {
|
if encoder != nil {
|
||||||
return encoder
|
return encoder
|
||||||
}
|
}
|
||||||
@ -296,13 +338,13 @@ func describeStruct(ctx *ctx, typ reflect2.Type) *StructDescriptor {
|
|||||||
for i := 0; i < structType.NumField(); i++ {
|
for i := 0; i < structType.NumField(); i++ {
|
||||||
field := structType.Field(i)
|
field := structType.Field(i)
|
||||||
tag, hastag := field.Tag().Lookup(ctx.getTagKey())
|
tag, hastag := field.Tag().Lookup(ctx.getTagKey())
|
||||||
if ctx.onlyTaggedField && !hastag {
|
if ctx.onlyTaggedField && !hastag && !field.Anonymous() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if tag == "-" || field.Name() == "_" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
tagParts := strings.Split(tag, ",")
|
tagParts := strings.Split(tag, ",")
|
||||||
if tag == "-" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if field.Anonymous() && (tag == "" || tagParts[0] == "") {
|
if field.Anonymous() && (tag == "" || tagParts[0] == "") {
|
||||||
if field.Type().Kind() == reflect.Struct {
|
if field.Type().Kind() == reflect.Struct {
|
||||||
structDescriptor := describeStruct(ctx, field.Type())
|
structDescriptor := describeStruct(ctx, field.Type())
|
||||||
@ -355,13 +397,15 @@ func describeStruct(ctx *ctx, typ reflect2.Type) *StructDescriptor {
|
|||||||
}
|
}
|
||||||
func createStructDescriptor(ctx *ctx, typ reflect2.Type, bindings []*Binding, embeddedBindings []*Binding) *StructDescriptor {
|
func createStructDescriptor(ctx *ctx, typ reflect2.Type, bindings []*Binding, embeddedBindings []*Binding) *StructDescriptor {
|
||||||
structDescriptor := &StructDescriptor{
|
structDescriptor := &StructDescriptor{
|
||||||
Type: typ,
|
Type: typ,
|
||||||
Fields: bindings,
|
Fields: bindings,
|
||||||
}
|
}
|
||||||
for _, extension := range extensions {
|
for _, extension := range extensions {
|
||||||
extension.UpdateStructDescriptor(structDescriptor)
|
extension.UpdateStructDescriptor(structDescriptor)
|
||||||
}
|
}
|
||||||
for _, extension := range ctx.extensions {
|
ctx.encoderExtension.UpdateStructDescriptor(structDescriptor)
|
||||||
|
ctx.decoderExtension.UpdateStructDescriptor(structDescriptor)
|
||||||
|
for _, extension := range ctx.extraExtensions {
|
||||||
extension.UpdateStructDescriptor(structDescriptor)
|
extension.UpdateStructDescriptor(structDescriptor)
|
||||||
}
|
}
|
||||||
processTags(structDescriptor, ctx.frozenConfig)
|
processTags(structDescriptor, ctx.frozenConfig)
|
||||||
|
@ -2,9 +2,9 @@ package jsoniter
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"github.com/modern-go/reflect2"
|
||||||
"strconv"
|
"strconv"
|
||||||
"unsafe"
|
"unsafe"
|
||||||
"github.com/v2pro/plz/reflect2"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Number string
|
type Number string
|
||||||
@ -109,4 +109,4 @@ func (codec *jsoniterNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
|||||||
|
|
||||||
func (codec *jsoniterNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
func (codec *jsoniterNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||||
return len(*((*Number)(ptr))) == 0
|
return len(*((*Number)(ptr))) == 0
|
||||||
}
|
}
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
package jsoniter
|
package jsoniter
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"unsafe"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"github.com/v2pro/plz/reflect2"
|
"github.com/modern-go/reflect2"
|
||||||
|
"unsafe"
|
||||||
)
|
)
|
||||||
|
|
||||||
var jsonRawMessageType = reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()
|
var jsonRawMessageType = reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()
|
||||||
@ -57,4 +57,4 @@ func (codec *jsoniterRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream)
|
|||||||
|
|
||||||
func (codec *jsoniterRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
func (codec *jsoniterRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||||
return len(*((*RawMessage)(ptr))) == 0
|
return len(*((*RawMessage)(ptr))) == 0
|
||||||
}
|
}
|
||||||
|
@ -1,11 +1,12 @@
|
|||||||
package jsoniter
|
package jsoniter
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/modern-go/reflect2"
|
||||||
|
"io"
|
||||||
"reflect"
|
"reflect"
|
||||||
"sort"
|
"sort"
|
||||||
"unsafe"
|
"unsafe"
|
||||||
"github.com/v2pro/plz/reflect2"
|
|
||||||
"fmt"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func decoderOfMap(ctx *ctx, typ reflect2.Type) ValDecoder {
|
func decoderOfMap(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||||
@ -38,6 +39,16 @@ func encoderOfMap(ctx *ctx, typ reflect2.Type) ValEncoder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
|
func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||||
|
decoder := ctx.decoderExtension.CreateMapKeyDecoder(typ)
|
||||||
|
if decoder != nil {
|
||||||
|
return decoder
|
||||||
|
}
|
||||||
|
for _, extension := range ctx.extraExtensions {
|
||||||
|
decoder := extension.CreateMapKeyDecoder(typ)
|
||||||
|
if decoder != nil {
|
||||||
|
return decoder
|
||||||
|
}
|
||||||
|
}
|
||||||
switch typ.Kind() {
|
switch typ.Kind() {
|
||||||
case reflect.String:
|
case reflect.String:
|
||||||
return decoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
|
return decoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
|
||||||
@ -53,14 +64,26 @@ func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
|
|||||||
return &numericMapKeyDecoder{decoderOfType(ctx, typ)}
|
return &numericMapKeyDecoder{decoderOfType(ctx, typ)}
|
||||||
default:
|
default:
|
||||||
ptrType := reflect2.PtrTo(typ)
|
ptrType := reflect2.PtrTo(typ)
|
||||||
if ptrType.Implements(textMarshalerType) {
|
if ptrType.Implements(unmarshalerType) {
|
||||||
|
return &referenceDecoder{
|
||||||
|
&unmarshalerDecoder{
|
||||||
|
valType: ptrType,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if typ.Implements(unmarshalerType) {
|
||||||
|
return &unmarshalerDecoder{
|
||||||
|
valType: typ,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ptrType.Implements(textUnmarshalerType) {
|
||||||
return &referenceDecoder{
|
return &referenceDecoder{
|
||||||
&textUnmarshalerDecoder{
|
&textUnmarshalerDecoder{
|
||||||
valType: ptrType,
|
valType: ptrType,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if typ.Implements(textMarshalerType) {
|
if typ.Implements(textUnmarshalerType) {
|
||||||
return &textUnmarshalerDecoder{
|
return &textUnmarshalerDecoder{
|
||||||
valType: typ,
|
valType: typ,
|
||||||
}
|
}
|
||||||
@ -70,6 +93,16 @@ func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func encoderOfMapKey(ctx *ctx, typ reflect2.Type) ValEncoder {
|
func encoderOfMapKey(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||||
|
encoder := ctx.encoderExtension.CreateMapKeyEncoder(typ)
|
||||||
|
if encoder != nil {
|
||||||
|
return encoder
|
||||||
|
}
|
||||||
|
for _, extension := range ctx.extraExtensions {
|
||||||
|
encoder := extension.CreateMapKeyEncoder(typ)
|
||||||
|
if encoder != nil {
|
||||||
|
return encoder
|
||||||
|
}
|
||||||
|
}
|
||||||
switch typ.Kind() {
|
switch typ.Kind() {
|
||||||
case reflect.String:
|
case reflect.String:
|
||||||
return encoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
|
return encoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
|
||||||
@ -95,6 +128,9 @@ func encoderOfMapKey(ctx *ctx, typ reflect2.Type) ValEncoder {
|
|||||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if typ.Kind() == reflect.Interface {
|
||||||
|
return &dynamicMapKeyEncoder{ctx, typ}
|
||||||
|
}
|
||||||
return &lazyErrorEncoder{err: fmt.Errorf("unsupported map key type: %v", typ)}
|
return &lazyErrorEncoder{err: fmt.Errorf("unsupported map key type: %v", typ)}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -191,6 +227,21 @@ func (encoder *numericMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type dynamicMapKeyEncoder struct {
|
||||||
|
ctx *ctx
|
||||||
|
valType reflect2.Type
|
||||||
|
}
|
||||||
|
|
||||||
|
func (encoder *dynamicMapKeyEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||||
|
obj := encoder.valType.UnsafeIndirect(ptr)
|
||||||
|
encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).Encode(reflect2.PtrOf(obj), stream)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (encoder *dynamicMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||||
|
obj := encoder.valType.UnsafeIndirect(ptr)
|
||||||
|
return encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).IsEmpty(reflect2.PtrOf(obj))
|
||||||
|
}
|
||||||
|
|
||||||
type mapEncoder struct {
|
type mapEncoder struct {
|
||||||
mapType *reflect2.UnsafeMapType
|
mapType *reflect2.UnsafeMapType
|
||||||
keyEncoder ValEncoder
|
keyEncoder ValEncoder
|
||||||
@ -198,6 +249,10 @@ type mapEncoder struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (encoder *mapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
func (encoder *mapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||||
|
if *(*unsafe.Pointer)(ptr) == nil {
|
||||||
|
stream.WriteNil()
|
||||||
|
return
|
||||||
|
}
|
||||||
stream.WriteObjectStart()
|
stream.WriteObjectStart()
|
||||||
iter := encoder.mapType.UnsafeIterate(ptr)
|
iter := encoder.mapType.UnsafeIterate(ptr)
|
||||||
for i := 0; iter.HasNext(); i++ {
|
for i := 0; iter.HasNext(); i++ {
|
||||||
@ -235,13 +290,17 @@ func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
|||||||
stream.WriteObjectStart()
|
stream.WriteObjectStart()
|
||||||
mapIter := encoder.mapType.UnsafeIterate(ptr)
|
mapIter := encoder.mapType.UnsafeIterate(ptr)
|
||||||
subStream := stream.cfg.BorrowStream(nil)
|
subStream := stream.cfg.BorrowStream(nil)
|
||||||
|
subStream.Attachment = stream.Attachment
|
||||||
subIter := stream.cfg.BorrowIterator(nil)
|
subIter := stream.cfg.BorrowIterator(nil)
|
||||||
keyValues := encodedKeyValues{}
|
keyValues := encodedKeyValues{}
|
||||||
for mapIter.HasNext() {
|
for mapIter.HasNext() {
|
||||||
subStream.buf = make([]byte, 0, 64)
|
|
||||||
key, elem := mapIter.UnsafeNext()
|
key, elem := mapIter.UnsafeNext()
|
||||||
|
subStreamIndex := subStream.Buffered()
|
||||||
encoder.keyEncoder.Encode(key, subStream)
|
encoder.keyEncoder.Encode(key, subStream)
|
||||||
encodedKey := subStream.Buffer()
|
if subStream.Error != nil && subStream.Error != io.EOF && stream.Error == nil {
|
||||||
|
stream.Error = subStream.Error
|
||||||
|
}
|
||||||
|
encodedKey := subStream.Buffer()[subStreamIndex:]
|
||||||
subIter.ResetBytes(encodedKey)
|
subIter.ResetBytes(encodedKey)
|
||||||
decodedKey := subIter.ReadString()
|
decodedKey := subIter.ReadString()
|
||||||
if stream.indention > 0 {
|
if stream.indention > 0 {
|
||||||
@ -252,7 +311,7 @@ func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
|||||||
encoder.elemEncoder.Encode(elem, subStream)
|
encoder.elemEncoder.Encode(elem, subStream)
|
||||||
keyValues = append(keyValues, encodedKV{
|
keyValues = append(keyValues, encodedKV{
|
||||||
key: decodedKey,
|
key: decodedKey,
|
||||||
keyValue: subStream.Buffer(),
|
keyValue: subStream.Buffer()[subStreamIndex:],
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
sort.Sort(keyValues)
|
sort.Sort(keyValues)
|
||||||
@ -262,6 +321,9 @@ func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
|||||||
}
|
}
|
||||||
stream.Write(keyValue.keyValue)
|
stream.Write(keyValue.keyValue)
|
||||||
}
|
}
|
||||||
|
if subStream.Error != nil && stream.Error == nil {
|
||||||
|
stream.Error = subStream.Error
|
||||||
|
}
|
||||||
stream.WriteObjectEnd()
|
stream.WriteObjectEnd()
|
||||||
stream.cfg.ReturnStream(subStream)
|
stream.cfg.ReturnStream(subStream)
|
||||||
stream.cfg.ReturnIterator(subIter)
|
stream.cfg.ReturnIterator(subIter)
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
package jsoniter
|
package jsoniter
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/v2pro/plz/reflect2"
|
|
||||||
"unsafe"
|
|
||||||
"encoding"
|
"encoding"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/modern-go/reflect2"
|
||||||
)
|
)
|
||||||
|
|
||||||
var marshalerType = reflect2.TypeOfPtr((*json.Marshaler)(nil)).Elem()
|
var marshalerType = reflect2.TypeOfPtr((*json.Marshaler)(nil)).Elem()
|
||||||
@ -98,6 +99,12 @@ func (encoder *marshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
stream.Error = err
|
stream.Error = err
|
||||||
} else {
|
} else {
|
||||||
|
// html escape was already done by jsoniter
|
||||||
|
// but the extra '\n' should be trimed
|
||||||
|
l := len(bytes)
|
||||||
|
if l > 0 && bytes[l-1] == '\n' {
|
||||||
|
bytes = bytes[:l-1]
|
||||||
|
}
|
||||||
stream.Write(bytes)
|
stream.Write(bytes)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,9 +3,10 @@ package jsoniter
|
|||||||
import (
|
import (
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"reflect"
|
"reflect"
|
||||||
"unsafe"
|
|
||||||
"github.com/v2pro/plz/reflect2"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/modern-go/reflect2"
|
||||||
)
|
)
|
||||||
|
|
||||||
const ptrSize = 32 << uintptr(^uintptr(0)>>63)
|
const ptrSize = 32 << uintptr(^uintptr(0)>>63)
|
||||||
@ -405,7 +406,7 @@ func (codec *boolCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type base64Codec struct {
|
type base64Codec struct {
|
||||||
sliceType *reflect2.UnsafeSliceType
|
sliceType *reflect2.UnsafeSliceType
|
||||||
sliceDecoder ValDecoder
|
sliceDecoder ValDecoder
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -416,16 +417,11 @@ func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
|||||||
}
|
}
|
||||||
switch iter.WhatIsNext() {
|
switch iter.WhatIsNext() {
|
||||||
case StringValue:
|
case StringValue:
|
||||||
encoding := base64.StdEncoding
|
src := iter.ReadString()
|
||||||
src := iter.SkipAndReturnBytes()
|
dst, err := base64.StdEncoding.DecodeString(src)
|
||||||
src = src[1: len(src)-1]
|
|
||||||
decodedLen := encoding.DecodedLen(len(src))
|
|
||||||
dst := make([]byte, decodedLen)
|
|
||||||
len, err := encoding.Decode(dst, src)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
iter.ReportError("decode base64", err.Error())
|
iter.ReportError("decode base64", err.Error())
|
||||||
} else {
|
} else {
|
||||||
dst = dst[:len]
|
|
||||||
codec.sliceType.UnsafeSet(ptr, unsafe.Pointer(&dst))
|
codec.sliceType.UnsafeSet(ptr, unsafe.Pointer(&dst))
|
||||||
}
|
}
|
||||||
case ArrayValue:
|
case ArrayValue:
|
||||||
@ -436,17 +432,19 @@ func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (codec *base64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
func (codec *base64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||||
src := *((*[]byte)(ptr))
|
if codec.sliceType.UnsafeIsNil(ptr) {
|
||||||
if len(src) == 0 {
|
|
||||||
stream.WriteNil()
|
stream.WriteNil()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
src := *((*[]byte)(ptr))
|
||||||
encoding := base64.StdEncoding
|
encoding := base64.StdEncoding
|
||||||
stream.writeByte('"')
|
stream.writeByte('"')
|
||||||
size := encoding.EncodedLen(len(src))
|
if len(src) != 0 {
|
||||||
buf := make([]byte, size)
|
size := encoding.EncodedLen(len(src))
|
||||||
encoding.Encode(buf, src)
|
buf := make([]byte, size)
|
||||||
stream.buf = append(stream.buf, buf...)
|
encoding.Encode(buf, src)
|
||||||
|
stream.buf = append(stream.buf, buf...)
|
||||||
|
}
|
||||||
stream.writeByte('"')
|
stream.writeByte('"')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
package jsoniter
|
package jsoniter
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/modern-go/reflect2"
|
||||||
"reflect"
|
"reflect"
|
||||||
"unsafe"
|
"unsafe"
|
||||||
"github.com/v2pro/plz/reflect2"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func decoderOfOptional(ctx *ctx, typ reflect2.Type) ValDecoder {
|
func decoderOfOptional(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||||
|
@ -2,9 +2,9 @@ package jsoniter
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/modern-go/reflect2"
|
||||||
"io"
|
"io"
|
||||||
"unsafe"
|
"unsafe"
|
||||||
"github.com/v2pro/plz/reflect2"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func decoderOfSlice(ctx *ctx, typ reflect2.Type) ValDecoder {
|
func decoderOfSlice(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||||
|
@ -5,7 +5,8 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"strings"
|
"strings"
|
||||||
"unsafe"
|
"unsafe"
|
||||||
"github.com/v2pro/plz/reflect2"
|
|
||||||
|
"github.com/modern-go/reflect2"
|
||||||
)
|
)
|
||||||
|
|
||||||
func decoderOfStruct(ctx *ctx, typ reflect2.Type) ValDecoder {
|
func decoderOfStruct(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||||
@ -31,6 +32,15 @@ func decoderOfStruct(ctx *ctx, typ reflect2.Type) ValDecoder {
|
|||||||
for k, binding := range bindings {
|
for k, binding := range bindings {
|
||||||
fields[k] = binding.Decoder.(*structFieldDecoder)
|
fields[k] = binding.Decoder.(*structFieldDecoder)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !ctx.caseSensitive() {
|
||||||
|
for k, binding := range bindings {
|
||||||
|
if _, found := fields[strings.ToLower(k)]; !found {
|
||||||
|
fields[strings.ToLower(k)] = binding.Decoder.(*structFieldDecoder)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return createStructDecoder(ctx, typ, fields)
|
return createStructDecoder(ctx, typ, fields)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -41,12 +51,13 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
|
|||||||
knownHash := map[int64]struct{}{
|
knownHash := map[int64]struct{}{
|
||||||
0: {},
|
0: {},
|
||||||
}
|
}
|
||||||
|
|
||||||
switch len(fields) {
|
switch len(fields) {
|
||||||
case 0:
|
case 0:
|
||||||
return &skipObjectDecoder{typ}
|
return &skipObjectDecoder{typ}
|
||||||
case 1:
|
case 1:
|
||||||
for fieldName, fieldDecoder := range fields {
|
for fieldName, fieldDecoder := range fields {
|
||||||
fieldHash := calcHash(fieldName)
|
fieldHash := calcHash(fieldName, ctx.caseSensitive())
|
||||||
_, known := knownHash[fieldHash]
|
_, known := knownHash[fieldHash]
|
||||||
if known {
|
if known {
|
||||||
return &generalStructDecoder{typ, fields, false}
|
return &generalStructDecoder{typ, fields, false}
|
||||||
@ -60,7 +71,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
|
|||||||
var fieldDecoder1 *structFieldDecoder
|
var fieldDecoder1 *structFieldDecoder
|
||||||
var fieldDecoder2 *structFieldDecoder
|
var fieldDecoder2 *structFieldDecoder
|
||||||
for fieldName, fieldDecoder := range fields {
|
for fieldName, fieldDecoder := range fields {
|
||||||
fieldHash := calcHash(fieldName)
|
fieldHash := calcHash(fieldName, ctx.caseSensitive())
|
||||||
_, known := knownHash[fieldHash]
|
_, known := knownHash[fieldHash]
|
||||||
if known {
|
if known {
|
||||||
return &generalStructDecoder{typ, fields, false}
|
return &generalStructDecoder{typ, fields, false}
|
||||||
@ -83,7 +94,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
|
|||||||
var fieldDecoder2 *structFieldDecoder
|
var fieldDecoder2 *structFieldDecoder
|
||||||
var fieldDecoder3 *structFieldDecoder
|
var fieldDecoder3 *structFieldDecoder
|
||||||
for fieldName, fieldDecoder := range fields {
|
for fieldName, fieldDecoder := range fields {
|
||||||
fieldHash := calcHash(fieldName)
|
fieldHash := calcHash(fieldName, ctx.caseSensitive())
|
||||||
_, known := knownHash[fieldHash]
|
_, known := knownHash[fieldHash]
|
||||||
if known {
|
if known {
|
||||||
return &generalStructDecoder{typ, fields, false}
|
return &generalStructDecoder{typ, fields, false}
|
||||||
@ -114,7 +125,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
|
|||||||
var fieldDecoder3 *structFieldDecoder
|
var fieldDecoder3 *structFieldDecoder
|
||||||
var fieldDecoder4 *structFieldDecoder
|
var fieldDecoder4 *structFieldDecoder
|
||||||
for fieldName, fieldDecoder := range fields {
|
for fieldName, fieldDecoder := range fields {
|
||||||
fieldHash := calcHash(fieldName)
|
fieldHash := calcHash(fieldName, ctx.caseSensitive())
|
||||||
_, known := knownHash[fieldHash]
|
_, known := knownHash[fieldHash]
|
||||||
if known {
|
if known {
|
||||||
return &generalStructDecoder{typ, fields, false}
|
return &generalStructDecoder{typ, fields, false}
|
||||||
@ -151,7 +162,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
|
|||||||
var fieldDecoder4 *structFieldDecoder
|
var fieldDecoder4 *structFieldDecoder
|
||||||
var fieldDecoder5 *structFieldDecoder
|
var fieldDecoder5 *structFieldDecoder
|
||||||
for fieldName, fieldDecoder := range fields {
|
for fieldName, fieldDecoder := range fields {
|
||||||
fieldHash := calcHash(fieldName)
|
fieldHash := calcHash(fieldName, ctx.caseSensitive())
|
||||||
_, known := knownHash[fieldHash]
|
_, known := knownHash[fieldHash]
|
||||||
if known {
|
if known {
|
||||||
return &generalStructDecoder{typ, fields, false}
|
return &generalStructDecoder{typ, fields, false}
|
||||||
@ -194,7 +205,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
|
|||||||
var fieldDecoder5 *structFieldDecoder
|
var fieldDecoder5 *structFieldDecoder
|
||||||
var fieldDecoder6 *structFieldDecoder
|
var fieldDecoder6 *structFieldDecoder
|
||||||
for fieldName, fieldDecoder := range fields {
|
for fieldName, fieldDecoder := range fields {
|
||||||
fieldHash := calcHash(fieldName)
|
fieldHash := calcHash(fieldName, ctx.caseSensitive())
|
||||||
_, known := knownHash[fieldHash]
|
_, known := knownHash[fieldHash]
|
||||||
if known {
|
if known {
|
||||||
return &generalStructDecoder{typ, fields, false}
|
return &generalStructDecoder{typ, fields, false}
|
||||||
@ -243,7 +254,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
|
|||||||
var fieldDecoder6 *structFieldDecoder
|
var fieldDecoder6 *structFieldDecoder
|
||||||
var fieldDecoder7 *structFieldDecoder
|
var fieldDecoder7 *structFieldDecoder
|
||||||
for fieldName, fieldDecoder := range fields {
|
for fieldName, fieldDecoder := range fields {
|
||||||
fieldHash := calcHash(fieldName)
|
fieldHash := calcHash(fieldName, ctx.caseSensitive())
|
||||||
_, known := knownHash[fieldHash]
|
_, known := knownHash[fieldHash]
|
||||||
if known {
|
if known {
|
||||||
return &generalStructDecoder{typ, fields, false}
|
return &generalStructDecoder{typ, fields, false}
|
||||||
@ -298,7 +309,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
|
|||||||
var fieldDecoder7 *structFieldDecoder
|
var fieldDecoder7 *structFieldDecoder
|
||||||
var fieldDecoder8 *structFieldDecoder
|
var fieldDecoder8 *structFieldDecoder
|
||||||
for fieldName, fieldDecoder := range fields {
|
for fieldName, fieldDecoder := range fields {
|
||||||
fieldHash := calcHash(fieldName)
|
fieldHash := calcHash(fieldName, ctx.caseSensitive())
|
||||||
_, known := knownHash[fieldHash]
|
_, known := knownHash[fieldHash]
|
||||||
if known {
|
if known {
|
||||||
return &generalStructDecoder{typ, fields, false}
|
return &generalStructDecoder{typ, fields, false}
|
||||||
@ -359,7 +370,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
|
|||||||
var fieldDecoder8 *structFieldDecoder
|
var fieldDecoder8 *structFieldDecoder
|
||||||
var fieldDecoder9 *structFieldDecoder
|
var fieldDecoder9 *structFieldDecoder
|
||||||
for fieldName, fieldDecoder := range fields {
|
for fieldName, fieldDecoder := range fields {
|
||||||
fieldHash := calcHash(fieldName)
|
fieldHash := calcHash(fieldName, ctx.caseSensitive())
|
||||||
_, known := knownHash[fieldHash]
|
_, known := knownHash[fieldHash]
|
||||||
if known {
|
if known {
|
||||||
return &generalStructDecoder{typ, fields, false}
|
return &generalStructDecoder{typ, fields, false}
|
||||||
@ -426,7 +437,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
|
|||||||
var fieldDecoder9 *structFieldDecoder
|
var fieldDecoder9 *structFieldDecoder
|
||||||
var fieldDecoder10 *structFieldDecoder
|
var fieldDecoder10 *structFieldDecoder
|
||||||
for fieldName, fieldDecoder := range fields {
|
for fieldName, fieldDecoder := range fields {
|
||||||
fieldHash := calcHash(fieldName)
|
fieldHash := calcHash(fieldName, ctx.caseSensitive())
|
||||||
_, known := knownHash[fieldHash]
|
_, known := knownHash[fieldHash]
|
||||||
if known {
|
if known {
|
||||||
return &generalStructDecoder{typ, fields, false}
|
return &generalStructDecoder{typ, fields, false}
|
||||||
@ -489,13 +500,20 @@ func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
|
|||||||
if !iter.readObjectStart() {
|
if !iter.readObjectStart() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
decoder.decodeOneField(ptr, iter)
|
if !iter.incrementDepth() {
|
||||||
for iter.nextToken() == ',' {
|
return
|
||||||
|
}
|
||||||
|
var c byte
|
||||||
|
for c = ','; c == ','; c = iter.nextToken() {
|
||||||
decoder.decodeOneField(ptr, iter)
|
decoder.decodeOneField(ptr, iter)
|
||||||
}
|
}
|
||||||
if iter.Error != nil && iter.Error != io.EOF {
|
if iter.Error != nil && iter.Error != io.EOF {
|
||||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||||
}
|
}
|
||||||
|
if c != '}' {
|
||||||
|
iter.ReportError("struct Decode", `expect }, but found `+string([]byte{c}))
|
||||||
|
}
|
||||||
|
iter.decrementDepth()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (decoder *generalStructDecoder) decodeOneField(ptr unsafe.Pointer, iter *Iterator) {
|
func (decoder *generalStructDecoder) decodeOneField(ptr unsafe.Pointer, iter *Iterator) {
|
||||||
@ -505,19 +523,19 @@ func (decoder *generalStructDecoder) decodeOneField(ptr unsafe.Pointer, iter *It
|
|||||||
fieldBytes := iter.ReadStringAsSlice()
|
fieldBytes := iter.ReadStringAsSlice()
|
||||||
field = *(*string)(unsafe.Pointer(&fieldBytes))
|
field = *(*string)(unsafe.Pointer(&fieldBytes))
|
||||||
fieldDecoder = decoder.fields[field]
|
fieldDecoder = decoder.fields[field]
|
||||||
if fieldDecoder == nil {
|
if fieldDecoder == nil && !iter.cfg.caseSensitive {
|
||||||
fieldDecoder = decoder.fields[strings.ToLower(field)]
|
fieldDecoder = decoder.fields[strings.ToLower(field)]
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
field = iter.ReadString()
|
field = iter.ReadString()
|
||||||
fieldDecoder = decoder.fields[field]
|
fieldDecoder = decoder.fields[field]
|
||||||
if fieldDecoder == nil {
|
if fieldDecoder == nil && !iter.cfg.caseSensitive {
|
||||||
fieldDecoder = decoder.fields[strings.ToLower(field)]
|
fieldDecoder = decoder.fields[strings.ToLower(field)]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if fieldDecoder == nil {
|
if fieldDecoder == nil {
|
||||||
msg := "found unknown field: " + field
|
|
||||||
if decoder.disallowUnknownFields {
|
if decoder.disallowUnknownFields {
|
||||||
|
msg := "found unknown field: " + field
|
||||||
iter.ReportError("ReadObject", msg)
|
iter.ReportError("ReadObject", msg)
|
||||||
}
|
}
|
||||||
c := iter.nextToken()
|
c := iter.nextToken()
|
||||||
@ -557,6 +575,9 @@ func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
|
|||||||
if !iter.readObjectStart() {
|
if !iter.readObjectStart() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if !iter.incrementDepth() {
|
||||||
|
return
|
||||||
|
}
|
||||||
for {
|
for {
|
||||||
if iter.readFieldHash() == decoder.fieldHash {
|
if iter.readFieldHash() == decoder.fieldHash {
|
||||||
decoder.fieldDecoder.Decode(ptr, iter)
|
decoder.fieldDecoder.Decode(ptr, iter)
|
||||||
@ -570,6 +591,7 @@ func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
|
|||||||
if iter.Error != nil && iter.Error != io.EOF {
|
if iter.Error != nil && iter.Error != io.EOF {
|
||||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||||
}
|
}
|
||||||
|
iter.decrementDepth()
|
||||||
}
|
}
|
||||||
|
|
||||||
type twoFieldsStructDecoder struct {
|
type twoFieldsStructDecoder struct {
|
||||||
@ -584,6 +606,9 @@ func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
|||||||
if !iter.readObjectStart() {
|
if !iter.readObjectStart() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if !iter.incrementDepth() {
|
||||||
|
return
|
||||||
|
}
|
||||||
for {
|
for {
|
||||||
switch iter.readFieldHash() {
|
switch iter.readFieldHash() {
|
||||||
case decoder.fieldHash1:
|
case decoder.fieldHash1:
|
||||||
@ -600,6 +625,7 @@ func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
|||||||
if iter.Error != nil && iter.Error != io.EOF {
|
if iter.Error != nil && iter.Error != io.EOF {
|
||||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||||
}
|
}
|
||||||
|
iter.decrementDepth()
|
||||||
}
|
}
|
||||||
|
|
||||||
type threeFieldsStructDecoder struct {
|
type threeFieldsStructDecoder struct {
|
||||||
@ -616,6 +642,9 @@ func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
|||||||
if !iter.readObjectStart() {
|
if !iter.readObjectStart() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if !iter.incrementDepth() {
|
||||||
|
return
|
||||||
|
}
|
||||||
for {
|
for {
|
||||||
switch iter.readFieldHash() {
|
switch iter.readFieldHash() {
|
||||||
case decoder.fieldHash1:
|
case decoder.fieldHash1:
|
||||||
@ -634,6 +663,7 @@ func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
|||||||
if iter.Error != nil && iter.Error != io.EOF {
|
if iter.Error != nil && iter.Error != io.EOF {
|
||||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||||
}
|
}
|
||||||
|
iter.decrementDepth()
|
||||||
}
|
}
|
||||||
|
|
||||||
type fourFieldsStructDecoder struct {
|
type fourFieldsStructDecoder struct {
|
||||||
@ -652,6 +682,9 @@ func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
|||||||
if !iter.readObjectStart() {
|
if !iter.readObjectStart() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if !iter.incrementDepth() {
|
||||||
|
return
|
||||||
|
}
|
||||||
for {
|
for {
|
||||||
switch iter.readFieldHash() {
|
switch iter.readFieldHash() {
|
||||||
case decoder.fieldHash1:
|
case decoder.fieldHash1:
|
||||||
@ -672,6 +705,7 @@ func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
|||||||
if iter.Error != nil && iter.Error != io.EOF {
|
if iter.Error != nil && iter.Error != io.EOF {
|
||||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||||
}
|
}
|
||||||
|
iter.decrementDepth()
|
||||||
}
|
}
|
||||||
|
|
||||||
type fiveFieldsStructDecoder struct {
|
type fiveFieldsStructDecoder struct {
|
||||||
@ -692,6 +726,9 @@ func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
|||||||
if !iter.readObjectStart() {
|
if !iter.readObjectStart() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if !iter.incrementDepth() {
|
||||||
|
return
|
||||||
|
}
|
||||||
for {
|
for {
|
||||||
switch iter.readFieldHash() {
|
switch iter.readFieldHash() {
|
||||||
case decoder.fieldHash1:
|
case decoder.fieldHash1:
|
||||||
@ -714,6 +751,7 @@ func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
|||||||
if iter.Error != nil && iter.Error != io.EOF {
|
if iter.Error != nil && iter.Error != io.EOF {
|
||||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||||
}
|
}
|
||||||
|
iter.decrementDepth()
|
||||||
}
|
}
|
||||||
|
|
||||||
type sixFieldsStructDecoder struct {
|
type sixFieldsStructDecoder struct {
|
||||||
@ -736,6 +774,9 @@ func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
|||||||
if !iter.readObjectStart() {
|
if !iter.readObjectStart() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if !iter.incrementDepth() {
|
||||||
|
return
|
||||||
|
}
|
||||||
for {
|
for {
|
||||||
switch iter.readFieldHash() {
|
switch iter.readFieldHash() {
|
||||||
case decoder.fieldHash1:
|
case decoder.fieldHash1:
|
||||||
@ -760,6 +801,7 @@ func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
|||||||
if iter.Error != nil && iter.Error != io.EOF {
|
if iter.Error != nil && iter.Error != io.EOF {
|
||||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||||
}
|
}
|
||||||
|
iter.decrementDepth()
|
||||||
}
|
}
|
||||||
|
|
||||||
type sevenFieldsStructDecoder struct {
|
type sevenFieldsStructDecoder struct {
|
||||||
@ -784,6 +826,9 @@ func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
|||||||
if !iter.readObjectStart() {
|
if !iter.readObjectStart() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if !iter.incrementDepth() {
|
||||||
|
return
|
||||||
|
}
|
||||||
for {
|
for {
|
||||||
switch iter.readFieldHash() {
|
switch iter.readFieldHash() {
|
||||||
case decoder.fieldHash1:
|
case decoder.fieldHash1:
|
||||||
@ -810,6 +855,7 @@ func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
|||||||
if iter.Error != nil && iter.Error != io.EOF {
|
if iter.Error != nil && iter.Error != io.EOF {
|
||||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||||
}
|
}
|
||||||
|
iter.decrementDepth()
|
||||||
}
|
}
|
||||||
|
|
||||||
type eightFieldsStructDecoder struct {
|
type eightFieldsStructDecoder struct {
|
||||||
@ -836,6 +882,9 @@ func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
|||||||
if !iter.readObjectStart() {
|
if !iter.readObjectStart() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if !iter.incrementDepth() {
|
||||||
|
return
|
||||||
|
}
|
||||||
for {
|
for {
|
||||||
switch iter.readFieldHash() {
|
switch iter.readFieldHash() {
|
||||||
case decoder.fieldHash1:
|
case decoder.fieldHash1:
|
||||||
@ -864,6 +913,7 @@ func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
|||||||
if iter.Error != nil && iter.Error != io.EOF {
|
if iter.Error != nil && iter.Error != io.EOF {
|
||||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||||
}
|
}
|
||||||
|
iter.decrementDepth()
|
||||||
}
|
}
|
||||||
|
|
||||||
type nineFieldsStructDecoder struct {
|
type nineFieldsStructDecoder struct {
|
||||||
@ -892,6 +942,9 @@ func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
|||||||
if !iter.readObjectStart() {
|
if !iter.readObjectStart() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if !iter.incrementDepth() {
|
||||||
|
return
|
||||||
|
}
|
||||||
for {
|
for {
|
||||||
switch iter.readFieldHash() {
|
switch iter.readFieldHash() {
|
||||||
case decoder.fieldHash1:
|
case decoder.fieldHash1:
|
||||||
@ -922,6 +975,7 @@ func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
|||||||
if iter.Error != nil && iter.Error != io.EOF {
|
if iter.Error != nil && iter.Error != io.EOF {
|
||||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||||
}
|
}
|
||||||
|
iter.decrementDepth()
|
||||||
}
|
}
|
||||||
|
|
||||||
type tenFieldsStructDecoder struct {
|
type tenFieldsStructDecoder struct {
|
||||||
@ -952,6 +1006,9 @@ func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
|||||||
if !iter.readObjectStart() {
|
if !iter.readObjectStart() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if !iter.incrementDepth() {
|
||||||
|
return
|
||||||
|
}
|
||||||
for {
|
for {
|
||||||
switch iter.readFieldHash() {
|
switch iter.readFieldHash() {
|
||||||
case decoder.fieldHash1:
|
case decoder.fieldHash1:
|
||||||
@ -984,6 +1041,7 @@ func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
|||||||
if iter.Error != nil && iter.Error != io.EOF {
|
if iter.Error != nil && iter.Error != io.EOF {
|
||||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||||
}
|
}
|
||||||
|
iter.decrementDepth()
|
||||||
}
|
}
|
||||||
|
|
||||||
type structFieldDecoder struct {
|
type structFieldDecoder struct {
|
||||||
@ -1031,4 +1089,4 @@ func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
|||||||
iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
|
iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,10 +2,10 @@ package jsoniter
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/modern-go/reflect2"
|
||||||
"io"
|
"io"
|
||||||
"reflect"
|
"reflect"
|
||||||
"unsafe"
|
"unsafe"
|
||||||
"github.com/v2pro/plz/reflect2"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func encoderOfStruct(ctx *ctx, typ reflect2.Type) ValEncoder {
|
func encoderOfStruct(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||||
@ -200,6 +200,7 @@ type stringModeStringEncoder struct {
|
|||||||
|
|
||||||
func (encoder *stringModeStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
func (encoder *stringModeStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||||
tempStream := encoder.cfg.BorrowStream(nil)
|
tempStream := encoder.cfg.BorrowStream(nil)
|
||||||
|
tempStream.Attachment = stream.Attachment
|
||||||
defer encoder.cfg.ReturnStream(tempStream)
|
defer encoder.cfg.ReturnStream(tempStream)
|
||||||
encoder.elemEncoder.Encode(ptr, tempStream)
|
encoder.elemEncoder.Encode(ptr, tempStream)
|
||||||
stream.WriteString(string(tempStream.Buffer()))
|
stream.WriteString(string(tempStream.Buffer()))
|
||||||
|
@ -5,8 +5,8 @@ import (
|
|||||||
"encoding/json"
|
"encoding/json"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_skip_number_in_array(t *testing.T) {
|
func Test_skip_number_in_array(t *testing.T) {
|
||||||
@ -105,6 +105,15 @@ func Test_skip_and_return_bytes_with_reader(t *testing.T) {
|
|||||||
should.Equal(`{"a" : [{"stream": "c"}], "d": 102 }`, string(skipped))
|
should.Equal(`{"a" : [{"stream": "c"}], "d": 102 }`, string(skipped))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Test_append_skip_and_return_bytes_with_reader(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
iter := jsoniter.Parse(jsoniter.ConfigDefault, bytes.NewBufferString(`[ {"a" : [{"stream": "c"}], "d": 102 }, "stream"]`), 4)
|
||||||
|
iter.ReadArray()
|
||||||
|
buf := make([]byte, 0, 1024)
|
||||||
|
buf = iter.SkipAndAppendBytes(buf)
|
||||||
|
should.Equal(`{"a" : [{"stream": "c"}], "d": 102 }`, string(buf))
|
||||||
|
}
|
||||||
|
|
||||||
func Test_skip_empty(t *testing.T) {
|
func Test_skip_empty(t *testing.T) {
|
||||||
should := require.New(t)
|
should := require.New(t)
|
||||||
should.NotNil(jsoniter.Get([]byte("")).LastError())
|
should.NotNil(jsoniter.Get([]byte("")).LastError())
|
||||||
|
@ -6,8 +6,8 @@ import (
|
|||||||
"github.com/json-iterator/go"
|
"github.com/json-iterator/go"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"io"
|
"io"
|
||||||
"testing"
|
|
||||||
"reflect"
|
"reflect"
|
||||||
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
type testCase struct {
|
type testCase struct {
|
||||||
|
@ -14,4 +14,4 @@ func init() {
|
|||||||
`"\t"`, // valid
|
`"\t"`, // valid
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -13,7 +13,7 @@ func init() {
|
|||||||
`{"hello":{}}`, // valid
|
`{"hello":{}}`, // valid
|
||||||
`{"hello":{}}}`, // invalid
|
`{"hello":{}}}`, // invalid
|
||||||
`{"hello": { "hello": 1}}`, // valid
|
`{"hello": { "hello": 1}}`, // valid
|
||||||
`{abc}`, // invalid
|
`{abc}`, // invalid
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -55,6 +55,11 @@ func (stream *Stream) Buffer() []byte {
|
|||||||
return stream.buf
|
return stream.buf
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetBuffer allows to append to the internal buffer directly
|
||||||
|
func (stream *Stream) SetBuffer(buf []byte) {
|
||||||
|
stream.buf = buf
|
||||||
|
}
|
||||||
|
|
||||||
// Write writes the contents of p into the buffer.
|
// Write writes the contents of p into the buffer.
|
||||||
// It returns the number of bytes written.
|
// It returns the number of bytes written.
|
||||||
// If nn < len(p), it also returns an error explaining
|
// If nn < len(p), it also returns an error explaining
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package jsoniter
|
package jsoniter
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
"strconv"
|
"strconv"
|
||||||
)
|
)
|
||||||
@ -13,6 +14,10 @@ func init() {
|
|||||||
|
|
||||||
// WriteFloat32 write float32 to stream
|
// WriteFloat32 write float32 to stream
|
||||||
func (stream *Stream) WriteFloat32(val float32) {
|
func (stream *Stream) WriteFloat32(val float32) {
|
||||||
|
if math.IsInf(float64(val), 0) || math.IsNaN(float64(val)) {
|
||||||
|
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||||
|
return
|
||||||
|
}
|
||||||
abs := math.Abs(float64(val))
|
abs := math.Abs(float64(val))
|
||||||
fmt := byte('f')
|
fmt := byte('f')
|
||||||
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
|
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
|
||||||
@ -26,6 +31,10 @@ func (stream *Stream) WriteFloat32(val float32) {
|
|||||||
|
|
||||||
// WriteFloat32Lossy write float32 to stream with ONLY 6 digits precision although much much faster
|
// WriteFloat32Lossy write float32 to stream with ONLY 6 digits precision although much much faster
|
||||||
func (stream *Stream) WriteFloat32Lossy(val float32) {
|
func (stream *Stream) WriteFloat32Lossy(val float32) {
|
||||||
|
if math.IsInf(float64(val), 0) || math.IsNaN(float64(val)) {
|
||||||
|
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||||
|
return
|
||||||
|
}
|
||||||
if val < 0 {
|
if val < 0 {
|
||||||
stream.writeByte('-')
|
stream.writeByte('-')
|
||||||
val = -val
|
val = -val
|
||||||
@ -54,6 +63,10 @@ func (stream *Stream) WriteFloat32Lossy(val float32) {
|
|||||||
|
|
||||||
// WriteFloat64 write float64 to stream
|
// WriteFloat64 write float64 to stream
|
||||||
func (stream *Stream) WriteFloat64(val float64) {
|
func (stream *Stream) WriteFloat64(val float64) {
|
||||||
|
if math.IsInf(val, 0) || math.IsNaN(val) {
|
||||||
|
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||||
|
return
|
||||||
|
}
|
||||||
abs := math.Abs(val)
|
abs := math.Abs(val)
|
||||||
fmt := byte('f')
|
fmt := byte('f')
|
||||||
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
|
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
|
||||||
@ -67,6 +80,10 @@ func (stream *Stream) WriteFloat64(val float64) {
|
|||||||
|
|
||||||
// WriteFloat64Lossy write float64 to stream with ONLY 6 digits precision although much much faster
|
// WriteFloat64Lossy write float64 to stream with ONLY 6 digits precision although much much faster
|
||||||
func (stream *Stream) WriteFloat64Lossy(val float64) {
|
func (stream *Stream) WriteFloat64Lossy(val float64) {
|
||||||
|
if math.IsInf(val, 0) || math.IsNaN(val) {
|
||||||
|
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||||
|
return
|
||||||
|
}
|
||||||
if val < 0 {
|
if val < 0 {
|
||||||
stream.writeByte('-')
|
stream.writeByte('-')
|
||||||
val = -val
|
val = -val
|
||||||
|
@ -17,16 +17,16 @@ func init() {
|
|||||||
func writeFirstBuf(space []byte, v uint32) []byte {
|
func writeFirstBuf(space []byte, v uint32) []byte {
|
||||||
start := v >> 24
|
start := v >> 24
|
||||||
if start == 0 {
|
if start == 0 {
|
||||||
space = append(space, byte(v >> 16), byte(v >> 8))
|
space = append(space, byte(v>>16), byte(v>>8))
|
||||||
} else if start == 1 {
|
} else if start == 1 {
|
||||||
space = append(space, byte(v >> 8))
|
space = append(space, byte(v>>8))
|
||||||
}
|
}
|
||||||
space = append(space, byte(v))
|
space = append(space, byte(v))
|
||||||
return space
|
return space
|
||||||
}
|
}
|
||||||
|
|
||||||
func writeBuf(buf []byte, v uint32) []byte {
|
func writeBuf(buf []byte, v uint32) []byte {
|
||||||
return append(buf, byte(v >> 16), byte(v >> 8), byte(v))
|
return append(buf, byte(v>>16), byte(v>>8), byte(v))
|
||||||
}
|
}
|
||||||
|
|
||||||
// WriteUint8 write uint8 to stream
|
// WriteUint8 write uint8 to stream
|
||||||
@ -91,7 +91,7 @@ func (stream *Stream) WriteUint32(val uint32) {
|
|||||||
stream.buf = writeFirstBuf(stream.buf, digits[q2])
|
stream.buf = writeFirstBuf(stream.buf, digits[q2])
|
||||||
} else {
|
} else {
|
||||||
r3 := q2 - q3*1000
|
r3 := q2 - q3*1000
|
||||||
stream.buf = append(stream.buf, byte(q3 + '0'))
|
stream.buf = append(stream.buf, byte(q3+'0'))
|
||||||
stream.buf = writeBuf(stream.buf, digits[r3])
|
stream.buf = writeBuf(stream.buf, digits[r3])
|
||||||
}
|
}
|
||||||
stream.buf = writeBuf(stream.buf, digits[r2])
|
stream.buf = writeBuf(stream.buf, digits[r2])
|
||||||
|
@ -60,4 +60,4 @@ func init() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type structEmpty struct{}
|
type structEmpty struct{}
|
||||||
type arrayAlis [4]stringAlias
|
type arrayAlis [4]stringAlias
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
package test
|
package test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"strings"
|
|
||||||
"encoding"
|
"encoding"
|
||||||
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
@ -26,7 +26,6 @@ func (k *stringKeyType) UnmarshalText(text []byte) error {
|
|||||||
var _ encoding.TextMarshaler = stringKeyType("")
|
var _ encoding.TextMarshaler = stringKeyType("")
|
||||||
var _ encoding.TextUnmarshaler = new(stringKeyType)
|
var _ encoding.TextUnmarshaler = new(stringKeyType)
|
||||||
|
|
||||||
|
|
||||||
type structKeyType struct {
|
type structKeyType struct {
|
||||||
X string
|
X string
|
||||||
}
|
}
|
||||||
@ -41,4 +40,4 @@ func (k *structKeyType) UnmarshalText(text []byte) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var _ encoding.TextMarshaler = structKeyType{}
|
var _ encoding.TextMarshaler = structKeyType{}
|
||||||
var _ encoding.TextUnmarshaler = &structKeyType{}
|
var _ encoding.TextUnmarshaler = &structKeyType{}
|
||||||
|
@ -3,8 +3,8 @@ package test
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"strings"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
type StringMarshaler string
|
type StringMarshaler string
|
||||||
@ -49,4 +49,4 @@ var _ json.Unmarshaler = new(StringMarshaler)
|
|||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
testCases = append(testCases, (*StringMarshaler)(nil))
|
testCases = append(testCases, (*StringMarshaler)(nil))
|
||||||
}
|
}
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
package test
|
package test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"strings"
|
|
||||||
"encoding/base64"
|
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"encoding/base64"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
type structMarshaler struct {
|
type structMarshaler struct {
|
||||||
|
@ -91,7 +91,6 @@ func (p *jsonMarshaler) UnmarshalJSON(input []byte) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
type jsonMarshalerMap map[int]int
|
type jsonMarshalerMap map[int]int
|
||||||
|
|
||||||
func (p *jsonMarshalerMap) MarshalJSON() ([]byte, error) {
|
func (p *jsonMarshalerMap) MarshalJSON() ([]byte, error) {
|
||||||
@ -117,11 +116,10 @@ func (p *textMarshaler) UnmarshalText(input []byte) error {
|
|||||||
|
|
||||||
type textMarshalerMap map[int]int
|
type textMarshalerMap map[int]int
|
||||||
|
|
||||||
|
|
||||||
func (p *textMarshalerMap) MarshalText() ([]byte, error) {
|
func (p *textMarshalerMap) MarshalText() ([]byte, error) {
|
||||||
return []byte(`{}`), nil
|
return []byte(`{}`), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *textMarshalerMap) UnmarshalText(input []byte) error {
|
func (p *textMarshalerMap) UnmarshalText(input []byte) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -60,6 +60,7 @@ func init() {
|
|||||||
(*SameLevel2NoTags)(nil),
|
(*SameLevel2NoTags)(nil),
|
||||||
(*SameLevel2Tagged)(nil),
|
(*SameLevel2Tagged)(nil),
|
||||||
(*EmbeddedPtr)(nil),
|
(*EmbeddedPtr)(nil),
|
||||||
|
(*UnnamedLiteral)(nil),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -230,4 +231,8 @@ type EmbeddedPtrOption struct {
|
|||||||
|
|
||||||
type EmbeddedPtr struct {
|
type EmbeddedPtr struct {
|
||||||
EmbeddedPtrOption `json:","`
|
EmbeddedPtrOption `json:","`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type UnnamedLiteral struct {
|
||||||
|
_ struct{}
|
||||||
|
}
|
||||||
|
@ -145,6 +145,9 @@ func init() {
|
|||||||
(*struct {
|
(*struct {
|
||||||
Field bool `json:",omitempty,string"`
|
Field bool `json:",omitempty,string"`
|
||||||
})(nil),
|
})(nil),
|
||||||
|
(*struct {
|
||||||
|
Field bool `json:"中文"`
|
||||||
|
})(nil),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -192,12 +195,12 @@ type StringFieldNameE struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type StringFieldName struct {
|
type StringFieldName struct {
|
||||||
F1 string `json:"F1"`
|
F1 string `json:"F1"`
|
||||||
F2 string `json:"f2"`
|
F2 string `json:"f2"`
|
||||||
F3 string `json:"-"`
|
F3 string `json:"-"`
|
||||||
F4 string `json:"-,"`
|
F4 string `json:"-,"`
|
||||||
F5 string `json:","`
|
F5 string `json:","`
|
||||||
F6 string `json:""`
|
F6 string `json:""`
|
||||||
StringFieldNameE `json:"e"`
|
StringFieldNameE `json:"e"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
package test
|
package test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"strings"
|
|
||||||
"encoding"
|
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"encoding"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
@ -2,9 +2,9 @@ package test
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"encoding"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"strings"
|
"strings"
|
||||||
"encoding"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
@ -66,4 +66,4 @@ func (m *structTextMarshaler) UnmarshalText(text []byte) error {
|
|||||||
var _ encoding.TextMarshaler = structTextMarshaler{}
|
var _ encoding.TextMarshaler = structTextMarshaler{}
|
||||||
var _ encoding.TextUnmarshaler = &structTextMarshaler{}
|
var _ encoding.TextUnmarshaler = &structTextMarshaler{}
|
||||||
|
|
||||||
type structTextMarshalerAlias structTextMarshaler
|
type structTextMarshalerAlias structTextMarshaler
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
package test
|
package test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
|
||||||
"reflect"
|
|
||||||
"fmt"
|
|
||||||
"github.com/google/gofuzz"
|
|
||||||
"strings"
|
|
||||||
"github.com/json-iterator/go"
|
|
||||||
"encoding/json"
|
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
"github.com/davecgh/go-spew/spew"
|
"github.com/davecgh/go-spew/spew"
|
||||||
|
"github.com/google/gofuzz"
|
||||||
|
"github.com/json-iterator/go"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
var testCases []interface{}
|
var testCases []interface{}
|
||||||
|
@ -8,13 +8,13 @@ func init() {
|
|||||||
[2]*float64{},
|
[2]*float64{},
|
||||||
)
|
)
|
||||||
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
||||||
ptr: (*[0]int)(nil),
|
ptr: (*[0]int)(nil),
|
||||||
input: `[1]`,
|
input: `[1]`,
|
||||||
}, unmarshalCase{
|
}, unmarshalCase{
|
||||||
ptr: (*[1]int)(nil),
|
ptr: (*[1]int)(nil),
|
||||||
input: `[2]`,
|
input: `[2]`,
|
||||||
}, unmarshalCase{
|
}, unmarshalCase{
|
||||||
ptr: (*[1]int)(nil),
|
ptr: (*[1]int)(nil),
|
||||||
input: `[]`,
|
input: `[]`,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -8,16 +8,16 @@ func init() {
|
|||||||
return &val
|
return &val
|
||||||
}
|
}
|
||||||
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
||||||
ptr: (**interface{})(nil),
|
ptr: (**interface{})(nil),
|
||||||
input: `"hello"`,
|
input: `"hello"`,
|
||||||
}, unmarshalCase{
|
}, unmarshalCase{
|
||||||
ptr: (**interface{})(nil),
|
ptr: (**interface{})(nil),
|
||||||
input: `1e1`,
|
input: `1e1`,
|
||||||
}, unmarshalCase{
|
}, unmarshalCase{
|
||||||
ptr: (**interface{})(nil),
|
ptr: (**interface{})(nil),
|
||||||
input: `1.0e1`,
|
input: `1.0e1`,
|
||||||
}, unmarshalCase{
|
}, unmarshalCase{
|
||||||
ptr: (*[]interface{})(nil),
|
ptr: (*[]interface{})(nil),
|
||||||
input: `[1.0e1]`,
|
input: `[1.0e1]`,
|
||||||
}, unmarshalCase{
|
}, unmarshalCase{
|
||||||
ptr: (*struct {
|
ptr: (*struct {
|
||||||
@ -66,12 +66,12 @@ func init() {
|
|||||||
}{"hello"},
|
}{"hello"},
|
||||||
struct {
|
struct {
|
||||||
Field interface{}
|
Field interface{}
|
||||||
}{struct{
|
}{struct {
|
||||||
field chan int
|
field chan int
|
||||||
}{}},
|
}{}},
|
||||||
struct {
|
struct {
|
||||||
Field interface{}
|
Field interface{}
|
||||||
}{struct{
|
}{struct {
|
||||||
Field *int
|
Field *int
|
||||||
}{pInt(100)}},
|
}{pInt(100)}},
|
||||||
)
|
)
|
||||||
|
36
value_tests/error_test.go
Normal file
36
value_tests/error_test.go
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
package test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/json-iterator/go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"reflect"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Test_errorInput(t *testing.T) {
|
||||||
|
for _, testCase := range unmarshalCases {
|
||||||
|
if testCase.obj != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
valType := reflect.TypeOf(testCase.ptr).Elem()
|
||||||
|
t.Run(valType.String(), func(t *testing.T) {
|
||||||
|
for _, data := range []string{
|
||||||
|
`x`,
|
||||||
|
`n`,
|
||||||
|
`nul`,
|
||||||
|
`{x}`,
|
||||||
|
`{"x"}`,
|
||||||
|
`{"x": "y"x}`,
|
||||||
|
`{"x": "y"`,
|
||||||
|
`{"x": "y", "a"}`,
|
||||||
|
`[`,
|
||||||
|
`[{"x": "y"}`,
|
||||||
|
} {
|
||||||
|
ptrVal := reflect.New(valType)
|
||||||
|
ptr := ptrVal.Interface()
|
||||||
|
err := jsoniter.ConfigCompatibleWithStandardLibrary.Unmarshal([]byte(data), ptr)
|
||||||
|
require.Error(t, err, "on input %q", data)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
@ -1,13 +1,13 @@
|
|||||||
package test
|
package test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"testing"
|
"github.com/json-iterator/go"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"strconv"
|
"strconv"
|
||||||
"bytes"
|
"testing"
|
||||||
"github.com/json-iterator/go"
|
|
||||||
"encoding/json"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_read_float(t *testing.T) {
|
func Test_read_float(t *testing.T) {
|
||||||
@ -50,7 +50,6 @@ func Test_read_float(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func Test_write_float32(t *testing.T) {
|
func Test_write_float32(t *testing.T) {
|
||||||
vals := []float32{0, 1, -1, 99, 0xff, 0xfff, 0xffff, 0xfffff, 0xffffff, 0x4ffffff, 0xfffffff,
|
vals := []float32{0, 1, -1, 99, 0xff, 0xfff, 0xffff, 0xfffff, 0xffffff, 0x4ffffff, 0xfffffff,
|
||||||
-0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001}
|
-0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001}
|
||||||
@ -127,4 +126,4 @@ func Test_write_float64(t *testing.T) {
|
|||||||
stream = jsoniter.NewStream(jsoniter.ConfigDefault, nil, 0)
|
stream = jsoniter.NewStream(jsoniter.ConfigDefault, nil, 0)
|
||||||
stream.WriteFloat64(float64(0.0000001))
|
stream.WriteFloat64(float64(0.0000001))
|
||||||
should.Equal("1e-07", string(stream.Buffer()))
|
should.Equal("1e-07", string(stream.Buffer()))
|
||||||
}
|
}
|
||||||
|
@ -17,7 +17,7 @@ func init() {
|
|||||||
pCloser2("hello"),
|
pCloser2("hello"),
|
||||||
)
|
)
|
||||||
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
||||||
ptr: (*[]io.Closer)(nil),
|
ptr: (*[]io.Closer)(nil),
|
||||||
input: "[null]",
|
input: "[null]",
|
||||||
}, unmarshalCase{
|
}, unmarshalCase{
|
||||||
obj: func() interface{} {
|
obj: func() interface{} {
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
package test
|
package test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"github.com/json-iterator/go"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"strconv"
|
"strconv"
|
||||||
"fmt"
|
|
||||||
"testing"
|
"testing"
|
||||||
"bytes"
|
|
||||||
"github.com/json-iterator/go"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
@ -158,7 +158,6 @@ func Test_read_int64(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func Test_write_uint8(t *testing.T) {
|
func Test_write_uint8(t *testing.T) {
|
||||||
vals := []uint8{0, 1, 11, 111, 255}
|
vals := []uint8{0, 1, 11, 111, 255}
|
||||||
for _, val := range vals {
|
for _, val := range vals {
|
||||||
@ -417,4 +416,4 @@ func Test_write_int64(t *testing.T) {
|
|||||||
stream.Flush()
|
stream.Flush()
|
||||||
should.Nil(stream.Error)
|
should.Nil(stream.Error)
|
||||||
should.Equal("a4294967295", buf.String())
|
should.Equal("a4294967295", buf.String())
|
||||||
}
|
}
|
||||||
|
@ -3,11 +3,11 @@ package test
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"github.com/json-iterator/go"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"io"
|
"io"
|
||||||
"testing"
|
"testing"
|
||||||
"github.com/json-iterator/go"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_missing_object_end(t *testing.T) {
|
func Test_missing_object_end(t *testing.T) {
|
||||||
@ -103,18 +103,44 @@ func Test_invalid_float(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func Test_chan(t *testing.T) {
|
func Test_chan(t *testing.T) {
|
||||||
t.Skip("do not support chan")
|
|
||||||
|
|
||||||
type TestObject struct {
|
type TestObject struct {
|
||||||
MyChan chan bool
|
MyChan chan bool
|
||||||
MyField int
|
MyField int
|
||||||
}
|
}
|
||||||
|
|
||||||
should := require.New(t)
|
|
||||||
obj := TestObject{}
|
obj := TestObject{}
|
||||||
str, err := json.Marshal(obj)
|
|
||||||
should.Nil(err)
|
t.Run("Encode channel", func(t *testing.T) {
|
||||||
should.Equal(``, str)
|
should := require.New(t)
|
||||||
|
str, err := jsoniter.Marshal(obj)
|
||||||
|
should.NotNil(err)
|
||||||
|
should.Nil(str)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Encode channel using compatible configuration", func(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
str, err := jsoniter.ConfigCompatibleWithStandardLibrary.Marshal(obj)
|
||||||
|
should.NotNil(err)
|
||||||
|
should.Nil(str)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_invalid_in_map(t *testing.T) {
|
||||||
|
testMap := map[string]interface{}{"chan": make(chan interface{})}
|
||||||
|
|
||||||
|
t.Run("Encode map with invalid content", func(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
str, err := jsoniter.Marshal(testMap)
|
||||||
|
should.NotNil(err)
|
||||||
|
should.Nil(str)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Encode map with invalid content using compatible configuration", func(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
str, err := jsoniter.ConfigCompatibleWithStandardLibrary.Marshal(testMap)
|
||||||
|
should.NotNil(err)
|
||||||
|
should.Nil(str)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func Test_invalid_number(t *testing.T) {
|
func Test_invalid_number(t *testing.T) {
|
||||||
@ -223,4 +249,14 @@ func Test_EmptyInput(t *testing.T) {
|
|||||||
if err == nil {
|
if err == nil {
|
||||||
t.Errorf("Expected error")
|
t.Errorf("Expected error")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type Foo struct {
|
||||||
|
A jsoniter.Any
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_nil_any(t *testing.T) {
|
||||||
|
should := require.New(t)
|
||||||
|
data, _ := jsoniter.Marshal(&Foo{})
|
||||||
|
should.Equal(`{"A":null}`, string(data))
|
||||||
|
}
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
package test
|
package test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"math/big"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"math/big"
|
||||||
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
@ -26,17 +28,33 @@ func init() {
|
|||||||
},
|
},
|
||||||
nilMap,
|
nilMap,
|
||||||
&nilMap,
|
&nilMap,
|
||||||
map[string]*json.RawMessage{"hello":pRawMessage(json.RawMessage("[]"))},
|
map[string]*json.RawMessage{"hello": pRawMessage(json.RawMessage("[]"))},
|
||||||
|
map[Date]bool{{}: true},
|
||||||
|
map[Date2]bool{{}: true},
|
||||||
)
|
)
|
||||||
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
||||||
ptr: (*map[string]string)(nil),
|
ptr: (*map[string]string)(nil),
|
||||||
input: `{"k\"ey": "val"}`,
|
input: `{"k\"ey": "val"}`,
|
||||||
}, unmarshalCase{
|
}, unmarshalCase{
|
||||||
ptr: (*map[string]string)(nil),
|
ptr: (*map[string]string)(nil),
|
||||||
input: `null`,
|
input: `null`,
|
||||||
}, unmarshalCase{
|
}, unmarshalCase{
|
||||||
ptr: (*map[string]*json.RawMessage)(nil),
|
ptr: (*map[string]*json.RawMessage)(nil),
|
||||||
input: "{\"test\":[{\"key\":\"value\"}]}",
|
input: "{\"test\":[{\"key\":\"value\"}]}",
|
||||||
|
}, unmarshalCase{
|
||||||
|
ptr: (*map[Date]bool)(nil),
|
||||||
|
input: `{
|
||||||
|
"2018-12-12": true,
|
||||||
|
"2018-12-13": true,
|
||||||
|
"2018-12-14": true
|
||||||
|
}`,
|
||||||
|
}, unmarshalCase{
|
||||||
|
ptr: (*map[Date2]bool)(nil),
|
||||||
|
input: `{
|
||||||
|
"2018-12-12": true,
|
||||||
|
"2018-12-13": true,
|
||||||
|
"2018-12-14": true
|
||||||
|
}`,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -49,3 +67,51 @@ type MyString string
|
|||||||
func (ms MyString) Hello() string {
|
func (ms MyString) Hello() string {
|
||||||
return string(ms)
|
return string(ms)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type Date struct {
|
||||||
|
time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Date) UnmarshalJSON(b []byte) error {
|
||||||
|
dateStr := string(b) // something like `"2017-08-20"`
|
||||||
|
|
||||||
|
if dateStr == "null" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
t, err := time.Parse(`"2006-01-02"`, dateStr)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cant parse date: %#v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
d.Time = t
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Date) MarshalJSON() ([]byte, error) {
|
||||||
|
return []byte(d.Time.Format("2006-01-02")), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type Date2 struct {
|
||||||
|
time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d Date2) UnmarshalJSON(b []byte) error {
|
||||||
|
dateStr := string(b) // something like `"2017-08-20"`
|
||||||
|
|
||||||
|
if dateStr == "null" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
t, err := time.Parse(`"2006-01-02"`, dateStr)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cant parse date: %#v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
d.Time = t
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d Date2) MarshalJSON() ([]byte, error) {
|
||||||
|
return []byte(d.Time.Format("2006-01-02")), nil
|
||||||
|
}
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
package test
|
package test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
|
||||||
"encoding"
|
"encoding"
|
||||||
|
"encoding/json"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
@ -17,20 +17,20 @@ func init() {
|
|||||||
tmOfStructInt{},
|
tmOfStructInt{},
|
||||||
&tm2,
|
&tm2,
|
||||||
map[tmOfStruct]int{
|
map[tmOfStruct]int{
|
||||||
tmOfStruct{}: 100,
|
{}: 100,
|
||||||
},
|
},
|
||||||
map[*tmOfStruct]int{
|
map[*tmOfStruct]int{
|
||||||
&tmOfStruct{}: 100,
|
{}: 100,
|
||||||
},
|
},
|
||||||
map[encoding.TextMarshaler]int{
|
map[encoding.TextMarshaler]int{
|
||||||
tm1: 100,
|
tm1: 100,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
||||||
ptr: (*tmOfMap)(nil),
|
ptr: (*tmOfMap)(nil),
|
||||||
input: `"{1:2}"`,
|
input: `"{1:2}"`,
|
||||||
}, unmarshalCase{
|
}, unmarshalCase{
|
||||||
ptr: (*tmOfMapPtr)(nil),
|
ptr: (*tmOfMapPtr)(nil),
|
||||||
input: `"{1:2}"`,
|
input: `"{1:2}"`,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -47,7 +47,6 @@ func (q *jmOfStruct) UnmarshalJSON(value []byte) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
type tmOfStruct struct {
|
type tmOfStruct struct {
|
||||||
F2 chan []byte
|
F2 chan []byte
|
||||||
}
|
}
|
||||||
|
@ -1,13 +1,24 @@
|
|||||||
package test
|
package test
|
||||||
|
|
||||||
import "encoding/json"
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
marshalCases = append(marshalCases,
|
marshalCases = append(marshalCases,
|
||||||
json.RawMessage("{}"),
|
json.RawMessage("{}"),
|
||||||
|
struct {
|
||||||
|
Env string `json:"env"`
|
||||||
|
Extra json.RawMessage `json:"extra,omitempty"`
|
||||||
|
}{
|
||||||
|
Env: "jfdk",
|
||||||
|
},
|
||||||
)
|
)
|
||||||
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
||||||
ptr: (*json.RawMessage)(nil),
|
ptr: (*json.RawMessage)(nil),
|
||||||
input: `[1,2,3]`,
|
input: `[1,2,3]`,
|
||||||
|
}, unmarshalCase{
|
||||||
|
ptr: (*json.RawMessage)(nil),
|
||||||
|
input: `1.122e+250`,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -6,6 +6,7 @@ func init() {
|
|||||||
[]interface{}{"hello"},
|
[]interface{}{"hello"},
|
||||||
nilSlice,
|
nilSlice,
|
||||||
&nilSlice,
|
&nilSlice,
|
||||||
|
[]byte{1, 2, 3},
|
||||||
)
|
)
|
||||||
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
||||||
ptr: (*[]string)(nil),
|
ptr: (*[]string)(nil),
|
||||||
@ -13,5 +14,14 @@ func init() {
|
|||||||
}, unmarshalCase{
|
}, unmarshalCase{
|
||||||
ptr: (*[]string)(nil),
|
ptr: (*[]string)(nil),
|
||||||
input: "[]",
|
input: "[]",
|
||||||
|
}, unmarshalCase{
|
||||||
|
ptr: (*[]byte)(nil),
|
||||||
|
input: "[1,2,3]",
|
||||||
|
}, unmarshalCase{
|
||||||
|
ptr: (*[]byte)(nil),
|
||||||
|
input: `"aGVsbG8="`,
|
||||||
|
}, unmarshalCase{
|
||||||
|
ptr: (*[]byte)(nil),
|
||||||
|
input: `"c3ViamVjdHM\/X2Q9MQ=="`,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
package test
|
package test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
|
||||||
"github.com/json-iterator/go"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"github.com/json-iterator/go"
|
||||||
|
"testing"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
package test
|
package test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"time"
|
|
||||||
"encoding/json"
|
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
@ -63,6 +63,43 @@ func init() {
|
|||||||
d *time.Timer
|
d *time.Timer
|
||||||
})(nil),
|
})(nil),
|
||||||
input: `{"a": 444, "b":"bad", "C":256, "d":{"not":"a timer"}}`,
|
input: `{"a": 444, "b":"bad", "C":256, "d":{"not":"a timer"}}`,
|
||||||
|
}, unmarshalCase{
|
||||||
|
ptr: (*struct {
|
||||||
|
A string
|
||||||
|
B string
|
||||||
|
C string
|
||||||
|
D string
|
||||||
|
E string
|
||||||
|
F string
|
||||||
|
G string
|
||||||
|
H string
|
||||||
|
I string
|
||||||
|
J string
|
||||||
|
K string
|
||||||
|
})(nil),
|
||||||
|
input: `{"a":"1","b":"2","c":"3","d":"4","e":"5","f":"6","g":"7","h":"8","i":"9","j":"10","k":"11"}`,
|
||||||
|
}, unmarshalCase{
|
||||||
|
ptr: (*struct {
|
||||||
|
T float64 `json:"T"`
|
||||||
|
})(nil),
|
||||||
|
input: `{"t":10.0}`,
|
||||||
|
}, unmarshalCase{
|
||||||
|
ptr: (*struct {
|
||||||
|
T float64 `json:"T"`
|
||||||
|
})(nil),
|
||||||
|
input: `{"T":10.0}`,
|
||||||
|
}, unmarshalCase{
|
||||||
|
ptr: (*struct {
|
||||||
|
T float64 `json:"t"`
|
||||||
|
})(nil),
|
||||||
|
input: `{"T":10.0}`,
|
||||||
|
}, unmarshalCase{
|
||||||
|
ptr: (*struct {
|
||||||
|
KeyString string `json:"key_string"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
Asks [][2]float64 `json:"asks"`
|
||||||
|
})(nil),
|
||||||
|
input: `{"key_string": "KEYSTRING","type": "TYPE","asks": [[1e+66,1]]}`,
|
||||||
})
|
})
|
||||||
marshalCases = append(marshalCases,
|
marshalCases = append(marshalCases,
|
||||||
struct {
|
struct {
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user