mirror of
https://github.com/json-iterator/go.git
synced 2025-06-03 22:27:26 +02:00
Compare commits
136 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
71ac16282d | ||
|
720cd423d0 | ||
|
9b00d50ac0 | ||
|
ffc487c633 | ||
|
10b9ac0971 | ||
|
bf8b920736 | ||
|
a7b8414362 | ||
|
024077e996 | ||
|
08b5e5796b | ||
|
c6661824eb | ||
|
3c55efb1a1 | ||
|
e6b9536d36 | ||
|
a3465d79a9 | ||
|
9b79a3e192 | ||
|
1779031cda | ||
|
6821bec9fa | ||
|
9461257643 | ||
|
5bce16d299 | ||
|
a1ca083078 | ||
|
cd6773e694 | ||
|
55287ed53a | ||
|
8961be9c21 | ||
|
0f8241d334 | ||
|
53b9d06ba7 | ||
|
1f7ee05ef8 | ||
|
b22f393858 | ||
|
bede7b9e40 | ||
|
58aeb59006 | ||
|
7acbb404a4 | ||
|
69f2a91ff4 | ||
|
11a37a0774 | ||
|
91f4a6405d | ||
|
a54d350455 | ||
|
6f4c196d95 | ||
|
8302a17e8c | ||
|
3987001e27 | ||
|
78d9e97b7a | ||
|
49c900ee46 | ||
|
9c0685d8d3 | ||
|
acfec88f7a | ||
|
e88512faf8 | ||
|
b681149eae | ||
|
d1af7639b3 | ||
|
7c9f8c2d20 | ||
|
f814d6c0f1 | ||
|
aba8654400 | ||
|
a1c9557592 | ||
|
44a7e7340d | ||
|
2834c7e43c | ||
|
d296277d5c | ||
|
dc11f49689 | ||
|
83f7b825b3 | ||
|
03217c3e97 | ||
|
908eaed151 | ||
|
eec24895fe | ||
|
1ba732a07d | ||
|
819acad769 | ||
|
695ec2b83b | ||
|
028e2ef2bd | ||
|
976454858b | ||
|
27518f6661 | ||
|
94869abf43 | ||
|
459f0e30ae | ||
|
0039f4ac3d | ||
|
fb5614a4ca | ||
|
f71b9090aa | ||
|
08047c174c | ||
|
68347ec4d6 | ||
|
0fd91468bb | ||
|
1bc9828b4f | ||
|
24c3d57281 | ||
|
0ff49de124 | ||
|
5bc9320502 | ||
|
f64ce68b6e | ||
|
2d42ff74dd | ||
|
3a023a5fbc | ||
|
16aef10b2b | ||
|
ae4c002f78 | ||
|
e4aa2ec063 | ||
|
d05f387f50 | ||
|
a9403d25cd | ||
|
05d041de10 | ||
|
5916df66b3 | ||
|
2433035e51 | ||
|
6dfc0bf2dd | ||
|
b9be8dd373 | ||
|
b8d78b6aaf | ||
|
7109b5e7dd | ||
|
4cc76529e8 | ||
|
c5ddac9dc3 | ||
|
f76d712086 | ||
|
1624edc445 | ||
|
5d789e5e02 | ||
|
0260c89b54 | ||
|
10a568c511 | ||
|
ab8a2e0c74 | ||
|
2fbdfbb595 | ||
|
720ab8dc7f | ||
|
f2b4162afb | ||
|
3830516ed0 | ||
|
7cceb6c2e3 | ||
|
b92cf78708 | ||
|
8744d7c5c7 | ||
|
37cc313d18 | ||
|
2ddf6d7582 | ||
|
6a6742f0a2 | ||
|
6c702ce12a | ||
|
f88871b601 | ||
|
f246f80f14 | ||
|
51dd70305b | ||
|
a949c42748 | ||
|
f89479f5c0 | ||
|
b858ec296c | ||
|
885a41a0a6 | ||
|
9e9a97040e | ||
|
fb4d53e4cc | ||
|
b53656d459 | ||
|
8f27a81d90 | ||
|
4930b053b8 | ||
|
06e0f9391e | ||
|
ca39e5af3e | ||
|
39acec93e0 | ||
|
25fa392355 | ||
|
d51e841de0 | ||
|
3353055b2a | ||
|
455b3f8bb8 | ||
|
2a93f9003e | ||
|
9472474ffd | ||
|
ad83167dc6 | ||
|
fff342fd04 | ||
|
8d6662b81b | ||
|
a377e2656b | ||
|
0ac74bba4a | ||
|
ebe943a4a6 | ||
|
414d0307c9 | ||
|
86e9fd72bc |
28
Gopkg.lock
generated
28
Gopkg.lock
generated
@ -2,32 +2,20 @@
|
||||
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/davecgh/go-spew"
|
||||
packages = ["spew"]
|
||||
revision = "346938d642f2ec3594ed81d874461961cd0faa76"
|
||||
version = "v1.1.0"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/google/gofuzz"
|
||||
name = "github.com/modern-go/concurrent"
|
||||
packages = ["."]
|
||||
revision = "24818f796faf91cd76ec7bddd72458fbced7a6c1"
|
||||
revision = "e0a39a4cb4216ea8db28e22a69f4ec25610d513a"
|
||||
version = "1.0.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/pmezard/go-difflib"
|
||||
packages = ["difflib"]
|
||||
revision = "792786c7400a136282c1664665ae0a8db921c6c2"
|
||||
version = "v1.0.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/stretchr/testify"
|
||||
packages = ["assert","require"]
|
||||
revision = "69483b4bd14f5845b5a1e55bca19e954e827f1d0"
|
||||
version = "v1.1.4"
|
||||
name = "github.com/modern-go/reflect2"
|
||||
packages = ["."]
|
||||
revision = "4b7aa43c6742a2c18fdef89dd197aaae7dac7ccd"
|
||||
version = "1.0.1"
|
||||
|
||||
[solve-meta]
|
||||
analyzer-name = "dep"
|
||||
analyzer-version = 1
|
||||
inputs-digest = "f8b7cf3941d3792cbbd570bb53c093adaf774334d1162c651565c97a58dc9d09"
|
||||
inputs-digest = "ea54a775e5a354cb015502d2e7aa4b74230fc77e894f34a838b268c25ec8eeb8"
|
||||
solver-name = "gps-cdcl"
|
||||
solver-version = 1
|
||||
|
13
Gopkg.toml
13
Gopkg.toml
@ -19,15 +19,8 @@
|
||||
# name = "github.com/x/y"
|
||||
# version = "2.4.0"
|
||||
|
||||
ignored = ["github.com/davecgh/go-spew*","github.com/google/gofuzz*","github.com/stretchr/testify*"]
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/davecgh/go-spew"
|
||||
version = "1.1.0"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/google/gofuzz"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/stretchr/testify"
|
||||
version = "1.1.4"
|
||||
name = "github.com/modern-go/reflect2"
|
||||
version = "1.0.1"
|
||||
|
43
README.md
43
README.md
@ -1,5 +1,5 @@
|
||||
[](https://sourcegraph.com/github.com/json-iterator/go?badge)
|
||||
[](http://godoc.org/github.com/json-iterator/go)
|
||||
[](https://pkg.go.dev/github.com/json-iterator/go)
|
||||
[](https://travis-ci.org/json-iterator/go)
|
||||
[](https://codecov.io/gh/json-iterator/go)
|
||||
[](https://goreportcard.com/report/github.com/json-iterator/go)
|
||||
@ -8,12 +8,6 @@
|
||||
|
||||
A high-performance 100% compatible drop-in replacement of "encoding/json"
|
||||
|
||||
You can also use thrift like JSON using [thrift-iterator](https://github.com/thrift-iterator/go)
|
||||
|
||||
```
|
||||
Go开发者们请加入我们,滴滴出行平台技术部 taowen@didichuxing.com
|
||||
```
|
||||
|
||||
# Benchmark
|
||||
|
||||

|
||||
@ -22,14 +16,17 @@ Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/githu
|
||||
|
||||
Raw Result (easyjson requires static code generation)
|
||||
|
||||
| | ns/op | allocation bytes | allocation times |
|
||||
| --- | --- | --- | --- |
|
||||
| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
|
||||
| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
|
||||
| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
|
||||
| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
|
||||
| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
|
||||
| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
|
||||
| | ns/op | allocation bytes | allocation times |
|
||||
| --------------- | ----------- | ---------------- | ---------------- |
|
||||
| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
|
||||
| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
|
||||
| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
|
||||
| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
|
||||
| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
|
||||
| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
|
||||
|
||||
Always benchmark with your own workload.
|
||||
The result depends heavily on the data input.
|
||||
|
||||
# Usage
|
||||
|
||||
@ -42,10 +39,10 @@ import "encoding/json"
|
||||
json.Marshal(&data)
|
||||
```
|
||||
|
||||
with
|
||||
with
|
||||
|
||||
```go
|
||||
import "github.com/json-iterator/go"
|
||||
import jsoniter "github.com/json-iterator/go"
|
||||
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
json.Marshal(&data)
|
||||
@ -61,7 +58,7 @@ json.Unmarshal(input, &data)
|
||||
with
|
||||
|
||||
```go
|
||||
import "github.com/json-iterator/go"
|
||||
import jsoniter "github.com/json-iterator/go"
|
||||
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
json.Unmarshal(input, &data)
|
||||
@ -79,10 +76,10 @@ go get github.com/json-iterator/go
|
||||
|
||||
Contributors
|
||||
|
||||
* [thockin](https://github.com/thockin)
|
||||
* [mattn](https://github.com/mattn)
|
||||
* [cch123](https://github.com/cch123)
|
||||
* [Oleg Shaldybin](https://github.com/olegshaldybin)
|
||||
* [Jason Toffaletti](https://github.com/toffaletti)
|
||||
- [thockin](https://github.com/thockin)
|
||||
- [mattn](https://github.com/mattn)
|
||||
- [cch123](https://github.com/cch123)
|
||||
- [Oleg Shaldybin](https://github.com/olegshaldybin)
|
||||
- [Jason Toffaletti](https://github.com/toffaletti)
|
||||
|
||||
Report issue or pull request, or email taowen@gmail.com, or [](https://gitter.im/json-iterator/Lobby)
|
||||
|
21
adapter.go
21
adapter.go
@ -16,7 +16,7 @@ func Unmarshal(data []byte, v interface{}) error {
|
||||
return ConfigDefault.Unmarshal(data, v)
|
||||
}
|
||||
|
||||
// UnmarshalFromString convenient method to read from string instead of []byte
|
||||
// UnmarshalFromString is a convenient method to read from string instead of []byte
|
||||
func UnmarshalFromString(str string, v interface{}) error {
|
||||
return ConfigDefault.UnmarshalFromString(str, v)
|
||||
}
|
||||
@ -77,7 +77,16 @@ func (adapter *Decoder) Decode(obj interface{}) error {
|
||||
|
||||
// More is there more?
|
||||
func (adapter *Decoder) More() bool {
|
||||
return adapter.iter.head != adapter.iter.tail
|
||||
iter := adapter.iter
|
||||
if iter.Error != nil {
|
||||
return false
|
||||
}
|
||||
c := iter.nextToken()
|
||||
if c == 0 {
|
||||
return false
|
||||
}
|
||||
iter.unreadByte()
|
||||
return c != ']' && c != '}'
|
||||
}
|
||||
|
||||
// Buffered remaining buffer
|
||||
@ -91,7 +100,7 @@ func (adapter *Decoder) Buffered() io.Reader {
|
||||
func (adapter *Decoder) UseNumber() {
|
||||
cfg := adapter.iter.cfg.configBeforeFrozen
|
||||
cfg.UseNumber = true
|
||||
adapter.iter.cfg = cfg.frozeWithCacheReuse()
|
||||
adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
|
||||
}
|
||||
|
||||
// DisallowUnknownFields causes the Decoder to return an error when the destination
|
||||
@ -100,7 +109,7 @@ func (adapter *Decoder) UseNumber() {
|
||||
func (adapter *Decoder) DisallowUnknownFields() {
|
||||
cfg := adapter.iter.cfg.configBeforeFrozen
|
||||
cfg.DisallowUnknownFields = true
|
||||
adapter.iter.cfg = cfg.frozeWithCacheReuse()
|
||||
adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
|
||||
}
|
||||
|
||||
// NewEncoder same as json.NewEncoder
|
||||
@ -125,14 +134,14 @@ func (adapter *Encoder) Encode(val interface{}) error {
|
||||
func (adapter *Encoder) SetIndent(prefix, indent string) {
|
||||
config := adapter.stream.cfg.configBeforeFrozen
|
||||
config.IndentionStep = len(indent)
|
||||
adapter.stream.cfg = config.frozeWithCacheReuse()
|
||||
adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
|
||||
}
|
||||
|
||||
// SetEscapeHTML escape html by default, set to false to disable
|
||||
func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) {
|
||||
config := adapter.stream.cfg.configBeforeFrozen
|
||||
config.EscapeHTML = escapeHTML
|
||||
adapter.stream.cfg = config.frozeWithCacheReuse()
|
||||
adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
|
||||
}
|
||||
|
||||
// Valid reports whether data is a valid JSON encoding.
|
||||
|
18
any.go
18
any.go
@ -3,10 +3,11 @@ package jsoniter
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/modern-go/reflect2"
|
||||
"io"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"unsafe"
|
||||
"github.com/v2pro/plz/reflect2"
|
||||
)
|
||||
|
||||
// Any generic object representation.
|
||||
@ -101,6 +102,9 @@ func Wrap(val interface{}) Any {
|
||||
case reflect.String:
|
||||
return WrapString(val.(string))
|
||||
case reflect.Int:
|
||||
if strconv.IntSize == 32 {
|
||||
return WrapInt32(int32(val.(int)))
|
||||
}
|
||||
return WrapInt64(int64(val.(int)))
|
||||
case reflect.Int8:
|
||||
return WrapInt32(int32(val.(int8)))
|
||||
@ -111,7 +115,15 @@ func Wrap(val interface{}) Any {
|
||||
case reflect.Int64:
|
||||
return WrapInt64(val.(int64))
|
||||
case reflect.Uint:
|
||||
if strconv.IntSize == 32 {
|
||||
return WrapUint32(uint32(val.(uint)))
|
||||
}
|
||||
return WrapUint64(uint64(val.(uint)))
|
||||
case reflect.Uintptr:
|
||||
if ptrSize == 32 {
|
||||
return WrapUint32(uint32(val.(uintptr)))
|
||||
}
|
||||
return WrapUint64(uint64(val.(uintptr)))
|
||||
case reflect.Uint8:
|
||||
return WrapUint32(uint32(val.(uint8)))
|
||||
case reflect.Uint16:
|
||||
@ -300,6 +312,10 @@ func (codec *directAnyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
|
||||
func (codec *directAnyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
any := *(*Any)(ptr)
|
||||
if any == nil {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
any.WriteTo(stream)
|
||||
}
|
||||
|
||||
|
@ -64,7 +64,6 @@ func (any *stringAny) ToInt64() int64 {
|
||||
|
||||
flag := 1
|
||||
startPos := 0
|
||||
endPos := 0
|
||||
if any.val[0] == '+' || any.val[0] == '-' {
|
||||
startPos = 1
|
||||
}
|
||||
@ -73,6 +72,7 @@ func (any *stringAny) ToInt64() int64 {
|
||||
flag = -1
|
||||
}
|
||||
|
||||
endPos := startPos
|
||||
for i := startPos; i < len(any.val); i++ {
|
||||
if any.val[i] >= '0' && any.val[i] <= '9' {
|
||||
endPos = i + 1
|
||||
@ -98,7 +98,6 @@ func (any *stringAny) ToUint64() uint64 {
|
||||
}
|
||||
|
||||
startPos := 0
|
||||
endPos := 0
|
||||
|
||||
if any.val[0] == '-' {
|
||||
return 0
|
||||
@ -107,6 +106,7 @@ func (any *stringAny) ToUint64() uint64 {
|
||||
startPos = 1
|
||||
}
|
||||
|
||||
endPos := startPos
|
||||
for i := startPos; i < len(any.val); i++ {
|
||||
if any.val[i] >= '0' && any.val[i] <= '9' {
|
||||
endPos = i + 1
|
||||
|
@ -3,8 +3,8 @@ package any_tests
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_read_empty_array_as_any(t *testing.T) {
|
||||
|
@ -4,8 +4,8 @@ import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var boolConvertMap = map[string]bool{
|
||||
|
@ -3,8 +3,8 @@ package any_tests
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var floatConvertMap = map[string]float64{
|
||||
@ -82,10 +82,8 @@ func Test_read_float_to_any(t *testing.T) {
|
||||
should := require.New(t)
|
||||
any := jsoniter.WrapFloat64(12.3)
|
||||
anyFloat64 := float64(12.3)
|
||||
//negaAnyFloat64 := float64(-1.1)
|
||||
any2 := jsoniter.WrapFloat64(-1.1)
|
||||
should.Equal(float64(12.3), any.ToFloat64())
|
||||
//should.Equal("12.3", any.ToString())
|
||||
should.True(any.ToBool())
|
||||
should.Equal(float32(anyFloat64), any.ToFloat32())
|
||||
should.Equal(int(anyFloat64), any.ToInt())
|
||||
|
@ -4,8 +4,8 @@ import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var intConvertMap = map[string]int{
|
||||
|
@ -1,9 +1,9 @@
|
||||
package any_tests
|
||||
|
||||
import (
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
"testing"
|
||||
"github.com/json-iterator/go"
|
||||
)
|
||||
|
||||
func Test_wrap_map(t *testing.T) {
|
||||
|
@ -1,9 +1,9 @@
|
||||
package any_tests
|
||||
|
||||
import (
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
"testing"
|
||||
"github.com/json-iterator/go"
|
||||
)
|
||||
|
||||
func Test_read_null_as_any(t *testing.T) {
|
||||
|
@ -3,8 +3,8 @@ package any_tests
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_read_object_as_any(t *testing.T) {
|
||||
@ -118,6 +118,4 @@ func Test_object_wrapper_any_get_all(t *testing.T) {
|
||||
should.Contains(any.Keys(), "Field1")
|
||||
should.Contains(any.Keys(), "Field2")
|
||||
should.NotContains(any.Keys(), "Field3")
|
||||
|
||||
//should.Contains(any.GetObject()["Field1"].GetArray()[0], 1)
|
||||
}
|
||||
}
|
||||
|
@ -3,29 +3,29 @@ package any_tests
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var stringConvertMap = map[string]string{
|
||||
"null": "",
|
||||
"321.1": "321.1",
|
||||
`"1.1"`: "1.1",
|
||||
`"-123.1"`: "-123.1",
|
||||
"0.0": "0.0",
|
||||
"0": "0",
|
||||
`"0"`: "0",
|
||||
`"0.0"`: "0.0",
|
||||
`"00.0"`: "00.0",
|
||||
"true": "true",
|
||||
"false": "false",
|
||||
`"true"`: "true",
|
||||
`"false"`: "false",
|
||||
`"true123"`: "true123",
|
||||
`"+1"`: "+1",
|
||||
"[]": "[]",
|
||||
"[1,2]": "[1,2]",
|
||||
"{}": "{}",
|
||||
"null": "",
|
||||
"321.1": "321.1",
|
||||
`"1.1"`: "1.1",
|
||||
`"-123.1"`: "-123.1",
|
||||
"0.0": "0.0",
|
||||
"0": "0",
|
||||
`"0"`: "0",
|
||||
`"0.0"`: "0.0",
|
||||
`"00.0"`: "00.0",
|
||||
"true": "true",
|
||||
"false": "false",
|
||||
`"true"`: "true",
|
||||
`"false"`: "false",
|
||||
`"true123"`: "true123",
|
||||
`"+1"`: "+1",
|
||||
"[]": "[]",
|
||||
"[1,2]": "[1,2]",
|
||||
"{}": "{}",
|
||||
`{"a":1, "stream":true}`: `{"a":1, "stream":true}`,
|
||||
}
|
||||
|
||||
|
@ -3,8 +3,8 @@ package any_tests
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
// if must be valid is useless, just drop this test
|
||||
|
@ -3,8 +3,8 @@ package any_tests
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_wrap_and_valuetype_everything(t *testing.T) {
|
||||
|
@ -1,10 +1,11 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/json-iterator/go"
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_use_number_for_unmarshal(t *testing.T) {
|
||||
@ -23,7 +24,6 @@ func Test_customize_float_marshal(t *testing.T) {
|
||||
should.Equal("1.234568", str)
|
||||
}
|
||||
|
||||
|
||||
func Test_customize_tag_key(t *testing.T) {
|
||||
|
||||
type TestObject struct {
|
||||
@ -45,4 +45,185 @@ func Test_read_large_number_as_interface(t *testing.T) {
|
||||
output, err := jsoniter.MarshalToString(val)
|
||||
should.Nil(err)
|
||||
should.Equal(`123456789123456789123456789`, output)
|
||||
}
|
||||
}
|
||||
|
||||
type caseSensitiveStruct struct {
|
||||
A string `json:"a"`
|
||||
B string `json:"b,omitempty"`
|
||||
C *C `json:"C,omitempty"`
|
||||
}
|
||||
|
||||
type C struct {
|
||||
D int64 `json:"D,omitempty"`
|
||||
E *E `json:"e,omitempty"`
|
||||
}
|
||||
|
||||
type E struct {
|
||||
F string `json:"F,omitempty"`
|
||||
}
|
||||
|
||||
func Test_CaseSensitive(t *testing.T) {
|
||||
should := require.New(t)
|
||||
|
||||
testCases := []struct {
|
||||
input string
|
||||
expectedOutput string
|
||||
caseSensitive bool
|
||||
}{
|
||||
{
|
||||
input: `{"A":"foo","B":"bar"}`,
|
||||
expectedOutput: `{"a":"foo","b":"bar"}`,
|
||||
caseSensitive: false,
|
||||
},
|
||||
{
|
||||
input: `{"a":"foo","b":"bar"}`,
|
||||
expectedOutput: `{"a":"foo","b":"bar"}`,
|
||||
caseSensitive: true,
|
||||
},
|
||||
{
|
||||
input: `{"a":"foo","b":"bar","C":{"D":10}}`,
|
||||
expectedOutput: `{"a":"foo","b":"bar","C":{"D":10}}`,
|
||||
caseSensitive: true,
|
||||
},
|
||||
{
|
||||
input: `{"a":"foo","B":"bar","c":{"d":10}}`,
|
||||
expectedOutput: `{"a":"foo"}`,
|
||||
caseSensitive: true,
|
||||
},
|
||||
{
|
||||
input: `{"a":"foo","C":{"d":10}}`,
|
||||
expectedOutput: `{"a":"foo","C":{}}`,
|
||||
caseSensitive: true,
|
||||
},
|
||||
{
|
||||
input: `{"a":"foo","C":{"D":10,"e":{"f":"baz"}}}`,
|
||||
expectedOutput: `{"a":"foo","C":{"D":10,"e":{}}}`,
|
||||
caseSensitive: true,
|
||||
},
|
||||
{
|
||||
input: `{"a":"foo","C":{"D":10,"e":{"F":"baz"}}}`,
|
||||
expectedOutput: `{"a":"foo","C":{"D":10,"e":{"F":"baz"}}}`,
|
||||
caseSensitive: true,
|
||||
},
|
||||
{
|
||||
input: `{"A":"foo","c":{"d":10,"E":{"f":"baz"}}}`,
|
||||
expectedOutput: `{"a":"foo","C":{"D":10,"e":{"F":"baz"}}}`,
|
||||
caseSensitive: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
val := caseSensitiveStruct{}
|
||||
err := jsoniter.Config{CaseSensitive: tc.caseSensitive}.Froze().UnmarshalFromString(tc.input, &val)
|
||||
should.Nil(err)
|
||||
|
||||
output, err := jsoniter.MarshalToString(val)
|
||||
should.Nil(err)
|
||||
should.Equal(tc.expectedOutput, output)
|
||||
}
|
||||
}
|
||||
|
||||
type structWithElevenFields struct {
|
||||
A string `json:"A,omitempty"`
|
||||
B string `json:"B,omitempty"`
|
||||
C string `json:"C,omitempty"`
|
||||
D string `json:"d,omitempty"`
|
||||
E string `json:"e,omitempty"`
|
||||
F string `json:"f,omitempty"`
|
||||
G string `json:"g,omitempty"`
|
||||
H string `json:"h,omitempty"`
|
||||
I string `json:"i,omitempty"`
|
||||
J string `json:"j,omitempty"`
|
||||
K string `json:"k,omitempty"`
|
||||
}
|
||||
|
||||
func Test_CaseSensitive_MoreThanTenFields(t *testing.T) {
|
||||
should := require.New(t)
|
||||
|
||||
testCases := []struct {
|
||||
input string
|
||||
expectedOutput string
|
||||
caseSensitive bool
|
||||
}{
|
||||
{
|
||||
input: `{"A":"1","B":"2","C":"3","d":"4","e":"5","f":"6","g":"7","h":"8","i":"9","j":"10","k":"11"}`,
|
||||
expectedOutput: `{"A":"1","B":"2","C":"3","d":"4","e":"5","f":"6","g":"7","h":"8","i":"9","j":"10","k":"11"}`,
|
||||
caseSensitive: true,
|
||||
},
|
||||
{
|
||||
input: `{"a":"1","b":"2","c":"3","D":"4","E":"5","F":"6"}`,
|
||||
expectedOutput: `{"A":"1","B":"2","C":"3","d":"4","e":"5","f":"6"}`,
|
||||
caseSensitive: false,
|
||||
},
|
||||
{
|
||||
input: `{"A":"1","b":"2","d":"4","E":"5"}`,
|
||||
expectedOutput: `{"A":"1","d":"4"}`,
|
||||
caseSensitive: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
val := structWithElevenFields{}
|
||||
err := jsoniter.Config{CaseSensitive: tc.caseSensitive}.Froze().UnmarshalFromString(tc.input, &val)
|
||||
should.Nil(err)
|
||||
|
||||
output, err := jsoniter.MarshalToString(val)
|
||||
should.Nil(err)
|
||||
should.Equal(tc.expectedOutput, output)
|
||||
}
|
||||
}
|
||||
|
||||
type onlyTaggedFieldStruct struct {
|
||||
A string `json:"a"`
|
||||
B string
|
||||
FSimpl F `json:"f_simpl"`
|
||||
ISimpl I
|
||||
FPtr *F `json:"f_ptr"`
|
||||
IPtr *I
|
||||
F
|
||||
*I
|
||||
}
|
||||
|
||||
type F struct {
|
||||
G string `json:"g"`
|
||||
H string
|
||||
}
|
||||
|
||||
type I struct {
|
||||
J string `json:"j"`
|
||||
K string
|
||||
}
|
||||
|
||||
func Test_OnlyTaggedField(t *testing.T) {
|
||||
should := require.New(t)
|
||||
|
||||
obj := onlyTaggedFieldStruct{
|
||||
A: "a",
|
||||
B: "b",
|
||||
FSimpl: F{G: "g", H: "h"},
|
||||
ISimpl: I{J: "j", K: "k"},
|
||||
FPtr: &F{G: "g", H: "h"},
|
||||
IPtr: &I{J: "j", K: "k"},
|
||||
F: F{G: "g", H: "h"},
|
||||
I: &I{J: "j", K: "k"},
|
||||
}
|
||||
|
||||
output, err := jsoniter.Config{OnlyTaggedField: true}.Froze().Marshal(obj)
|
||||
should.Nil(err)
|
||||
|
||||
m := make(map[string]interface{})
|
||||
err = jsoniter.Unmarshal(output, &m)
|
||||
should.Nil(err)
|
||||
|
||||
should.Equal(map[string]interface{}{
|
||||
"a": "a",
|
||||
"f_simpl": map[string]interface{}{
|
||||
"g": "g",
|
||||
},
|
||||
"f_ptr": map[string]interface{}{
|
||||
"g": "g",
|
||||
},
|
||||
"g": "g",
|
||||
"j": "j",
|
||||
}, m)
|
||||
}
|
||||
|
@ -2,11 +2,11 @@ package test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"github.com/stretchr/testify/require"
|
||||
"testing"
|
||||
"github.com/json-iterator/go"
|
||||
"io/ioutil"
|
||||
"encoding/json"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func Test_disallowUnknownFields(t *testing.T) {
|
||||
@ -18,7 +18,6 @@ func Test_disallowUnknownFields(t *testing.T) {
|
||||
should.Error(decoder.Decode(&obj))
|
||||
}
|
||||
|
||||
|
||||
func Test_new_decoder(t *testing.T) {
|
||||
should := require.New(t)
|
||||
decoder1 := json.NewDecoder(bytes.NewBufferString(`[1][2]`))
|
||||
@ -57,3 +56,9 @@ func Test_use_number(t *testing.T) {
|
||||
should.Nil(decoder2.Decode(&obj2))
|
||||
should.Equal(json.Number("123"), obj2)
|
||||
}
|
||||
|
||||
func Test_decoder_more(t *testing.T) {
|
||||
should := require.New(t)
|
||||
decoder := jsoniter.NewDecoder(bytes.NewBufferString("abcde"))
|
||||
should.True(decoder.More())
|
||||
}
|
||||
|
@ -8,8 +8,8 @@ import (
|
||||
"testing"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_new_encoder(t *testing.T) {
|
||||
|
@ -1,11 +1,11 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"github.com/stretchr/testify/require"
|
||||
"bytes"
|
||||
"github.com/json-iterator/go"
|
||||
"encoding/json"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// Standard Encoder has trailing newline.
|
||||
@ -17,4 +17,4 @@ func TestEncoderHasTrailingNewline(t *testing.T) {
|
||||
stdenc := json.NewEncoder(&stdbuf)
|
||||
stdenc.Encode(1)
|
||||
should.Equal(stdbuf.Bytes(), buf.Bytes())
|
||||
}
|
||||
}
|
||||
|
@ -2,9 +2,9 @@ package test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
"testing"
|
||||
"github.com/json-iterator/go"
|
||||
)
|
||||
|
||||
func Test_marshal_indent(t *testing.T) {
|
||||
|
47
api_tests/marshal_json_escape_test.go
Normal file
47
api_tests/marshal_json_escape_test.go
Normal file
@ -0,0 +1,47 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var marshalConfig = jsoniter.Config{
|
||||
EscapeHTML: false,
|
||||
SortMapKeys: true,
|
||||
ValidateJsonRawMessage: true,
|
||||
}.Froze()
|
||||
|
||||
type Container struct {
|
||||
Bar interface{}
|
||||
}
|
||||
|
||||
func (c *Container) MarshalJSON() ([]byte, error) {
|
||||
return marshalConfig.Marshal(&c.Bar)
|
||||
}
|
||||
|
||||
func TestEncodeEscape(t *testing.T) {
|
||||
should := require.New(t)
|
||||
|
||||
container := &Container{
|
||||
Bar: []string{"123<ab>", "ooo"},
|
||||
}
|
||||
out, err := marshalConfig.Marshal(container)
|
||||
should.Nil(err)
|
||||
bufout := string(out)
|
||||
|
||||
var stdbuf bytes.Buffer
|
||||
stdenc := json.NewEncoder(&stdbuf)
|
||||
stdenc.SetEscapeHTML(false)
|
||||
err = stdenc.Encode(container)
|
||||
should.Nil(err)
|
||||
stdout := string(stdbuf.Bytes())
|
||||
if stdout[len(stdout)-1:] == "\n" {
|
||||
stdout = stdout[:len(stdout)-1]
|
||||
}
|
||||
|
||||
should.Equal(stdout, bufout)
|
||||
}
|
36
api_tests/marshal_json_test.go
Normal file
36
api_tests/marshal_json_test.go
Normal file
@ -0,0 +1,36 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"github.com/json-iterator/go"
|
||||
"testing"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
|
||||
type Foo struct {
|
||||
Bar interface{}
|
||||
}
|
||||
|
||||
func (f Foo) MarshalJSON() ([]byte, error) {
|
||||
var buf bytes.Buffer
|
||||
err := json.NewEncoder(&buf).Encode(f.Bar)
|
||||
return buf.Bytes(), err
|
||||
}
|
||||
|
||||
|
||||
// Standard Encoder has trailing newline.
|
||||
func TestEncodeMarshalJSON(t *testing.T) {
|
||||
|
||||
foo := Foo {
|
||||
Bar: 123,
|
||||
}
|
||||
should := require.New(t)
|
||||
var buf, stdbuf bytes.Buffer
|
||||
enc := jsoniter.ConfigCompatibleWithStandardLibrary.NewEncoder(&buf)
|
||||
enc.Encode(foo)
|
||||
stdenc := json.NewEncoder(&stdbuf)
|
||||
stdenc.Encode(foo)
|
||||
should.Equal(stdbuf.Bytes(), buf.Bytes())
|
||||
}
|
@ -2,10 +2,10 @@ package test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/json-iterator/go"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"testing"
|
||||
"github.com/json-iterator/go"
|
||||
)
|
||||
|
||||
//func Test_large_file(t *testing.T) {
|
||||
|
128
benchmarks/stream_test.go
Normal file
128
benchmarks/stream_test.go
Normal file
@ -0,0 +1,128 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
)
|
||||
|
||||
func Benchmark_stream_encode_big_object(b *testing.B) {
|
||||
var buf bytes.Buffer
|
||||
var stream = jsoniter.NewStream(jsoniter.ConfigDefault, &buf, 100)
|
||||
for i := 0; i < b.N; i++ {
|
||||
buf.Reset()
|
||||
stream.Reset(&buf)
|
||||
encodeObject(stream)
|
||||
if stream.Error != nil {
|
||||
b.Errorf("error: %+v", stream.Error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestEncodeObject(t *testing.T) {
|
||||
var stream = jsoniter.NewStream(jsoniter.ConfigDefault, nil, 100)
|
||||
encodeObject(stream)
|
||||
if stream.Error != nil {
|
||||
t.Errorf("error encoding a test object: %+v", stream.Error)
|
||||
return
|
||||
}
|
||||
var m = make(map[string]interface{})
|
||||
if err := jsoniter.Unmarshal(stream.Buffer(), &m); err != nil {
|
||||
t.Errorf("error unmarshaling a test object: %+v", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func encodeObject(stream *jsoniter.Stream) {
|
||||
stream.WriteObjectStart()
|
||||
|
||||
stream.WriteObjectField("objectId")
|
||||
stream.WriteUint64(8838243212)
|
||||
|
||||
stream.WriteMore()
|
||||
stream.WriteObjectField("name")
|
||||
stream.WriteString("Jane Doe")
|
||||
|
||||
stream.WriteMore()
|
||||
stream.WriteObjectField("address")
|
||||
stream.WriteObjectStart()
|
||||
for i, field := range addressFields {
|
||||
if i != 0 {
|
||||
stream.WriteMore()
|
||||
}
|
||||
stream.WriteObjectField(field.key)
|
||||
stream.WriteString(field.val)
|
||||
}
|
||||
|
||||
stream.WriteMore()
|
||||
stream.WriteObjectField("geo")
|
||||
{
|
||||
stream.WriteObjectStart()
|
||||
stream.WriteObjectField("latitude")
|
||||
stream.WriteFloat64(-154.550817)
|
||||
stream.WriteMore()
|
||||
stream.WriteObjectField("longitude")
|
||||
stream.WriteFloat64(-84.176159)
|
||||
stream.WriteObjectEnd()
|
||||
|
||||
}
|
||||
stream.WriteObjectEnd()
|
||||
|
||||
stream.WriteMore()
|
||||
stream.WriteObjectField("specialties")
|
||||
stream.WriteArrayStart()
|
||||
for i, s := range specialties {
|
||||
if i != 0 {
|
||||
stream.WriteMore()
|
||||
}
|
||||
stream.WriteString(s)
|
||||
}
|
||||
stream.WriteArrayEnd()
|
||||
|
||||
stream.WriteMore()
|
||||
for i, text := range longText {
|
||||
if i != 0 {
|
||||
stream.WriteMore()
|
||||
}
|
||||
stream.WriteObjectField("longText" + strconv.Itoa(i))
|
||||
stream.WriteString(text)
|
||||
}
|
||||
|
||||
for i := 0; i < 25; i++ {
|
||||
num := i * 18328
|
||||
stream.WriteMore()
|
||||
stream.WriteObjectField("integerField" + strconv.Itoa(i))
|
||||
stream.WriteInt64(int64(num))
|
||||
}
|
||||
|
||||
stream.WriteObjectEnd()
|
||||
}
|
||||
|
||||
type field struct{ key, val string }
|
||||
|
||||
var (
|
||||
addressFields = []field{
|
||||
{"address1", "123 Example St"},
|
||||
{"address2", "Apartment 5D, Suite 3"},
|
||||
{"city", "Miami"},
|
||||
{"state", "FL"},
|
||||
{"postalCode", "33133"},
|
||||
{"country", "US"},
|
||||
}
|
||||
specialties = []string{
|
||||
"Web Design",
|
||||
"Go Programming",
|
||||
"Tennis",
|
||||
"Cycling",
|
||||
"Mixed martial arts",
|
||||
}
|
||||
longText = []string{
|
||||
`Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.`,
|
||||
`Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?`,
|
||||
`But I must explain to you how all this mistaken idea of denouncing pleasure and praising pain was born and I will give you a complete account of the system, and expound the actual teachings of the great explorer of the truth, the master-builder of human happiness. No one rejects, dislikes, or avoids pleasure itself, because it is pleasure, but because those who do not know how to pursue pleasure rationally encounter consequences that are extremely painful. Nor again is there anyone who loves or pursues or desires to obtain pain of itself, because it is pain, but because occasionally circumstances occur in which toil and pain can procure him some great pleasure. To take a trivial example, which of us ever undertakes laborious physical exercise, except to obtain some advantage from it? But who has any right to find fault with a man who chooses to enjoy a pleasure that has no annoying consequences, or one who avoids a pain that produces no resultant pleasure?`,
|
||||
`At vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio. Nam libero tempore, cum soluta nobis est eligendi optio cumque nihil impedit quo minus id quod maxime placeat facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet ut et voluptates repudiandae sint et molestiae non recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat.`,
|
||||
`On the other hand, we denounce with righteous indignation and dislike men who are so beguiled and demoralized by the charms of pleasure of the moment, so blinded by desire, that they cannot foresee the pain and trouble that are bound to ensue; and equal blame belongs to those who fail in their duty through weakness of will, which is the same as saying through shrinking from toil and pain. These cases are perfectly simple and easy to distinguish. In a free hour, when our power of choice is untrammelled and when nothing prevents our being able to do what we like best, every pleasure is to be welcomed and every pain avoided. But in certain circumstances and owing to the claims of duty or the obligations of business it will frequently occur that pleasures have to be repudiated and annoyances accepted. The wise man therefore always holds in these matters to this principle of selection: he rejects pleasures to secure other greater pleasures, or else he endures pains to avoid worse pains.`,
|
||||
}
|
||||
)
|
101
config.go
101
config.go
@ -3,9 +3,12 @@ package jsoniter
|
||||
import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
"unsafe"
|
||||
"github.com/v2pro/plz/reflect2"
|
||||
"reflect"
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"github.com/modern-go/concurrent"
|
||||
"github.com/modern-go/reflect2"
|
||||
)
|
||||
|
||||
// Config customize how the API should behave.
|
||||
@ -21,6 +24,7 @@ type Config struct {
|
||||
OnlyTaggedField bool
|
||||
ValidateJsonRawMessage bool
|
||||
ObjectFieldMustBeSimpleString bool
|
||||
CaseSensitive bool
|
||||
}
|
||||
|
||||
// API the public interface of this package.
|
||||
@ -38,6 +42,8 @@ type API interface {
|
||||
NewDecoder(reader io.Reader) *Decoder
|
||||
Valid(data []byte) bool
|
||||
RegisterExtension(extension Extension)
|
||||
DecoderOf(typ reflect2.Type) ValDecoder
|
||||
EncoderOf(typ reflect2.Type) ValEncoder
|
||||
}
|
||||
|
||||
// ConfigDefault the default API
|
||||
@ -59,6 +65,66 @@ var ConfigFastest = Config{
|
||||
ObjectFieldMustBeSimpleString: true, // do not unescape object field
|
||||
}.Froze()
|
||||
|
||||
type frozenConfig struct {
|
||||
configBeforeFrozen Config
|
||||
sortMapKeys bool
|
||||
indentionStep int
|
||||
objectFieldMustBeSimpleString bool
|
||||
onlyTaggedField bool
|
||||
disallowUnknownFields bool
|
||||
decoderCache *concurrent.Map
|
||||
encoderCache *concurrent.Map
|
||||
encoderExtension Extension
|
||||
decoderExtension Extension
|
||||
extraExtensions []Extension
|
||||
streamPool *sync.Pool
|
||||
iteratorPool *sync.Pool
|
||||
caseSensitive bool
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) initCache() {
|
||||
cfg.decoderCache = concurrent.NewMap()
|
||||
cfg.encoderCache = concurrent.NewMap()
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) addDecoderToCache(cacheKey uintptr, decoder ValDecoder) {
|
||||
cfg.decoderCache.Store(cacheKey, decoder)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) addEncoderToCache(cacheKey uintptr, encoder ValEncoder) {
|
||||
cfg.encoderCache.Store(cacheKey, encoder)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) getDecoderFromCache(cacheKey uintptr) ValDecoder {
|
||||
decoder, found := cfg.decoderCache.Load(cacheKey)
|
||||
if found {
|
||||
return decoder.(ValDecoder)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) getEncoderFromCache(cacheKey uintptr) ValEncoder {
|
||||
encoder, found := cfg.encoderCache.Load(cacheKey)
|
||||
if found {
|
||||
return encoder.(ValEncoder)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var cfgCache = concurrent.NewMap()
|
||||
|
||||
func getFrozenConfigFromCache(cfg Config) *frozenConfig {
|
||||
obj, found := cfgCache.Load(cfg)
|
||||
if found {
|
||||
return obj.(*frozenConfig)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func addFrozenConfigToCache(cfg Config, frozenConfig *frozenConfig) {
|
||||
cfgCache.Store(cfg, frozenConfig)
|
||||
}
|
||||
|
||||
// Froze forge API from config
|
||||
func (cfg Config) Froze() API {
|
||||
api := &frozenConfig{
|
||||
@ -67,6 +133,7 @@ func (cfg Config) Froze() API {
|
||||
objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString,
|
||||
onlyTaggedField: cfg.OnlyTaggedField,
|
||||
disallowUnknownFields: cfg.DisallowUnknownFields,
|
||||
caseSensitive: cfg.CaseSensitive,
|
||||
}
|
||||
api.streamPool = &sync.Pool{
|
||||
New: func() interface{} {
|
||||
@ -93,22 +160,21 @@ func (cfg Config) Froze() API {
|
||||
if cfg.ValidateJsonRawMessage {
|
||||
api.validateJsonRawMessage(encoderExtension)
|
||||
}
|
||||
if len(encoderExtension) > 0 {
|
||||
api.extensions = append(api.extensions, encoderExtension)
|
||||
}
|
||||
if len(decoderExtension) > 0 {
|
||||
api.extensions = append(api.extensions, decoderExtension)
|
||||
}
|
||||
api.encoderExtension = encoderExtension
|
||||
api.decoderExtension = decoderExtension
|
||||
api.configBeforeFrozen = cfg
|
||||
return api
|
||||
}
|
||||
|
||||
func (cfg Config) frozeWithCacheReuse() *frozenConfig {
|
||||
func (cfg Config) frozeWithCacheReuse(extraExtensions []Extension) *frozenConfig {
|
||||
api := getFrozenConfigFromCache(cfg)
|
||||
if api != nil {
|
||||
return api
|
||||
}
|
||||
api = cfg.Froze().(*frozenConfig)
|
||||
for _, extension := range extraExtensions {
|
||||
api.RegisterExtension(extension)
|
||||
}
|
||||
addFrozenConfigToCache(cfg, api)
|
||||
return api
|
||||
}
|
||||
@ -117,15 +183,15 @@ func (cfg *frozenConfig) validateJsonRawMessage(extension EncoderExtension) {
|
||||
encoder := &funcEncoder{func(ptr unsafe.Pointer, stream *Stream) {
|
||||
rawMessage := *(*json.RawMessage)(ptr)
|
||||
iter := cfg.BorrowIterator([]byte(rawMessage))
|
||||
defer cfg.ReturnIterator(iter)
|
||||
iter.Read()
|
||||
if iter.Error != nil {
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
stream.WriteRaw("null")
|
||||
} else {
|
||||
cfg.ReturnIterator(iter)
|
||||
stream.WriteRaw(string(rawMessage))
|
||||
}
|
||||
}, func(ptr unsafe.Pointer) bool {
|
||||
return false
|
||||
return len(*((*json.RawMessage)(ptr))) == 0
|
||||
}}
|
||||
extension[reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()] = encoder
|
||||
extension[reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()] = encoder
|
||||
@ -133,6 +199,11 @@ func (cfg *frozenConfig) validateJsonRawMessage(extension EncoderExtension) {
|
||||
|
||||
func (cfg *frozenConfig) useNumber(extension DecoderExtension) {
|
||||
extension[reflect2.TypeOfPtr((*interface{})(nil)).Elem()] = &funcDecoder{func(ptr unsafe.Pointer, iter *Iterator) {
|
||||
exitingValue := *((*interface{})(ptr))
|
||||
if exitingValue != nil && reflect.TypeOf(exitingValue).Kind() == reflect.Ptr {
|
||||
iter.ReadVal(exitingValue)
|
||||
return
|
||||
}
|
||||
if iter.WhatIsNext() == NumberValue {
|
||||
*((*interface{})(ptr)) = json.Number(iter.readNumberAsString())
|
||||
} else {
|
||||
@ -149,7 +220,9 @@ func (cfg *frozenConfig) getTagKey() string {
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) RegisterExtension(extension Extension) {
|
||||
cfg.extensions = append(cfg.extensions, extension)
|
||||
cfg.extraExtensions = append(cfg.extraExtensions, extension)
|
||||
copied := cfg.configBeforeFrozen
|
||||
cfg.configBeforeFrozen = copied
|
||||
}
|
||||
|
||||
type lossyFloat32Encoder struct {
|
||||
@ -244,7 +317,7 @@ func (cfg *frozenConfig) MarshalIndent(v interface{}, prefix, indent string) ([]
|
||||
}
|
||||
newCfg := cfg.configBeforeFrozen
|
||||
newCfg.IndentionStep = len(indent)
|
||||
return newCfg.frozeWithCacheReuse().Marshal(v)
|
||||
return newCfg.frozeWithCacheReuse(cfg.extraExtensions).Marshal(v)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) UnmarshalFromString(str string, v interface{}) error {
|
||||
|
@ -1,64 +0,0 @@
|
||||
//+build go1.9
|
||||
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"sync"
|
||||
)
|
||||
|
||||
type frozenConfig struct {
|
||||
configBeforeFrozen Config
|
||||
sortMapKeys bool
|
||||
indentionStep int
|
||||
objectFieldMustBeSimpleString bool
|
||||
onlyTaggedField bool
|
||||
disallowUnknownFields bool
|
||||
decoderCache sync.Map
|
||||
encoderCache sync.Map
|
||||
extensions []Extension
|
||||
streamPool *sync.Pool
|
||||
iteratorPool *sync.Pool
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) initCache() {
|
||||
cfg.decoderCache = sync.Map{}
|
||||
cfg.encoderCache = sync.Map{}
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) addDecoderToCache(cacheKey uintptr, decoder ValDecoder) {
|
||||
cfg.decoderCache.Store(cacheKey, decoder)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) addEncoderToCache(cacheKey uintptr, encoder ValEncoder) {
|
||||
cfg.encoderCache.Store(cacheKey, encoder)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) getDecoderFromCache(cacheKey uintptr) ValDecoder {
|
||||
decoder, found := cfg.decoderCache.Load(cacheKey)
|
||||
if found {
|
||||
return decoder.(ValDecoder)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) getEncoderFromCache(cacheKey uintptr) ValEncoder {
|
||||
encoder, found := cfg.encoderCache.Load(cacheKey)
|
||||
if found {
|
||||
return encoder.(ValEncoder)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var cfgCache = &sync.Map{}
|
||||
|
||||
func getFrozenConfigFromCache(cfg Config) *frozenConfig {
|
||||
obj, found := cfgCache.Load(cfg)
|
||||
if found {
|
||||
return obj.(*frozenConfig)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func addFrozenConfigToCache(cfg Config, frozenConfig *frozenConfig) {
|
||||
cfgCache.Store(cfg, frozenConfig)
|
||||
}
|
@ -1,70 +0,0 @@
|
||||
//+build !go1.9
|
||||
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"sync"
|
||||
)
|
||||
|
||||
type frozenConfig struct {
|
||||
configBeforeFrozen Config
|
||||
sortMapKeys bool
|
||||
indentionStep int
|
||||
objectFieldMustBeSimpleString bool
|
||||
onlyTaggedField bool
|
||||
disallowUnknownFields bool
|
||||
cacheLock *sync.RWMutex
|
||||
decoderCache map[uintptr]ValDecoder
|
||||
encoderCache map[uintptr]ValEncoder
|
||||
extensions []Extension
|
||||
streamPool *sync.Pool
|
||||
iteratorPool *sync.Pool
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) initCache() {
|
||||
cfg.cacheLock = &sync.RWMutex{}
|
||||
cfg.decoderCache = map[uintptr]ValDecoder{}
|
||||
cfg.encoderCache = map[uintptr]ValEncoder{}
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) addDecoderToCache(cacheKey uintptr, decoder ValDecoder) {
|
||||
cfg.cacheLock.Lock()
|
||||
cfg.decoderCache[cacheKey] = decoder
|
||||
cfg.cacheLock.Unlock()
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) addEncoderToCache(cacheKey uintptr, encoder ValEncoder) {
|
||||
cfg.cacheLock.Lock()
|
||||
cfg.encoderCache[cacheKey] = encoder
|
||||
cfg.cacheLock.Unlock()
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) getDecoderFromCache(cacheKey uintptr) ValDecoder {
|
||||
cfg.cacheLock.RLock()
|
||||
decoder, _ := cfg.decoderCache[cacheKey].(ValDecoder)
|
||||
cfg.cacheLock.RUnlock()
|
||||
return decoder
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) getEncoderFromCache(cacheKey uintptr) ValEncoder {
|
||||
cfg.cacheLock.RLock()
|
||||
encoder, _ := cfg.encoderCache[cacheKey].(ValEncoder)
|
||||
cfg.cacheLock.RUnlock()
|
||||
return encoder
|
||||
}
|
||||
|
||||
var cfgCacheLock = &sync.RWMutex{}
|
||||
var cfgCache = map[Config]*frozenConfig{}
|
||||
|
||||
func getFrozenConfigFromCache(cfg Config) *frozenConfig {
|
||||
cfgCacheLock.RLock()
|
||||
frozenConfig := cfgCache[cfg]
|
||||
cfgCacheLock.RUnlock()
|
||||
return frozenConfig
|
||||
}
|
||||
|
||||
func addFrozenConfigToCache(cfg Config, frozenConfig *frozenConfig) {
|
||||
cfgCacheLock.Lock()
|
||||
cfgCache[cfg] = frozenConfig
|
||||
cfgCacheLock.Unlock()
|
||||
}
|
@ -3,6 +3,7 @@ package jsoniter
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func ExampleMarshal() {
|
||||
@ -93,3 +94,28 @@ func ExampleGet() {
|
||||
// Output:
|
||||
// Crimson
|
||||
}
|
||||
|
||||
func ExampleMyKey() {
|
||||
hello := MyKey("hello")
|
||||
output, _ := Marshal(map[*MyKey]string{&hello: "world"})
|
||||
fmt.Println(string(output))
|
||||
obj := map[*MyKey]string{}
|
||||
Unmarshal(output, &obj)
|
||||
for k, v := range obj {
|
||||
fmt.Println(*k, v)
|
||||
}
|
||||
// Output:
|
||||
// {"Hello":"world"}
|
||||
// Hel world
|
||||
}
|
||||
|
||||
type MyKey string
|
||||
|
||||
func (m *MyKey) MarshalText() ([]byte, error) {
|
||||
return []byte(strings.Replace(string(*m), "h", "H", -1)), nil
|
||||
}
|
||||
|
||||
func (m *MyKey) UnmarshalText(text []byte) error {
|
||||
*m = MyKey(text[:3])
|
||||
return nil
|
||||
}
|
||||
|
@ -1,12 +1,14 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"unsafe"
|
||||
"time"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
"strconv"
|
||||
"testing"
|
||||
"time"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
func Test_customize_type_decoder(t *testing.T) {
|
||||
@ -46,6 +48,38 @@ func Test_customize_byte_array_encoder(t *testing.T) {
|
||||
should.Equal(`"abc"`, str)
|
||||
}
|
||||
|
||||
type CustomEncoderAttachmentTestStruct struct {
|
||||
Value int32 `json:"value"`
|
||||
}
|
||||
|
||||
type CustomEncoderAttachmentTestStructEncoder struct {}
|
||||
|
||||
func (c *CustomEncoderAttachmentTestStructEncoder) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream) {
|
||||
attachVal, ok := stream.Attachment.(int)
|
||||
stream.WriteRaw(`"`)
|
||||
stream.WriteRaw(fmt.Sprintf("%t %d", ok, attachVal))
|
||||
stream.WriteRaw(`"`)
|
||||
}
|
||||
|
||||
func (c *CustomEncoderAttachmentTestStructEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func Test_custom_encoder_attachment(t *testing.T) {
|
||||
|
||||
jsoniter.RegisterTypeEncoder("test.CustomEncoderAttachmentTestStruct", &CustomEncoderAttachmentTestStructEncoder{})
|
||||
expectedValue := 17
|
||||
should := require.New(t)
|
||||
buf := &bytes.Buffer{}
|
||||
stream := jsoniter.NewStream(jsoniter.Config{SortMapKeys: true}.Froze(), buf, 4096)
|
||||
stream.Attachment = expectedValue
|
||||
val := map[string]CustomEncoderAttachmentTestStruct{"a": {}}
|
||||
stream.WriteVal(val)
|
||||
stream.Flush()
|
||||
should.Nil(stream.Error)
|
||||
should.Equal("{\"a\":\"true 17\"}", buf.String())
|
||||
}
|
||||
|
||||
func Test_customize_field_decoder(t *testing.T) {
|
||||
type Tom struct {
|
||||
field1 string
|
||||
@ -61,7 +95,6 @@ func Test_customize_field_decoder(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
func Test_recursive_empty_interface_customization(t *testing.T) {
|
||||
t.Skip()
|
||||
var obj interface{}
|
||||
@ -98,4 +131,93 @@ func Test_read_custom_interface(t *testing.T) {
|
||||
err := jsoniter.UnmarshalFromString(`"hello"`, &val)
|
||||
should.Nil(err)
|
||||
should.Equal("hello", val.Hello())
|
||||
}
|
||||
}
|
||||
|
||||
const flow1 = `
|
||||
{"A":"hello"}
|
||||
{"A":"hello"}
|
||||
{"A":"hello"}
|
||||
{"A":"hello"}
|
||||
{"A":"hello"}`
|
||||
|
||||
const flow2 = `
|
||||
{"A":"hello"}
|
||||
{"A":"hello"}
|
||||
{"A":"hello"}
|
||||
{"A":"hello"}
|
||||
{"A":"hello"}
|
||||
`
|
||||
|
||||
type (
|
||||
Type1 struct {
|
||||
A string
|
||||
}
|
||||
|
||||
Type2 struct {
|
||||
A string
|
||||
}
|
||||
)
|
||||
|
||||
func (t *Type2) UnmarshalJSON(data []byte) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *Type2) MarshalJSON() ([]byte, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func TestType1NoFinalLF(t *testing.T) {
|
||||
reader := bytes.NewReader([]byte(flow1))
|
||||
dec := jsoniter.NewDecoder(reader)
|
||||
|
||||
i := 0
|
||||
for dec.More() {
|
||||
data := &Type1{}
|
||||
if err := dec.Decode(data); err != nil {
|
||||
t.Errorf("at %v got %v", i, err)
|
||||
}
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
func TestType1FinalLF(t *testing.T) {
|
||||
reader := bytes.NewReader([]byte(flow2))
|
||||
dec := jsoniter.NewDecoder(reader)
|
||||
|
||||
i := 0
|
||||
for dec.More() {
|
||||
data := &Type1{}
|
||||
if err := dec.Decode(data); err != nil {
|
||||
t.Errorf("at %v got %v", i, err)
|
||||
}
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
func TestType2NoFinalLF(t *testing.T) {
|
||||
reader := bytes.NewReader([]byte(flow1))
|
||||
dec := jsoniter.NewDecoder(reader)
|
||||
|
||||
i := 0
|
||||
for dec.More() {
|
||||
data := &Type2{}
|
||||
if err := dec.Decode(data); err != nil {
|
||||
t.Errorf("at %v got %v", i, err)
|
||||
}
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
func TestType2FinalLF(t *testing.T) {
|
||||
reader := bytes.NewReader([]byte(flow2))
|
||||
dec := jsoniter.NewDecoder(reader)
|
||||
|
||||
i := 0
|
||||
for dec.More() {
|
||||
data := &Type2{}
|
||||
if err := dec.Decode(data); err != nil {
|
||||
t.Errorf("at %v got %v", i, err)
|
||||
}
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
@ -1,11 +1,13 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"unsafe"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/modern-go/reflect2"
|
||||
"github.com/stretchr/testify/require"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"testing"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/json-iterator/go"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type TestObject1 struct {
|
||||
@ -46,6 +48,53 @@ func Test_customize_field_by_extension(t *testing.T) {
|
||||
should.Equal(`{"field-1":100}`, str)
|
||||
}
|
||||
|
||||
func Test_customize_map_key_encoder(t *testing.T) {
|
||||
should := require.New(t)
|
||||
cfg := jsoniter.Config{}.Froze()
|
||||
cfg.RegisterExtension(&testMapKeyExtension{})
|
||||
m := map[int]int{1: 2}
|
||||
output, err := cfg.MarshalToString(m)
|
||||
should.NoError(err)
|
||||
should.Equal(`{"2":2}`, output)
|
||||
m = map[int]int{}
|
||||
should.NoError(cfg.UnmarshalFromString(output, &m))
|
||||
should.Equal(map[int]int{1: 2}, m)
|
||||
}
|
||||
|
||||
type testMapKeyExtension struct {
|
||||
jsoniter.DummyExtension
|
||||
}
|
||||
|
||||
func (extension *testMapKeyExtension) CreateMapKeyEncoder(typ reflect2.Type) jsoniter.ValEncoder {
|
||||
if typ.Kind() == reflect.Int {
|
||||
return &funcEncoder{
|
||||
fun: func(ptr unsafe.Pointer, stream *jsoniter.Stream) {
|
||||
stream.WriteRaw(`"`)
|
||||
stream.WriteInt(*(*int)(ptr) + 1)
|
||||
stream.WriteRaw(`"`)
|
||||
},
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (extension *testMapKeyExtension) CreateMapKeyDecoder(typ reflect2.Type) jsoniter.ValDecoder {
|
||||
if typ.Kind() == reflect.Int {
|
||||
return &funcDecoder{
|
||||
fun: func(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
|
||||
i, err := strconv.Atoi(iter.ReadString())
|
||||
if err != nil {
|
||||
iter.ReportError("read map key", err.Error())
|
||||
return
|
||||
}
|
||||
i--
|
||||
*(*int)(ptr) = i
|
||||
},
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type funcDecoder struct {
|
||||
fun jsoniter.DecoderFunc
|
||||
}
|
||||
|
238
extra/binary_as_string_codec.go
Normal file
238
extra/binary_as_string_codec.go
Normal file
@ -0,0 +1,238 @@
|
||||
package extra
|
||||
|
||||
import (
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/modern-go/reflect2"
|
||||
"unicode/utf8"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// safeSet holds the value true if the ASCII character with the given array
|
||||
// position can be represented inside a JSON string without any further
|
||||
// escaping.
|
||||
//
|
||||
// All values are true except for the ASCII control characters (0-31), the
|
||||
// double quote ("), and the backslash character ("\").
|
||||
var safeSet = [utf8.RuneSelf]bool{
|
||||
' ': true,
|
||||
'!': true,
|
||||
'"': false,
|
||||
'#': true,
|
||||
'$': true,
|
||||
'%': true,
|
||||
'&': true,
|
||||
'\'': true,
|
||||
'(': true,
|
||||
')': true,
|
||||
'*': true,
|
||||
'+': true,
|
||||
',': true,
|
||||
'-': true,
|
||||
'.': true,
|
||||
'/': true,
|
||||
'0': true,
|
||||
'1': true,
|
||||
'2': true,
|
||||
'3': true,
|
||||
'4': true,
|
||||
'5': true,
|
||||
'6': true,
|
||||
'7': true,
|
||||
'8': true,
|
||||
'9': true,
|
||||
':': true,
|
||||
';': true,
|
||||
'<': true,
|
||||
'=': true,
|
||||
'>': true,
|
||||
'?': true,
|
||||
'@': true,
|
||||
'A': true,
|
||||
'B': true,
|
||||
'C': true,
|
||||
'D': true,
|
||||
'E': true,
|
||||
'F': true,
|
||||
'G': true,
|
||||
'H': true,
|
||||
'I': true,
|
||||
'J': true,
|
||||
'K': true,
|
||||
'L': true,
|
||||
'M': true,
|
||||
'N': true,
|
||||
'O': true,
|
||||
'P': true,
|
||||
'Q': true,
|
||||
'R': true,
|
||||
'S': true,
|
||||
'T': true,
|
||||
'U': true,
|
||||
'V': true,
|
||||
'W': true,
|
||||
'X': true,
|
||||
'Y': true,
|
||||
'Z': true,
|
||||
'[': true,
|
||||
'\\': false,
|
||||
']': true,
|
||||
'^': true,
|
||||
'_': true,
|
||||
'`': true,
|
||||
'a': true,
|
||||
'b': true,
|
||||
'c': true,
|
||||
'd': true,
|
||||
'e': true,
|
||||
'f': true,
|
||||
'g': true,
|
||||
'h': true,
|
||||
'i': true,
|
||||
'j': true,
|
||||
'k': true,
|
||||
'l': true,
|
||||
'm': true,
|
||||
'n': true,
|
||||
'o': true,
|
||||
'p': true,
|
||||
'q': true,
|
||||
'r': true,
|
||||
's': true,
|
||||
't': true,
|
||||
'u': true,
|
||||
'v': true,
|
||||
'w': true,
|
||||
'x': true,
|
||||
'y': true,
|
||||
'z': true,
|
||||
'{': true,
|
||||
'|': true,
|
||||
'}': true,
|
||||
'~': true,
|
||||
'\u007f': true,
|
||||
}
|
||||
|
||||
var binaryType = reflect2.TypeOfPtr((*[]byte)(nil)).Elem()
|
||||
|
||||
type BinaryAsStringExtension struct {
|
||||
jsoniter.DummyExtension
|
||||
}
|
||||
|
||||
func (extension *BinaryAsStringExtension) CreateEncoder(typ reflect2.Type) jsoniter.ValEncoder {
|
||||
if typ == binaryType {
|
||||
return &binaryAsStringCodec{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (extension *BinaryAsStringExtension) CreateDecoder(typ reflect2.Type) jsoniter.ValDecoder {
|
||||
if typ == binaryType {
|
||||
return &binaryAsStringCodec{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type binaryAsStringCodec struct {
|
||||
}
|
||||
|
||||
func (codec *binaryAsStringCodec) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
|
||||
rawBytes := iter.ReadStringAsSlice()
|
||||
bytes := make([]byte, 0, len(rawBytes))
|
||||
for i := 0; i < len(rawBytes); i++ {
|
||||
b := rawBytes[i]
|
||||
if b == '\\' {
|
||||
b2 := rawBytes[i+1]
|
||||
if b2 != '\\' {
|
||||
iter.ReportError("decode binary as string", `\\x is only supported escape`)
|
||||
return
|
||||
}
|
||||
b3 := rawBytes[i+2]
|
||||
if b3 != 'x' {
|
||||
iter.ReportError("decode binary as string", `\\x is only supported escape`)
|
||||
return
|
||||
}
|
||||
b4 := rawBytes[i+3]
|
||||
b5 := rawBytes[i+4]
|
||||
i += 4
|
||||
b = readHex(iter, b4, b5)
|
||||
}
|
||||
bytes = append(bytes, b)
|
||||
}
|
||||
*(*[]byte)(ptr) = bytes
|
||||
}
|
||||
func (codec *binaryAsStringCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return len(*((*[]byte)(ptr))) == 0
|
||||
}
|
||||
func (codec *binaryAsStringCodec) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream) {
|
||||
newBuffer := writeBytes(stream.Buffer(), *(*[]byte)(ptr))
|
||||
stream.SetBuffer(newBuffer)
|
||||
}
|
||||
|
||||
func readHex(iter *jsoniter.Iterator, b1, b2 byte) byte {
|
||||
var ret byte
|
||||
if b1 >= '0' && b1 <= '9' {
|
||||
ret = b1 - '0'
|
||||
} else if b1 >= 'a' && b1 <= 'f' {
|
||||
ret = b1 - 'a' + 10
|
||||
} else {
|
||||
iter.ReportError("read hex", "expects 0~9 or a~f, but found "+string([]byte{b1}))
|
||||
return 0
|
||||
}
|
||||
ret *= 16
|
||||
if b2 >= '0' && b2 <= '9' {
|
||||
ret += b2 - '0'
|
||||
} else if b2 >= 'a' && b2 <= 'f' {
|
||||
ret += b2 - 'a' + 10
|
||||
} else {
|
||||
iter.ReportError("read hex", "expects 0~9 or a~f, but found "+string([]byte{b2}))
|
||||
return 0
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
var hex = "0123456789abcdef"
|
||||
|
||||
func writeBytes(space []byte, s []byte) []byte {
|
||||
space = append(space, '"')
|
||||
// write string, the fast path, without utf8 and escape support
|
||||
var i int
|
||||
var c byte
|
||||
for i, c = range s {
|
||||
if c < utf8.RuneSelf && safeSet[c] {
|
||||
space = append(space, c)
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if i == len(s)-1 {
|
||||
space = append(space, '"')
|
||||
return space
|
||||
}
|
||||
return writeBytesSlowPath(space, s[i:])
|
||||
}
|
||||
|
||||
func writeBytesSlowPath(space []byte, s []byte) []byte {
|
||||
start := 0
|
||||
// for the remaining parts, we process them char by char
|
||||
var i int
|
||||
var b byte
|
||||
for i, b = range s {
|
||||
if b >= utf8.RuneSelf {
|
||||
space = append(space, '\\', '\\', 'x', hex[b>>4], hex[b&0xF])
|
||||
start = i + 1
|
||||
continue
|
||||
}
|
||||
if safeSet[b] {
|
||||
continue
|
||||
}
|
||||
if start < i {
|
||||
space = append(space, s[start:i]...)
|
||||
}
|
||||
space = append(space, '\\', '\\', 'x', hex[b>>4], hex[b&0xF])
|
||||
start = i + 1
|
||||
}
|
||||
if start < len(s) {
|
||||
space = append(space, s[start:]...)
|
||||
}
|
||||
return append(space, '"')
|
||||
}
|
32
extra/binary_as_string_codec_test.go
Normal file
32
extra/binary_as_string_codec_test.go
Normal file
@ -0,0 +1,32 @@
|
||||
package extra
|
||||
|
||||
import (
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func init() {
|
||||
jsoniter.RegisterExtension(&BinaryAsStringExtension{})
|
||||
}
|
||||
|
||||
func TestBinaryAsStringCodec(t *testing.T) {
|
||||
t.Run("safe set", func(t *testing.T) {
|
||||
should := require.New(t)
|
||||
output, err := jsoniter.Marshal([]byte("hello"))
|
||||
should.NoError(err)
|
||||
should.Equal(`"hello"`, string(output))
|
||||
var val []byte
|
||||
should.NoError(jsoniter.Unmarshal(output, &val))
|
||||
should.Equal(`hello`, string(val))
|
||||
})
|
||||
t.Run("non safe set", func(t *testing.T) {
|
||||
should := require.New(t)
|
||||
output, err := jsoniter.Marshal([]byte{1, 2, 3, 23})
|
||||
should.NoError(err)
|
||||
should.Equal(`"\\x01\\x02\\x03\\x17"`, string(output))
|
||||
var val []byte
|
||||
should.NoError(jsoniter.Unmarshal(output, &val))
|
||||
should.Equal([]byte{1, 2, 3, 23}, val)
|
||||
})
|
||||
}
|
@ -9,7 +9,7 @@ import (
|
||||
"unsafe"
|
||||
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/v2pro/plz/reflect2"
|
||||
"github.com/modern-go/reflect2"
|
||||
)
|
||||
|
||||
const maxUint = ^uint(0)
|
||||
@ -183,6 +183,9 @@ func (decoder *fuzzyStringDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Ite
|
||||
*((*string)(ptr)) = string(number)
|
||||
case jsoniter.StringValue:
|
||||
*((*string)(ptr)) = iter.ReadString()
|
||||
case jsoniter.NilValue:
|
||||
iter.Skip()
|
||||
*((*string)(ptr)) = ""
|
||||
default:
|
||||
iter.ReportError("fuzzyStringDecoder", "not number or string")
|
||||
}
|
||||
@ -208,9 +211,15 @@ func (decoder *fuzzyIntegerDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.It
|
||||
} else {
|
||||
str = "0"
|
||||
}
|
||||
case jsoniter.NilValue:
|
||||
iter.Skip()
|
||||
str = "0"
|
||||
default:
|
||||
iter.ReportError("fuzzyIntegerDecoder", "not number or string")
|
||||
}
|
||||
if len(str) == 0 {
|
||||
str = "0"
|
||||
}
|
||||
newIter := iter.Pool().BorrowIterator([]byte(str))
|
||||
defer iter.Pool().ReturnIterator(newIter)
|
||||
isFloat := strings.IndexByte(str, '.') != -1
|
||||
@ -244,6 +253,9 @@ func (decoder *fuzzyFloat32Decoder) Decode(ptr unsafe.Pointer, iter *jsoniter.It
|
||||
} else {
|
||||
*((*float32)(ptr)) = 0
|
||||
}
|
||||
case jsoniter.NilValue:
|
||||
iter.Skip()
|
||||
*((*float32)(ptr)) = 0
|
||||
default:
|
||||
iter.ReportError("fuzzyFloat32Decoder", "not number or string")
|
||||
}
|
||||
@ -273,7 +285,10 @@ func (decoder *fuzzyFloat64Decoder) Decode(ptr unsafe.Pointer, iter *jsoniter.It
|
||||
} else {
|
||||
*((*float64)(ptr)) = 0
|
||||
}
|
||||
case jsoniter.NilValue:
|
||||
iter.Skip()
|
||||
*((*float64)(ptr)) = 0
|
||||
default:
|
||||
iter.ReportError("fuzzyFloat32Decoder", "not number or string")
|
||||
iter.ReportError("fuzzyFloat64Decoder", "not number or string")
|
||||
}
|
||||
}
|
||||
|
@ -37,6 +37,8 @@ func Test_any_to_int64(t *testing.T) {
|
||||
should.Equal(int64(10), val)
|
||||
should.Nil(jsoniter.UnmarshalFromString(`10`, &val))
|
||||
should.Equal(int64(10), val)
|
||||
should.Nil(jsoniter.UnmarshalFromString(`""`, &val))
|
||||
should.Equal(int64(0), val)
|
||||
|
||||
// bool part
|
||||
should.Nil(jsoniter.UnmarshalFromString(`false`, &val))
|
||||
@ -357,3 +359,35 @@ func Test_bad_case(t *testing.T) {
|
||||
should := require.New(t)
|
||||
should.Nil(err)
|
||||
}
|
||||
|
||||
func Test_null_to_string(t *testing.T) {
|
||||
should := require.New(t)
|
||||
body := []byte(`null`)
|
||||
var message string
|
||||
err := jsoniter.Unmarshal(body, &message)
|
||||
should.NoError(err)
|
||||
}
|
||||
|
||||
func Test_null_to_int(t *testing.T) {
|
||||
should := require.New(t)
|
||||
body := []byte(`null`)
|
||||
var message int
|
||||
err := jsoniter.Unmarshal(body, &message)
|
||||
should.NoError(err)
|
||||
}
|
||||
|
||||
func Test_null_to_float32(t *testing.T) {
|
||||
should := require.New(t)
|
||||
body := []byte(`null`)
|
||||
var message float32
|
||||
err := jsoniter.Unmarshal(body, &message)
|
||||
should.NoError(err)
|
||||
}
|
||||
|
||||
func Test_null_to_float64(t *testing.T) {
|
||||
should := require.New(t)
|
||||
body := []byte(`null`)
|
||||
var message float64
|
||||
err := jsoniter.Unmarshal(body, &message)
|
||||
should.NoError(err)
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ package extra
|
||||
|
||||
import (
|
||||
"github.com/json-iterator/go"
|
||||
"strings"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
@ -17,6 +18,19 @@ type namingStrategyExtension struct {
|
||||
|
||||
func (extension *namingStrategyExtension) UpdateStructDescriptor(structDescriptor *jsoniter.StructDescriptor) {
|
||||
for _, binding := range structDescriptor.Fields {
|
||||
if unicode.IsLower(rune(binding.Field.Name()[0])) || binding.Field.Name()[0] == '_'{
|
||||
continue
|
||||
}
|
||||
tag, hastag := binding.Field.Tag().Lookup("json")
|
||||
if hastag {
|
||||
tagParts := strings.Split(tag, ",")
|
||||
if tagParts[0] == "-" {
|
||||
continue // hidden field
|
||||
}
|
||||
if tagParts[0] != "" {
|
||||
continue // field explicitly named
|
||||
}
|
||||
}
|
||||
binding.ToNames = []string{extension.translate(binding.Field.Name())}
|
||||
binding.FromNames = []string{extension.translate(binding.Field.Name())}
|
||||
}
|
||||
|
@ -21,3 +21,46 @@ func Test_lower_case_with_underscores(t *testing.T) {
|
||||
should.Nil(err)
|
||||
should.Equal(`{"user_name":"taowen","first_language":"Chinese"}`, string(output))
|
||||
}
|
||||
|
||||
func Test_set_naming_strategy_with_overrides(t *testing.T) {
|
||||
should := require.New(t)
|
||||
SetNamingStrategy(LowerCaseWithUnderscores)
|
||||
output, err := jsoniter.Marshal(struct {
|
||||
UserName string `json:"UserName"`
|
||||
FirstLanguage string
|
||||
}{
|
||||
UserName: "taowen",
|
||||
FirstLanguage: "Chinese",
|
||||
})
|
||||
should.Nil(err)
|
||||
should.Equal(`{"UserName":"taowen","first_language":"Chinese"}`, string(output))
|
||||
}
|
||||
|
||||
func Test_set_naming_strategy_with_omitempty(t *testing.T) {
|
||||
should := require.New(t)
|
||||
SetNamingStrategy(LowerCaseWithUnderscores)
|
||||
output, err := jsoniter.Marshal(struct {
|
||||
UserName string
|
||||
FirstLanguage string `json:",omitempty"`
|
||||
}{
|
||||
UserName: "taowen",
|
||||
})
|
||||
should.Nil(err)
|
||||
should.Equal(`{"user_name":"taowen"}`, string(output))
|
||||
}
|
||||
|
||||
func Test_set_naming_strategy_with_private_field(t *testing.T) {
|
||||
should := require.New(t)
|
||||
SetNamingStrategy(LowerCaseWithUnderscores)
|
||||
output, err := jsoniter.Marshal(struct {
|
||||
UserName string
|
||||
userId int
|
||||
_UserAge int
|
||||
}{
|
||||
UserName: "allen",
|
||||
userId: 100,
|
||||
_UserAge: 30,
|
||||
})
|
||||
should.Nil(err)
|
||||
should.Equal(`{"user_name":"allen"}`, string(output))
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ package extra
|
||||
|
||||
import (
|
||||
"github.com/json-iterator/go"
|
||||
"strings"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
@ -18,8 +19,36 @@ func (extension *privateFieldsExtension) UpdateStructDescriptor(structDescriptor
|
||||
for _, binding := range structDescriptor.Fields {
|
||||
isPrivate := unicode.IsLower(rune(binding.Field.Name()[0]))
|
||||
if isPrivate {
|
||||
binding.FromNames = []string{binding.Field.Name()}
|
||||
binding.ToNames = []string{binding.Field.Name()}
|
||||
tag, hastag := binding.Field.Tag().Lookup("json")
|
||||
if !hastag {
|
||||
binding.FromNames = []string{binding.Field.Name()}
|
||||
binding.ToNames = []string{binding.Field.Name()}
|
||||
continue
|
||||
}
|
||||
tagParts := strings.Split(tag, ",")
|
||||
names := calcFieldNames(binding.Field.Name(), tagParts[0], tag)
|
||||
binding.FromNames = names
|
||||
binding.ToNames = names
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func calcFieldNames(originalFieldName string, tagProvidedFieldName string, wholeTag string) []string {
|
||||
// ignore?
|
||||
if wholeTag == "-" {
|
||||
return []string{}
|
||||
}
|
||||
// rename?
|
||||
var fieldNames []string
|
||||
if tagProvidedFieldName == "" {
|
||||
fieldNames = []string{originalFieldName}
|
||||
} else {
|
||||
fieldNames = []string{tagProvidedFieldName}
|
||||
}
|
||||
// private?
|
||||
isNotExported := unicode.IsLower(rune(originalFieldName[0]))
|
||||
if isNotExported {
|
||||
fieldNames = []string{}
|
||||
}
|
||||
return fieldNames
|
||||
}
|
||||
|
11
go.mod
Normal file
11
go.mod
Normal file
@ -0,0 +1,11 @@
|
||||
module github.com/json-iterator/go
|
||||
|
||||
go 1.12
|
||||
|
||||
require (
|
||||
github.com/davecgh/go-spew v1.1.1
|
||||
github.com/google/gofuzz v1.0.0
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421
|
||||
github.com/modern-go/reflect2 v1.0.2
|
||||
github.com/stretchr/testify v1.8.0
|
||||
)
|
21
go.sum
Normal file
21
go.sum
Normal file
@ -0,0 +1,21 @@
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/google/gofuzz v1.0.0 h1:A8PeW59pxE9IoFRqBp37U+mSNaQoZ46F1f0f863XSXw=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
27
iter.go
27
iter.go
@ -74,6 +74,7 @@ type Iterator struct {
|
||||
buf []byte
|
||||
head int
|
||||
tail int
|
||||
depth int
|
||||
captureStartedAt int
|
||||
captured []byte
|
||||
Error error
|
||||
@ -88,6 +89,7 @@ func NewIterator(cfg API) *Iterator {
|
||||
buf: nil,
|
||||
head: 0,
|
||||
tail: 0,
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@ -99,6 +101,7 @@ func Parse(cfg API, reader io.Reader, bufSize int) *Iterator {
|
||||
buf: make([]byte, bufSize),
|
||||
head: 0,
|
||||
tail: 0,
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@ -110,6 +113,7 @@ func ParseBytes(cfg API, input []byte) *Iterator {
|
||||
buf: input,
|
||||
head: 0,
|
||||
tail: len(input),
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@ -128,6 +132,7 @@ func (iter *Iterator) Reset(reader io.Reader) *Iterator {
|
||||
iter.reader = reader
|
||||
iter.head = 0
|
||||
iter.tail = 0
|
||||
iter.depth = 0
|
||||
return iter
|
||||
}
|
||||
|
||||
@ -137,6 +142,7 @@ func (iter *Iterator) ResetBytes(input []byte) *Iterator {
|
||||
iter.buf = input
|
||||
iter.head = 0
|
||||
iter.tail = len(input)
|
||||
iter.depth = 0
|
||||
return iter
|
||||
}
|
||||
|
||||
@ -320,3 +326,24 @@ func (iter *Iterator) Read() interface{} {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// limit maximum depth of nesting, as allowed by https://tools.ietf.org/html/rfc7159#section-9
|
||||
const maxDepth = 10000
|
||||
|
||||
func (iter *Iterator) incrementDepth() (success bool) {
|
||||
iter.depth++
|
||||
if iter.depth <= maxDepth {
|
||||
return true
|
||||
}
|
||||
iter.ReportError("incrementDepth", "exceeded max depth")
|
||||
return false
|
||||
}
|
||||
|
||||
func (iter *Iterator) decrementDepth() (success bool) {
|
||||
iter.depth--
|
||||
if iter.depth >= 0 {
|
||||
return true
|
||||
}
|
||||
iter.ReportError("decrementDepth", "unexpected negative nesting")
|
||||
return false
|
||||
}
|
||||
|
@ -28,26 +28,32 @@ func (iter *Iterator) ReadArray() (ret bool) {
|
||||
func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
|
||||
c := iter.nextToken()
|
||||
if c == '[' {
|
||||
if !iter.incrementDepth() {
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c != ']' {
|
||||
iter.unreadByte()
|
||||
if !callback(iter) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
for c == ',' {
|
||||
if !callback(iter) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
}
|
||||
if c != ']' {
|
||||
iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
|
@ -77,14 +77,12 @@ func (iter *Iterator) ReadFloat32() (ret float32) {
|
||||
}
|
||||
|
||||
func (iter *Iterator) readPositiveFloat32() (ret float32) {
|
||||
value := uint64(0)
|
||||
c := byte(' ')
|
||||
i := iter.head
|
||||
// first char
|
||||
if i == iter.tail {
|
||||
return iter.readFloat32SlowPath()
|
||||
}
|
||||
c = iter.buf[i]
|
||||
c := iter.buf[i]
|
||||
i++
|
||||
ind := floatDigits[c]
|
||||
switch ind {
|
||||
@ -107,7 +105,7 @@ func (iter *Iterator) readPositiveFloat32() (ret float32) {
|
||||
return
|
||||
}
|
||||
}
|
||||
value = uint64(ind)
|
||||
value := uint64(ind)
|
||||
// chars before dot
|
||||
non_decimal_loop:
|
||||
for ; i < iter.tail; i++ {
|
||||
@ -145,9 +143,7 @@ non_decimal_loop:
|
||||
}
|
||||
// too many decimal places
|
||||
return iter.readFloat32SlowPath()
|
||||
case invalidCharForNumber:
|
||||
fallthrough
|
||||
case dotInNumber:
|
||||
case invalidCharForNumber, dotInNumber:
|
||||
return iter.readFloat32SlowPath()
|
||||
}
|
||||
decimalPlaces++
|
||||
@ -218,14 +214,12 @@ func (iter *Iterator) ReadFloat64() (ret float64) {
|
||||
}
|
||||
|
||||
func (iter *Iterator) readPositiveFloat64() (ret float64) {
|
||||
value := uint64(0)
|
||||
c := byte(' ')
|
||||
i := iter.head
|
||||
// first char
|
||||
if i == iter.tail {
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
c = iter.buf[i]
|
||||
c := iter.buf[i]
|
||||
i++
|
||||
ind := floatDigits[c]
|
||||
switch ind {
|
||||
@ -248,7 +242,7 @@ func (iter *Iterator) readPositiveFloat64() (ret float64) {
|
||||
return
|
||||
}
|
||||
}
|
||||
value = uint64(ind)
|
||||
value := uint64(ind)
|
||||
// chars before dot
|
||||
non_decimal_loop:
|
||||
for ; i < iter.tail; i++ {
|
||||
@ -286,9 +280,7 @@ non_decimal_loop:
|
||||
}
|
||||
// too many decimal places
|
||||
return iter.readFloat64SlowPath()
|
||||
case invalidCharForNumber:
|
||||
fallthrough
|
||||
case dotInNumber:
|
||||
case invalidCharForNumber, dotInNumber:
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
decimalPlaces++
|
||||
@ -296,6 +288,9 @@ non_decimal_loop:
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
value = (value << 3) + (value << 1) + uint64(ind)
|
||||
if value > maxFloat64 {
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
}
|
||||
}
|
||||
return iter.readFloat64SlowPath()
|
||||
|
@ -9,6 +9,7 @@ var intDigits []int8
|
||||
|
||||
const uint32SafeToMultiply10 = uint32(0xffffffff)/10 - 1
|
||||
const uint64SafeToMultiple10 = uint64(0xffffffffffffffff)/10 - 1
|
||||
const maxFloat64 = 1<<53 - 1
|
||||
|
||||
func init() {
|
||||
intDigits = make([]int8, 256)
|
||||
@ -22,11 +23,17 @@ func init() {
|
||||
|
||||
// ReadUint read uint
|
||||
func (iter *Iterator) ReadUint() uint {
|
||||
if strconv.IntSize == 32 {
|
||||
return uint(iter.ReadUint32())
|
||||
}
|
||||
return uint(iter.ReadUint64())
|
||||
}
|
||||
|
||||
// ReadInt read int
|
||||
func (iter *Iterator) ReadInt() int {
|
||||
if strconv.IntSize == 32 {
|
||||
return int(iter.ReadInt32())
|
||||
}
|
||||
return int(iter.ReadInt64())
|
||||
}
|
||||
|
||||
@ -333,7 +340,7 @@ func (iter *Iterator) readUint64(c byte) (ret uint64) {
|
||||
}
|
||||
|
||||
func (iter *Iterator) assertInteger() {
|
||||
if iter.head < len(iter.buf) && iter.buf[iter.head] == '.' {
|
||||
if iter.head < iter.tail && iter.buf[iter.head] == '.' {
|
||||
iter.ReportError("assertInteger", "can not decode float as int")
|
||||
}
|
||||
}
|
||||
|
@ -2,7 +2,7 @@ package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ReadObject read one field from object.
|
||||
@ -60,7 +60,7 @@ func (iter *Iterator) readFieldHash() int64 {
|
||||
if b == '\\' {
|
||||
iter.head = i
|
||||
for _, b := range iter.readStringSlowPath() {
|
||||
if 'A' <= b && b <= 'Z' {
|
||||
if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
|
||||
b += 'a' - 'A'
|
||||
}
|
||||
hash ^= int64(b)
|
||||
@ -82,7 +82,7 @@ func (iter *Iterator) readFieldHash() int64 {
|
||||
}
|
||||
return hash
|
||||
}
|
||||
if 'A' <= b && b <= 'Z' {
|
||||
if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
|
||||
b += 'a' - 'A'
|
||||
}
|
||||
hash ^= int64(b)
|
||||
@ -95,10 +95,13 @@ func (iter *Iterator) readFieldHash() int64 {
|
||||
}
|
||||
}
|
||||
|
||||
func calcHash(str string) int64 {
|
||||
func calcHash(str string, caseSensitive bool) int64 {
|
||||
if !caseSensitive {
|
||||
str = strings.ToLower(str)
|
||||
}
|
||||
hash := int64(0x811c9dc5)
|
||||
for _, b := range str {
|
||||
hash ^= int64(unicode.ToLower(b))
|
||||
for _, b := range []byte(str) {
|
||||
hash ^= int64(b)
|
||||
hash *= 0x1000193
|
||||
}
|
||||
return int64(hash)
|
||||
@ -109,6 +112,9 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
||||
c := iter.nextToken()
|
||||
var field string
|
||||
if c == '{' {
|
||||
if !iter.incrementDepth() {
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c == '"' {
|
||||
iter.unreadByte()
|
||||
@ -118,6 +124,7 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
||||
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
@ -128,20 +135,23 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
||||
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
}
|
||||
if c != '}' {
|
||||
iter.ReportError("ReadObjectCB", `object not ended with }`)
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
if c == '}' {
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
iter.ReportError("ReadObjectCB", `expect " after }, but found `+string([]byte{c}))
|
||||
iter.ReportError("ReadObjectCB", `expect " after {, but found `+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if c == 'n' {
|
||||
@ -156,15 +166,20 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
||||
func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
|
||||
c := iter.nextToken()
|
||||
if c == '{' {
|
||||
if !iter.incrementDepth() {
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c == '"' {
|
||||
iter.unreadByte()
|
||||
field := iter.ReadString()
|
||||
if iter.nextToken() != ':' {
|
||||
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
@ -172,23 +187,27 @@ func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
|
||||
field = iter.ReadString()
|
||||
if iter.nextToken() != ':' {
|
||||
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
}
|
||||
if c != '}' {
|
||||
iter.ReportError("ReadMapCB", `object not ended with }`)
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
if c == '}' {
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c}))
|
||||
iter.ReportError("ReadMapCB", `expect " after {, but found `+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if c == 'n' {
|
||||
|
25
iter_skip.go
25
iter_skip.go
@ -37,17 +37,24 @@ func (iter *Iterator) SkipAndReturnBytes() []byte {
|
||||
return iter.stopCapture()
|
||||
}
|
||||
|
||||
type captureBuffer struct {
|
||||
startedAt int
|
||||
captured []byte
|
||||
// SkipAndAppendBytes skips next JSON element and appends its content to
|
||||
// buffer, returning the result.
|
||||
func (iter *Iterator) SkipAndAppendBytes(buf []byte) []byte {
|
||||
iter.startCaptureTo(buf, iter.head)
|
||||
iter.Skip()
|
||||
return iter.stopCapture()
|
||||
}
|
||||
|
||||
func (iter *Iterator) startCapture(captureStartedAt int) {
|
||||
func (iter *Iterator) startCaptureTo(buf []byte, captureStartedAt int) {
|
||||
if iter.captured != nil {
|
||||
panic("already in capture mode")
|
||||
}
|
||||
iter.captureStartedAt = captureStartedAt
|
||||
iter.captured = make([]byte, 0, 32)
|
||||
iter.captured = buf
|
||||
}
|
||||
|
||||
func (iter *Iterator) startCapture(captureStartedAt int) {
|
||||
iter.startCaptureTo(make([]byte, 0, 32), captureStartedAt)
|
||||
}
|
||||
|
||||
func (iter *Iterator) stopCapture() []byte {
|
||||
@ -58,13 +65,7 @@ func (iter *Iterator) stopCapture() []byte {
|
||||
remaining := iter.buf[iter.captureStartedAt:iter.head]
|
||||
iter.captureStartedAt = -1
|
||||
iter.captured = nil
|
||||
if len(captured) == 0 {
|
||||
copied := make([]byte, len(remaining))
|
||||
copy(copied, remaining)
|
||||
return copied
|
||||
}
|
||||
captured = append(captured, remaining...)
|
||||
return captured
|
||||
return append(captured, remaining...)
|
||||
}
|
||||
|
||||
// Skip skips a json object and positions to relatively the next json object
|
||||
|
@ -22,6 +22,9 @@ func (iter *Iterator) skipNumber() {
|
||||
|
||||
func (iter *Iterator) skipArray() {
|
||||
level := 1
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
switch iter.buf[i] {
|
||||
@ -31,8 +34,14 @@ func (iter *Iterator) skipArray() {
|
||||
i = iter.head - 1 // it will be i++ soon
|
||||
case '[': // If open symbol, increase level
|
||||
level++
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
case ']': // If close symbol, increase level
|
||||
level--
|
||||
if !iter.decrementDepth() {
|
||||
return
|
||||
}
|
||||
|
||||
// If we have returned to the original level, we're done
|
||||
if level == 0 {
|
||||
@ -50,6 +59,10 @@ func (iter *Iterator) skipArray() {
|
||||
|
||||
func (iter *Iterator) skipObject() {
|
||||
level := 1
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
switch iter.buf[i] {
|
||||
@ -59,8 +72,14 @@ func (iter *Iterator) skipObject() {
|
||||
i = iter.head - 1 // it will be i++ soon
|
||||
case '{': // If open symbol, increase level
|
||||
level++
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
case '}': // If close symbol, increase level
|
||||
level--
|
||||
if !iter.decrementDepth() {
|
||||
return
|
||||
}
|
||||
|
||||
// If we have returned to the original level, we're done
|
||||
if level == 0 {
|
||||
|
@ -2,12 +2,22 @@
|
||||
|
||||
package jsoniter
|
||||
|
||||
import "fmt"
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
func (iter *Iterator) skipNumber() {
|
||||
if !iter.trySkipNumber() {
|
||||
iter.unreadByte()
|
||||
iter.ReadFloat32()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
return
|
||||
}
|
||||
iter.ReadFloat64()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
iter.Error = nil
|
||||
iter.ReadBigFloat()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3,9 +3,10 @@ package misc_tests
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"github.com/stretchr/testify/require"
|
||||
"testing"
|
||||
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_empty_array(t *testing.T) {
|
||||
@ -157,6 +158,27 @@ func Test_encode_byte_array(t *testing.T) {
|
||||
should.Equal(`"AQID"`, string(bytes))
|
||||
}
|
||||
|
||||
func Test_encode_empty_byte_array(t *testing.T) {
|
||||
should := require.New(t)
|
||||
bytes, err := json.Marshal([]byte{})
|
||||
should.Nil(err)
|
||||
should.Equal(`""`, string(bytes))
|
||||
bytes, err = jsoniter.Marshal([]byte{})
|
||||
should.Nil(err)
|
||||
should.Equal(`""`, string(bytes))
|
||||
}
|
||||
|
||||
func Test_encode_nil_byte_array(t *testing.T) {
|
||||
should := require.New(t)
|
||||
var nilSlice []byte
|
||||
bytes, err := json.Marshal(nilSlice)
|
||||
should.Nil(err)
|
||||
should.Equal(`null`, string(bytes))
|
||||
bytes, err = jsoniter.Marshal(nilSlice)
|
||||
should.Nil(err)
|
||||
should.Equal(`null`, string(bytes))
|
||||
}
|
||||
|
||||
func Test_decode_byte_array_from_base64(t *testing.T) {
|
||||
should := require.New(t)
|
||||
data := []byte{}
|
||||
@ -168,6 +190,17 @@ func Test_decode_byte_array_from_base64(t *testing.T) {
|
||||
should.Equal([]byte{1, 2, 3}, data)
|
||||
}
|
||||
|
||||
func Test_decode_byte_array_from_base64_with_newlines(t *testing.T) {
|
||||
should := require.New(t)
|
||||
data := []byte{}
|
||||
err := json.Unmarshal([]byte(`"A\rQ\nID"`), &data)
|
||||
should.Nil(err)
|
||||
should.Equal([]byte{1, 2, 3}, data)
|
||||
err = jsoniter.Unmarshal([]byte(`"A\rQ\nID"`), &data)
|
||||
should.Nil(err)
|
||||
should.Equal([]byte{1, 2, 3}, data)
|
||||
}
|
||||
|
||||
func Test_decode_byte_array_from_array(t *testing.T) {
|
||||
should := require.New(t)
|
||||
data := []byte{}
|
||||
|
@ -4,8 +4,8 @@ import (
|
||||
"bytes"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_true(t *testing.T) {
|
||||
@ -44,4 +44,4 @@ func Test_write_val_bool(t *testing.T) {
|
||||
should.Equal(stream.Buffered(), 0)
|
||||
should.Nil(stream.Error)
|
||||
should.Equal("true", buf.String())
|
||||
}
|
||||
}
|
||||
|
@ -2,10 +2,11 @@ package misc_tests
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"math"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_read_big_float(t *testing.T) {
|
||||
@ -77,6 +78,26 @@ func Test_read_number(t *testing.T) {
|
||||
should.Equal(`92233720368547758079223372036854775807`, string(val))
|
||||
}
|
||||
|
||||
func Test_encode_inf(t *testing.T) {
|
||||
should := require.New(t)
|
||||
_, err := json.Marshal(math.Inf(1))
|
||||
should.Error(err)
|
||||
_, err = jsoniter.Marshal(float32(math.Inf(1)))
|
||||
should.Error(err)
|
||||
_, err = jsoniter.Marshal(math.Inf(-1))
|
||||
should.Error(err)
|
||||
}
|
||||
|
||||
func Test_encode_nan(t *testing.T) {
|
||||
should := require.New(t)
|
||||
_, err := json.Marshal(math.NaN())
|
||||
should.Error(err)
|
||||
_, err = jsoniter.Marshal(float32(math.NaN()))
|
||||
should.Error(err)
|
||||
_, err = jsoniter.Marshal(math.NaN())
|
||||
should.Error(err)
|
||||
}
|
||||
|
||||
func Benchmark_jsoniter_float(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
input := []byte(`1.1123,`)
|
||||
|
@ -5,12 +5,14 @@ package misc_tests
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"math/rand"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_read_uint64_invalid(t *testing.T) {
|
||||
@ -70,6 +72,95 @@ func Test_float_as_int(t *testing.T) {
|
||||
should.NotNil(jsoniter.Unmarshal([]byte(`1.1`), &i))
|
||||
}
|
||||
|
||||
// chunkedData is io.Reader which returns random amount of data in range [1, chunkedData.chunkSize].
|
||||
// It simulates chunked data on from HTTP server, which is commonly used by net/http package.
|
||||
type chunkedData struct {
|
||||
chunkSize int
|
||||
data []byte
|
||||
head int
|
||||
}
|
||||
|
||||
// Read is implementation of the io.Reader which returns random amount of data in range [1, chunkedData.chunkSize].
|
||||
func (c *chunkedData) Read(p []byte) (n int, err error) {
|
||||
to := c.head + int(rand.Int31n(int32(c.chunkSize))+1)
|
||||
|
||||
// copy does not copy more data then p can consume
|
||||
n = copy(p, c.data[c.head:to])
|
||||
c.head = c.head + n
|
||||
if c.head >= len(c.data) {
|
||||
err = io.EOF
|
||||
}
|
||||
return n, err
|
||||
}
|
||||
|
||||
// TestIterator_ReadInt_chunkedInput validates the behaviour of Iterator.ReadInt() method in where:
|
||||
// - it reads data from io.Reader,
|
||||
// - expected value is 0 (zero)
|
||||
// - Iterator.tail == Iterator.head
|
||||
// - Iterator.tail < len(Iterator.buf)
|
||||
// - value in buffer after Iterator.tail is presented from previous read and has '.' character.
|
||||
func TestIterator_ReadInt_chunkedInput(t *testing.T) {
|
||||
should := require.New(t)
|
||||
|
||||
data := &chunkedData{
|
||||
data: jsonFloatIntArray(t, 10),
|
||||
}
|
||||
|
||||
// because this test is rely on randomness of chunkedData, we are doing multiple iterations to
|
||||
// be sure, that we can hit a required case.
|
||||
for data.chunkSize = 3; data.chunkSize <= len(data.data); data.chunkSize++ {
|
||||
data.head = 0
|
||||
|
||||
iter := jsoniter.Parse(jsoniter.ConfigDefault, data, data.chunkSize)
|
||||
i := 0
|
||||
for iter.ReadArray() {
|
||||
// every even item is float, let's just skip it.
|
||||
if i%2 == 0 {
|
||||
iter.Skip()
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
should.Zero(iter.ReadInt())
|
||||
should.NoError(iter.Error)
|
||||
|
||||
i++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// jsonFloatIntArray generates JSON array where every
|
||||
// - even item is float 0.1
|
||||
// - odd item is integer 0
|
||||
//
|
||||
// [0.1, 0, 0.1, 0]
|
||||
func jsonFloatIntArray(t *testing.T, numberOfItems int) []byte {
|
||||
t.Helper()
|
||||
numbers := make([]jsoniter.Any, numberOfItems)
|
||||
for i := range numbers {
|
||||
switch i % 2 {
|
||||
case 0:
|
||||
numbers[i] = jsoniter.WrapFloat64(0.1)
|
||||
default:
|
||||
numbers[i] = jsoniter.WrapInt64(0)
|
||||
}
|
||||
}
|
||||
|
||||
fixture, err := jsoniter.ConfigFastest.Marshal(numbers)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
b := &bytes.Buffer{}
|
||||
|
||||
require.NoError(
|
||||
t,
|
||||
json.Compact(b, fixture),
|
||||
"json should be compactable",
|
||||
)
|
||||
return b.Bytes()
|
||||
}
|
||||
|
||||
func Benchmark_jsoniter_encode_int(b *testing.B) {
|
||||
stream := jsoniter.NewStream(jsoniter.ConfigDefault, ioutil.Discard, 64)
|
||||
for n := 0; n < b.N; n++ {
|
||||
|
@ -2,10 +2,10 @@ package misc_tests
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
"io"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func Test_nil_non_empty_interface(t *testing.T) {
|
||||
@ -114,7 +114,7 @@ func Test_overwrite_interface_value_with_nil(t *testing.T) {
|
||||
|
||||
err := json.Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
|
||||
should.NoError(err)
|
||||
should.Equal(42, (*(wrapper.Payload.(*Payload))).Value)
|
||||
should.Equal(42, wrapper.Payload.(*Payload).Value)
|
||||
|
||||
err = json.Unmarshal([]byte(`{"payload": null}`), &wrapper)
|
||||
should.NoError(err)
|
||||
@ -128,7 +128,7 @@ func Test_overwrite_interface_value_with_nil(t *testing.T) {
|
||||
|
||||
err = jsoniter.Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
|
||||
should.Equal(nil, err)
|
||||
should.Equal(42, (*(wrapper.Payload.(*Payload))).Value)
|
||||
should.Equal(42, wrapper.Payload.(*Payload).Value)
|
||||
|
||||
err = jsoniter.Unmarshal([]byte(`{"payload": null}`), &wrapper)
|
||||
should.Equal(nil, err)
|
||||
|
@ -7,8 +7,8 @@ import (
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_bad_case(t *testing.T) {
|
||||
|
@ -5,9 +5,9 @@ import (
|
||||
"math/big"
|
||||
"testing"
|
||||
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
"strings"
|
||||
"github.com/json-iterator/go"
|
||||
)
|
||||
|
||||
func Test_decode_TextMarshaler_key_map(t *testing.T) {
|
||||
@ -31,3 +31,22 @@ func Test_read_map_with_reader(t *testing.T) {
|
||||
should.Equal(m2, m1)
|
||||
should.Equal("1.0.76", m1["note"].(map[string]interface{})["CoreServices"].(map[string]interface{})["version_name"])
|
||||
}
|
||||
|
||||
func Test_map_eface_of_eface(t *testing.T) {
|
||||
should := require.New(t)
|
||||
json := jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
output, err := json.MarshalToString(map[interface{}]interface{}{
|
||||
"1": 2,
|
||||
3: "4",
|
||||
})
|
||||
should.NoError(err)
|
||||
should.Equal(`{"1":2,"3":"4"}`, output)
|
||||
}
|
||||
|
||||
func Test_encode_nil_map(t *testing.T) {
|
||||
should := require.New(t)
|
||||
var nilMap map[string]string
|
||||
output, err := jsoniter.MarshalToString(nilMap)
|
||||
should.NoError(err)
|
||||
should.Equal(`null`, output)
|
||||
}
|
||||
|
@ -2,9 +2,10 @@ package misc_tests
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"reflect"
|
||||
"testing"
|
||||
"github.com/json-iterator/go"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type Level1 struct {
|
||||
@ -15,6 +16,243 @@ type Level2 struct {
|
||||
World string
|
||||
}
|
||||
|
||||
func Test_deep_nested(t *testing.T) {
|
||||
type unstructured interface{}
|
||||
|
||||
testcases := []struct {
|
||||
name string
|
||||
data []byte
|
||||
expectError string
|
||||
}{
|
||||
{
|
||||
name: "array under maxDepth",
|
||||
data: []byte(`{"a":` + strings.Repeat(`[`, 10000-1) + strings.Repeat(`]`, 10000-1) + `}`),
|
||||
expectError: "",
|
||||
},
|
||||
{
|
||||
name: "array over maxDepth",
|
||||
data: []byte(`{"a":` + strings.Repeat(`[`, 10000) + strings.Repeat(`]`, 10000) + `}`),
|
||||
expectError: "max depth",
|
||||
},
|
||||
{
|
||||
name: "object under maxDepth",
|
||||
data: []byte(`{"a":` + strings.Repeat(`{"a":`, 10000-1) + `0` + strings.Repeat(`}`, 10000-1) + `}`),
|
||||
expectError: "",
|
||||
},
|
||||
{
|
||||
name: "object over maxDepth",
|
||||
data: []byte(`{"a":` + strings.Repeat(`{"a":`, 10000) + `0` + strings.Repeat(`}`, 10000) + `}`),
|
||||
expectError: "max depth",
|
||||
},
|
||||
}
|
||||
|
||||
targets := []struct {
|
||||
name string
|
||||
new func() interface{}
|
||||
}{
|
||||
{
|
||||
name: "unstructured",
|
||||
new: func() interface{} {
|
||||
var v interface{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed named field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed missing field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
B interface{} `json:"b"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 1 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 2 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 3 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 4 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 5 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 6 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
F interface{} `json:"f"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 7 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
F interface{} `json:"f"`
|
||||
G interface{} `json:"g"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 8 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
F interface{} `json:"f"`
|
||||
G interface{} `json:"g"`
|
||||
H interface{} `json:"h"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 9 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
F interface{} `json:"f"`
|
||||
G interface{} `json:"g"`
|
||||
H interface{} `json:"h"`
|
||||
I interface{} `json:"i"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 10 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
F interface{} `json:"f"`
|
||||
G interface{} `json:"g"`
|
||||
H interface{} `json:"h"`
|
||||
I interface{} `json:"i"`
|
||||
J interface{} `json:"j"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 11 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
F interface{} `json:"f"`
|
||||
G interface{} `json:"g"`
|
||||
H interface{} `json:"h"`
|
||||
I interface{} `json:"i"`
|
||||
J interface{} `json:"j"`
|
||||
K interface{} `json:"k"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testcases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
for _, target := range targets {
|
||||
t.Run(target.name, func(t *testing.T) {
|
||||
err := jsoniter.Unmarshal(tc.data, target.new())
|
||||
if len(tc.expectError) == 0 {
|
||||
if err != nil {
|
||||
t.Errorf("unexpected error: %v", err)
|
||||
}
|
||||
} else {
|
||||
if err == nil {
|
||||
t.Errorf("expected error, got none")
|
||||
} else if !strings.Contains(err.Error(), tc.expectError) {
|
||||
t.Errorf("expected error containing '%s', got: %v", tc.expectError, err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_nested(t *testing.T) {
|
||||
iter := jsoniter.ParseString(jsoniter.ConfigDefault, `{"hello": [{"world": "value1"}, {"world": "value2"}]}`)
|
||||
l1 := Level1{}
|
||||
|
@ -5,8 +5,8 @@ import (
|
||||
"io"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_read_null(t *testing.T) {
|
||||
|
@ -2,12 +2,13 @@ package misc_tests
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/json-iterator/go"
|
||||
"time"
|
||||
"github.com/stretchr/testify/require"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
func Test_empty_object(t *testing.T) {
|
||||
@ -129,4 +130,243 @@ func Test_reader_and_load_more(t *testing.T) {
|
||||
decoder := jsoniter.ConfigCompatibleWithStandardLibrary.NewDecoder(reader)
|
||||
obj := TestObject{}
|
||||
should.Nil(decoder.Decode(&obj))
|
||||
}
|
||||
}
|
||||
|
||||
func Test_unmarshal_into_existing_value(t *testing.T) {
|
||||
should := require.New(t)
|
||||
type TestObject struct {
|
||||
Field1 int
|
||||
Field2 interface{}
|
||||
}
|
||||
var obj TestObject
|
||||
m := map[string]interface{}{}
|
||||
obj.Field2 = &m
|
||||
cfg := jsoniter.Config{UseNumber: true}.Froze()
|
||||
err := cfg.Unmarshal([]byte(`{"Field1":1,"Field2":{"k":"v"}}`), &obj)
|
||||
should.NoError(err)
|
||||
should.Equal(map[string]interface{}{
|
||||
"k": "v",
|
||||
}, m)
|
||||
}
|
||||
|
||||
// for issue421
|
||||
func Test_unmarshal_anonymous_struct_invalid(t *testing.T) {
|
||||
should := require.New(t)
|
||||
t0 := struct {
|
||||
Field1 string
|
||||
}{}
|
||||
|
||||
cfg := jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
err := cfg.UnmarshalFromString(`{"Field1":`, &t0)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t0).String())
|
||||
|
||||
cfgCaseSensitive := jsoniter.Config{
|
||||
CaseSensitive: true,
|
||||
}.Froze()
|
||||
|
||||
type TestObject1 struct {
|
||||
Field1 struct {
|
||||
InnerField1 string
|
||||
}
|
||||
}
|
||||
t1 := TestObject1{}
|
||||
err = cfgCaseSensitive.UnmarshalFromString(`{"Field1":{"InnerField1"`, &t1)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t1.Field1).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t1).String())
|
||||
|
||||
type TestObject2 struct {
|
||||
Field1 int
|
||||
Field2 struct {
|
||||
InnerField1 string
|
||||
InnerField2 string
|
||||
}
|
||||
}
|
||||
t2 := TestObject2{}
|
||||
err = cfgCaseSensitive.UnmarshalFromString(`{"Field2":{"InnerField2"`, &t2)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t2.Field2).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t2).String())
|
||||
|
||||
type TestObject3 struct {
|
||||
Field1 int
|
||||
Field2 int
|
||||
Field3 struct {
|
||||
InnerField1 string
|
||||
InnerField2 string
|
||||
InnerField3 string
|
||||
}
|
||||
}
|
||||
t3 := TestObject3{}
|
||||
err = cfgCaseSensitive.UnmarshalFromString(`{"Field3":{"InnerField3"`, &t3)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t3.Field3).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t3).String())
|
||||
|
||||
type TestObject4 struct {
|
||||
Field1 int
|
||||
Field2 int
|
||||
Field3 int
|
||||
Field4 struct {
|
||||
InnerField1 string
|
||||
InnerField2 string
|
||||
InnerField3 string
|
||||
InnerField4 string
|
||||
}
|
||||
}
|
||||
t4 := TestObject4{}
|
||||
err = cfgCaseSensitive.UnmarshalFromString(`{"Field4":{"InnerField4"`, &t4)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t4.Field4).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t4).String())
|
||||
|
||||
type TestObject5 struct {
|
||||
Field1 int
|
||||
Field2 int
|
||||
Field3 int
|
||||
Field4 int
|
||||
Field5 struct {
|
||||
InnerField1 string
|
||||
InnerField2 string
|
||||
InnerField3 string
|
||||
InnerField4 string
|
||||
InnerField5 string
|
||||
}
|
||||
}
|
||||
t5 := TestObject5{}
|
||||
err = cfgCaseSensitive.UnmarshalFromString(`{"Field5":{"InnerField5"`, &t5)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t5.Field5).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t5).String())
|
||||
|
||||
type TestObject6 struct {
|
||||
Field1 int
|
||||
Field2 int
|
||||
Field3 int
|
||||
Field4 int
|
||||
Field5 int
|
||||
Field6 struct {
|
||||
InnerField1 string
|
||||
InnerField2 string
|
||||
InnerField3 string
|
||||
InnerField4 string
|
||||
InnerField5 string
|
||||
InnerField6 string
|
||||
}
|
||||
}
|
||||
t6 := TestObject6{}
|
||||
err = cfgCaseSensitive.UnmarshalFromString(`{"Field6":{"InnerField6"`, &t6)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t6.Field6).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t6).String())
|
||||
|
||||
type TestObject7 struct {
|
||||
Field1 int
|
||||
Field2 int
|
||||
Field3 int
|
||||
Field4 int
|
||||
Field5 int
|
||||
Field6 int
|
||||
Field7 struct {
|
||||
InnerField1 string
|
||||
InnerField2 string
|
||||
InnerField3 string
|
||||
InnerField4 string
|
||||
InnerField5 string
|
||||
InnerField6 string
|
||||
InnerField7 string
|
||||
}
|
||||
}
|
||||
t7 := TestObject7{}
|
||||
err = cfgCaseSensitive.UnmarshalFromString(`{"Field7":{"InnerField7"`, &t7)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t7.Field7).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t7).String())
|
||||
|
||||
type TestObject8 struct {
|
||||
Field1 int
|
||||
Field2 int
|
||||
Field3 int
|
||||
Field4 int
|
||||
Field5 int
|
||||
Field6 int
|
||||
Field7 int
|
||||
Field8 struct {
|
||||
InnerField1 string
|
||||
InnerField2 string
|
||||
InnerField3 string
|
||||
InnerField4 string
|
||||
InnerField5 string
|
||||
InnerField6 string
|
||||
InnerField7 string
|
||||
InnerField8 string
|
||||
}
|
||||
}
|
||||
t8 := TestObject8{}
|
||||
err = cfgCaseSensitive.UnmarshalFromString(`{"Field8":{"InnerField8"`, &t8)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t8.Field8).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t8).String())
|
||||
|
||||
type TestObject9 struct {
|
||||
Field1 int
|
||||
Field2 int
|
||||
Field3 int
|
||||
Field4 int
|
||||
Field5 int
|
||||
Field6 int
|
||||
Field7 int
|
||||
Field8 int
|
||||
Field9 struct {
|
||||
InnerField1 string
|
||||
InnerField2 string
|
||||
InnerField3 string
|
||||
InnerField4 string
|
||||
InnerField5 string
|
||||
InnerField6 string
|
||||
InnerField7 string
|
||||
InnerField8 string
|
||||
InnerField9 string
|
||||
}
|
||||
}
|
||||
t9 := TestObject9{}
|
||||
err = cfgCaseSensitive.UnmarshalFromString(`{"Field9":{"InnerField9"`, &t9)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t9.Field9).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t9).String())
|
||||
|
||||
type TestObject10 struct {
|
||||
Field1 int
|
||||
Field2 int
|
||||
Field3 int
|
||||
Field4 int
|
||||
Field5 int
|
||||
Field6 int
|
||||
Field7 int
|
||||
Field8 int
|
||||
Field9 int
|
||||
Field10 struct {
|
||||
InnerField1 string
|
||||
InnerField2 string
|
||||
InnerField3 string
|
||||
InnerField4 string
|
||||
InnerField5 string
|
||||
InnerField6 string
|
||||
InnerField7 string
|
||||
InnerField8 string
|
||||
InnerField9 string
|
||||
InnerField10 string
|
||||
}
|
||||
}
|
||||
t10 := TestObject10{}
|
||||
err = cfgCaseSensitive.UnmarshalFromString(`{"Field10":{"InnerField10"`, &t10)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t10.Field10).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t10).String())
|
||||
|
||||
err = cfg.UnmarshalFromString(`{"Field10":{"InnerField10"`, &t10)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t10.Field10).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t10).String())
|
||||
}
|
||||
|
@ -2,10 +2,10 @@ package misc_tests
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
"strings"
|
||||
"testing"
|
||||
"github.com/json-iterator/go"
|
||||
)
|
||||
|
||||
func Test_jsoniter_RawMessage(t *testing.T) {
|
||||
@ -42,21 +42,40 @@ func Test_marshal_invalid_json_raw_message(t *testing.T) {
|
||||
should.Nil(aouterr)
|
||||
}
|
||||
|
||||
func Test_marshal_nil_json_raw_message(t *testing.T) {
|
||||
type A struct {
|
||||
Nil1 jsoniter.RawMessage `json:"raw1"`
|
||||
Nil2 json.RawMessage `json:"raw2"`
|
||||
}
|
||||
|
||||
a := A{}
|
||||
should := require.New(t)
|
||||
aout, aouterr := jsoniter.Marshal(&a)
|
||||
should.Equal(`{"raw1":null,"raw2":null}`, string(aout))
|
||||
should.Nil(aouterr)
|
||||
|
||||
a.Nil1 = []byte(`Any`)
|
||||
a.Nil2 = []byte(`Any`)
|
||||
should.Nil(jsoniter.Unmarshal(aout, &a))
|
||||
should.Nil(a.Nil1)
|
||||
should.Nil(a.Nil2)
|
||||
}
|
||||
|
||||
func Test_raw_message_memory_not_copied_issue(t *testing.T) {
|
||||
jsonStream := `{"name":"xxxxx","bundle_id":"com.zonst.majiang","app_platform":"ios","app_category":"100103", "budget_day":1000,"bidding_min":1,"bidding_max":2,"bidding_type":"CPM", "freq":{"open":true,"type":"day","num":100},"speed":1, "targeting":{"vendor":{"open":true,"list":["zonst"]}, "geo_code":{"open":true,"list":["156110100"]},"app_category":{"open":true,"list":["100101"]}, "day_parting":{"open":true,"list":["100409","100410"]},"device_type":{"open":true,"list":["ipad"]}, "os_version":{"open":true,"list":[10]},"carrier":{"open":true,"list":["mobile"]}, "network":{"open":true,"list":["4G"]}},"url":{"tracking_imp_url":"http://www.baidu.com", "tracking_clk_url":"http://www.baidu.com","jump_url":"http://www.baidu.com","deep_link_url":"http://www.baidu.com"}}`
|
||||
type IteratorObject struct {
|
||||
Name *string `json:"name"`
|
||||
BundleId *string `json:"bundle_id"`
|
||||
AppCategory *string `json:"app_category"`
|
||||
AppPlatform *string `json:"app_platform"`
|
||||
BudgetDay *float32 `json:"budget_day"`
|
||||
BiddingMax *float32 `json:"bidding_max"`
|
||||
BiddingMin *float32 `json:"bidding_min"`
|
||||
BiddingType *string `json:"bidding_type"`
|
||||
Name *string `json:"name"`
|
||||
BundleId *string `json:"bundle_id"`
|
||||
AppCategory *string `json:"app_category"`
|
||||
AppPlatform *string `json:"app_platform"`
|
||||
BudgetDay *float32 `json:"budget_day"`
|
||||
BiddingMax *float32 `json:"bidding_max"`
|
||||
BiddingMin *float32 `json:"bidding_min"`
|
||||
BiddingType *string `json:"bidding_type"`
|
||||
Freq *jsoniter.RawMessage `json:"freq"`
|
||||
Targeting *jsoniter.RawMessage `json:"targeting"`
|
||||
Url *jsoniter.RawMessage `json:"url"`
|
||||
Speed *int `json:"speed" db:"speed"`
|
||||
Speed *int `json:"speed" db:"speed"`
|
||||
}
|
||||
|
||||
obj := &IteratorObject{}
|
||||
|
1
pool.go
1
pool.go
@ -23,6 +23,7 @@ func (cfg *frozenConfig) BorrowStream(writer io.Writer) *Stream {
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) ReturnStream(stream *Stream) {
|
||||
stream.out = nil
|
||||
stream.Error = nil
|
||||
stream.Attachment = nil
|
||||
cfg.streamPool.Put(stream)
|
||||
|
24
reflect.go
24
reflect.go
@ -4,7 +4,8 @@ import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
"github.com/v2pro/plz/reflect2"
|
||||
|
||||
"github.com/modern-go/reflect2"
|
||||
)
|
||||
|
||||
// ValDecoder is an internal type registered to cache as needed.
|
||||
@ -40,6 +41,14 @@ type ctx struct {
|
||||
decoders map[reflect2.Type]ValDecoder
|
||||
}
|
||||
|
||||
func (b *ctx) caseSensitive() bool {
|
||||
if b.frozenConfig == nil {
|
||||
// default is case-insensitive
|
||||
return false
|
||||
}
|
||||
return b.frozenConfig.caseSensitive
|
||||
}
|
||||
|
||||
func (b *ctx) append(prefix string) *ctx {
|
||||
return &ctx{
|
||||
frozenConfig: b.frozenConfig,
|
||||
@ -51,11 +60,12 @@ func (b *ctx) append(prefix string) *ctx {
|
||||
|
||||
// ReadVal copy the underlying JSON into go interface, same as json.Unmarshal
|
||||
func (iter *Iterator) ReadVal(obj interface{}) {
|
||||
depth := iter.depth
|
||||
cacheKey := reflect2.RTypeOf(obj)
|
||||
decoder := iter.cfg.getDecoderFromCache(cacheKey)
|
||||
if decoder == nil {
|
||||
typ := reflect2.TypeOf(obj)
|
||||
if typ.Kind() != reflect.Ptr {
|
||||
if typ == nil || typ.Kind() != reflect.Ptr {
|
||||
iter.ReportError("ReadVal", "can only unmarshal into pointer")
|
||||
return
|
||||
}
|
||||
@ -67,6 +77,10 @@ func (iter *Iterator) ReadVal(obj interface{}) {
|
||||
return
|
||||
}
|
||||
decoder.Decode(ptr, iter)
|
||||
if iter.depth != depth {
|
||||
iter.ReportError("ReadVal", "unexpected mismatched nesting")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// WriteVal copy the go interface into underlying JSON, same as json.Marshal
|
||||
@ -111,7 +125,8 @@ func decoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
for _, extension := range extensions {
|
||||
decoder = extension.DecorateDecoder(typ, decoder)
|
||||
}
|
||||
for _, extension := range ctx.extensions {
|
||||
decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder)
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
decoder = extension.DecorateDecoder(typ, decoder)
|
||||
}
|
||||
return decoder
|
||||
@ -213,7 +228,8 @@ func encoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
for _, extension := range extensions {
|
||||
encoder = extension.DecorateEncoder(typ, encoder)
|
||||
}
|
||||
for _, extension := range ctx.extensions {
|
||||
encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder)
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
encoder = extension.DecorateEncoder(typ, encoder)
|
||||
}
|
||||
return encoder
|
||||
|
@ -2,9 +2,9 @@ package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/modern-go/reflect2"
|
||||
"io"
|
||||
"unsafe"
|
||||
"github.com/v2pro/plz/reflect2"
|
||||
)
|
||||
|
||||
func decoderOfArray(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
|
@ -1,9 +1,9 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"github.com/v2pro/plz/reflect2"
|
||||
"unsafe"
|
||||
"github.com/modern-go/reflect2"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type dynamicEncoder struct {
|
||||
|
@ -2,12 +2,12 @@ package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/modern-go/reflect2"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unsafe"
|
||||
"github.com/v2pro/plz/reflect2"
|
||||
)
|
||||
|
||||
var typeDecoders = map[string]ValDecoder{}
|
||||
@ -18,8 +18,8 @@ var extensions = []Extension{}
|
||||
|
||||
// StructDescriptor describe how should we encode/decode the struct
|
||||
type StructDescriptor struct {
|
||||
Type reflect2.Type
|
||||
Fields []*Binding
|
||||
Type reflect2.Type
|
||||
Fields []*Binding
|
||||
}
|
||||
|
||||
// GetField get one field from the descriptor by its name.
|
||||
@ -47,6 +47,8 @@ type Binding struct {
|
||||
// Can also rename fields by UpdateStructDescriptor.
|
||||
type Extension interface {
|
||||
UpdateStructDescriptor(structDescriptor *StructDescriptor)
|
||||
CreateMapKeyDecoder(typ reflect2.Type) ValDecoder
|
||||
CreateMapKeyEncoder(typ reflect2.Type) ValEncoder
|
||||
CreateDecoder(typ reflect2.Type) ValDecoder
|
||||
CreateEncoder(typ reflect2.Type) ValEncoder
|
||||
DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder
|
||||
@ -61,6 +63,16 @@ type DummyExtension struct {
|
||||
func (extension *DummyExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
|
||||
}
|
||||
|
||||
// CreateMapKeyDecoder No-op
|
||||
func (extension *DummyExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateMapKeyEncoder No-op
|
||||
func (extension *DummyExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateDecoder No-op
|
||||
func (extension *DummyExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
|
||||
return nil
|
||||
@ -97,6 +109,16 @@ func (extension EncoderExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
|
||||
return extension[typ]
|
||||
}
|
||||
|
||||
// CreateMapKeyDecoder No-op
|
||||
func (extension EncoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateMapKeyEncoder No-op
|
||||
func (extension EncoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// DecorateDecoder No-op
|
||||
func (extension EncoderExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
|
||||
return decoder
|
||||
@ -113,6 +135,16 @@ type DecoderExtension map[reflect2.Type]ValDecoder
|
||||
func (extension DecoderExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
|
||||
}
|
||||
|
||||
// CreateMapKeyDecoder No-op
|
||||
func (extension DecoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateMapKeyEncoder No-op
|
||||
func (extension DecoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateDecoder get decoder from map
|
||||
func (extension DecoderExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
|
||||
return extension[typ]
|
||||
@ -214,7 +246,8 @@ func getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
for _, extension := range extensions {
|
||||
decoder = extension.DecorateDecoder(typ, decoder)
|
||||
}
|
||||
for _, extension := range ctx.extensions {
|
||||
decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder)
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
decoder = extension.DecorateDecoder(typ, decoder)
|
||||
}
|
||||
}
|
||||
@ -227,14 +260,18 @@ func _getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
return decoder
|
||||
}
|
||||
}
|
||||
for _, extension := range ctx.extensions {
|
||||
decoder := ctx.decoderExtension.CreateDecoder(typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
decoder := extension.CreateDecoder(typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
}
|
||||
typeName := typ.String()
|
||||
decoder := typeDecoders[typeName]
|
||||
decoder = typeDecoders[typeName]
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
@ -254,7 +291,8 @@ func getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
for _, extension := range extensions {
|
||||
encoder = extension.DecorateEncoder(typ, encoder)
|
||||
}
|
||||
for _, extension := range ctx.extensions {
|
||||
encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder)
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
encoder = extension.DecorateEncoder(typ, encoder)
|
||||
}
|
||||
}
|
||||
@ -268,14 +306,18 @@ func _getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
return encoder
|
||||
}
|
||||
}
|
||||
for _, extension := range ctx.extensions {
|
||||
encoder := ctx.encoderExtension.CreateEncoder(typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
encoder := extension.CreateEncoder(typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
}
|
||||
typeName := typ.String()
|
||||
encoder := typeEncoders[typeName]
|
||||
encoder = typeEncoders[typeName]
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
@ -296,13 +338,13 @@ func describeStruct(ctx *ctx, typ reflect2.Type) *StructDescriptor {
|
||||
for i := 0; i < structType.NumField(); i++ {
|
||||
field := structType.Field(i)
|
||||
tag, hastag := field.Tag().Lookup(ctx.getTagKey())
|
||||
if ctx.onlyTaggedField && !hastag {
|
||||
if ctx.onlyTaggedField && !hastag && !field.Anonymous() {
|
||||
continue
|
||||
}
|
||||
if tag == "-" || field.Name() == "_" {
|
||||
continue
|
||||
}
|
||||
tagParts := strings.Split(tag, ",")
|
||||
if tag == "-" {
|
||||
continue
|
||||
}
|
||||
if field.Anonymous() && (tag == "" || tagParts[0] == "") {
|
||||
if field.Type().Kind() == reflect.Struct {
|
||||
structDescriptor := describeStruct(ctx, field.Type())
|
||||
@ -355,13 +397,15 @@ func describeStruct(ctx *ctx, typ reflect2.Type) *StructDescriptor {
|
||||
}
|
||||
func createStructDescriptor(ctx *ctx, typ reflect2.Type, bindings []*Binding, embeddedBindings []*Binding) *StructDescriptor {
|
||||
structDescriptor := &StructDescriptor{
|
||||
Type: typ,
|
||||
Fields: bindings,
|
||||
Type: typ,
|
||||
Fields: bindings,
|
||||
}
|
||||
for _, extension := range extensions {
|
||||
extension.UpdateStructDescriptor(structDescriptor)
|
||||
}
|
||||
for _, extension := range ctx.extensions {
|
||||
ctx.encoderExtension.UpdateStructDescriptor(structDescriptor)
|
||||
ctx.decoderExtension.UpdateStructDescriptor(structDescriptor)
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
extension.UpdateStructDescriptor(structDescriptor)
|
||||
}
|
||||
processTags(structDescriptor, ctx.frozenConfig)
|
||||
@ -431,7 +475,7 @@ func calcFieldNames(originalFieldName string, tagProvidedFieldName string, whole
|
||||
fieldNames = []string{tagProvidedFieldName}
|
||||
}
|
||||
// private?
|
||||
isNotExported := unicode.IsLower(rune(originalFieldName[0]))
|
||||
isNotExported := unicode.IsLower(rune(originalFieldName[0])) || originalFieldName[0] == '_'
|
||||
if isNotExported {
|
||||
fieldNames = []string{}
|
||||
}
|
||||
|
@ -2,9 +2,9 @@ package jsoniter
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/modern-go/reflect2"
|
||||
"strconv"
|
||||
"unsafe"
|
||||
"github.com/v2pro/plz/reflect2"
|
||||
)
|
||||
|
||||
type Number string
|
||||
@ -109,4 +109,4 @@ func (codec *jsoniterNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
|
||||
func (codec *jsoniterNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return len(*((*Number)(ptr))) == 0
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"unsafe"
|
||||
"encoding/json"
|
||||
"github.com/v2pro/plz/reflect2"
|
||||
"github.com/modern-go/reflect2"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
var jsonRawMessageType = reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()
|
||||
@ -33,11 +33,19 @@ type jsonRawMessageCodec struct {
|
||||
}
|
||||
|
||||
func (codec *jsonRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
*((*json.RawMessage)(ptr)) = json.RawMessage(iter.SkipAndReturnBytes())
|
||||
if iter.ReadNil() {
|
||||
*((*json.RawMessage)(ptr)) = nil
|
||||
} else {
|
||||
*((*json.RawMessage)(ptr)) = iter.SkipAndReturnBytes()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *jsonRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteRaw(string(*((*json.RawMessage)(ptr))))
|
||||
if *((*json.RawMessage)(ptr)) == nil {
|
||||
stream.WriteNil()
|
||||
} else {
|
||||
stream.WriteRaw(string(*((*json.RawMessage)(ptr))))
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *jsonRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
@ -48,13 +56,21 @@ type jsoniterRawMessageCodec struct {
|
||||
}
|
||||
|
||||
func (codec *jsoniterRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
*((*RawMessage)(ptr)) = RawMessage(iter.SkipAndReturnBytes())
|
||||
if iter.ReadNil() {
|
||||
*((*RawMessage)(ptr)) = nil
|
||||
} else {
|
||||
*((*RawMessage)(ptr)) = iter.SkipAndReturnBytes()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *jsoniterRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteRaw(string(*((*RawMessage)(ptr))))
|
||||
if *((*RawMessage)(ptr)) == nil {
|
||||
stream.WriteNil()
|
||||
} else {
|
||||
stream.WriteRaw(string(*((*RawMessage)(ptr))))
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *jsoniterRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return len(*((*RawMessage)(ptr))) == 0
|
||||
}
|
||||
}
|
||||
|
129
reflect_map.go
129
reflect_map.go
@ -1,11 +1,13 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"reflect"
|
||||
"sort"
|
||||
"unsafe"
|
||||
"github.com/v2pro/plz/reflect2"
|
||||
"fmt"
|
||||
|
||||
"github.com/modern-go/reflect2"
|
||||
)
|
||||
|
||||
func decoderOfMap(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
@ -38,6 +40,43 @@ func encoderOfMap(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
}
|
||||
|
||||
func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
decoder := ctx.decoderExtension.CreateMapKeyDecoder(typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
decoder := extension.CreateMapKeyDecoder(typ)
|
||||
if decoder != nil {
|
||||
return decoder
|
||||
}
|
||||
}
|
||||
|
||||
ptrType := reflect2.PtrTo(typ)
|
||||
if ptrType.Implements(unmarshalerType) {
|
||||
return &referenceDecoder{
|
||||
&unmarshalerDecoder{
|
||||
valType: ptrType,
|
||||
},
|
||||
}
|
||||
}
|
||||
if typ.Implements(unmarshalerType) {
|
||||
return &unmarshalerDecoder{
|
||||
valType: typ,
|
||||
}
|
||||
}
|
||||
if ptrType.Implements(textUnmarshalerType) {
|
||||
return &referenceDecoder{
|
||||
&textUnmarshalerDecoder{
|
||||
valType: ptrType,
|
||||
},
|
||||
}
|
||||
}
|
||||
if typ.Implements(textUnmarshalerType) {
|
||||
return &textUnmarshalerDecoder{
|
||||
valType: typ,
|
||||
}
|
||||
}
|
||||
|
||||
switch typ.Kind() {
|
||||
case reflect.String:
|
||||
return decoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
|
||||
@ -52,24 +91,36 @@ func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
typ = reflect2.DefaultTypeOfKind(typ.Kind())
|
||||
return &numericMapKeyDecoder{decoderOfType(ctx, typ)}
|
||||
default:
|
||||
ptrType := reflect2.PtrTo(typ)
|
||||
if ptrType.Implements(textMarshalerType) {
|
||||
return &referenceDecoder{
|
||||
&textUnmarshalerDecoder{
|
||||
valType: ptrType,
|
||||
},
|
||||
}
|
||||
}
|
||||
if typ.Implements(textMarshalerType) {
|
||||
return &textUnmarshalerDecoder{
|
||||
valType: typ,
|
||||
}
|
||||
}
|
||||
return &lazyErrorDecoder{err: fmt.Errorf("unsupported map key type: %v", typ)}
|
||||
}
|
||||
}
|
||||
|
||||
func encoderOfMapKey(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
encoder := ctx.encoderExtension.CreateMapKeyEncoder(typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
for _, extension := range ctx.extraExtensions {
|
||||
encoder := extension.CreateMapKeyEncoder(typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
}
|
||||
|
||||
if typ.Kind() != reflect.String {
|
||||
if typ == textMarshalerType {
|
||||
return &directTextMarshalerEncoder{
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
}
|
||||
}
|
||||
if typ.Implements(textMarshalerType) {
|
||||
return &textMarshalerEncoder{
|
||||
valType: typ,
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
switch typ.Kind() {
|
||||
case reflect.String:
|
||||
return encoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
|
||||
@ -84,16 +135,8 @@ func encoderOfMapKey(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
typ = reflect2.DefaultTypeOfKind(typ.Kind())
|
||||
return &numericMapKeyEncoder{encoderOfType(ctx, typ)}
|
||||
default:
|
||||
if typ == textMarshalerType {
|
||||
return &directTextMarshalerEncoder{
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
}
|
||||
}
|
||||
if typ.Implements(textMarshalerType) {
|
||||
return &textMarshalerEncoder{
|
||||
valType: typ,
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
}
|
||||
if typ.Kind() == reflect.Interface {
|
||||
return &dynamicMapKeyEncoder{ctx, typ}
|
||||
}
|
||||
return &lazyErrorEncoder{err: fmt.Errorf("unsupported map key type: %v", typ)}
|
||||
}
|
||||
@ -127,10 +170,6 @@ func (decoder *mapDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if c == '}' {
|
||||
return
|
||||
}
|
||||
if c != '"' {
|
||||
iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
iter.unreadByte()
|
||||
key := decoder.keyType.UnsafeNew()
|
||||
decoder.keyDecoder.Decode(key, iter)
|
||||
@ -191,6 +230,21 @@ func (encoder *numericMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
type dynamicMapKeyEncoder struct {
|
||||
ctx *ctx
|
||||
valType reflect2.Type
|
||||
}
|
||||
|
||||
func (encoder *dynamicMapKeyEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
obj := encoder.valType.UnsafeIndirect(ptr)
|
||||
encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).Encode(reflect2.PtrOf(obj), stream)
|
||||
}
|
||||
|
||||
func (encoder *dynamicMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
obj := encoder.valType.UnsafeIndirect(ptr)
|
||||
return encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).IsEmpty(reflect2.PtrOf(obj))
|
||||
}
|
||||
|
||||
type mapEncoder struct {
|
||||
mapType *reflect2.UnsafeMapType
|
||||
keyEncoder ValEncoder
|
||||
@ -198,6 +252,10 @@ type mapEncoder struct {
|
||||
}
|
||||
|
||||
func (encoder *mapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
if *(*unsafe.Pointer)(ptr) == nil {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
stream.WriteObjectStart()
|
||||
iter := encoder.mapType.UnsafeIterate(ptr)
|
||||
for i := 0; iter.HasNext(); i++ {
|
||||
@ -235,13 +293,17 @@ func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteObjectStart()
|
||||
mapIter := encoder.mapType.UnsafeIterate(ptr)
|
||||
subStream := stream.cfg.BorrowStream(nil)
|
||||
subStream.Attachment = stream.Attachment
|
||||
subIter := stream.cfg.BorrowIterator(nil)
|
||||
keyValues := encodedKeyValues{}
|
||||
for mapIter.HasNext() {
|
||||
subStream.buf = make([]byte, 0, 64)
|
||||
key, elem := mapIter.UnsafeNext()
|
||||
subStreamIndex := subStream.Buffered()
|
||||
encoder.keyEncoder.Encode(key, subStream)
|
||||
encodedKey := subStream.Buffer()
|
||||
if subStream.Error != nil && subStream.Error != io.EOF && stream.Error == nil {
|
||||
stream.Error = subStream.Error
|
||||
}
|
||||
encodedKey := subStream.Buffer()[subStreamIndex:]
|
||||
subIter.ResetBytes(encodedKey)
|
||||
decodedKey := subIter.ReadString()
|
||||
if stream.indention > 0 {
|
||||
@ -252,7 +314,7 @@ func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
encoder.elemEncoder.Encode(elem, subStream)
|
||||
keyValues = append(keyValues, encodedKV{
|
||||
key: decodedKey,
|
||||
keyValue: subStream.Buffer(),
|
||||
keyValue: subStream.Buffer()[subStreamIndex:],
|
||||
})
|
||||
}
|
||||
sort.Sort(keyValues)
|
||||
@ -262,6 +324,9 @@ func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
}
|
||||
stream.Write(keyValue.keyValue)
|
||||
}
|
||||
if subStream.Error != nil && stream.Error == nil {
|
||||
stream.Error = subStream.Error
|
||||
}
|
||||
stream.WriteObjectEnd()
|
||||
stream.cfg.ReturnStream(subStream)
|
||||
stream.cfg.ReturnIterator(subIter)
|
||||
|
@ -1,10 +1,11 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"github.com/v2pro/plz/reflect2"
|
||||
"unsafe"
|
||||
"encoding"
|
||||
"encoding/json"
|
||||
"unsafe"
|
||||
|
||||
"github.com/modern-go/reflect2"
|
||||
)
|
||||
|
||||
var marshalerType = reflect2.TypeOfPtr((*json.Marshaler)(nil)).Elem()
|
||||
@ -98,6 +99,12 @@ func (encoder *marshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
if err != nil {
|
||||
stream.Error = err
|
||||
} else {
|
||||
// html escape was already done by jsoniter
|
||||
// but the extra '\n' should be trimed
|
||||
l := len(bytes)
|
||||
if l > 0 && bytes[l-1] == '\n' {
|
||||
bytes = bytes[:l-1]
|
||||
}
|
||||
stream.Write(bytes)
|
||||
}
|
||||
}
|
||||
|
@ -3,10 +3,14 @@ package jsoniter
|
||||
import (
|
||||
"encoding/base64"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"unsafe"
|
||||
"github.com/v2pro/plz/reflect2"
|
||||
|
||||
"github.com/modern-go/reflect2"
|
||||
)
|
||||
|
||||
const ptrSize = 32 << uintptr(^uintptr(0)>>63)
|
||||
|
||||
func createEncoderOfNative(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
if typ.Kind() == reflect.Slice && typ.(reflect2.SliceType).Elem().Kind() == reflect.Uint8 {
|
||||
sliceDecoder := decoderOfSlice(ctx, typ)
|
||||
@ -24,7 +28,10 @@ func createEncoderOfNative(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
if typeName != "int" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*int)(nil)).Elem())
|
||||
}
|
||||
return &intCodec{}
|
||||
if strconv.IntSize == 32 {
|
||||
return &int32Codec{}
|
||||
}
|
||||
return &int64Codec{}
|
||||
case reflect.Int8:
|
||||
if typeName != "int8" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*int8)(nil)).Elem())
|
||||
@ -49,7 +56,10 @@ func createEncoderOfNative(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
if typeName != "uint" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint)(nil)).Elem())
|
||||
}
|
||||
return &uintCodec{}
|
||||
if strconv.IntSize == 32 {
|
||||
return &uint32Codec{}
|
||||
}
|
||||
return &uint64Codec{}
|
||||
case reflect.Uint8:
|
||||
if typeName != "uint8" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint8)(nil)).Elem())
|
||||
@ -69,7 +79,10 @@ func createEncoderOfNative(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
if typeName != "uintptr" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*uintptr)(nil)).Elem())
|
||||
}
|
||||
return &uintptrCodec{}
|
||||
if ptrSize == 32 {
|
||||
return &uint32Codec{}
|
||||
}
|
||||
return &uint64Codec{}
|
||||
case reflect.Uint64:
|
||||
if typeName != "uint64" {
|
||||
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint64)(nil)).Elem())
|
||||
@ -110,7 +123,10 @@ func createDecoderOfNative(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
if typeName != "int" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*int)(nil)).Elem())
|
||||
}
|
||||
return &intCodec{}
|
||||
if strconv.IntSize == 32 {
|
||||
return &int32Codec{}
|
||||
}
|
||||
return &int64Codec{}
|
||||
case reflect.Int8:
|
||||
if typeName != "int8" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*int8)(nil)).Elem())
|
||||
@ -135,7 +151,10 @@ func createDecoderOfNative(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
if typeName != "uint" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint)(nil)).Elem())
|
||||
}
|
||||
return &uintCodec{}
|
||||
if strconv.IntSize == 32 {
|
||||
return &uint32Codec{}
|
||||
}
|
||||
return &uint64Codec{}
|
||||
case reflect.Uint8:
|
||||
if typeName != "uint8" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint8)(nil)).Elem())
|
||||
@ -155,7 +174,10 @@ func createDecoderOfNative(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
if typeName != "uintptr" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*uintptr)(nil)).Elem())
|
||||
}
|
||||
return &uintptrCodec{}
|
||||
if ptrSize == 32 {
|
||||
return &uint32Codec{}
|
||||
}
|
||||
return &uint64Codec{}
|
||||
case reflect.Uint64:
|
||||
if typeName != "uint64" {
|
||||
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint64)(nil)).Elem())
|
||||
@ -196,40 +218,6 @@ func (codec *stringCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*string)(ptr)) == ""
|
||||
}
|
||||
|
||||
type intCodec struct {
|
||||
}
|
||||
|
||||
func (codec *intCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*int)(ptr)) = iter.ReadInt()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *intCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteInt(*((*int)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *intCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*int)(ptr)) == 0
|
||||
}
|
||||
|
||||
type uintptrCodec struct {
|
||||
}
|
||||
|
||||
func (codec *uintptrCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*uintptr)(ptr)) = uintptr(iter.ReadUint64())
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *uintptrCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteUint64(uint64(*((*uintptr)(ptr))))
|
||||
}
|
||||
|
||||
func (codec *uintptrCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*uintptr)(ptr)) == 0
|
||||
}
|
||||
|
||||
type int8Codec struct {
|
||||
}
|
||||
|
||||
@ -298,24 +286,6 @@ func (codec *int64Codec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*int64)(ptr)) == 0
|
||||
}
|
||||
|
||||
type uintCodec struct {
|
||||
}
|
||||
|
||||
func (codec *uintCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if !iter.ReadNil() {
|
||||
*((*uint)(ptr)) = iter.ReadUint()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *uintCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteUint(*((*uint)(ptr)))
|
||||
}
|
||||
|
||||
func (codec *uintCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*uint)(ptr)) == 0
|
||||
}
|
||||
|
||||
type uint8Codec struct {
|
||||
}
|
||||
|
||||
@ -436,7 +406,7 @@ func (codec *boolCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
}
|
||||
|
||||
type base64Codec struct {
|
||||
sliceType *reflect2.UnsafeSliceType
|
||||
sliceType *reflect2.UnsafeSliceType
|
||||
sliceDecoder ValDecoder
|
||||
}
|
||||
|
||||
@ -447,16 +417,11 @@ func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
}
|
||||
switch iter.WhatIsNext() {
|
||||
case StringValue:
|
||||
encoding := base64.StdEncoding
|
||||
src := iter.SkipAndReturnBytes()
|
||||
src = src[1: len(src)-1]
|
||||
decodedLen := encoding.DecodedLen(len(src))
|
||||
dst := make([]byte, decodedLen)
|
||||
len, err := encoding.Decode(dst, src)
|
||||
src := iter.ReadString()
|
||||
dst, err := base64.StdEncoding.DecodeString(src)
|
||||
if err != nil {
|
||||
iter.ReportError("decode base64", err.Error())
|
||||
} else {
|
||||
dst = dst[:len]
|
||||
codec.sliceType.UnsafeSet(ptr, unsafe.Pointer(&dst))
|
||||
}
|
||||
case ArrayValue:
|
||||
@ -467,17 +432,19 @@ func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
}
|
||||
|
||||
func (codec *base64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
src := *((*[]byte)(ptr))
|
||||
if len(src) == 0 {
|
||||
if codec.sliceType.UnsafeIsNil(ptr) {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
src := *((*[]byte)(ptr))
|
||||
encoding := base64.StdEncoding
|
||||
stream.writeByte('"')
|
||||
size := encoding.EncodedLen(len(src))
|
||||
buf := make([]byte, size)
|
||||
encoding.Encode(buf, src)
|
||||
stream.buf = append(stream.buf, buf...)
|
||||
if len(src) != 0 {
|
||||
size := encoding.EncodedLen(len(src))
|
||||
buf := make([]byte, size)
|
||||
encoding.Encode(buf, src)
|
||||
stream.buf = append(stream.buf, buf...)
|
||||
}
|
||||
stream.writeByte('"')
|
||||
}
|
||||
|
||||
|
@ -1,18 +1,14 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"github.com/modern-go/reflect2"
|
||||
"unsafe"
|
||||
"github.com/v2pro/plz/reflect2"
|
||||
)
|
||||
|
||||
func decoderOfOptional(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
ptrType := typ.(*reflect2.UnsafePtrType)
|
||||
elemType := ptrType.Elem()
|
||||
decoder := decoderOfType(ctx, elemType)
|
||||
if ctx.prefix == "" && elemType.Kind() == reflect.Ptr {
|
||||
return &dereferenceDecoder{elemType, decoder}
|
||||
}
|
||||
return &OptionalDecoder{elemType, decoder}
|
||||
}
|
||||
|
||||
|
@ -2,9 +2,9 @@ package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/modern-go/reflect2"
|
||||
"io"
|
||||
"unsafe"
|
||||
"github.com/v2pro/plz/reflect2"
|
||||
)
|
||||
|
||||
func decoderOfSlice(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
|
@ -5,7 +5,8 @@ import (
|
||||
"io"
|
||||
"strings"
|
||||
"unsafe"
|
||||
"github.com/v2pro/plz/reflect2"
|
||||
|
||||
"github.com/modern-go/reflect2"
|
||||
)
|
||||
|
||||
func decoderOfStruct(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
@ -31,6 +32,15 @@ func decoderOfStruct(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
for k, binding := range bindings {
|
||||
fields[k] = binding.Decoder.(*structFieldDecoder)
|
||||
}
|
||||
|
||||
if !ctx.caseSensitive() {
|
||||
for k, binding := range bindings {
|
||||
if _, found := fields[strings.ToLower(k)]; !found {
|
||||
fields[strings.ToLower(k)] = binding.Decoder.(*structFieldDecoder)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return createStructDecoder(ctx, typ, fields)
|
||||
}
|
||||
|
||||
@ -41,12 +51,13 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
|
||||
knownHash := map[int64]struct{}{
|
||||
0: {},
|
||||
}
|
||||
|
||||
switch len(fields) {
|
||||
case 0:
|
||||
return &skipObjectDecoder{typ}
|
||||
case 1:
|
||||
for fieldName, fieldDecoder := range fields {
|
||||
fieldHash := calcHash(fieldName)
|
||||
fieldHash := calcHash(fieldName, ctx.caseSensitive())
|
||||
_, known := knownHash[fieldHash]
|
||||
if known {
|
||||
return &generalStructDecoder{typ, fields, false}
|
||||
@ -60,7 +71,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
|
||||
var fieldDecoder1 *structFieldDecoder
|
||||
var fieldDecoder2 *structFieldDecoder
|
||||
for fieldName, fieldDecoder := range fields {
|
||||
fieldHash := calcHash(fieldName)
|
||||
fieldHash := calcHash(fieldName, ctx.caseSensitive())
|
||||
_, known := knownHash[fieldHash]
|
||||
if known {
|
||||
return &generalStructDecoder{typ, fields, false}
|
||||
@ -83,7 +94,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
|
||||
var fieldDecoder2 *structFieldDecoder
|
||||
var fieldDecoder3 *structFieldDecoder
|
||||
for fieldName, fieldDecoder := range fields {
|
||||
fieldHash := calcHash(fieldName)
|
||||
fieldHash := calcHash(fieldName, ctx.caseSensitive())
|
||||
_, known := knownHash[fieldHash]
|
||||
if known {
|
||||
return &generalStructDecoder{typ, fields, false}
|
||||
@ -114,7 +125,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
|
||||
var fieldDecoder3 *structFieldDecoder
|
||||
var fieldDecoder4 *structFieldDecoder
|
||||
for fieldName, fieldDecoder := range fields {
|
||||
fieldHash := calcHash(fieldName)
|
||||
fieldHash := calcHash(fieldName, ctx.caseSensitive())
|
||||
_, known := knownHash[fieldHash]
|
||||
if known {
|
||||
return &generalStructDecoder{typ, fields, false}
|
||||
@ -151,7 +162,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
|
||||
var fieldDecoder4 *structFieldDecoder
|
||||
var fieldDecoder5 *structFieldDecoder
|
||||
for fieldName, fieldDecoder := range fields {
|
||||
fieldHash := calcHash(fieldName)
|
||||
fieldHash := calcHash(fieldName, ctx.caseSensitive())
|
||||
_, known := knownHash[fieldHash]
|
||||
if known {
|
||||
return &generalStructDecoder{typ, fields, false}
|
||||
@ -194,7 +205,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
|
||||
var fieldDecoder5 *structFieldDecoder
|
||||
var fieldDecoder6 *structFieldDecoder
|
||||
for fieldName, fieldDecoder := range fields {
|
||||
fieldHash := calcHash(fieldName)
|
||||
fieldHash := calcHash(fieldName, ctx.caseSensitive())
|
||||
_, known := knownHash[fieldHash]
|
||||
if known {
|
||||
return &generalStructDecoder{typ, fields, false}
|
||||
@ -243,7 +254,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
|
||||
var fieldDecoder6 *structFieldDecoder
|
||||
var fieldDecoder7 *structFieldDecoder
|
||||
for fieldName, fieldDecoder := range fields {
|
||||
fieldHash := calcHash(fieldName)
|
||||
fieldHash := calcHash(fieldName, ctx.caseSensitive())
|
||||
_, known := knownHash[fieldHash]
|
||||
if known {
|
||||
return &generalStructDecoder{typ, fields, false}
|
||||
@ -298,7 +309,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
|
||||
var fieldDecoder7 *structFieldDecoder
|
||||
var fieldDecoder8 *structFieldDecoder
|
||||
for fieldName, fieldDecoder := range fields {
|
||||
fieldHash := calcHash(fieldName)
|
||||
fieldHash := calcHash(fieldName, ctx.caseSensitive())
|
||||
_, known := knownHash[fieldHash]
|
||||
if known {
|
||||
return &generalStructDecoder{typ, fields, false}
|
||||
@ -359,7 +370,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
|
||||
var fieldDecoder8 *structFieldDecoder
|
||||
var fieldDecoder9 *structFieldDecoder
|
||||
for fieldName, fieldDecoder := range fields {
|
||||
fieldHash := calcHash(fieldName)
|
||||
fieldHash := calcHash(fieldName, ctx.caseSensitive())
|
||||
_, known := knownHash[fieldHash]
|
||||
if known {
|
||||
return &generalStructDecoder{typ, fields, false}
|
||||
@ -426,7 +437,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
|
||||
var fieldDecoder9 *structFieldDecoder
|
||||
var fieldDecoder10 *structFieldDecoder
|
||||
for fieldName, fieldDecoder := range fields {
|
||||
fieldHash := calcHash(fieldName)
|
||||
fieldHash := calcHash(fieldName, ctx.caseSensitive())
|
||||
_, known := knownHash[fieldHash]
|
||||
if known {
|
||||
return &generalStructDecoder{typ, fields, false}
|
||||
@ -489,13 +500,20 @@ func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
decoder.decodeOneField(ptr, iter)
|
||||
for iter.nextToken() == ',' {
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
var c byte
|
||||
for c = ','; c == ','; c = iter.nextToken() {
|
||||
decoder.decodeOneField(ptr, iter)
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
if c != '}' {
|
||||
iter.ReportError("struct Decode", `expect }, but found `+string([]byte{c}))
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
func (decoder *generalStructDecoder) decodeOneField(ptr unsafe.Pointer, iter *Iterator) {
|
||||
@ -505,19 +523,19 @@ func (decoder *generalStructDecoder) decodeOneField(ptr unsafe.Pointer, iter *It
|
||||
fieldBytes := iter.ReadStringAsSlice()
|
||||
field = *(*string)(unsafe.Pointer(&fieldBytes))
|
||||
fieldDecoder = decoder.fields[field]
|
||||
if fieldDecoder == nil {
|
||||
if fieldDecoder == nil && !iter.cfg.caseSensitive {
|
||||
fieldDecoder = decoder.fields[strings.ToLower(field)]
|
||||
}
|
||||
} else {
|
||||
field = iter.ReadString()
|
||||
fieldDecoder = decoder.fields[field]
|
||||
if fieldDecoder == nil {
|
||||
if fieldDecoder == nil && !iter.cfg.caseSensitive {
|
||||
fieldDecoder = decoder.fields[strings.ToLower(field)]
|
||||
}
|
||||
}
|
||||
if fieldDecoder == nil {
|
||||
msg := "found unknown field: " + field
|
||||
if decoder.disallowUnknownFields {
|
||||
msg := "found unknown field: " + field
|
||||
iter.ReportError("ReadObject", msg)
|
||||
}
|
||||
c := iter.nextToken()
|
||||
@ -557,6 +575,9 @@ func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
if iter.readFieldHash() == decoder.fieldHash {
|
||||
decoder.fieldDecoder.Decode(ptr, iter)
|
||||
@ -567,9 +588,10 @@ func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type twoFieldsStructDecoder struct {
|
||||
@ -584,6 +606,9 @@ func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -597,9 +622,10 @@ func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type threeFieldsStructDecoder struct {
|
||||
@ -616,6 +642,9 @@ func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -631,9 +660,10 @@ func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type fourFieldsStructDecoder struct {
|
||||
@ -652,6 +682,9 @@ func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -669,9 +702,10 @@ func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type fiveFieldsStructDecoder struct {
|
||||
@ -692,6 +726,9 @@ func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -711,9 +748,10 @@ func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type sixFieldsStructDecoder struct {
|
||||
@ -736,6 +774,9 @@ func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -757,9 +798,10 @@ func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type sevenFieldsStructDecoder struct {
|
||||
@ -784,6 +826,9 @@ func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -807,9 +852,10 @@ func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type eightFieldsStructDecoder struct {
|
||||
@ -836,6 +882,9 @@ func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -861,9 +910,10 @@ func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type nineFieldsStructDecoder struct {
|
||||
@ -892,6 +942,9 @@ func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -919,9 +972,10 @@ func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type tenFieldsStructDecoder struct {
|
||||
@ -952,6 +1006,9 @@ func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -981,9 +1038,10 @@ func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type structFieldDecoder struct {
|
||||
@ -995,7 +1053,7 @@ func (decoder *structFieldDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
fieldPtr := decoder.field.UnsafeGet(ptr)
|
||||
decoder.fieldDecoder.Decode(fieldPtr, iter)
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
iter.Error = fmt.Errorf("%s: %s", decoder.field.Name, iter.Error.Error())
|
||||
iter.Error = fmt.Errorf("%s: %s", decoder.field.Name(), iter.Error.Error())
|
||||
}
|
||||
}
|
||||
|
||||
@ -1017,6 +1075,11 @@ type stringModeNumberDecoder struct {
|
||||
}
|
||||
|
||||
func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if iter.WhatIsNext() == NilValue {
|
||||
decoder.elemDecoder.Decode(ptr, iter)
|
||||
return
|
||||
}
|
||||
|
||||
c := iter.nextToken()
|
||||
if c != '"' {
|
||||
iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
|
||||
@ -1031,4 +1094,4 @@ func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,10 +2,10 @@ package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/modern-go/reflect2"
|
||||
"io"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
"github.com/v2pro/plz/reflect2"
|
||||
)
|
||||
|
||||
func encoderOfStruct(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
@ -47,38 +47,12 @@ func encoderOfStruct(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
}
|
||||
|
||||
func createCheckIsEmpty(ctx *ctx, typ reflect2.Type) checkIsEmpty {
|
||||
encoder := createEncoderOfNative(ctx, typ)
|
||||
if encoder != nil {
|
||||
return encoder
|
||||
}
|
||||
kind := typ.Kind()
|
||||
switch kind {
|
||||
case reflect.String:
|
||||
return &stringCodec{}
|
||||
case reflect.Int:
|
||||
return &intCodec{}
|
||||
case reflect.Int8:
|
||||
return &int8Codec{}
|
||||
case reflect.Int16:
|
||||
return &int16Codec{}
|
||||
case reflect.Int32:
|
||||
return &int32Codec{}
|
||||
case reflect.Int64:
|
||||
return &int64Codec{}
|
||||
case reflect.Uint:
|
||||
return &uintCodec{}
|
||||
case reflect.Uint8:
|
||||
return &uint8Codec{}
|
||||
case reflect.Uint16:
|
||||
return &uint16Codec{}
|
||||
case reflect.Uint32:
|
||||
return &uint32Codec{}
|
||||
case reflect.Uintptr:
|
||||
return &uintptrCodec{}
|
||||
case reflect.Uint64:
|
||||
return &uint64Codec{}
|
||||
case reflect.Float32:
|
||||
return &float32Codec{}
|
||||
case reflect.Float64:
|
||||
return &float64Codec{}
|
||||
case reflect.Bool:
|
||||
return &boolCodec{}
|
||||
case reflect.Interface:
|
||||
return &dynamicEncoder{typ}
|
||||
case reflect.Struct:
|
||||
@ -226,6 +200,7 @@ type stringModeStringEncoder struct {
|
||||
|
||||
func (encoder *stringModeStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
tempStream := encoder.cfg.BorrowStream(nil)
|
||||
tempStream.Attachment = stream.Attachment
|
||||
defer encoder.cfg.ReturnStream(tempStream)
|
||||
encoder.elemEncoder.Encode(ptr, tempStream)
|
||||
stream.WriteString(string(tempStream.Buffer()))
|
||||
|
@ -5,8 +5,8 @@ import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_skip_number_in_array(t *testing.T) {
|
||||
@ -105,6 +105,15 @@ func Test_skip_and_return_bytes_with_reader(t *testing.T) {
|
||||
should.Equal(`{"a" : [{"stream": "c"}], "d": 102 }`, string(skipped))
|
||||
}
|
||||
|
||||
func Test_append_skip_and_return_bytes_with_reader(t *testing.T) {
|
||||
should := require.New(t)
|
||||
iter := jsoniter.Parse(jsoniter.ConfigDefault, bytes.NewBufferString(`[ {"a" : [{"stream": "c"}], "d": 102 }, "stream"]`), 4)
|
||||
iter.ReadArray()
|
||||
buf := make([]byte, 0, 1024)
|
||||
buf = iter.SkipAndAppendBytes(buf)
|
||||
should.Equal(`{"a" : [{"stream": "c"}], "d": 102 }`, string(buf))
|
||||
}
|
||||
|
||||
func Test_skip_empty(t *testing.T) {
|
||||
should := require.New(t)
|
||||
should.NotNil(jsoniter.Get([]byte("")).LastError())
|
||||
|
@ -6,8 +6,8 @@ import (
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
"io"
|
||||
"testing"
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type testCase struct {
|
||||
|
@ -14,4 +14,4 @@ func init() {
|
||||
`"\t"`, // valid
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -13,7 +13,7 @@ func init() {
|
||||
`{"hello":{}}`, // valid
|
||||
`{"hello":{}}}`, // invalid
|
||||
`{"hello": { "hello": 1}}`, // valid
|
||||
`{abc}`, // invalid
|
||||
`{abc}`, // invalid
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
10
stream.go
10
stream.go
@ -55,6 +55,11 @@ func (stream *Stream) Buffer() []byte {
|
||||
return stream.buf
|
||||
}
|
||||
|
||||
// SetBuffer allows to append to the internal buffer directly
|
||||
func (stream *Stream) SetBuffer(buf []byte) {
|
||||
stream.buf = buf
|
||||
}
|
||||
|
||||
// Write writes the contents of p into the buffer.
|
||||
// It returns the number of bytes written.
|
||||
// If nn < len(p), it also returns an error explaining
|
||||
@ -98,14 +103,14 @@ func (stream *Stream) Flush() error {
|
||||
if stream.Error != nil {
|
||||
return stream.Error
|
||||
}
|
||||
n, err := stream.out.Write(stream.buf)
|
||||
_, err := stream.out.Write(stream.buf)
|
||||
if err != nil {
|
||||
if stream.Error == nil {
|
||||
stream.Error = err
|
||||
}
|
||||
return err
|
||||
}
|
||||
stream.buf = stream.buf[n:]
|
||||
stream.buf = stream.buf[:0]
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -172,7 +177,6 @@ func (stream *Stream) WriteEmptyObject() {
|
||||
func (stream *Stream) WriteMore() {
|
||||
stream.writeByte(',')
|
||||
stream.writeIndention(0)
|
||||
stream.Flush()
|
||||
}
|
||||
|
||||
// WriteArrayStart write [ with possible indention
|
||||
|
@ -1,6 +1,7 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strconv"
|
||||
)
|
||||
@ -13,6 +14,10 @@ func init() {
|
||||
|
||||
// WriteFloat32 write float32 to stream
|
||||
func (stream *Stream) WriteFloat32(val float32) {
|
||||
if math.IsInf(float64(val), 0) || math.IsNaN(float64(val)) {
|
||||
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||
return
|
||||
}
|
||||
abs := math.Abs(float64(val))
|
||||
fmt := byte('f')
|
||||
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
|
||||
@ -22,10 +27,22 @@ func (stream *Stream) WriteFloat32(val float32) {
|
||||
}
|
||||
}
|
||||
stream.buf = strconv.AppendFloat(stream.buf, float64(val), fmt, -1, 32)
|
||||
if fmt == 'e' {
|
||||
// clean up e-09 to e-9
|
||||
n := len(stream.buf)
|
||||
if n >= 4 && stream.buf[n-4] == 'e' && stream.buf[n-3] == '-' && stream.buf[n-2] == '0' {
|
||||
stream.buf[n-2] = stream.buf[n-1]
|
||||
stream.buf = stream.buf[:n-1]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// WriteFloat32Lossy write float32 to stream with ONLY 6 digits precision although much much faster
|
||||
func (stream *Stream) WriteFloat32Lossy(val float32) {
|
||||
if math.IsInf(float64(val), 0) || math.IsNaN(float64(val)) {
|
||||
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||
return
|
||||
}
|
||||
if val < 0 {
|
||||
stream.writeByte('-')
|
||||
val = -val
|
||||
@ -54,6 +71,10 @@ func (stream *Stream) WriteFloat32Lossy(val float32) {
|
||||
|
||||
// WriteFloat64 write float64 to stream
|
||||
func (stream *Stream) WriteFloat64(val float64) {
|
||||
if math.IsInf(val, 0) || math.IsNaN(val) {
|
||||
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||
return
|
||||
}
|
||||
abs := math.Abs(val)
|
||||
fmt := byte('f')
|
||||
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
|
||||
@ -63,10 +84,22 @@ func (stream *Stream) WriteFloat64(val float64) {
|
||||
}
|
||||
}
|
||||
stream.buf = strconv.AppendFloat(stream.buf, float64(val), fmt, -1, 64)
|
||||
if fmt == 'e' {
|
||||
// clean up e-09 to e-9
|
||||
n := len(stream.buf)
|
||||
if n >= 4 && stream.buf[n-4] == 'e' && stream.buf[n-3] == '-' && stream.buf[n-2] == '0' {
|
||||
stream.buf[n-2] = stream.buf[n-1]
|
||||
stream.buf = stream.buf[:n-1]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// WriteFloat64Lossy write float64 to stream with ONLY 6 digits precision although much much faster
|
||||
func (stream *Stream) WriteFloat64Lossy(val float64) {
|
||||
if math.IsInf(val, 0) || math.IsNaN(val) {
|
||||
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||
return
|
||||
}
|
||||
if val < 0 {
|
||||
stream.writeByte('-')
|
||||
val = -val
|
||||
|
@ -17,16 +17,16 @@ func init() {
|
||||
func writeFirstBuf(space []byte, v uint32) []byte {
|
||||
start := v >> 24
|
||||
if start == 0 {
|
||||
space = append(space, byte(v >> 16), byte(v >> 8))
|
||||
space = append(space, byte(v>>16), byte(v>>8))
|
||||
} else if start == 1 {
|
||||
space = append(space, byte(v >> 8))
|
||||
space = append(space, byte(v>>8))
|
||||
}
|
||||
space = append(space, byte(v))
|
||||
return space
|
||||
}
|
||||
|
||||
func writeBuf(buf []byte, v uint32) []byte {
|
||||
return append(buf, byte(v >> 16), byte(v >> 8), byte(v))
|
||||
return append(buf, byte(v>>16), byte(v>>8), byte(v))
|
||||
}
|
||||
|
||||
// WriteUint8 write uint8 to stream
|
||||
@ -91,7 +91,7 @@ func (stream *Stream) WriteUint32(val uint32) {
|
||||
stream.buf = writeFirstBuf(stream.buf, digits[q2])
|
||||
} else {
|
||||
r3 := q2 - q3*1000
|
||||
stream.buf = append(stream.buf, byte(q3 + '0'))
|
||||
stream.buf = append(stream.buf, byte(q3+'0'))
|
||||
stream.buf = writeBuf(stream.buf, digits[r3])
|
||||
}
|
||||
stream.buf = writeBuf(stream.buf, digits[r2])
|
||||
|
@ -1,8 +1,9 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/require"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_writeByte_should_grow_buffer(t *testing.T) {
|
||||
@ -62,8 +63,24 @@ func (w *NopWriter) Write(p []byte) (n int, err error) {
|
||||
}
|
||||
|
||||
func Test_flush_buffer_should_stop_grow_buffer(t *testing.T) {
|
||||
// Stream an array of a zillion zeros.
|
||||
writer := new(NopWriter)
|
||||
NewEncoder(writer).Encode(make([]int, 10000000))
|
||||
stream := NewStream(ConfigDefault, writer, 512)
|
||||
stream.WriteArrayStart()
|
||||
for i := 0; i < 10000000; i++ {
|
||||
stream.WriteInt(0)
|
||||
stream.WriteMore()
|
||||
stream.Flush()
|
||||
}
|
||||
stream.WriteInt(0)
|
||||
stream.WriteArrayEnd()
|
||||
|
||||
// Confirm that the buffer didn't have to grow.
|
||||
should := require.New(t)
|
||||
should.Equal(8, writer.bufferSize)
|
||||
|
||||
// 512 is the internal buffer size set in NewEncoder
|
||||
//
|
||||
// Flush is called after each array element, so only the first 8 bytes of it
|
||||
// is ever used, and it is never extended. Capacity remains 512.
|
||||
should.Equal(512, writer.bufferSize)
|
||||
}
|
||||
|
@ -60,4 +60,4 @@ func init() {
|
||||
}
|
||||
|
||||
type structEmpty struct{}
|
||||
type arrayAlis [4]stringAlias
|
||||
type arrayAlis [4]stringAlias
|
||||
|
@ -1,8 +1,12 @@
|
||||
// +build go1.15
|
||||
// remove these tests temporarily until https://github.com/golang/go/issues/38105 and
|
||||
// https://github.com/golang/go/issues/38940 is fixed
|
||||
|
||||
package test
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"encoding"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func init() {
|
||||
@ -26,7 +30,6 @@ func (k *stringKeyType) UnmarshalText(text []byte) error {
|
||||
var _ encoding.TextMarshaler = stringKeyType("")
|
||||
var _ encoding.TextUnmarshaler = new(stringKeyType)
|
||||
|
||||
|
||||
type structKeyType struct {
|
||||
X string
|
||||
}
|
||||
@ -41,4 +44,4 @@ func (k *structKeyType) UnmarshalText(text []byte) error {
|
||||
}
|
||||
|
||||
var _ encoding.TextMarshaler = structKeyType{}
|
||||
var _ encoding.TextUnmarshaler = &structKeyType{}
|
||||
var _ encoding.TextUnmarshaler = &structKeyType{}
|
||||
|
@ -3,8 +3,8 @@ package test
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"strings"
|
||||
"encoding/json"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type StringMarshaler string
|
||||
@ -49,4 +49,4 @@ var _ json.Unmarshaler = new(StringMarshaler)
|
||||
|
||||
func init() {
|
||||
testCases = append(testCases, (*StringMarshaler)(nil))
|
||||
}
|
||||
}
|
||||
|
@ -1,10 +1,10 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"encoding/base64"
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type structMarshaler struct {
|
||||
|
@ -91,7 +91,6 @@ func (p *jsonMarshaler) UnmarshalJSON(input []byte) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
type jsonMarshalerMap map[int]int
|
||||
|
||||
func (p *jsonMarshalerMap) MarshalJSON() ([]byte, error) {
|
||||
@ -117,11 +116,10 @@ func (p *textMarshaler) UnmarshalText(input []byte) error {
|
||||
|
||||
type textMarshalerMap map[int]int
|
||||
|
||||
|
||||
func (p *textMarshalerMap) MarshalText() ([]byte, error) {
|
||||
return []byte(`{}`), nil
|
||||
}
|
||||
|
||||
func (p *textMarshalerMap) UnmarshalText(input []byte) error {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
@ -60,6 +60,7 @@ func init() {
|
||||
(*SameLevel2NoTags)(nil),
|
||||
(*SameLevel2Tagged)(nil),
|
||||
(*EmbeddedPtr)(nil),
|
||||
(*UnnamedLiteral)(nil),
|
||||
)
|
||||
}
|
||||
|
||||
@ -230,4 +231,8 @@ type EmbeddedPtrOption struct {
|
||||
|
||||
type EmbeddedPtr struct {
|
||||
EmbeddedPtrOption `json:","`
|
||||
}
|
||||
}
|
||||
|
||||
type UnnamedLiteral struct {
|
||||
_ struct{}
|
||||
}
|
||||
|
@ -121,10 +121,11 @@ func init() {
|
||||
F1 int32 `json:"F1"`
|
||||
F2 int32 `json:"F2,string"`
|
||||
})(nil),
|
||||
(*struct {
|
||||
F1 string `json:"F1"`
|
||||
F2 string `json:"F2,string"`
|
||||
})(nil),
|
||||
// remove temporarily until https://github.com/golang/go/issues/38126 is fixed
|
||||
// (*struct {
|
||||
// F1 string `json:"F1"`
|
||||
// F2 string `json:"F2,string"`
|
||||
// })(nil),
|
||||
(*struct {
|
||||
F1 uint8 `json:"F1"`
|
||||
F2 uint8 `json:"F2,string"`
|
||||
@ -145,6 +146,9 @@ func init() {
|
||||
(*struct {
|
||||
Field bool `json:",omitempty,string"`
|
||||
})(nil),
|
||||
(*struct {
|
||||
Field bool `json:"中文"`
|
||||
})(nil),
|
||||
)
|
||||
}
|
||||
|
||||
@ -192,12 +196,12 @@ type StringFieldNameE struct {
|
||||
}
|
||||
|
||||
type StringFieldName struct {
|
||||
F1 string `json:"F1"`
|
||||
F2 string `json:"f2"`
|
||||
F3 string `json:"-"`
|
||||
F4 string `json:"-,"`
|
||||
F5 string `json:","`
|
||||
F6 string `json:""`
|
||||
F1 string `json:"F1"`
|
||||
F2 string `json:"f2"`
|
||||
F3 string `json:"-"`
|
||||
F4 string `json:"-,"`
|
||||
F5 string `json:","`
|
||||
F6 string `json:""`
|
||||
StringFieldNameE `json:"e"`
|
||||
}
|
||||
|
||||
|
@ -1,10 +1,10 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"encoding"
|
||||
"bytes"
|
||||
"encoding"
|
||||
"encoding/base64"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
@ -2,9 +2,9 @@ package test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding"
|
||||
"encoding/base64"
|
||||
"strings"
|
||||
"encoding"
|
||||
)
|
||||
|
||||
func init() {
|
||||
@ -66,4 +66,4 @@ func (m *structTextMarshaler) UnmarshalText(text []byte) error {
|
||||
var _ encoding.TextMarshaler = structTextMarshaler{}
|
||||
var _ encoding.TextUnmarshaler = &structTextMarshaler{}
|
||||
|
||||
type structTextMarshalerAlias structTextMarshaler
|
||||
type structTextMarshalerAlias structTextMarshaler
|
||||
|
@ -1,15 +1,15 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"reflect"
|
||||
"fmt"
|
||||
"github.com/google/gofuzz"
|
||||
"strings"
|
||||
"github.com/json-iterator/go"
|
||||
"encoding/json"
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
"github.com/google/gofuzz"
|
||||
"github.com/json-iterator/go"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var testCases []interface{}
|
||||
|
@ -8,13 +8,13 @@ func init() {
|
||||
[2]*float64{},
|
||||
)
|
||||
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
||||
ptr: (*[0]int)(nil),
|
||||
ptr: (*[0]int)(nil),
|
||||
input: `[1]`,
|
||||
}, unmarshalCase{
|
||||
ptr: (*[1]int)(nil),
|
||||
ptr: (*[1]int)(nil),
|
||||
input: `[2]`,
|
||||
}, unmarshalCase{
|
||||
ptr: (*[1]int)(nil),
|
||||
ptr: (*[1]int)(nil),
|
||||
input: `[]`,
|
||||
})
|
||||
}
|
||||
|
@ -8,16 +8,16 @@ func init() {
|
||||
return &val
|
||||
}
|
||||
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
||||
ptr: (**interface{})(nil),
|
||||
ptr: (**interface{})(nil),
|
||||
input: `"hello"`,
|
||||
}, unmarshalCase{
|
||||
ptr: (**interface{})(nil),
|
||||
ptr: (**interface{})(nil),
|
||||
input: `1e1`,
|
||||
}, unmarshalCase{
|
||||
ptr: (**interface{})(nil),
|
||||
ptr: (**interface{})(nil),
|
||||
input: `1.0e1`,
|
||||
}, unmarshalCase{
|
||||
ptr: (*[]interface{})(nil),
|
||||
ptr: (*[]interface{})(nil),
|
||||
input: `[1.0e1]`,
|
||||
}, unmarshalCase{
|
||||
ptr: (*struct {
|
||||
@ -66,12 +66,12 @@ func init() {
|
||||
}{"hello"},
|
||||
struct {
|
||||
Field interface{}
|
||||
}{struct{
|
||||
}{struct {
|
||||
field chan int
|
||||
}{}},
|
||||
struct {
|
||||
Field interface{}
|
||||
}{struct{
|
||||
}{struct {
|
||||
Field *int
|
||||
}{pInt(100)}},
|
||||
)
|
||||
|
36
value_tests/error_test.go
Normal file
36
value_tests/error_test.go
Normal file
@ -0,0 +1,36 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func Test_errorInput(t *testing.T) {
|
||||
for _, testCase := range unmarshalCases {
|
||||
if testCase.obj != nil {
|
||||
continue
|
||||
}
|
||||
valType := reflect.TypeOf(testCase.ptr).Elem()
|
||||
t.Run(valType.String(), func(t *testing.T) {
|
||||
for _, data := range []string{
|
||||
`x`,
|
||||
`n`,
|
||||
`nul`,
|
||||
`{x}`,
|
||||
`{"x"}`,
|
||||
`{"x": "y"x}`,
|
||||
`{"x": "y"`,
|
||||
`{"x": "y", "a"}`,
|
||||
`[`,
|
||||
`[{"x": "y"}`,
|
||||
} {
|
||||
ptrVal := reflect.New(valType)
|
||||
ptr := ptrVal.Interface()
|
||||
err := jsoniter.ConfigCompatibleWithStandardLibrary.Unmarshal([]byte(data), ptr)
|
||||
require.Error(t, err, "on input %q", data)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@ -1,13 +1,14 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
"github.com/stretchr/testify/require"
|
||||
"strconv"
|
||||
"bytes"
|
||||
"github.com/json-iterator/go"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_read_float(t *testing.T) {
|
||||
@ -50,7 +51,6 @@ func Test_read_float(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
func Test_write_float32(t *testing.T) {
|
||||
vals := []float32{0, 1, -1, 99, 0xff, 0xfff, 0xffff, 0xfffff, 0xffffff, 0x4ffffff, 0xfffffff,
|
||||
-0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001}
|
||||
@ -89,7 +89,7 @@ func Test_write_float32(t *testing.T) {
|
||||
|
||||
stream = jsoniter.NewStream(jsoniter.ConfigDefault, nil, 0)
|
||||
stream.WriteFloat32(float32(0.0000001))
|
||||
should.Equal("1e-07", string(stream.Buffer()))
|
||||
should.Equal("1e-7", string(stream.Buffer()))
|
||||
}
|
||||
|
||||
func Test_write_float64(t *testing.T) {
|
||||
@ -126,5 +126,5 @@ func Test_write_float64(t *testing.T) {
|
||||
|
||||
stream = jsoniter.NewStream(jsoniter.ConfigDefault, nil, 0)
|
||||
stream.WriteFloat64(float64(0.0000001))
|
||||
should.Equal("1e-07", string(stream.Buffer()))
|
||||
}
|
||||
should.Equal("1e-7", string(stream.Buffer()))
|
||||
}
|
||||
|
@ -17,7 +17,7 @@ func init() {
|
||||
pCloser2("hello"),
|
||||
)
|
||||
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
||||
ptr: (*[]io.Closer)(nil),
|
||||
ptr: (*[]io.Closer)(nil),
|
||||
input: "[null]",
|
||||
}, unmarshalCase{
|
||||
obj: func() interface{} {
|
||||
|
@ -1,12 +1,12 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
"strconv"
|
||||
"fmt"
|
||||
"testing"
|
||||
"bytes"
|
||||
"github.com/json-iterator/go"
|
||||
)
|
||||
|
||||
func init() {
|
||||
@ -158,7 +158,6 @@ func Test_read_int64(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
func Test_write_uint8(t *testing.T) {
|
||||
vals := []uint8{0, 1, 11, 111, 255}
|
||||
for _, val := range vals {
|
||||
@ -417,4 +416,4 @@ func Test_write_int64(t *testing.T) {
|
||||
stream.Flush()
|
||||
should.Nil(stream.Error)
|
||||
should.Equal("a4294967295", buf.String())
|
||||
}
|
||||
}
|
||||
|
@ -3,11 +3,11 @@ package test
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"io"
|
||||
"testing"
|
||||
"github.com/json-iterator/go"
|
||||
)
|
||||
|
||||
func Test_missing_object_end(t *testing.T) {
|
||||
@ -103,18 +103,44 @@ func Test_invalid_float(t *testing.T) {
|
||||
}
|
||||
|
||||
func Test_chan(t *testing.T) {
|
||||
t.Skip("do not support chan")
|
||||
|
||||
type TestObject struct {
|
||||
MyChan chan bool
|
||||
MyField int
|
||||
}
|
||||
|
||||
should := require.New(t)
|
||||
obj := TestObject{}
|
||||
str, err := json.Marshal(obj)
|
||||
should.Nil(err)
|
||||
should.Equal(``, str)
|
||||
|
||||
t.Run("Encode channel", func(t *testing.T) {
|
||||
should := require.New(t)
|
||||
str, err := jsoniter.Marshal(obj)
|
||||
should.NotNil(err)
|
||||
should.Nil(str)
|
||||
})
|
||||
|
||||
t.Run("Encode channel using compatible configuration", func(t *testing.T) {
|
||||
should := require.New(t)
|
||||
str, err := jsoniter.ConfigCompatibleWithStandardLibrary.Marshal(obj)
|
||||
should.NotNil(err)
|
||||
should.Nil(str)
|
||||
})
|
||||
}
|
||||
|
||||
func Test_invalid_in_map(t *testing.T) {
|
||||
testMap := map[string]interface{}{"chan": make(chan interface{})}
|
||||
|
||||
t.Run("Encode map with invalid content", func(t *testing.T) {
|
||||
should := require.New(t)
|
||||
str, err := jsoniter.Marshal(testMap)
|
||||
should.NotNil(err)
|
||||
should.Nil(str)
|
||||
})
|
||||
|
||||
t.Run("Encode map with invalid content using compatible configuration", func(t *testing.T) {
|
||||
should := require.New(t)
|
||||
str, err := jsoniter.ConfigCompatibleWithStandardLibrary.Marshal(testMap)
|
||||
should.NotNil(err)
|
||||
should.Nil(str)
|
||||
})
|
||||
}
|
||||
|
||||
func Test_invalid_number(t *testing.T) {
|
||||
@ -223,4 +249,14 @@ func Test_EmptyInput(t *testing.T) {
|
||||
if err == nil {
|
||||
t.Errorf("Expected error")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type Foo struct {
|
||||
A jsoniter.Any
|
||||
}
|
||||
|
||||
func Test_nil_any(t *testing.T) {
|
||||
should := require.New(t)
|
||||
data, _ := jsoniter.Marshal(&Foo{})
|
||||
should.Equal(`{"A":null}`, string(data))
|
||||
}
|
||||
|
@ -1,8 +1,10 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"math/big"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"time"
|
||||
)
|
||||
|
||||
func init() {
|
||||
@ -26,17 +28,37 @@ func init() {
|
||||
},
|
||||
nilMap,
|
||||
&nilMap,
|
||||
map[string]*json.RawMessage{"hello":pRawMessage(json.RawMessage("[]"))},
|
||||
map[string]*json.RawMessage{"hello": pRawMessage(json.RawMessage("[]"))},
|
||||
map[Date]bool{{}: true},
|
||||
map[Date2]bool{{}: true},
|
||||
map[customKey]string{customKey(1): "bar"},
|
||||
)
|
||||
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
||||
ptr: (*map[string]string)(nil),
|
||||
ptr: (*map[string]string)(nil),
|
||||
input: `{"k\"ey": "val"}`,
|
||||
}, unmarshalCase{
|
||||
ptr: (*map[string]string)(nil),
|
||||
ptr: (*map[string]string)(nil),
|
||||
input: `null`,
|
||||
}, unmarshalCase{
|
||||
ptr: (*map[string]*json.RawMessage)(nil),
|
||||
ptr: (*map[string]*json.RawMessage)(nil),
|
||||
input: "{\"test\":[{\"key\":\"value\"}]}",
|
||||
}, unmarshalCase{
|
||||
ptr: (*map[Date]bool)(nil),
|
||||
input: `{
|
||||
"2018-12-12": true,
|
||||
"2018-12-13": true,
|
||||
"2018-12-14": true
|
||||
}`,
|
||||
}, unmarshalCase{
|
||||
ptr: (*map[Date2]bool)(nil),
|
||||
input: `{
|
||||
"2018-12-12": true,
|
||||
"2018-12-13": true,
|
||||
"2018-12-14": true
|
||||
}`,
|
||||
}, unmarshalCase{
|
||||
ptr: (*map[customKey]string)(nil),
|
||||
input: `{"foo": "bar"}`,
|
||||
})
|
||||
}
|
||||
|
||||
@ -49,3 +71,62 @@ type MyString string
|
||||
func (ms MyString) Hello() string {
|
||||
return string(ms)
|
||||
}
|
||||
|
||||
type Date struct {
|
||||
time.Time
|
||||
}
|
||||
|
||||
func (d *Date) UnmarshalJSON(b []byte) error {
|
||||
dateStr := string(b) // something like `"2017-08-20"`
|
||||
|
||||
if dateStr == "null" {
|
||||
return nil
|
||||
}
|
||||
|
||||
t, err := time.Parse(`"2006-01-02"`, dateStr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("cant parse date: %#v", err)
|
||||
}
|
||||
|
||||
d.Time = t
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Date) MarshalJSON() ([]byte, error) {
|
||||
return []byte(d.Time.Format("2006-01-02")), nil
|
||||
}
|
||||
|
||||
type Date2 struct {
|
||||
time.Time
|
||||
}
|
||||
|
||||
func (d Date2) UnmarshalJSON(b []byte) error {
|
||||
dateStr := string(b) // something like `"2017-08-20"`
|
||||
|
||||
if dateStr == "null" {
|
||||
return nil
|
||||
}
|
||||
|
||||
t, err := time.Parse(`"2006-01-02"`, dateStr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("cant parse date: %#v", err)
|
||||
}
|
||||
|
||||
d.Time = t
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d Date2) MarshalJSON() ([]byte, error) {
|
||||
return []byte(d.Time.Format("2006-01-02")), nil
|
||||
}
|
||||
|
||||
type customKey int32
|
||||
|
||||
func (c customKey) MarshalText() ([]byte, error) {
|
||||
return []byte("foo"), nil
|
||||
}
|
||||
|
||||
func (c *customKey) UnmarshalText(value []byte) error {
|
||||
*c = 1
|
||||
return nil
|
||||
}
|
||||
|
@ -1,8 +1,8 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"encoding"
|
||||
"encoding/json"
|
||||
)
|
||||
|
||||
func init() {
|
||||
@ -17,20 +17,20 @@ func init() {
|
||||
tmOfStructInt{},
|
||||
&tm2,
|
||||
map[tmOfStruct]int{
|
||||
tmOfStruct{}: 100,
|
||||
{}: 100,
|
||||
},
|
||||
map[*tmOfStruct]int{
|
||||
&tmOfStruct{}: 100,
|
||||
{}: 100,
|
||||
},
|
||||
map[encoding.TextMarshaler]int{
|
||||
tm1: 100,
|
||||
},
|
||||
)
|
||||
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
||||
ptr: (*tmOfMap)(nil),
|
||||
ptr: (*tmOfMap)(nil),
|
||||
input: `"{1:2}"`,
|
||||
}, unmarshalCase{
|
||||
ptr: (*tmOfMapPtr)(nil),
|
||||
ptr: (*tmOfMapPtr)(nil),
|
||||
input: `"{1:2}"`,
|
||||
})
|
||||
}
|
||||
@ -47,7 +47,6 @@ func (q *jmOfStruct) UnmarshalJSON(value []byte) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
type tmOfStruct struct {
|
||||
F2 chan []byte
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user