You've already forked json-iterator
mirror of
https://github.com/json-iterator/go.git
synced 2025-06-15 22:50:24 +02:00
Compare commits
80 Commits
Author | SHA1 | Date | |
---|---|---|---|
e6b9536d36 | |||
a3465d79a9 | |||
9b79a3e192 | |||
1779031cda | |||
6821bec9fa | |||
9461257643 | |||
5bce16d299 | |||
a1ca083078 | |||
cd6773e694 | |||
55287ed53a | |||
8961be9c21 | |||
0f8241d334 | |||
53b9d06ba7 | |||
1f7ee05ef8 | |||
b22f393858 | |||
bede7b9e40 | |||
58aeb59006 | |||
7acbb404a4 | |||
69f2a91ff4 | |||
11a37a0774 | |||
91f4a6405d | |||
a54d350455 | |||
6f4c196d95 | |||
8302a17e8c | |||
3987001e27 | |||
78d9e97b7a | |||
49c900ee46 | |||
9c0685d8d3 | |||
acfec88f7a | |||
e88512faf8 | |||
b681149eae | |||
d1af7639b3 | |||
7c9f8c2d20 | |||
f814d6c0f1 | |||
aba8654400 | |||
a1c9557592 | |||
44a7e7340d | |||
2834c7e43c | |||
d296277d5c | |||
dc11f49689 | |||
83f7b825b3 | |||
03217c3e97 | |||
908eaed151 | |||
eec24895fe | |||
1ba732a07d | |||
819acad769 | |||
695ec2b83b | |||
028e2ef2bd | |||
976454858b | |||
27518f6661 | |||
94869abf43 | |||
459f0e30ae | |||
0039f4ac3d | |||
fb5614a4ca | |||
f71b9090aa | |||
08047c174c | |||
68347ec4d6 | |||
0fd91468bb | |||
1bc9828b4f | |||
24c3d57281 | |||
0ff49de124 | |||
5bc9320502 | |||
f64ce68b6e | |||
2d42ff74dd | |||
3a023a5fbc | |||
16aef10b2b | |||
ae4c002f78 | |||
e4aa2ec063 | |||
d05f387f50 | |||
a9403d25cd | |||
05d041de10 | |||
5916df66b3 | |||
2433035e51 | |||
6dfc0bf2dd | |||
b9be8dd373 | |||
b8d78b6aaf | |||
7109b5e7dd | |||
4cc76529e8 | |||
c5ddac9dc3 | |||
f76d712086 |
40
README.md
40
README.md
@ -1,5 +1,5 @@
|
||||
[](https://sourcegraph.com/github.com/json-iterator/go?badge)
|
||||
[](http://godoc.org/github.com/json-iterator/go)
|
||||
[](https://pkg.go.dev/github.com/json-iterator/go)
|
||||
[](https://travis-ci.org/json-iterator/go)
|
||||
[](https://codecov.io/gh/json-iterator/go)
|
||||
[](https://goreportcard.com/report/github.com/json-iterator/go)
|
||||
@ -10,10 +10,6 @@ A high-performance 100% compatible drop-in replacement of "encoding/json"
|
||||
|
||||
You can also use thrift like JSON using [thrift-iterator](https://github.com/thrift-iterator/go)
|
||||
|
||||
```
|
||||
Go开发者们请加入我们,滴滴出行平台技术部 taowen@didichuxing.com
|
||||
```
|
||||
|
||||
# Benchmark
|
||||
|
||||

|
||||
@ -22,16 +18,16 @@ Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/githu
|
||||
|
||||
Raw Result (easyjson requires static code generation)
|
||||
|
||||
| | ns/op | allocation bytes | allocation times |
|
||||
| --- | --- | --- | --- |
|
||||
| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
|
||||
| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
|
||||
| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
|
||||
| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
|
||||
| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
|
||||
| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
|
||||
| | ns/op | allocation bytes | allocation times |
|
||||
| --------------- | ----------- | ---------------- | ---------------- |
|
||||
| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
|
||||
| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
|
||||
| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
|
||||
| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
|
||||
| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
|
||||
| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
|
||||
|
||||
Always benchmark with your own workload.
|
||||
Always benchmark with your own workload.
|
||||
The result depends heavily on the data input.
|
||||
|
||||
# Usage
|
||||
@ -45,10 +41,10 @@ import "encoding/json"
|
||||
json.Marshal(&data)
|
||||
```
|
||||
|
||||
with
|
||||
with
|
||||
|
||||
```go
|
||||
import "github.com/json-iterator/go"
|
||||
import jsoniter "github.com/json-iterator/go"
|
||||
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
json.Marshal(&data)
|
||||
@ -64,7 +60,7 @@ json.Unmarshal(input, &data)
|
||||
with
|
||||
|
||||
```go
|
||||
import "github.com/json-iterator/go"
|
||||
import jsoniter "github.com/json-iterator/go"
|
||||
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
json.Unmarshal(input, &data)
|
||||
@ -82,10 +78,10 @@ go get github.com/json-iterator/go
|
||||
|
||||
Contributors
|
||||
|
||||
* [thockin](https://github.com/thockin)
|
||||
* [mattn](https://github.com/mattn)
|
||||
* [cch123](https://github.com/cch123)
|
||||
* [Oleg Shaldybin](https://github.com/olegshaldybin)
|
||||
* [Jason Toffaletti](https://github.com/toffaletti)
|
||||
- [thockin](https://github.com/thockin)
|
||||
- [mattn](https://github.com/mattn)
|
||||
- [cch123](https://github.com/cch123)
|
||||
- [Oleg Shaldybin](https://github.com/olegshaldybin)
|
||||
- [Jason Toffaletti](https://github.com/toffaletti)
|
||||
|
||||
Report issue or pull request, or email taowen@gmail.com, or [](https://gitter.im/json-iterator/Lobby)
|
||||
|
@ -16,7 +16,7 @@ func Unmarshal(data []byte, v interface{}) error {
|
||||
return ConfigDefault.Unmarshal(data, v)
|
||||
}
|
||||
|
||||
// UnmarshalFromString convenient method to read from string instead of []byte
|
||||
// UnmarshalFromString is a convenient method to read from string instead of []byte
|
||||
func UnmarshalFromString(str string, v interface{}) error {
|
||||
return ConfigDefault.UnmarshalFromString(str, v)
|
||||
}
|
||||
|
4
any.go
4
any.go
@ -312,6 +312,10 @@ func (codec *directAnyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
|
||||
func (codec *directAnyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
any := *(*Any)(ptr)
|
||||
if any == nil {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
any.WriteTo(stream)
|
||||
}
|
||||
|
||||
|
@ -64,7 +64,6 @@ func (any *stringAny) ToInt64() int64 {
|
||||
|
||||
flag := 1
|
||||
startPos := 0
|
||||
endPos := 0
|
||||
if any.val[0] == '+' || any.val[0] == '-' {
|
||||
startPos = 1
|
||||
}
|
||||
@ -73,6 +72,7 @@ func (any *stringAny) ToInt64() int64 {
|
||||
flag = -1
|
||||
}
|
||||
|
||||
endPos := startPos
|
||||
for i := startPos; i < len(any.val); i++ {
|
||||
if any.val[i] >= '0' && any.val[i] <= '9' {
|
||||
endPos = i + 1
|
||||
@ -98,7 +98,6 @@ func (any *stringAny) ToUint64() uint64 {
|
||||
}
|
||||
|
||||
startPos := 0
|
||||
endPos := 0
|
||||
|
||||
if any.val[0] == '-' {
|
||||
return 0
|
||||
@ -107,6 +106,7 @@ func (any *stringAny) ToUint64() uint64 {
|
||||
startPos = 1
|
||||
}
|
||||
|
||||
endPos := startPos
|
||||
for i := startPos; i < len(any.val); i++ {
|
||||
if any.val[i] >= '0' && any.val[i] <= '9' {
|
||||
endPos = i + 1
|
||||
|
@ -82,10 +82,8 @@ func Test_read_float_to_any(t *testing.T) {
|
||||
should := require.New(t)
|
||||
any := jsoniter.WrapFloat64(12.3)
|
||||
anyFloat64 := float64(12.3)
|
||||
//negaAnyFloat64 := float64(-1.1)
|
||||
any2 := jsoniter.WrapFloat64(-1.1)
|
||||
should.Equal(float64(12.3), any.ToFloat64())
|
||||
//should.Equal("12.3", any.ToString())
|
||||
should.True(any.ToBool())
|
||||
should.Equal(float32(anyFloat64), any.ToFloat32())
|
||||
should.Equal(int(anyFloat64), any.ToInt())
|
||||
|
@ -118,6 +118,4 @@ func Test_object_wrapper_any_get_all(t *testing.T) {
|
||||
should.Contains(any.Keys(), "Field1")
|
||||
should.Contains(any.Keys(), "Field2")
|
||||
should.NotContains(any.Keys(), "Field3")
|
||||
|
||||
//should.Contains(any.GetObject()["Field1"].GetArray()[0], 1)
|
||||
}
|
||||
|
@ -8,24 +8,24 @@ import (
|
||||
)
|
||||
|
||||
var stringConvertMap = map[string]string{
|
||||
"null": "",
|
||||
"321.1": "321.1",
|
||||
`"1.1"`: "1.1",
|
||||
`"-123.1"`: "-123.1",
|
||||
"0.0": "0.0",
|
||||
"0": "0",
|
||||
`"0"`: "0",
|
||||
`"0.0"`: "0.0",
|
||||
`"00.0"`: "00.0",
|
||||
"true": "true",
|
||||
"false": "false",
|
||||
`"true"`: "true",
|
||||
`"false"`: "false",
|
||||
`"true123"`: "true123",
|
||||
`"+1"`: "+1",
|
||||
"[]": "[]",
|
||||
"[1,2]": "[1,2]",
|
||||
"{}": "{}",
|
||||
"null": "",
|
||||
"321.1": "321.1",
|
||||
`"1.1"`: "1.1",
|
||||
`"-123.1"`: "-123.1",
|
||||
"0.0": "0.0",
|
||||
"0": "0",
|
||||
`"0"`: "0",
|
||||
`"0.0"`: "0.0",
|
||||
`"00.0"`: "00.0",
|
||||
"true": "true",
|
||||
"false": "false",
|
||||
`"true"`: "true",
|
||||
`"false"`: "false",
|
||||
`"true123"`: "true123",
|
||||
`"+1"`: "+1",
|
||||
"[]": "[]",
|
||||
"[1,2]": "[1,2]",
|
||||
"{}": "{}",
|
||||
`{"a":1, "stream":true}`: `{"a":1, "stream":true}`,
|
||||
}
|
||||
|
||||
|
@ -172,3 +172,58 @@ func Test_CaseSensitive_MoreThanTenFields(t *testing.T) {
|
||||
should.Equal(tc.expectedOutput, output)
|
||||
}
|
||||
}
|
||||
|
||||
type onlyTaggedFieldStruct struct {
|
||||
A string `json:"a"`
|
||||
B string
|
||||
FSimpl F `json:"f_simpl"`
|
||||
ISimpl I
|
||||
FPtr *F `json:"f_ptr"`
|
||||
IPtr *I
|
||||
F
|
||||
*I
|
||||
}
|
||||
|
||||
type F struct {
|
||||
G string `json:"g"`
|
||||
H string
|
||||
}
|
||||
|
||||
type I struct {
|
||||
J string `json:"j"`
|
||||
K string
|
||||
}
|
||||
|
||||
func Test_OnlyTaggedField(t *testing.T) {
|
||||
should := require.New(t)
|
||||
|
||||
obj := onlyTaggedFieldStruct{
|
||||
A: "a",
|
||||
B: "b",
|
||||
FSimpl: F{G: "g", H: "h"},
|
||||
ISimpl: I{J: "j", K: "k"},
|
||||
FPtr: &F{G: "g", H: "h"},
|
||||
IPtr: &I{J: "j", K: "k"},
|
||||
F: F{G: "g", H: "h"},
|
||||
I: &I{J: "j", K: "k"},
|
||||
}
|
||||
|
||||
output, err := jsoniter.Config{OnlyTaggedField: true}.Froze().Marshal(obj)
|
||||
should.Nil(err)
|
||||
|
||||
m := make(map[string]interface{})
|
||||
err = jsoniter.Unmarshal(output, &m)
|
||||
should.Nil(err)
|
||||
|
||||
should.Equal(map[string]interface{}{
|
||||
"a": "a",
|
||||
"f_simpl": map[string]interface{}{
|
||||
"g": "g",
|
||||
},
|
||||
"f_ptr": map[string]interface{}{
|
||||
"g": "g",
|
||||
},
|
||||
"g": "g",
|
||||
"j": "j",
|
||||
}, m)
|
||||
}
|
||||
|
47
api_tests/marshal_json_escape_test.go
Normal file
47
api_tests/marshal_json_escape_test.go
Normal file
@ -0,0 +1,47 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var marshalConfig = jsoniter.Config{
|
||||
EscapeHTML: false,
|
||||
SortMapKeys: true,
|
||||
ValidateJsonRawMessage: true,
|
||||
}.Froze()
|
||||
|
||||
type Container struct {
|
||||
Bar interface{}
|
||||
}
|
||||
|
||||
func (c *Container) MarshalJSON() ([]byte, error) {
|
||||
return marshalConfig.Marshal(&c.Bar)
|
||||
}
|
||||
|
||||
func TestEncodeEscape(t *testing.T) {
|
||||
should := require.New(t)
|
||||
|
||||
container := &Container{
|
||||
Bar: []string{"123<ab>", "ooo"},
|
||||
}
|
||||
out, err := marshalConfig.Marshal(container)
|
||||
should.Nil(err)
|
||||
bufout := string(out)
|
||||
|
||||
var stdbuf bytes.Buffer
|
||||
stdenc := json.NewEncoder(&stdbuf)
|
||||
stdenc.SetEscapeHTML(false)
|
||||
err = stdenc.Encode(container)
|
||||
should.Nil(err)
|
||||
stdout := string(stdbuf.Bytes())
|
||||
if stdout[len(stdout)-1:] == "\n" {
|
||||
stdout = stdout[:len(stdout)-1]
|
||||
}
|
||||
|
||||
should.Equal(stdout, bufout)
|
||||
}
|
36
api_tests/marshal_json_test.go
Normal file
36
api_tests/marshal_json_test.go
Normal file
@ -0,0 +1,36 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"github.com/json-iterator/go"
|
||||
"testing"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
|
||||
type Foo struct {
|
||||
Bar interface{}
|
||||
}
|
||||
|
||||
func (f Foo) MarshalJSON() ([]byte, error) {
|
||||
var buf bytes.Buffer
|
||||
err := json.NewEncoder(&buf).Encode(f.Bar)
|
||||
return buf.Bytes(), err
|
||||
}
|
||||
|
||||
|
||||
// Standard Encoder has trailing newline.
|
||||
func TestEncodeMarshalJSON(t *testing.T) {
|
||||
|
||||
foo := Foo {
|
||||
Bar: 123,
|
||||
}
|
||||
should := require.New(t)
|
||||
var buf, stdbuf bytes.Buffer
|
||||
enc := jsoniter.ConfigCompatibleWithStandardLibrary.NewEncoder(&buf)
|
||||
enc.Encode(foo)
|
||||
stdenc := json.NewEncoder(&stdbuf)
|
||||
stdenc.Encode(foo)
|
||||
should.Equal(stdbuf.Bytes(), buf.Bytes())
|
||||
}
|
128
benchmarks/stream_test.go
Normal file
128
benchmarks/stream_test.go
Normal file
@ -0,0 +1,128 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
)
|
||||
|
||||
func Benchmark_stream_encode_big_object(b *testing.B) {
|
||||
var buf bytes.Buffer
|
||||
var stream = jsoniter.NewStream(jsoniter.ConfigDefault, &buf, 100)
|
||||
for i := 0; i < b.N; i++ {
|
||||
buf.Reset()
|
||||
stream.Reset(&buf)
|
||||
encodeObject(stream)
|
||||
if stream.Error != nil {
|
||||
b.Errorf("error: %+v", stream.Error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestEncodeObject(t *testing.T) {
|
||||
var stream = jsoniter.NewStream(jsoniter.ConfigDefault, nil, 100)
|
||||
encodeObject(stream)
|
||||
if stream.Error != nil {
|
||||
t.Errorf("error encoding a test object: %+v", stream.Error)
|
||||
return
|
||||
}
|
||||
var m = make(map[string]interface{})
|
||||
if err := jsoniter.Unmarshal(stream.Buffer(), &m); err != nil {
|
||||
t.Errorf("error unmarshaling a test object: %+v", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func encodeObject(stream *jsoniter.Stream) {
|
||||
stream.WriteObjectStart()
|
||||
|
||||
stream.WriteObjectField("objectId")
|
||||
stream.WriteUint64(8838243212)
|
||||
|
||||
stream.WriteMore()
|
||||
stream.WriteObjectField("name")
|
||||
stream.WriteString("Jane Doe")
|
||||
|
||||
stream.WriteMore()
|
||||
stream.WriteObjectField("address")
|
||||
stream.WriteObjectStart()
|
||||
for i, field := range addressFields {
|
||||
if i != 0 {
|
||||
stream.WriteMore()
|
||||
}
|
||||
stream.WriteObjectField(field.key)
|
||||
stream.WriteString(field.val)
|
||||
}
|
||||
|
||||
stream.WriteMore()
|
||||
stream.WriteObjectField("geo")
|
||||
{
|
||||
stream.WriteObjectStart()
|
||||
stream.WriteObjectField("latitude")
|
||||
stream.WriteFloat64(-154.550817)
|
||||
stream.WriteMore()
|
||||
stream.WriteObjectField("longitude")
|
||||
stream.WriteFloat64(-84.176159)
|
||||
stream.WriteObjectEnd()
|
||||
|
||||
}
|
||||
stream.WriteObjectEnd()
|
||||
|
||||
stream.WriteMore()
|
||||
stream.WriteObjectField("specialties")
|
||||
stream.WriteArrayStart()
|
||||
for i, s := range specialties {
|
||||
if i != 0 {
|
||||
stream.WriteMore()
|
||||
}
|
||||
stream.WriteString(s)
|
||||
}
|
||||
stream.WriteArrayEnd()
|
||||
|
||||
stream.WriteMore()
|
||||
for i, text := range longText {
|
||||
if i != 0 {
|
||||
stream.WriteMore()
|
||||
}
|
||||
stream.WriteObjectField("longText" + strconv.Itoa(i))
|
||||
stream.WriteString(text)
|
||||
}
|
||||
|
||||
for i := 0; i < 25; i++ {
|
||||
num := i * 18328
|
||||
stream.WriteMore()
|
||||
stream.WriteObjectField("integerField" + strconv.Itoa(i))
|
||||
stream.WriteInt64(int64(num))
|
||||
}
|
||||
|
||||
stream.WriteObjectEnd()
|
||||
}
|
||||
|
||||
type field struct{ key, val string }
|
||||
|
||||
var (
|
||||
addressFields = []field{
|
||||
{"address1", "123 Example St"},
|
||||
{"address2", "Apartment 5D, Suite 3"},
|
||||
{"city", "Miami"},
|
||||
{"state", "FL"},
|
||||
{"postalCode", "33133"},
|
||||
{"country", "US"},
|
||||
}
|
||||
specialties = []string{
|
||||
"Web Design",
|
||||
"Go Programming",
|
||||
"Tennis",
|
||||
"Cycling",
|
||||
"Mixed martial arts",
|
||||
}
|
||||
longText = []string{
|
||||
`Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.`,
|
||||
`Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?`,
|
||||
`But I must explain to you how all this mistaken idea of denouncing pleasure and praising pain was born and I will give you a complete account of the system, and expound the actual teachings of the great explorer of the truth, the master-builder of human happiness. No one rejects, dislikes, or avoids pleasure itself, because it is pleasure, but because those who do not know how to pursue pleasure rationally encounter consequences that are extremely painful. Nor again is there anyone who loves or pursues or desires to obtain pain of itself, because it is pain, but because occasionally circumstances occur in which toil and pain can procure him some great pleasure. To take a trivial example, which of us ever undertakes laborious physical exercise, except to obtain some advantage from it? But who has any right to find fault with a man who chooses to enjoy a pleasure that has no annoying consequences, or one who avoids a pain that produces no resultant pleasure?`,
|
||||
`At vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio. Nam libero tempore, cum soluta nobis est eligendi optio cumque nihil impedit quo minus id quod maxime placeat facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet ut et voluptates repudiandae sint et molestiae non recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat.`,
|
||||
`On the other hand, we denounce with righteous indignation and dislike men who are so beguiled and demoralized by the charms of pleasure of the moment, so blinded by desire, that they cannot foresee the pain and trouble that are bound to ensue; and equal blame belongs to those who fail in their duty through weakness of will, which is the same as saying through shrinking from toil and pain. These cases are perfectly simple and easy to distinguish. In a free hour, when our power of choice is untrammelled and when nothing prevents our being able to do what we like best, every pleasure is to be welcomed and every pain avoided. But in certain circumstances and owing to the claims of duty or the obligations of business it will frequently occur that pleasures have to be repudiated and annoyances accepted. The wise man therefore always holds in these matters to this principle of selection: he rejects pleasures to secure other greater pleasures, or else he endures pains to avoid worse pains.`,
|
||||
}
|
||||
)
|
@ -183,11 +183,11 @@ func (cfg *frozenConfig) validateJsonRawMessage(extension EncoderExtension) {
|
||||
encoder := &funcEncoder{func(ptr unsafe.Pointer, stream *Stream) {
|
||||
rawMessage := *(*json.RawMessage)(ptr)
|
||||
iter := cfg.BorrowIterator([]byte(rawMessage))
|
||||
defer cfg.ReturnIterator(iter)
|
||||
iter.Read()
|
||||
if iter.Error != nil {
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
stream.WriteRaw("null")
|
||||
} else {
|
||||
cfg.ReturnIterator(iter)
|
||||
stream.WriteRaw(string(rawMessage))
|
||||
}
|
||||
}, func(ptr unsafe.Pointer) bool {
|
||||
|
@ -95,7 +95,7 @@ func ExampleGet() {
|
||||
// Crimson
|
||||
}
|
||||
|
||||
func ExampleMapKey() {
|
||||
func ExampleMyKey() {
|
||||
hello := MyKey("hello")
|
||||
output, _ := Marshal(map[*MyKey]string{&hello: "world"})
|
||||
fmt.Println(string(output))
|
||||
|
@ -1,13 +1,14 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
"strconv"
|
||||
"testing"
|
||||
"time"
|
||||
"unsafe"
|
||||
"bytes"
|
||||
)
|
||||
|
||||
func Test_customize_type_decoder(t *testing.T) {
|
||||
@ -47,6 +48,38 @@ func Test_customize_byte_array_encoder(t *testing.T) {
|
||||
should.Equal(`"abc"`, str)
|
||||
}
|
||||
|
||||
type CustomEncoderAttachmentTestStruct struct {
|
||||
Value int32 `json:"value"`
|
||||
}
|
||||
|
||||
type CustomEncoderAttachmentTestStructEncoder struct {}
|
||||
|
||||
func (c *CustomEncoderAttachmentTestStructEncoder) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream) {
|
||||
attachVal, ok := stream.Attachment.(int)
|
||||
stream.WriteRaw(`"`)
|
||||
stream.WriteRaw(fmt.Sprintf("%t %d", ok, attachVal))
|
||||
stream.WriteRaw(`"`)
|
||||
}
|
||||
|
||||
func (c *CustomEncoderAttachmentTestStructEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func Test_custom_encoder_attachment(t *testing.T) {
|
||||
|
||||
jsoniter.RegisterTypeEncoder("test.CustomEncoderAttachmentTestStruct", &CustomEncoderAttachmentTestStructEncoder{})
|
||||
expectedValue := 17
|
||||
should := require.New(t)
|
||||
buf := &bytes.Buffer{}
|
||||
stream := jsoniter.NewStream(jsoniter.Config{SortMapKeys: true}.Froze(), buf, 4096)
|
||||
stream.Attachment = expectedValue
|
||||
val := map[string]CustomEncoderAttachmentTestStruct{"a": {}}
|
||||
stream.WriteVal(val)
|
||||
stream.Flush()
|
||||
should.Nil(stream.Error)
|
||||
should.Equal("{\"a\":\"true 17\"}", buf.String())
|
||||
}
|
||||
|
||||
func Test_customize_field_decoder(t *testing.T) {
|
||||
type Tom struct {
|
||||
field1 string
|
||||
|
@ -153,7 +153,7 @@ func (codec *binaryAsStringCodec) Decode(ptr unsafe.Pointer, iter *jsoniter.Iter
|
||||
}
|
||||
b4 := rawBytes[i+3]
|
||||
b5 := rawBytes[i+4]
|
||||
i = i + 4
|
||||
i += 4
|
||||
b = readHex(iter, b4, b5)
|
||||
}
|
||||
bytes = append(bytes, b)
|
||||
@ -178,7 +178,7 @@ func readHex(iter *jsoniter.Iterator, b1, b2 byte) byte {
|
||||
iter.ReportError("read hex", "expects 0~9 or a~f, but found "+string([]byte{b1}))
|
||||
return 0
|
||||
}
|
||||
ret = ret * 16
|
||||
ret *= 16
|
||||
if b2 >= '0' && b2 <= '9' {
|
||||
ret = b2 - '0'
|
||||
} else if b2 >= 'a' && b2 <= 'f' {
|
||||
|
@ -18,6 +18,9 @@ type namingStrategyExtension struct {
|
||||
|
||||
func (extension *namingStrategyExtension) UpdateStructDescriptor(structDescriptor *jsoniter.StructDescriptor) {
|
||||
for _, binding := range structDescriptor.Fields {
|
||||
if unicode.IsLower(rune(binding.Field.Name()[0])) || binding.Field.Name()[0] == '_'{
|
||||
continue
|
||||
}
|
||||
tag, hastag := binding.Field.Tag().Lookup("json")
|
||||
if hastag {
|
||||
tagParts := strings.Split(tag, ",")
|
||||
|
@ -48,3 +48,19 @@ func Test_set_naming_strategy_with_omitempty(t *testing.T) {
|
||||
should.Nil(err)
|
||||
should.Equal(`{"user_name":"taowen"}`, string(output))
|
||||
}
|
||||
|
||||
func Test_set_naming_strategy_with_private_field(t *testing.T) {
|
||||
should := require.New(t)
|
||||
SetNamingStrategy(LowerCaseWithUnderscores)
|
||||
output, err := jsoniter.Marshal(struct {
|
||||
UserName string
|
||||
userId int
|
||||
_UserAge int
|
||||
}{
|
||||
UserName: "allen",
|
||||
userId: 100,
|
||||
_UserAge: 30,
|
||||
})
|
||||
should.Nil(err)
|
||||
should.Equal(`{"user_name":"allen"}`, string(output))
|
||||
}
|
||||
|
11
go.mod
Normal file
11
go.mod
Normal file
@ -0,0 +1,11 @@
|
||||
module github.com/json-iterator/go
|
||||
|
||||
go 1.12
|
||||
|
||||
require (
|
||||
github.com/davecgh/go-spew v1.1.1
|
||||
github.com/google/gofuzz v1.0.0
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742
|
||||
github.com/stretchr/testify v1.3.0
|
||||
)
|
15
go.sum
Normal file
15
go.sum
Normal file
@ -0,0 +1,15 @@
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/google/gofuzz v1.0.0 h1:A8PeW59pxE9IoFRqBp37U+mSNaQoZ46F1f0f863XSXw=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
27
iter.go
27
iter.go
@ -74,6 +74,7 @@ type Iterator struct {
|
||||
buf []byte
|
||||
head int
|
||||
tail int
|
||||
depth int
|
||||
captureStartedAt int
|
||||
captured []byte
|
||||
Error error
|
||||
@ -88,6 +89,7 @@ func NewIterator(cfg API) *Iterator {
|
||||
buf: nil,
|
||||
head: 0,
|
||||
tail: 0,
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@ -99,6 +101,7 @@ func Parse(cfg API, reader io.Reader, bufSize int) *Iterator {
|
||||
buf: make([]byte, bufSize),
|
||||
head: 0,
|
||||
tail: 0,
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@ -110,6 +113,7 @@ func ParseBytes(cfg API, input []byte) *Iterator {
|
||||
buf: input,
|
||||
head: 0,
|
||||
tail: len(input),
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@ -128,6 +132,7 @@ func (iter *Iterator) Reset(reader io.Reader) *Iterator {
|
||||
iter.reader = reader
|
||||
iter.head = 0
|
||||
iter.tail = 0
|
||||
iter.depth = 0
|
||||
return iter
|
||||
}
|
||||
|
||||
@ -137,6 +142,7 @@ func (iter *Iterator) ResetBytes(input []byte) *Iterator {
|
||||
iter.buf = input
|
||||
iter.head = 0
|
||||
iter.tail = len(input)
|
||||
iter.depth = 0
|
||||
return iter
|
||||
}
|
||||
|
||||
@ -320,3 +326,24 @@ func (iter *Iterator) Read() interface{} {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// limit maximum depth of nesting, as allowed by https://tools.ietf.org/html/rfc7159#section-9
|
||||
const maxDepth = 10000
|
||||
|
||||
func (iter *Iterator) incrementDepth() (success bool) {
|
||||
iter.depth++
|
||||
if iter.depth <= maxDepth {
|
||||
return true
|
||||
}
|
||||
iter.ReportError("incrementDepth", "exceeded max depth")
|
||||
return false
|
||||
}
|
||||
|
||||
func (iter *Iterator) decrementDepth() (success bool) {
|
||||
iter.depth--
|
||||
if iter.depth >= 0 {
|
||||
return true
|
||||
}
|
||||
iter.ReportError("decrementDepth", "unexpected negative nesting")
|
||||
return false
|
||||
}
|
||||
|
@ -28,26 +28,32 @@ func (iter *Iterator) ReadArray() (ret bool) {
|
||||
func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
|
||||
c := iter.nextToken()
|
||||
if c == '[' {
|
||||
if !iter.incrementDepth() {
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c != ']' {
|
||||
iter.unreadByte()
|
||||
if !callback(iter) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
for c == ',' {
|
||||
if !callback(iter) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
}
|
||||
if c != ']' {
|
||||
iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
|
@ -77,14 +77,12 @@ func (iter *Iterator) ReadFloat32() (ret float32) {
|
||||
}
|
||||
|
||||
func (iter *Iterator) readPositiveFloat32() (ret float32) {
|
||||
value := uint64(0)
|
||||
c := byte(' ')
|
||||
i := iter.head
|
||||
// first char
|
||||
if i == iter.tail {
|
||||
return iter.readFloat32SlowPath()
|
||||
}
|
||||
c = iter.buf[i]
|
||||
c := iter.buf[i]
|
||||
i++
|
||||
ind := floatDigits[c]
|
||||
switch ind {
|
||||
@ -107,7 +105,7 @@ func (iter *Iterator) readPositiveFloat32() (ret float32) {
|
||||
return
|
||||
}
|
||||
}
|
||||
value = uint64(ind)
|
||||
value := uint64(ind)
|
||||
// chars before dot
|
||||
non_decimal_loop:
|
||||
for ; i < iter.tail; i++ {
|
||||
@ -145,9 +143,7 @@ non_decimal_loop:
|
||||
}
|
||||
// too many decimal places
|
||||
return iter.readFloat32SlowPath()
|
||||
case invalidCharForNumber:
|
||||
fallthrough
|
||||
case dotInNumber:
|
||||
case invalidCharForNumber, dotInNumber:
|
||||
return iter.readFloat32SlowPath()
|
||||
}
|
||||
decimalPlaces++
|
||||
@ -218,14 +214,12 @@ func (iter *Iterator) ReadFloat64() (ret float64) {
|
||||
}
|
||||
|
||||
func (iter *Iterator) readPositiveFloat64() (ret float64) {
|
||||
value := uint64(0)
|
||||
c := byte(' ')
|
||||
i := iter.head
|
||||
// first char
|
||||
if i == iter.tail {
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
c = iter.buf[i]
|
||||
c := iter.buf[i]
|
||||
i++
|
||||
ind := floatDigits[c]
|
||||
switch ind {
|
||||
@ -248,7 +242,7 @@ func (iter *Iterator) readPositiveFloat64() (ret float64) {
|
||||
return
|
||||
}
|
||||
}
|
||||
value = uint64(ind)
|
||||
value := uint64(ind)
|
||||
// chars before dot
|
||||
non_decimal_loop:
|
||||
for ; i < iter.tail; i++ {
|
||||
@ -286,9 +280,7 @@ non_decimal_loop:
|
||||
}
|
||||
// too many decimal places
|
||||
return iter.readFloat64SlowPath()
|
||||
case invalidCharForNumber:
|
||||
fallthrough
|
||||
case dotInNumber:
|
||||
case invalidCharForNumber, dotInNumber:
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
decimalPlaces++
|
||||
@ -296,6 +288,9 @@ non_decimal_loop:
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
value = (value << 3) + (value << 1) + uint64(ind)
|
||||
if value > maxFloat64 {
|
||||
return iter.readFloat64SlowPath()
|
||||
}
|
||||
}
|
||||
}
|
||||
return iter.readFloat64SlowPath()
|
||||
|
@ -9,6 +9,7 @@ var intDigits []int8
|
||||
|
||||
const uint32SafeToMultiply10 = uint32(0xffffffff)/10 - 1
|
||||
const uint64SafeToMultiple10 = uint64(0xffffffffffffffff)/10 - 1
|
||||
const maxFloat64 = 1<<53 - 1
|
||||
|
||||
func init() {
|
||||
intDigits = make([]int8, 256)
|
||||
@ -339,7 +340,7 @@ func (iter *Iterator) readUint64(c byte) (ret uint64) {
|
||||
}
|
||||
|
||||
func (iter *Iterator) assertInteger() {
|
||||
if iter.head < len(iter.buf) && iter.buf[iter.head] == '.' {
|
||||
if iter.head < iter.tail && iter.buf[iter.head] == '.' {
|
||||
iter.ReportError("assertInteger", "can not decode float as int")
|
||||
}
|
||||
}
|
||||
|
@ -112,6 +112,9 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
||||
c := iter.nextToken()
|
||||
var field string
|
||||
if c == '{' {
|
||||
if !iter.incrementDepth() {
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c == '"' {
|
||||
iter.unreadByte()
|
||||
@ -121,6 +124,7 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
||||
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
@ -131,20 +135,23 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
||||
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
}
|
||||
if c != '}' {
|
||||
iter.ReportError("ReadObjectCB", `object not ended with }`)
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
if c == '}' {
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
iter.ReportError("ReadObjectCB", `expect " after }, but found `+string([]byte{c}))
|
||||
iter.ReportError("ReadObjectCB", `expect " after {, but found `+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if c == 'n' {
|
||||
@ -159,15 +166,20 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
||||
func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
|
||||
c := iter.nextToken()
|
||||
if c == '{' {
|
||||
if !iter.incrementDepth() {
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c == '"' {
|
||||
iter.unreadByte()
|
||||
field := iter.ReadString()
|
||||
if iter.nextToken() != ':' {
|
||||
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
@ -175,23 +187,27 @@ func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
|
||||
field = iter.ReadString()
|
||||
if iter.nextToken() != ':' {
|
||||
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
}
|
||||
if c != '}' {
|
||||
iter.ReportError("ReadMapCB", `object not ended with }`)
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
if c == '}' {
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c}))
|
||||
iter.ReportError("ReadMapCB", `expect " after {, but found `+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if c == 'n' {
|
||||
|
25
iter_skip.go
25
iter_skip.go
@ -37,17 +37,24 @@ func (iter *Iterator) SkipAndReturnBytes() []byte {
|
||||
return iter.stopCapture()
|
||||
}
|
||||
|
||||
type captureBuffer struct {
|
||||
startedAt int
|
||||
captured []byte
|
||||
// SkipAndAppendBytes skips next JSON element and appends its content to
|
||||
// buffer, returning the result.
|
||||
func (iter *Iterator) SkipAndAppendBytes(buf []byte) []byte {
|
||||
iter.startCaptureTo(buf, iter.head)
|
||||
iter.Skip()
|
||||
return iter.stopCapture()
|
||||
}
|
||||
|
||||
func (iter *Iterator) startCapture(captureStartedAt int) {
|
||||
func (iter *Iterator) startCaptureTo(buf []byte, captureStartedAt int) {
|
||||
if iter.captured != nil {
|
||||
panic("already in capture mode")
|
||||
}
|
||||
iter.captureStartedAt = captureStartedAt
|
||||
iter.captured = make([]byte, 0, 32)
|
||||
iter.captured = buf
|
||||
}
|
||||
|
||||
func (iter *Iterator) startCapture(captureStartedAt int) {
|
||||
iter.startCaptureTo(make([]byte, 0, 32), captureStartedAt)
|
||||
}
|
||||
|
||||
func (iter *Iterator) stopCapture() []byte {
|
||||
@ -58,13 +65,7 @@ func (iter *Iterator) stopCapture() []byte {
|
||||
remaining := iter.buf[iter.captureStartedAt:iter.head]
|
||||
iter.captureStartedAt = -1
|
||||
iter.captured = nil
|
||||
if len(captured) == 0 {
|
||||
copied := make([]byte, len(remaining))
|
||||
copy(copied, remaining)
|
||||
return copied
|
||||
}
|
||||
captured = append(captured, remaining...)
|
||||
return captured
|
||||
return append(captured, remaining...)
|
||||
}
|
||||
|
||||
// Skip skips a json object and positions to relatively the next json object
|
||||
|
@ -22,6 +22,9 @@ func (iter *Iterator) skipNumber() {
|
||||
|
||||
func (iter *Iterator) skipArray() {
|
||||
level := 1
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
switch iter.buf[i] {
|
||||
@ -31,8 +34,14 @@ func (iter *Iterator) skipArray() {
|
||||
i = iter.head - 1 // it will be i++ soon
|
||||
case '[': // If open symbol, increase level
|
||||
level++
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
case ']': // If close symbol, increase level
|
||||
level--
|
||||
if !iter.decrementDepth() {
|
||||
return
|
||||
}
|
||||
|
||||
// If we have returned to the original level, we're done
|
||||
if level == 0 {
|
||||
@ -50,6 +59,10 @@ func (iter *Iterator) skipArray() {
|
||||
|
||||
func (iter *Iterator) skipObject() {
|
||||
level := 1
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
switch iter.buf[i] {
|
||||
@ -59,8 +72,14 @@ func (iter *Iterator) skipObject() {
|
||||
i = iter.head - 1 // it will be i++ soon
|
||||
case '{': // If open symbol, increase level
|
||||
level++
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
case '}': // If close symbol, increase level
|
||||
level--
|
||||
if !iter.decrementDepth() {
|
||||
return
|
||||
}
|
||||
|
||||
// If we have returned to the original level, we're done
|
||||
if level == 0 {
|
||||
|
@ -2,12 +2,22 @@
|
||||
|
||||
package jsoniter
|
||||
|
||||
import "fmt"
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
func (iter *Iterator) skipNumber() {
|
||||
if !iter.trySkipNumber() {
|
||||
iter.unreadByte()
|
||||
iter.ReadFloat32()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
return
|
||||
}
|
||||
iter.ReadFloat64()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
iter.Error = nil
|
||||
iter.ReadBigFloat()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -158,6 +158,27 @@ func Test_encode_byte_array(t *testing.T) {
|
||||
should.Equal(`"AQID"`, string(bytes))
|
||||
}
|
||||
|
||||
func Test_encode_empty_byte_array(t *testing.T) {
|
||||
should := require.New(t)
|
||||
bytes, err := json.Marshal([]byte{})
|
||||
should.Nil(err)
|
||||
should.Equal(`""`, string(bytes))
|
||||
bytes, err = jsoniter.Marshal([]byte{})
|
||||
should.Nil(err)
|
||||
should.Equal(`""`, string(bytes))
|
||||
}
|
||||
|
||||
func Test_encode_nil_byte_array(t *testing.T) {
|
||||
should := require.New(t)
|
||||
var nilSlice []byte
|
||||
bytes, err := json.Marshal(nilSlice)
|
||||
should.Nil(err)
|
||||
should.Equal(`null`, string(bytes))
|
||||
bytes, err = jsoniter.Marshal(nilSlice)
|
||||
should.Nil(err)
|
||||
should.Equal(`null`, string(bytes))
|
||||
}
|
||||
|
||||
func Test_decode_byte_array_from_base64(t *testing.T) {
|
||||
should := require.New(t)
|
||||
data := []byte{}
|
||||
|
@ -2,6 +2,7 @@ package misc_tests
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"math"
|
||||
"testing"
|
||||
|
||||
"github.com/json-iterator/go"
|
||||
@ -77,6 +78,26 @@ func Test_read_number(t *testing.T) {
|
||||
should.Equal(`92233720368547758079223372036854775807`, string(val))
|
||||
}
|
||||
|
||||
func Test_encode_inf(t *testing.T) {
|
||||
should := require.New(t)
|
||||
_, err := json.Marshal(math.Inf(1))
|
||||
should.Error(err)
|
||||
_, err = jsoniter.Marshal(float32(math.Inf(1)))
|
||||
should.Error(err)
|
||||
_, err = jsoniter.Marshal(math.Inf(-1))
|
||||
should.Error(err)
|
||||
}
|
||||
|
||||
func Test_encode_nan(t *testing.T) {
|
||||
should := require.New(t)
|
||||
_, err := json.Marshal(math.NaN())
|
||||
should.Error(err)
|
||||
_, err = jsoniter.Marshal(float32(math.NaN()))
|
||||
should.Error(err)
|
||||
_, err = jsoniter.Marshal(math.NaN())
|
||||
should.Error(err)
|
||||
}
|
||||
|
||||
func Benchmark_jsoniter_float(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
input := []byte(`1.1123,`)
|
||||
|
@ -5,7 +5,9 @@ package misc_tests
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"math/rand"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
@ -70,6 +72,95 @@ func Test_float_as_int(t *testing.T) {
|
||||
should.NotNil(jsoniter.Unmarshal([]byte(`1.1`), &i))
|
||||
}
|
||||
|
||||
// chunkedData is io.Reader which returns random amount of data in range [1, chunkedData.chunkSize].
|
||||
// It simulates chunked data on from HTTP server, which is commonly used by net/http package.
|
||||
type chunkedData struct {
|
||||
chunkSize int
|
||||
data []byte
|
||||
head int
|
||||
}
|
||||
|
||||
// Read is implementation of the io.Reader which returns random amount of data in range [1, chunkedData.chunkSize].
|
||||
func (c *chunkedData) Read(p []byte) (n int, err error) {
|
||||
to := c.head + int(rand.Int31n(int32(c.chunkSize))+1)
|
||||
|
||||
// copy does not copy more data then p can consume
|
||||
n = copy(p, c.data[c.head:to])
|
||||
c.head = c.head + n
|
||||
if c.head >= len(c.data) {
|
||||
err = io.EOF
|
||||
}
|
||||
return n, err
|
||||
}
|
||||
|
||||
// TestIterator_ReadInt_chunkedInput validates the behaviour of Iterator.ReadInt() method in where:
|
||||
// - it reads data from io.Reader,
|
||||
// - expected value is 0 (zero)
|
||||
// - Iterator.tail == Iterator.head
|
||||
// - Iterator.tail < len(Iterator.buf)
|
||||
// - value in buffer after Iterator.tail is presented from previous read and has '.' character.
|
||||
func TestIterator_ReadInt_chunkedInput(t *testing.T) {
|
||||
should := require.New(t)
|
||||
|
||||
data := &chunkedData{
|
||||
data: jsonFloatIntArray(t, 10),
|
||||
}
|
||||
|
||||
// because this test is rely on randomness of chunkedData, we are doing multiple iterations to
|
||||
// be sure, that we can hit a required case.
|
||||
for data.chunkSize = 3; data.chunkSize <= len(data.data); data.chunkSize++ {
|
||||
data.head = 0
|
||||
|
||||
iter := jsoniter.Parse(jsoniter.ConfigDefault, data, data.chunkSize)
|
||||
i := 0
|
||||
for iter.ReadArray() {
|
||||
// every even item is float, let's just skip it.
|
||||
if i%2 == 0 {
|
||||
iter.Skip()
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
should.Zero(iter.ReadInt())
|
||||
should.NoError(iter.Error)
|
||||
|
||||
i++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// jsonFloatIntArray generates JSON array where every
|
||||
// - even item is float 0.1
|
||||
// - odd item is integer 0
|
||||
//
|
||||
// [0.1, 0, 0.1, 0]
|
||||
func jsonFloatIntArray(t *testing.T, numberOfItems int) []byte {
|
||||
t.Helper()
|
||||
numbers := make([]jsoniter.Any, numberOfItems)
|
||||
for i := range numbers {
|
||||
switch i % 2 {
|
||||
case 0:
|
||||
numbers[i] = jsoniter.WrapFloat64(0.1)
|
||||
default:
|
||||
numbers[i] = jsoniter.WrapInt64(0)
|
||||
}
|
||||
}
|
||||
|
||||
fixture, err := jsoniter.ConfigFastest.Marshal(numbers)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
b := &bytes.Buffer{}
|
||||
|
||||
require.NoError(
|
||||
t,
|
||||
json.Compact(b, fixture),
|
||||
"json should be compactable",
|
||||
)
|
||||
return b.Bytes()
|
||||
}
|
||||
|
||||
func Benchmark_jsoniter_encode_int(b *testing.B) {
|
||||
stream := jsoniter.NewStream(jsoniter.ConfigDefault, ioutil.Discard, 64)
|
||||
for n := 0; n < b.N; n++ {
|
||||
|
@ -114,7 +114,7 @@ func Test_overwrite_interface_value_with_nil(t *testing.T) {
|
||||
|
||||
err := json.Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
|
||||
should.NoError(err)
|
||||
should.Equal(42, (*(wrapper.Payload.(*Payload))).Value)
|
||||
should.Equal(42, wrapper.Payload.(*Payload).Value)
|
||||
|
||||
err = json.Unmarshal([]byte(`{"payload": null}`), &wrapper)
|
||||
should.NoError(err)
|
||||
@ -128,7 +128,7 @@ func Test_overwrite_interface_value_with_nil(t *testing.T) {
|
||||
|
||||
err = jsoniter.Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
|
||||
should.Equal(nil, err)
|
||||
should.Equal(42, (*(wrapper.Payload.(*Payload))).Value)
|
||||
should.Equal(42, wrapper.Payload.(*Payload).Value)
|
||||
|
||||
err = jsoniter.Unmarshal([]byte(`{"payload": null}`), &wrapper)
|
||||
should.Equal(nil, err)
|
||||
|
@ -42,3 +42,11 @@ func Test_map_eface_of_eface(t *testing.T) {
|
||||
should.NoError(err)
|
||||
should.Equal(`{"1":2,"3":"4"}`, output)
|
||||
}
|
||||
|
||||
func Test_encode_nil_map(t *testing.T) {
|
||||
should := require.New(t)
|
||||
var nilMap map[string]string
|
||||
output, err := jsoniter.MarshalToString(nilMap)
|
||||
should.NoError(err)
|
||||
should.Equal(`null`, output)
|
||||
}
|
||||
|
@ -4,6 +4,7 @@ import (
|
||||
"encoding/json"
|
||||
"github.com/json-iterator/go"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
@ -15,6 +16,243 @@ type Level2 struct {
|
||||
World string
|
||||
}
|
||||
|
||||
func Test_deep_nested(t *testing.T) {
|
||||
type unstructured interface{}
|
||||
|
||||
testcases := []struct {
|
||||
name string
|
||||
data []byte
|
||||
expectError string
|
||||
}{
|
||||
{
|
||||
name: "array under maxDepth",
|
||||
data: []byte(`{"a":` + strings.Repeat(`[`, 10000-1) + strings.Repeat(`]`, 10000-1) + `}`),
|
||||
expectError: "",
|
||||
},
|
||||
{
|
||||
name: "array over maxDepth",
|
||||
data: []byte(`{"a":` + strings.Repeat(`[`, 10000) + strings.Repeat(`]`, 10000) + `}`),
|
||||
expectError: "max depth",
|
||||
},
|
||||
{
|
||||
name: "object under maxDepth",
|
||||
data: []byte(`{"a":` + strings.Repeat(`{"a":`, 10000-1) + `0` + strings.Repeat(`}`, 10000-1) + `}`),
|
||||
expectError: "",
|
||||
},
|
||||
{
|
||||
name: "object over maxDepth",
|
||||
data: []byte(`{"a":` + strings.Repeat(`{"a":`, 10000) + `0` + strings.Repeat(`}`, 10000) + `}`),
|
||||
expectError: "max depth",
|
||||
},
|
||||
}
|
||||
|
||||
targets := []struct {
|
||||
name string
|
||||
new func() interface{}
|
||||
}{
|
||||
{
|
||||
name: "unstructured",
|
||||
new: func() interface{} {
|
||||
var v interface{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed named field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed missing field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
B interface{} `json:"b"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 1 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 2 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 3 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 4 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 5 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 6 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
F interface{} `json:"f"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 7 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
F interface{} `json:"f"`
|
||||
G interface{} `json:"g"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 8 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
F interface{} `json:"f"`
|
||||
G interface{} `json:"g"`
|
||||
H interface{} `json:"h"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 9 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
F interface{} `json:"f"`
|
||||
G interface{} `json:"g"`
|
||||
H interface{} `json:"h"`
|
||||
I interface{} `json:"i"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 10 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
F interface{} `json:"f"`
|
||||
G interface{} `json:"g"`
|
||||
H interface{} `json:"h"`
|
||||
I interface{} `json:"i"`
|
||||
J interface{} `json:"j"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 11 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
F interface{} `json:"f"`
|
||||
G interface{} `json:"g"`
|
||||
H interface{} `json:"h"`
|
||||
I interface{} `json:"i"`
|
||||
J interface{} `json:"j"`
|
||||
K interface{} `json:"k"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testcases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
for _, target := range targets {
|
||||
t.Run(target.name, func(t *testing.T) {
|
||||
err := jsoniter.Unmarshal(tc.data, target.new())
|
||||
if len(tc.expectError) == 0 {
|
||||
if err != nil {
|
||||
t.Errorf("unexpected error: %v", err)
|
||||
}
|
||||
} else {
|
||||
if err == nil {
|
||||
t.Errorf("expected error, got none")
|
||||
} else if !strings.Contains(err.Error(), tc.expectError) {
|
||||
t.Errorf("expected error containing '%s', got: %v", tc.expectError, err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_nested(t *testing.T) {
|
||||
iter := jsoniter.ParseString(jsoniter.ConfigDefault, `{"hello": [{"world": "value1"}, {"world": "value2"}]}`)
|
||||
l1 := Level1{}
|
||||
|
@ -2,6 +2,7 @@ package misc_tests
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/json-iterator/go"
|
||||
@ -147,3 +148,225 @@ func Test_unmarshal_into_existing_value(t *testing.T) {
|
||||
"k": "v",
|
||||
}, m)
|
||||
}
|
||||
|
||||
// for issue421
|
||||
func Test_unmarshal_anonymous_struct_invalid(t *testing.T) {
|
||||
should := require.New(t)
|
||||
t0 := struct {
|
||||
Field1 string
|
||||
}{}
|
||||
|
||||
cfg := jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
err := cfg.UnmarshalFromString(`{"Field1":`, &t0)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t0).String())
|
||||
|
||||
cfgCaseSensitive := jsoniter.Config{
|
||||
CaseSensitive: true,
|
||||
}.Froze()
|
||||
|
||||
type TestObject1 struct {
|
||||
Field1 struct {
|
||||
InnerField1 string
|
||||
}
|
||||
}
|
||||
t1 := TestObject1{}
|
||||
err = cfgCaseSensitive.UnmarshalFromString(`{"Field1":{"InnerField1"`, &t1)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t1.Field1).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t1).String())
|
||||
|
||||
type TestObject2 struct {
|
||||
Field1 int
|
||||
Field2 struct {
|
||||
InnerField1 string
|
||||
InnerField2 string
|
||||
}
|
||||
}
|
||||
t2 := TestObject2{}
|
||||
err = cfgCaseSensitive.UnmarshalFromString(`{"Field2":{"InnerField2"`, &t2)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t2.Field2).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t2).String())
|
||||
|
||||
type TestObject3 struct {
|
||||
Field1 int
|
||||
Field2 int
|
||||
Field3 struct {
|
||||
InnerField1 string
|
||||
InnerField2 string
|
||||
InnerField3 string
|
||||
}
|
||||
}
|
||||
t3 := TestObject3{}
|
||||
err = cfgCaseSensitive.UnmarshalFromString(`{"Field3":{"InnerField3"`, &t3)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t3.Field3).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t3).String())
|
||||
|
||||
type TestObject4 struct {
|
||||
Field1 int
|
||||
Field2 int
|
||||
Field3 int
|
||||
Field4 struct {
|
||||
InnerField1 string
|
||||
InnerField2 string
|
||||
InnerField3 string
|
||||
InnerField4 string
|
||||
}
|
||||
}
|
||||
t4 := TestObject4{}
|
||||
err = cfgCaseSensitive.UnmarshalFromString(`{"Field4":{"InnerField4"`, &t4)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t4.Field4).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t4).String())
|
||||
|
||||
type TestObject5 struct {
|
||||
Field1 int
|
||||
Field2 int
|
||||
Field3 int
|
||||
Field4 int
|
||||
Field5 struct {
|
||||
InnerField1 string
|
||||
InnerField2 string
|
||||
InnerField3 string
|
||||
InnerField4 string
|
||||
InnerField5 string
|
||||
}
|
||||
}
|
||||
t5 := TestObject5{}
|
||||
err = cfgCaseSensitive.UnmarshalFromString(`{"Field5":{"InnerField5"`, &t5)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t5.Field5).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t5).String())
|
||||
|
||||
type TestObject6 struct {
|
||||
Field1 int
|
||||
Field2 int
|
||||
Field3 int
|
||||
Field4 int
|
||||
Field5 int
|
||||
Field6 struct {
|
||||
InnerField1 string
|
||||
InnerField2 string
|
||||
InnerField3 string
|
||||
InnerField4 string
|
||||
InnerField5 string
|
||||
InnerField6 string
|
||||
}
|
||||
}
|
||||
t6 := TestObject6{}
|
||||
err = cfgCaseSensitive.UnmarshalFromString(`{"Field6":{"InnerField6"`, &t6)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t6.Field6).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t6).String())
|
||||
|
||||
type TestObject7 struct {
|
||||
Field1 int
|
||||
Field2 int
|
||||
Field3 int
|
||||
Field4 int
|
||||
Field5 int
|
||||
Field6 int
|
||||
Field7 struct {
|
||||
InnerField1 string
|
||||
InnerField2 string
|
||||
InnerField3 string
|
||||
InnerField4 string
|
||||
InnerField5 string
|
||||
InnerField6 string
|
||||
InnerField7 string
|
||||
}
|
||||
}
|
||||
t7 := TestObject7{}
|
||||
err = cfgCaseSensitive.UnmarshalFromString(`{"Field7":{"InnerField7"`, &t7)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t7.Field7).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t7).String())
|
||||
|
||||
type TestObject8 struct {
|
||||
Field1 int
|
||||
Field2 int
|
||||
Field3 int
|
||||
Field4 int
|
||||
Field5 int
|
||||
Field6 int
|
||||
Field7 int
|
||||
Field8 struct {
|
||||
InnerField1 string
|
||||
InnerField2 string
|
||||
InnerField3 string
|
||||
InnerField4 string
|
||||
InnerField5 string
|
||||
InnerField6 string
|
||||
InnerField7 string
|
||||
InnerField8 string
|
||||
}
|
||||
}
|
||||
t8 := TestObject8{}
|
||||
err = cfgCaseSensitive.UnmarshalFromString(`{"Field8":{"InnerField8"`, &t8)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t8.Field8).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t8).String())
|
||||
|
||||
type TestObject9 struct {
|
||||
Field1 int
|
||||
Field2 int
|
||||
Field3 int
|
||||
Field4 int
|
||||
Field5 int
|
||||
Field6 int
|
||||
Field7 int
|
||||
Field8 int
|
||||
Field9 struct {
|
||||
InnerField1 string
|
||||
InnerField2 string
|
||||
InnerField3 string
|
||||
InnerField4 string
|
||||
InnerField5 string
|
||||
InnerField6 string
|
||||
InnerField7 string
|
||||
InnerField8 string
|
||||
InnerField9 string
|
||||
}
|
||||
}
|
||||
t9 := TestObject9{}
|
||||
err = cfgCaseSensitive.UnmarshalFromString(`{"Field9":{"InnerField9"`, &t9)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t9.Field9).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t9).String())
|
||||
|
||||
type TestObject10 struct {
|
||||
Field1 int
|
||||
Field2 int
|
||||
Field3 int
|
||||
Field4 int
|
||||
Field5 int
|
||||
Field6 int
|
||||
Field7 int
|
||||
Field8 int
|
||||
Field9 int
|
||||
Field10 struct {
|
||||
InnerField1 string
|
||||
InnerField2 string
|
||||
InnerField3 string
|
||||
InnerField4 string
|
||||
InnerField5 string
|
||||
InnerField6 string
|
||||
InnerField7 string
|
||||
InnerField8 string
|
||||
InnerField9 string
|
||||
InnerField10 string
|
||||
}
|
||||
}
|
||||
t10 := TestObject10{}
|
||||
err = cfgCaseSensitive.UnmarshalFromString(`{"Field10":{"InnerField10"`, &t10)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t10.Field10).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t10).String())
|
||||
|
||||
err = cfg.UnmarshalFromString(`{"Field10":{"InnerField10"`, &t10)
|
||||
should.NotNil(err)
|
||||
should.NotContains(err.Error(), reflect.TypeOf(t10.Field10).String())
|
||||
should.Contains(err.Error(), reflect.TypeOf(t10).String())
|
||||
}
|
||||
|
@ -42,6 +42,25 @@ func Test_marshal_invalid_json_raw_message(t *testing.T) {
|
||||
should.Nil(aouterr)
|
||||
}
|
||||
|
||||
func Test_marshal_nil_json_raw_message(t *testing.T) {
|
||||
type A struct {
|
||||
Nil1 jsoniter.RawMessage `json:"raw1"`
|
||||
Nil2 json.RawMessage `json:"raw2"`
|
||||
}
|
||||
|
||||
a := A{}
|
||||
should := require.New(t)
|
||||
aout, aouterr := jsoniter.Marshal(&a)
|
||||
should.Equal(`{"raw1":null,"raw2":null}`, string(aout))
|
||||
should.Nil(aouterr)
|
||||
|
||||
a.Nil1 = []byte(`Any`)
|
||||
a.Nil2 = []byte(`Any`)
|
||||
should.Nil(jsoniter.Unmarshal(aout, &a))
|
||||
should.Nil(a.Nil1)
|
||||
should.Nil(a.Nil2)
|
||||
}
|
||||
|
||||
func Test_raw_message_memory_not_copied_issue(t *testing.T) {
|
||||
jsonStream := `{"name":"xxxxx","bundle_id":"com.zonst.majiang","app_platform":"ios","app_category":"100103", "budget_day":1000,"bidding_min":1,"bidding_max":2,"bidding_type":"CPM", "freq":{"open":true,"type":"day","num":100},"speed":1, "targeting":{"vendor":{"open":true,"list":["zonst"]}, "geo_code":{"open":true,"list":["156110100"]},"app_category":{"open":true,"list":["100101"]}, "day_parting":{"open":true,"list":["100409","100410"]},"device_type":{"open":true,"list":["ipad"]}, "os_version":{"open":true,"list":[10]},"carrier":{"open":true,"list":["mobile"]}, "network":{"open":true,"list":["4G"]}},"url":{"tracking_imp_url":"http://www.baidu.com", "tracking_clk_url":"http://www.baidu.com","jump_url":"http://www.baidu.com","deep_link_url":"http://www.baidu.com"}}`
|
||||
type IteratorObject struct {
|
||||
|
@ -60,11 +60,12 @@ func (b *ctx) append(prefix string) *ctx {
|
||||
|
||||
// ReadVal copy the underlying JSON into go interface, same as json.Unmarshal
|
||||
func (iter *Iterator) ReadVal(obj interface{}) {
|
||||
depth := iter.depth
|
||||
cacheKey := reflect2.RTypeOf(obj)
|
||||
decoder := iter.cfg.getDecoderFromCache(cacheKey)
|
||||
if decoder == nil {
|
||||
typ := reflect2.TypeOf(obj)
|
||||
if typ.Kind() != reflect.Ptr {
|
||||
if typ == nil || typ.Kind() != reflect.Ptr {
|
||||
iter.ReportError("ReadVal", "can only unmarshal into pointer")
|
||||
return
|
||||
}
|
||||
@ -76,6 +77,10 @@ func (iter *Iterator) ReadVal(obj interface{}) {
|
||||
return
|
||||
}
|
||||
decoder.Decode(ptr, iter)
|
||||
if iter.depth != depth {
|
||||
iter.ReportError("ReadVal", "unexpected mismatched nesting")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// WriteVal copy the go interface into underlying JSON, same as json.Marshal
|
||||
|
@ -338,13 +338,13 @@ func describeStruct(ctx *ctx, typ reflect2.Type) *StructDescriptor {
|
||||
for i := 0; i < structType.NumField(); i++ {
|
||||
field := structType.Field(i)
|
||||
tag, hastag := field.Tag().Lookup(ctx.getTagKey())
|
||||
if ctx.onlyTaggedField && !hastag {
|
||||
if ctx.onlyTaggedField && !hastag && !field.Anonymous() {
|
||||
continue
|
||||
}
|
||||
if tag == "-" || field.Name() == "_" {
|
||||
continue
|
||||
}
|
||||
tagParts := strings.Split(tag, ",")
|
||||
if tag == "-" {
|
||||
continue
|
||||
}
|
||||
if field.Anonymous() && (tag == "" || tagParts[0] == "") {
|
||||
if field.Type().Kind() == reflect.Struct {
|
||||
structDescriptor := describeStruct(ctx, field.Type())
|
||||
@ -475,7 +475,7 @@ func calcFieldNames(originalFieldName string, tagProvidedFieldName string, whole
|
||||
fieldNames = []string{tagProvidedFieldName}
|
||||
}
|
||||
// private?
|
||||
isNotExported := unicode.IsLower(rune(originalFieldName[0]))
|
||||
isNotExported := unicode.IsLower(rune(originalFieldName[0])) || originalFieldName[0] == '_'
|
||||
if isNotExported {
|
||||
fieldNames = []string{}
|
||||
}
|
||||
|
@ -33,11 +33,19 @@ type jsonRawMessageCodec struct {
|
||||
}
|
||||
|
||||
func (codec *jsonRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
*((*json.RawMessage)(ptr)) = json.RawMessage(iter.SkipAndReturnBytes())
|
||||
if iter.ReadNil() {
|
||||
*((*json.RawMessage)(ptr)) = nil
|
||||
} else {
|
||||
*((*json.RawMessage)(ptr)) = iter.SkipAndReturnBytes()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *jsonRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteRaw(string(*((*json.RawMessage)(ptr))))
|
||||
if *((*json.RawMessage)(ptr)) == nil {
|
||||
stream.WriteNil()
|
||||
} else {
|
||||
stream.WriteRaw(string(*((*json.RawMessage)(ptr))))
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *jsonRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
@ -48,11 +56,19 @@ type jsoniterRawMessageCodec struct {
|
||||
}
|
||||
|
||||
func (codec *jsoniterRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
*((*RawMessage)(ptr)) = RawMessage(iter.SkipAndReturnBytes())
|
||||
if iter.ReadNil() {
|
||||
*((*RawMessage)(ptr)) = nil
|
||||
} else {
|
||||
*((*RawMessage)(ptr)) = iter.SkipAndReturnBytes()
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *jsoniterRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteRaw(string(*((*RawMessage)(ptr))))
|
||||
if *((*RawMessage)(ptr)) == nil {
|
||||
stream.WriteNil()
|
||||
} else {
|
||||
stream.WriteRaw(string(*((*RawMessage)(ptr))))
|
||||
}
|
||||
}
|
||||
|
||||
func (codec *jsoniterRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
|
@ -49,6 +49,33 @@ func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
return decoder
|
||||
}
|
||||
}
|
||||
|
||||
ptrType := reflect2.PtrTo(typ)
|
||||
if ptrType.Implements(unmarshalerType) {
|
||||
return &referenceDecoder{
|
||||
&unmarshalerDecoder{
|
||||
valType: ptrType,
|
||||
},
|
||||
}
|
||||
}
|
||||
if typ.Implements(unmarshalerType) {
|
||||
return &unmarshalerDecoder{
|
||||
valType: typ,
|
||||
}
|
||||
}
|
||||
if ptrType.Implements(textUnmarshalerType) {
|
||||
return &referenceDecoder{
|
||||
&textUnmarshalerDecoder{
|
||||
valType: ptrType,
|
||||
},
|
||||
}
|
||||
}
|
||||
if typ.Implements(textUnmarshalerType) {
|
||||
return &textUnmarshalerDecoder{
|
||||
valType: typ,
|
||||
}
|
||||
}
|
||||
|
||||
switch typ.Kind() {
|
||||
case reflect.String:
|
||||
return decoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
|
||||
@ -63,19 +90,6 @@ func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
typ = reflect2.DefaultTypeOfKind(typ.Kind())
|
||||
return &numericMapKeyDecoder{decoderOfType(ctx, typ)}
|
||||
default:
|
||||
ptrType := reflect2.PtrTo(typ)
|
||||
if ptrType.Implements(textMarshalerType) {
|
||||
return &referenceDecoder{
|
||||
&textUnmarshalerDecoder{
|
||||
valType: ptrType,
|
||||
},
|
||||
}
|
||||
}
|
||||
if typ.Implements(textMarshalerType) {
|
||||
return &textUnmarshalerDecoder{
|
||||
valType: typ,
|
||||
}
|
||||
}
|
||||
return &lazyErrorDecoder{err: fmt.Errorf("unsupported map key type: %v", typ)}
|
||||
}
|
||||
}
|
||||
@ -91,6 +105,19 @@ func encoderOfMapKey(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
return encoder
|
||||
}
|
||||
}
|
||||
|
||||
if typ == textMarshalerType {
|
||||
return &directTextMarshalerEncoder{
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
}
|
||||
}
|
||||
if typ.Implements(textMarshalerType) {
|
||||
return &textMarshalerEncoder{
|
||||
valType: typ,
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
}
|
||||
}
|
||||
|
||||
switch typ.Kind() {
|
||||
case reflect.String:
|
||||
return encoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
|
||||
@ -105,17 +132,6 @@ func encoderOfMapKey(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
typ = reflect2.DefaultTypeOfKind(typ.Kind())
|
||||
return &numericMapKeyEncoder{encoderOfType(ctx, typ)}
|
||||
default:
|
||||
if typ == textMarshalerType {
|
||||
return &directTextMarshalerEncoder{
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
}
|
||||
}
|
||||
if typ.Implements(textMarshalerType) {
|
||||
return &textMarshalerEncoder{
|
||||
valType: typ,
|
||||
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
|
||||
}
|
||||
}
|
||||
if typ.Kind() == reflect.Interface {
|
||||
return &dynamicMapKeyEncoder{ctx, typ}
|
||||
}
|
||||
@ -151,10 +167,6 @@ func (decoder *mapDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if c == '}' {
|
||||
return
|
||||
}
|
||||
if c != '"' {
|
||||
iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c}))
|
||||
return
|
||||
}
|
||||
iter.unreadByte()
|
||||
key := decoder.keyType.UnsafeNew()
|
||||
decoder.keyDecoder.Decode(key, iter)
|
||||
@ -237,6 +249,10 @@ type mapEncoder struct {
|
||||
}
|
||||
|
||||
func (encoder *mapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
if *(*unsafe.Pointer)(ptr) == nil {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
stream.WriteObjectStart()
|
||||
iter := encoder.mapType.UnsafeIterate(ptr)
|
||||
for i := 0; iter.HasNext(); i++ {
|
||||
@ -274,16 +290,17 @@ func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteObjectStart()
|
||||
mapIter := encoder.mapType.UnsafeIterate(ptr)
|
||||
subStream := stream.cfg.BorrowStream(nil)
|
||||
subStream.Attachment = stream.Attachment
|
||||
subIter := stream.cfg.BorrowIterator(nil)
|
||||
keyValues := encodedKeyValues{}
|
||||
for mapIter.HasNext() {
|
||||
subStream.buf = make([]byte, 0, 64)
|
||||
key, elem := mapIter.UnsafeNext()
|
||||
subStreamIndex := subStream.Buffered()
|
||||
encoder.keyEncoder.Encode(key, subStream)
|
||||
if subStream.Error != nil && subStream.Error != io.EOF && stream.Error == nil {
|
||||
stream.Error = subStream.Error
|
||||
}
|
||||
encodedKey := subStream.Buffer()
|
||||
encodedKey := subStream.Buffer()[subStreamIndex:]
|
||||
subIter.ResetBytes(encodedKey)
|
||||
decodedKey := subIter.ReadString()
|
||||
if stream.indention > 0 {
|
||||
@ -294,7 +311,7 @@ func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
encoder.elemEncoder.Encode(elem, subStream)
|
||||
keyValues = append(keyValues, encodedKV{
|
||||
key: decodedKey,
|
||||
keyValue: subStream.Buffer(),
|
||||
keyValue: subStream.Buffer()[subStreamIndex:],
|
||||
})
|
||||
}
|
||||
sort.Sort(keyValues)
|
||||
@ -304,6 +321,9 @@ func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
}
|
||||
stream.Write(keyValue.keyValue)
|
||||
}
|
||||
if subStream.Error != nil && stream.Error == nil {
|
||||
stream.Error = subStream.Error
|
||||
}
|
||||
stream.WriteObjectEnd()
|
||||
stream.cfg.ReturnStream(subStream)
|
||||
stream.cfg.ReturnIterator(subIter)
|
||||
|
@ -3,8 +3,9 @@ package jsoniter
|
||||
import (
|
||||
"encoding"
|
||||
"encoding/json"
|
||||
"github.com/modern-go/reflect2"
|
||||
"unsafe"
|
||||
|
||||
"github.com/modern-go/reflect2"
|
||||
)
|
||||
|
||||
var marshalerType = reflect2.TypeOfPtr((*json.Marshaler)(nil)).Elem()
|
||||
@ -98,6 +99,12 @@ func (encoder *marshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
if err != nil {
|
||||
stream.Error = err
|
||||
} else {
|
||||
// html escape was already done by jsoniter
|
||||
// but the extra '\n' should be trimed
|
||||
l := len(bytes)
|
||||
if l > 0 && bytes[l-1] == '\n' {
|
||||
bytes = bytes[:l-1]
|
||||
}
|
||||
stream.Write(bytes)
|
||||
}
|
||||
}
|
||||
|
@ -432,17 +432,19 @@ func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
}
|
||||
|
||||
func (codec *base64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
src := *((*[]byte)(ptr))
|
||||
if len(src) == 0 {
|
||||
if codec.sliceType.UnsafeIsNil(ptr) {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
src := *((*[]byte)(ptr))
|
||||
encoding := base64.StdEncoding
|
||||
stream.writeByte('"')
|
||||
size := encoding.EncodedLen(len(src))
|
||||
buf := make([]byte, size)
|
||||
encoding.Encode(buf, src)
|
||||
stream.buf = append(stream.buf, buf...)
|
||||
if len(src) != 0 {
|
||||
size := encoding.EncodedLen(len(src))
|
||||
buf := make([]byte, size)
|
||||
encoding.Encode(buf, src)
|
||||
stream.buf = append(stream.buf, buf...)
|
||||
}
|
||||
stream.writeByte('"')
|
||||
}
|
||||
|
||||
|
@ -2,7 +2,6 @@ package jsoniter
|
||||
|
||||
import (
|
||||
"github.com/modern-go/reflect2"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
@ -10,9 +9,6 @@ func decoderOfOptional(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
ptrType := typ.(*reflect2.UnsafePtrType)
|
||||
elemType := ptrType.Elem()
|
||||
decoder := decoderOfType(ctx, elemType)
|
||||
if ctx.prefix == "" && elemType.Kind() == reflect.Ptr {
|
||||
return &dereferenceDecoder{elemType, decoder}
|
||||
}
|
||||
return &OptionalDecoder{elemType, decoder}
|
||||
}
|
||||
|
||||
|
@ -500,16 +500,20 @@ func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
var c byte
|
||||
for c = ','; c == ','; c = iter.nextToken() {
|
||||
decoder.decodeOneField(ptr, iter)
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
if c != '}' {
|
||||
iter.ReportError("struct Decode", `expect }, but found `+string([]byte{c}))
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
func (decoder *generalStructDecoder) decodeOneField(ptr unsafe.Pointer, iter *Iterator) {
|
||||
@ -530,8 +534,8 @@ func (decoder *generalStructDecoder) decodeOneField(ptr unsafe.Pointer, iter *It
|
||||
}
|
||||
}
|
||||
if fieldDecoder == nil {
|
||||
msg := "found unknown field: " + field
|
||||
if decoder.disallowUnknownFields {
|
||||
msg := "found unknown field: " + field
|
||||
iter.ReportError("ReadObject", msg)
|
||||
}
|
||||
c := iter.nextToken()
|
||||
@ -571,6 +575,9 @@ func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
if iter.readFieldHash() == decoder.fieldHash {
|
||||
decoder.fieldDecoder.Decode(ptr, iter)
|
||||
@ -581,9 +588,10 @@ func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type twoFieldsStructDecoder struct {
|
||||
@ -598,6 +606,9 @@ func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -611,9 +622,10 @@ func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type threeFieldsStructDecoder struct {
|
||||
@ -630,6 +642,9 @@ func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -645,9 +660,10 @@ func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type fourFieldsStructDecoder struct {
|
||||
@ -666,6 +682,9 @@ func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -683,9 +702,10 @@ func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type fiveFieldsStructDecoder struct {
|
||||
@ -706,6 +726,9 @@ func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -725,9 +748,10 @@ func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type sixFieldsStructDecoder struct {
|
||||
@ -750,6 +774,9 @@ func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -771,9 +798,10 @@ func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type sevenFieldsStructDecoder struct {
|
||||
@ -798,6 +826,9 @@ func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -821,9 +852,10 @@ func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type eightFieldsStructDecoder struct {
|
||||
@ -850,6 +882,9 @@ func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -875,9 +910,10 @@ func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type nineFieldsStructDecoder struct {
|
||||
@ -906,6 +942,9 @@ func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -933,9 +972,10 @@ func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type tenFieldsStructDecoder struct {
|
||||
@ -966,6 +1006,9 @@ func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -995,9 +1038,10 @@ func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
break
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type structFieldDecoder struct {
|
||||
@ -1031,6 +1075,11 @@ type stringModeNumberDecoder struct {
|
||||
}
|
||||
|
||||
func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
if iter.WhatIsNext() == NilValue {
|
||||
decoder.elemDecoder.Decode(ptr, iter)
|
||||
return
|
||||
}
|
||||
|
||||
c := iter.nextToken()
|
||||
if c != '"' {
|
||||
iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
|
||||
|
@ -200,6 +200,7 @@ type stringModeStringEncoder struct {
|
||||
|
||||
func (encoder *stringModeStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
tempStream := encoder.cfg.BorrowStream(nil)
|
||||
tempStream.Attachment = stream.Attachment
|
||||
defer encoder.cfg.ReturnStream(tempStream)
|
||||
encoder.elemEncoder.Encode(ptr, tempStream)
|
||||
stream.WriteString(string(tempStream.Buffer()))
|
||||
|
@ -105,6 +105,15 @@ func Test_skip_and_return_bytes_with_reader(t *testing.T) {
|
||||
should.Equal(`{"a" : [{"stream": "c"}], "d": 102 }`, string(skipped))
|
||||
}
|
||||
|
||||
func Test_append_skip_and_return_bytes_with_reader(t *testing.T) {
|
||||
should := require.New(t)
|
||||
iter := jsoniter.Parse(jsoniter.ConfigDefault, bytes.NewBufferString(`[ {"a" : [{"stream": "c"}], "d": 102 }, "stream"]`), 4)
|
||||
iter.ReadArray()
|
||||
buf := make([]byte, 0, 1024)
|
||||
buf = iter.SkipAndAppendBytes(buf)
|
||||
should.Equal(`{"a" : [{"stream": "c"}], "d": 102 }`, string(buf))
|
||||
}
|
||||
|
||||
func Test_skip_empty(t *testing.T) {
|
||||
should := require.New(t)
|
||||
should.NotNil(jsoniter.Get([]byte("")).LastError())
|
||||
|
@ -13,7 +13,7 @@ func init() {
|
||||
`{"hello":{}}`, // valid
|
||||
`{"hello":{}}}`, // invalid
|
||||
`{"hello": { "hello": 1}}`, // valid
|
||||
`{abc}`, // invalid
|
||||
`{abc}`, // invalid
|
||||
},
|
||||
})
|
||||
}
|
||||
|
@ -103,14 +103,14 @@ func (stream *Stream) Flush() error {
|
||||
if stream.Error != nil {
|
||||
return stream.Error
|
||||
}
|
||||
n, err := stream.out.Write(stream.buf)
|
||||
_, err := stream.out.Write(stream.buf)
|
||||
if err != nil {
|
||||
if stream.Error == nil {
|
||||
stream.Error = err
|
||||
}
|
||||
return err
|
||||
}
|
||||
stream.buf = stream.buf[n:]
|
||||
stream.buf = stream.buf[:0]
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -177,7 +177,6 @@ func (stream *Stream) WriteEmptyObject() {
|
||||
func (stream *Stream) WriteMore() {
|
||||
stream.writeByte(',')
|
||||
stream.writeIndention(0)
|
||||
stream.Flush()
|
||||
}
|
||||
|
||||
// WriteArrayStart write [ with possible indention
|
||||
|
@ -1,6 +1,7 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strconv"
|
||||
)
|
||||
@ -13,6 +14,10 @@ func init() {
|
||||
|
||||
// WriteFloat32 write float32 to stream
|
||||
func (stream *Stream) WriteFloat32(val float32) {
|
||||
if math.IsInf(float64(val), 0) || math.IsNaN(float64(val)) {
|
||||
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||
return
|
||||
}
|
||||
abs := math.Abs(float64(val))
|
||||
fmt := byte('f')
|
||||
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
|
||||
@ -26,6 +31,10 @@ func (stream *Stream) WriteFloat32(val float32) {
|
||||
|
||||
// WriteFloat32Lossy write float32 to stream with ONLY 6 digits precision although much much faster
|
||||
func (stream *Stream) WriteFloat32Lossy(val float32) {
|
||||
if math.IsInf(float64(val), 0) || math.IsNaN(float64(val)) {
|
||||
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||
return
|
||||
}
|
||||
if val < 0 {
|
||||
stream.writeByte('-')
|
||||
val = -val
|
||||
@ -54,6 +63,10 @@ func (stream *Stream) WriteFloat32Lossy(val float32) {
|
||||
|
||||
// WriteFloat64 write float64 to stream
|
||||
func (stream *Stream) WriteFloat64(val float64) {
|
||||
if math.IsInf(val, 0) || math.IsNaN(val) {
|
||||
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||
return
|
||||
}
|
||||
abs := math.Abs(val)
|
||||
fmt := byte('f')
|
||||
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
|
||||
@ -67,6 +80,10 @@ func (stream *Stream) WriteFloat64(val float64) {
|
||||
|
||||
// WriteFloat64Lossy write float64 to stream with ONLY 6 digits precision although much much faster
|
||||
func (stream *Stream) WriteFloat64Lossy(val float64) {
|
||||
if math.IsInf(val, 0) || math.IsNaN(val) {
|
||||
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||
return
|
||||
}
|
||||
if val < 0 {
|
||||
stream.writeByte('-')
|
||||
val = -val
|
||||
|
@ -1,8 +1,9 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/require"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_writeByte_should_grow_buffer(t *testing.T) {
|
||||
@ -62,8 +63,24 @@ func (w *NopWriter) Write(p []byte) (n int, err error) {
|
||||
}
|
||||
|
||||
func Test_flush_buffer_should_stop_grow_buffer(t *testing.T) {
|
||||
// Stream an array of a zillion zeros.
|
||||
writer := new(NopWriter)
|
||||
NewEncoder(writer).Encode(make([]int, 10000000))
|
||||
stream := NewStream(ConfigDefault, writer, 512)
|
||||
stream.WriteArrayStart()
|
||||
for i := 0; i < 10000000; i++ {
|
||||
stream.WriteInt(0)
|
||||
stream.WriteMore()
|
||||
stream.Flush()
|
||||
}
|
||||
stream.WriteInt(0)
|
||||
stream.WriteArrayEnd()
|
||||
|
||||
// Confirm that the buffer didn't have to grow.
|
||||
should := require.New(t)
|
||||
should.Equal(8, writer.bufferSize)
|
||||
|
||||
// 512 is the internal buffer size set in NewEncoder
|
||||
//
|
||||
// Flush is called after each array element, so only the first 8 bytes of it
|
||||
// is ever used, and it is never extended. Capacity remains 512.
|
||||
should.Equal(512, writer.bufferSize)
|
||||
}
|
||||
|
@ -1,3 +1,7 @@
|
||||
// +build go1.15
|
||||
// remove these tests temporarily until https://github.com/golang/go/issues/38105 and
|
||||
// https://github.com/golang/go/issues/38940 is fixed
|
||||
|
||||
package test
|
||||
|
||||
import (
|
||||
|
@ -60,6 +60,7 @@ func init() {
|
||||
(*SameLevel2NoTags)(nil),
|
||||
(*SameLevel2Tagged)(nil),
|
||||
(*EmbeddedPtr)(nil),
|
||||
(*UnnamedLiteral)(nil),
|
||||
)
|
||||
}
|
||||
|
||||
@ -231,3 +232,7 @@ type EmbeddedPtrOption struct {
|
||||
type EmbeddedPtr struct {
|
||||
EmbeddedPtrOption `json:","`
|
||||
}
|
||||
|
||||
type UnnamedLiteral struct {
|
||||
_ struct{}
|
||||
}
|
||||
|
@ -121,10 +121,11 @@ func init() {
|
||||
F1 int32 `json:"F1"`
|
||||
F2 int32 `json:"F2,string"`
|
||||
})(nil),
|
||||
(*struct {
|
||||
F1 string `json:"F1"`
|
||||
F2 string `json:"F2,string"`
|
||||
})(nil),
|
||||
// remove temporarily until https://github.com/golang/go/issues/38126 is fixed
|
||||
// (*struct {
|
||||
// F1 string `json:"F1"`
|
||||
// F2 string `json:"F2,string"`
|
||||
// })(nil),
|
||||
(*struct {
|
||||
F1 uint8 `json:"F1"`
|
||||
F2 uint8 `json:"F2,string"`
|
||||
|
@ -103,18 +103,44 @@ func Test_invalid_float(t *testing.T) {
|
||||
}
|
||||
|
||||
func Test_chan(t *testing.T) {
|
||||
t.Skip("do not support chan")
|
||||
|
||||
type TestObject struct {
|
||||
MyChan chan bool
|
||||
MyField int
|
||||
}
|
||||
|
||||
should := require.New(t)
|
||||
obj := TestObject{}
|
||||
str, err := json.Marshal(obj)
|
||||
should.Nil(err)
|
||||
should.Equal(``, str)
|
||||
|
||||
t.Run("Encode channel", func(t *testing.T) {
|
||||
should := require.New(t)
|
||||
str, err := jsoniter.Marshal(obj)
|
||||
should.NotNil(err)
|
||||
should.Nil(str)
|
||||
})
|
||||
|
||||
t.Run("Encode channel using compatible configuration", func(t *testing.T) {
|
||||
should := require.New(t)
|
||||
str, err := jsoniter.ConfigCompatibleWithStandardLibrary.Marshal(obj)
|
||||
should.NotNil(err)
|
||||
should.Nil(str)
|
||||
})
|
||||
}
|
||||
|
||||
func Test_invalid_in_map(t *testing.T) {
|
||||
testMap := map[string]interface{}{"chan": make(chan interface{})}
|
||||
|
||||
t.Run("Encode map with invalid content", func(t *testing.T) {
|
||||
should := require.New(t)
|
||||
str, err := jsoniter.Marshal(testMap)
|
||||
should.NotNil(err)
|
||||
should.Nil(str)
|
||||
})
|
||||
|
||||
t.Run("Encode map with invalid content using compatible configuration", func(t *testing.T) {
|
||||
should := require.New(t)
|
||||
str, err := jsoniter.ConfigCompatibleWithStandardLibrary.Marshal(testMap)
|
||||
should.NotNil(err)
|
||||
should.Nil(str)
|
||||
})
|
||||
}
|
||||
|
||||
func Test_invalid_number(t *testing.T) {
|
||||
@ -224,3 +250,13 @@ func Test_EmptyInput(t *testing.T) {
|
||||
t.Errorf("Expected error")
|
||||
}
|
||||
}
|
||||
|
||||
type Foo struct {
|
||||
A jsoniter.Any
|
||||
}
|
||||
|
||||
func Test_nil_any(t *testing.T) {
|
||||
should := require.New(t)
|
||||
data, _ := jsoniter.Marshal(&Foo{})
|
||||
should.Equal(`{"A":null}`, string(data))
|
||||
}
|
||||
|
@ -2,7 +2,9 @@ package test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"time"
|
||||
)
|
||||
|
||||
func init() {
|
||||
@ -27,6 +29,9 @@ func init() {
|
||||
nilMap,
|
||||
&nilMap,
|
||||
map[string]*json.RawMessage{"hello": pRawMessage(json.RawMessage("[]"))},
|
||||
map[Date]bool{{}: true},
|
||||
map[Date2]bool{{}: true},
|
||||
map[customKey]string{customKey(1): "bar"},
|
||||
)
|
||||
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
||||
ptr: (*map[string]string)(nil),
|
||||
@ -37,6 +42,23 @@ func init() {
|
||||
}, unmarshalCase{
|
||||
ptr: (*map[string]*json.RawMessage)(nil),
|
||||
input: "{\"test\":[{\"key\":\"value\"}]}",
|
||||
}, unmarshalCase{
|
||||
ptr: (*map[Date]bool)(nil),
|
||||
input: `{
|
||||
"2018-12-12": true,
|
||||
"2018-12-13": true,
|
||||
"2018-12-14": true
|
||||
}`,
|
||||
}, unmarshalCase{
|
||||
ptr: (*map[Date2]bool)(nil),
|
||||
input: `{
|
||||
"2018-12-12": true,
|
||||
"2018-12-13": true,
|
||||
"2018-12-14": true
|
||||
}`,
|
||||
}, unmarshalCase{
|
||||
ptr: (*map[customKey]string)(nil),
|
||||
input: `{"foo": "bar"}`,
|
||||
})
|
||||
}
|
||||
|
||||
@ -49,3 +71,62 @@ type MyString string
|
||||
func (ms MyString) Hello() string {
|
||||
return string(ms)
|
||||
}
|
||||
|
||||
type Date struct {
|
||||
time.Time
|
||||
}
|
||||
|
||||
func (d *Date) UnmarshalJSON(b []byte) error {
|
||||
dateStr := string(b) // something like `"2017-08-20"`
|
||||
|
||||
if dateStr == "null" {
|
||||
return nil
|
||||
}
|
||||
|
||||
t, err := time.Parse(`"2006-01-02"`, dateStr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("cant parse date: %#v", err)
|
||||
}
|
||||
|
||||
d.Time = t
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Date) MarshalJSON() ([]byte, error) {
|
||||
return []byte(d.Time.Format("2006-01-02")), nil
|
||||
}
|
||||
|
||||
type Date2 struct {
|
||||
time.Time
|
||||
}
|
||||
|
||||
func (d Date2) UnmarshalJSON(b []byte) error {
|
||||
dateStr := string(b) // something like `"2017-08-20"`
|
||||
|
||||
if dateStr == "null" {
|
||||
return nil
|
||||
}
|
||||
|
||||
t, err := time.Parse(`"2006-01-02"`, dateStr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("cant parse date: %#v", err)
|
||||
}
|
||||
|
||||
d.Time = t
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d Date2) MarshalJSON() ([]byte, error) {
|
||||
return []byte(d.Time.Format("2006-01-02")), nil
|
||||
}
|
||||
|
||||
type customKey int32
|
||||
|
||||
func (c customKey) MarshalText() ([]byte, error) {
|
||||
return []byte("foo"), nil
|
||||
}
|
||||
|
||||
func (c *customKey) UnmarshalText(value []byte) error {
|
||||
*c = 1
|
||||
return nil
|
||||
}
|
||||
|
15
value_tests/ptr_114_test.go
Normal file
15
value_tests/ptr_114_test.go
Normal file
@ -0,0 +1,15 @@
|
||||
// +build go1.14
|
||||
|
||||
package test
|
||||
|
||||
func init() {
|
||||
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
||||
obj: func() interface{} {
|
||||
var i int
|
||||
pi := &i
|
||||
ppi := &pi
|
||||
return &ppi
|
||||
},
|
||||
input: "null",
|
||||
})
|
||||
}
|
@ -27,13 +27,5 @@ func init() {
|
||||
return &pi
|
||||
},
|
||||
input: "null",
|
||||
}, unmarshalCase{
|
||||
obj: func() interface{} {
|
||||
var i int
|
||||
pi := &i
|
||||
ppi := &pi
|
||||
return &ppi
|
||||
},
|
||||
input: "null",
|
||||
})
|
||||
}
|
||||
|
@ -7,15 +7,21 @@ import (
|
||||
func init() {
|
||||
marshalCases = append(marshalCases,
|
||||
json.RawMessage("{}"),
|
||||
selectedMarshalCase{struct {
|
||||
json.RawMessage("12345"),
|
||||
json.RawMessage("3.14"),
|
||||
json.RawMessage("-0.5e10"),
|
||||
struct {
|
||||
Env string `json:"env"`
|
||||
Extra json.RawMessage `json:"extra,omitempty"`
|
||||
}{
|
||||
Env: "jfdk",
|
||||
}},
|
||||
},
|
||||
)
|
||||
unmarshalCases = append(unmarshalCases, unmarshalCase{
|
||||
ptr: (*json.RawMessage)(nil),
|
||||
input: `[1,2,3]`,
|
||||
}, unmarshalCase{
|
||||
ptr: (*json.RawMessage)(nil),
|
||||
input: `1.122e+250`,
|
||||
})
|
||||
}
|
||||
|
@ -26,6 +26,11 @@ func init() {
|
||||
Field int `json:"field"`
|
||||
})(nil),
|
||||
input: `{"field": null}`,
|
||||
}, unmarshalCase{
|
||||
ptr: (*struct {
|
||||
Field int `json:"field,string"`
|
||||
})(nil),
|
||||
input: `{"field": null}`,
|
||||
}, unmarshalCase{
|
||||
ptr: (*struct {
|
||||
ID int `json:"id"`
|
||||
@ -194,6 +199,11 @@ func init() {
|
||||
C: 21,
|
||||
d: time.NewTimer(10 * time.Second),
|
||||
},
|
||||
struct {
|
||||
_UnderscoreField string
|
||||
}{
|
||||
"should not marshal",
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
|
Reference in New Issue
Block a user