You've already forked json-iterator
mirror of
https://github.com/json-iterator/go.git
synced 2025-06-15 22:50:24 +02:00
Compare commits
19 Commits
Author | SHA1 | Date | |
---|---|---|---|
03217c3e97 | |||
908eaed151 | |||
eec24895fe | |||
1ba732a07d | |||
819acad769 | |||
695ec2b83b | |||
028e2ef2bd | |||
976454858b | |||
27518f6661 | |||
94869abf43 | |||
459f0e30ae | |||
0039f4ac3d | |||
fb5614a4ca | |||
f71b9090aa | |||
08047c174c | |||
68347ec4d6 | |||
0fd91468bb | |||
1bc9828b4f | |||
24c3d57281 |
@ -16,7 +16,7 @@ func Unmarshal(data []byte, v interface{}) error {
|
||||
return ConfigDefault.Unmarshal(data, v)
|
||||
}
|
||||
|
||||
// UnmarshalFromString convenient method to read from string instead of []byte
|
||||
// UnmarshalFromString is a convenient method to read from string instead of []byte
|
||||
func UnmarshalFromString(str string, v interface{}) error {
|
||||
return ConfigDefault.UnmarshalFromString(str, v)
|
||||
}
|
||||
|
47
api_tests/marshal_json_escape_test.go
Normal file
47
api_tests/marshal_json_escape_test.go
Normal file
@ -0,0 +1,47 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var marshalConfig = jsoniter.Config{
|
||||
EscapeHTML: false,
|
||||
SortMapKeys: true,
|
||||
ValidateJsonRawMessage: true,
|
||||
}.Froze()
|
||||
|
||||
type Container struct {
|
||||
Bar interface{}
|
||||
}
|
||||
|
||||
func (c *Container) MarshalJSON() ([]byte, error) {
|
||||
return marshalConfig.Marshal(&c.Bar)
|
||||
}
|
||||
|
||||
func TestEncodeEscape(t *testing.T) {
|
||||
should := require.New(t)
|
||||
|
||||
container := &Container{
|
||||
Bar: []string{"123<ab>", "ooo"},
|
||||
}
|
||||
out, err := marshalConfig.Marshal(container)
|
||||
should.Nil(err)
|
||||
bufout := string(out)
|
||||
|
||||
var stdbuf bytes.Buffer
|
||||
stdenc := json.NewEncoder(&stdbuf)
|
||||
stdenc.SetEscapeHTML(false)
|
||||
err = stdenc.Encode(container)
|
||||
should.Nil(err)
|
||||
stdout := string(stdbuf.Bytes())
|
||||
if stdout[len(stdout)-1:] == "\n" {
|
||||
stdout = stdout[:len(stdout)-1]
|
||||
}
|
||||
|
||||
should.Equal(stdout, bufout)
|
||||
}
|
11
go.mod
Normal file
11
go.mod
Normal file
@ -0,0 +1,11 @@
|
||||
module github.com/json-iterator/go
|
||||
|
||||
go 1.12
|
||||
|
||||
require (
|
||||
github.com/davecgh/go-spew v1.1.1
|
||||
github.com/google/gofuzz v1.0.0
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742
|
||||
github.com/stretchr/testify v1.3.0
|
||||
)
|
14
go.sum
Normal file
14
go.sum
Normal file
@ -0,0 +1,14 @@
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/google/gofuzz v1.0.0 h1:A8PeW59pxE9IoFRqBp37U+mSNaQoZ46F1f0f863XSXw=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
27
iter.go
27
iter.go
@ -74,6 +74,7 @@ type Iterator struct {
|
||||
buf []byte
|
||||
head int
|
||||
tail int
|
||||
depth int
|
||||
captureStartedAt int
|
||||
captured []byte
|
||||
Error error
|
||||
@ -88,6 +89,7 @@ func NewIterator(cfg API) *Iterator {
|
||||
buf: nil,
|
||||
head: 0,
|
||||
tail: 0,
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@ -99,6 +101,7 @@ func Parse(cfg API, reader io.Reader, bufSize int) *Iterator {
|
||||
buf: make([]byte, bufSize),
|
||||
head: 0,
|
||||
tail: 0,
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@ -110,6 +113,7 @@ func ParseBytes(cfg API, input []byte) *Iterator {
|
||||
buf: input,
|
||||
head: 0,
|
||||
tail: len(input),
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@ -128,6 +132,7 @@ func (iter *Iterator) Reset(reader io.Reader) *Iterator {
|
||||
iter.reader = reader
|
||||
iter.head = 0
|
||||
iter.tail = 0
|
||||
iter.depth = 0
|
||||
return iter
|
||||
}
|
||||
|
||||
@ -137,6 +142,7 @@ func (iter *Iterator) ResetBytes(input []byte) *Iterator {
|
||||
iter.buf = input
|
||||
iter.head = 0
|
||||
iter.tail = len(input)
|
||||
iter.depth = 0
|
||||
return iter
|
||||
}
|
||||
|
||||
@ -320,3 +326,24 @@ func (iter *Iterator) Read() interface{} {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// limit maximum depth of nesting, as allowed by https://tools.ietf.org/html/rfc7159#section-9
|
||||
const maxDepth = 10000
|
||||
|
||||
func (iter *Iterator) incrementDepth() (success bool) {
|
||||
iter.depth++
|
||||
if iter.depth <= maxDepth {
|
||||
return true
|
||||
}
|
||||
iter.ReportError("incrementDepth", "exceeded max depth")
|
||||
return false
|
||||
}
|
||||
|
||||
func (iter *Iterator) decrementDepth() (success bool) {
|
||||
iter.depth--
|
||||
if iter.depth >= 0 {
|
||||
return true
|
||||
}
|
||||
iter.ReportError("decrementDepth", "unexpected negative nesting")
|
||||
return false
|
||||
}
|
||||
|
@ -28,26 +28,32 @@ func (iter *Iterator) ReadArray() (ret bool) {
|
||||
func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
|
||||
c := iter.nextToken()
|
||||
if c == '[' {
|
||||
if !iter.incrementDepth() {
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c != ']' {
|
||||
iter.unreadByte()
|
||||
if !callback(iter) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
for c == ',' {
|
||||
if !callback(iter) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
}
|
||||
if c != ']' {
|
||||
iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
if c == 'n' {
|
||||
iter.skipThreeBytes('u', 'l', 'l')
|
||||
|
@ -112,6 +112,9 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
||||
c := iter.nextToken()
|
||||
var field string
|
||||
if c == '{' {
|
||||
if !iter.incrementDepth() {
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c == '"' {
|
||||
iter.unreadByte()
|
||||
@ -121,6 +124,7 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
||||
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
@ -131,20 +135,23 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
||||
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
}
|
||||
if c != '}' {
|
||||
iter.ReportError("ReadObjectCB", `object not ended with }`)
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
if c == '}' {
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
iter.ReportError("ReadObjectCB", `expect " after }, but found `+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if c == 'n' {
|
||||
@ -159,15 +166,20 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
|
||||
func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
|
||||
c := iter.nextToken()
|
||||
if c == '{' {
|
||||
if !iter.incrementDepth() {
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
if c == '"' {
|
||||
iter.unreadByte()
|
||||
field := iter.ReadString()
|
||||
if iter.nextToken() != ':' {
|
||||
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
@ -175,23 +187,27 @@ func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
|
||||
field = iter.ReadString()
|
||||
if iter.nextToken() != ':' {
|
||||
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if !callback(iter, field) {
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
c = iter.nextToken()
|
||||
}
|
||||
if c != '}' {
|
||||
iter.ReportError("ReadMapCB", `object not ended with }`)
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
if c == '}' {
|
||||
return true
|
||||
return iter.decrementDepth()
|
||||
}
|
||||
iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c}))
|
||||
iter.decrementDepth()
|
||||
return false
|
||||
}
|
||||
if c == 'n' {
|
||||
|
25
iter_skip.go
25
iter_skip.go
@ -37,17 +37,24 @@ func (iter *Iterator) SkipAndReturnBytes() []byte {
|
||||
return iter.stopCapture()
|
||||
}
|
||||
|
||||
type captureBuffer struct {
|
||||
startedAt int
|
||||
captured []byte
|
||||
// SkipAndAppendBytes skips next JSON element and appends its content to
|
||||
// buffer, returning the result.
|
||||
func (iter *Iterator) SkipAndAppendBytes(buf []byte) []byte {
|
||||
iter.startCaptureTo(buf, iter.head)
|
||||
iter.Skip()
|
||||
return iter.stopCapture()
|
||||
}
|
||||
|
||||
func (iter *Iterator) startCapture(captureStartedAt int) {
|
||||
func (iter *Iterator) startCaptureTo(buf []byte, captureStartedAt int) {
|
||||
if iter.captured != nil {
|
||||
panic("already in capture mode")
|
||||
}
|
||||
iter.captureStartedAt = captureStartedAt
|
||||
iter.captured = make([]byte, 0, 32)
|
||||
iter.captured = buf
|
||||
}
|
||||
|
||||
func (iter *Iterator) startCapture(captureStartedAt int) {
|
||||
iter.startCaptureTo(make([]byte, 0, 32), captureStartedAt)
|
||||
}
|
||||
|
||||
func (iter *Iterator) stopCapture() []byte {
|
||||
@ -58,13 +65,7 @@ func (iter *Iterator) stopCapture() []byte {
|
||||
remaining := iter.buf[iter.captureStartedAt:iter.head]
|
||||
iter.captureStartedAt = -1
|
||||
iter.captured = nil
|
||||
if len(captured) == 0 {
|
||||
copied := make([]byte, len(remaining))
|
||||
copy(copied, remaining)
|
||||
return copied
|
||||
}
|
||||
captured = append(captured, remaining...)
|
||||
return captured
|
||||
return append(captured, remaining...)
|
||||
}
|
||||
|
||||
// Skip skips a json object and positions to relatively the next json object
|
||||
|
@ -22,6 +22,9 @@ func (iter *Iterator) skipNumber() {
|
||||
|
||||
func (iter *Iterator) skipArray() {
|
||||
level := 1
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
switch iter.buf[i] {
|
||||
@ -31,8 +34,14 @@ func (iter *Iterator) skipArray() {
|
||||
i = iter.head - 1 // it will be i++ soon
|
||||
case '[': // If open symbol, increase level
|
||||
level++
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
case ']': // If close symbol, increase level
|
||||
level--
|
||||
if !iter.decrementDepth() {
|
||||
return
|
||||
}
|
||||
|
||||
// If we have returned to the original level, we're done
|
||||
if level == 0 {
|
||||
@ -50,6 +59,10 @@ func (iter *Iterator) skipArray() {
|
||||
|
||||
func (iter *Iterator) skipObject() {
|
||||
level := 1
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
|
||||
for {
|
||||
for i := iter.head; i < iter.tail; i++ {
|
||||
switch iter.buf[i] {
|
||||
@ -59,8 +72,14 @@ func (iter *Iterator) skipObject() {
|
||||
i = iter.head - 1 // it will be i++ soon
|
||||
case '{': // If open symbol, increase level
|
||||
level++
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
case '}': // If close symbol, increase level
|
||||
level--
|
||||
if !iter.decrementDepth() {
|
||||
return
|
||||
}
|
||||
|
||||
// If we have returned to the original level, we're done
|
||||
if level == 0 {
|
||||
|
@ -158,6 +158,27 @@ func Test_encode_byte_array(t *testing.T) {
|
||||
should.Equal(`"AQID"`, string(bytes))
|
||||
}
|
||||
|
||||
func Test_encode_empty_byte_array(t *testing.T) {
|
||||
should := require.New(t)
|
||||
bytes, err := json.Marshal([]byte{})
|
||||
should.Nil(err)
|
||||
should.Equal(`""`, string(bytes))
|
||||
bytes, err = jsoniter.Marshal([]byte{})
|
||||
should.Nil(err)
|
||||
should.Equal(`""`, string(bytes))
|
||||
}
|
||||
|
||||
func Test_encode_nil_byte_array(t *testing.T) {
|
||||
should := require.New(t)
|
||||
var nilSlice []byte
|
||||
bytes, err := json.Marshal(nilSlice)
|
||||
should.Nil(err)
|
||||
should.Equal(`null`, string(bytes))
|
||||
bytes, err = jsoniter.Marshal(nilSlice)
|
||||
should.Nil(err)
|
||||
should.Equal(`null`, string(bytes))
|
||||
}
|
||||
|
||||
func Test_decode_byte_array_from_base64(t *testing.T) {
|
||||
should := require.New(t)
|
||||
data := []byte{}
|
||||
|
@ -2,6 +2,7 @@ package misc_tests
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"math"
|
||||
"testing"
|
||||
|
||||
"github.com/json-iterator/go"
|
||||
@ -77,6 +78,26 @@ func Test_read_number(t *testing.T) {
|
||||
should.Equal(`92233720368547758079223372036854775807`, string(val))
|
||||
}
|
||||
|
||||
func Test_encode_inf(t *testing.T) {
|
||||
should := require.New(t)
|
||||
_, err := json.Marshal(math.Inf(1))
|
||||
should.Error(err)
|
||||
_, err = jsoniter.Marshal(float32(math.Inf(1)))
|
||||
should.Error(err)
|
||||
_, err = jsoniter.Marshal(math.Inf(-1))
|
||||
should.Error(err)
|
||||
}
|
||||
|
||||
func Test_encode_nan(t *testing.T) {
|
||||
should := require.New(t)
|
||||
_, err := json.Marshal(math.NaN())
|
||||
should.Error(err)
|
||||
_, err = jsoniter.Marshal(float32(math.NaN()))
|
||||
should.Error(err)
|
||||
_, err = jsoniter.Marshal(math.NaN())
|
||||
should.Error(err)
|
||||
}
|
||||
|
||||
func Benchmark_jsoniter_float(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
input := []byte(`1.1123,`)
|
||||
|
@ -42,3 +42,11 @@ func Test_map_eface_of_eface(t *testing.T) {
|
||||
should.NoError(err)
|
||||
should.Equal(`{"1":2,"3":"4"}`, output)
|
||||
}
|
||||
|
||||
func Test_encode_nil_map(t *testing.T) {
|
||||
should := require.New(t)
|
||||
var nilMap map[string]string
|
||||
output, err := jsoniter.MarshalToString(nilMap)
|
||||
should.NoError(err)
|
||||
should.Equal(`null`, output)
|
||||
}
|
||||
|
@ -4,6 +4,7 @@ import (
|
||||
"encoding/json"
|
||||
"github.com/json-iterator/go"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
@ -15,6 +16,243 @@ type Level2 struct {
|
||||
World string
|
||||
}
|
||||
|
||||
func Test_deep_nested(t *testing.T) {
|
||||
type unstructured interface{}
|
||||
|
||||
testcases := []struct {
|
||||
name string
|
||||
data []byte
|
||||
expectError string
|
||||
}{
|
||||
{
|
||||
name: "array under maxDepth",
|
||||
data: []byte(`{"a":` + strings.Repeat(`[`, 10000-1) + strings.Repeat(`]`, 10000-1) + `}`),
|
||||
expectError: "",
|
||||
},
|
||||
{
|
||||
name: "array over maxDepth",
|
||||
data: []byte(`{"a":` + strings.Repeat(`[`, 10000) + strings.Repeat(`]`, 10000) + `}`),
|
||||
expectError: "max depth",
|
||||
},
|
||||
{
|
||||
name: "object under maxDepth",
|
||||
data: []byte(`{"a":` + strings.Repeat(`{"a":`, 10000-1) + `0` + strings.Repeat(`}`, 10000-1) + `}`),
|
||||
expectError: "",
|
||||
},
|
||||
{
|
||||
name: "object over maxDepth",
|
||||
data: []byte(`{"a":` + strings.Repeat(`{"a":`, 10000) + `0` + strings.Repeat(`}`, 10000) + `}`),
|
||||
expectError: "max depth",
|
||||
},
|
||||
}
|
||||
|
||||
targets := []struct {
|
||||
name string
|
||||
new func() interface{}
|
||||
}{
|
||||
{
|
||||
name: "unstructured",
|
||||
new: func() interface{} {
|
||||
var v interface{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed named field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed missing field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
B interface{} `json:"b"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 1 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 2 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 3 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 4 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 5 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 6 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
F interface{} `json:"f"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 7 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
F interface{} `json:"f"`
|
||||
G interface{} `json:"g"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 8 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
F interface{} `json:"f"`
|
||||
G interface{} `json:"g"`
|
||||
H interface{} `json:"h"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 9 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
F interface{} `json:"f"`
|
||||
G interface{} `json:"g"`
|
||||
H interface{} `json:"h"`
|
||||
I interface{} `json:"i"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 10 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
F interface{} `json:"f"`
|
||||
G interface{} `json:"g"`
|
||||
H interface{} `json:"h"`
|
||||
I interface{} `json:"i"`
|
||||
J interface{} `json:"j"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "typed 11 field",
|
||||
new: func() interface{} {
|
||||
v := struct {
|
||||
A interface{} `json:"a"`
|
||||
B interface{} `json:"b"`
|
||||
C interface{} `json:"c"`
|
||||
D interface{} `json:"d"`
|
||||
E interface{} `json:"e"`
|
||||
F interface{} `json:"f"`
|
||||
G interface{} `json:"g"`
|
||||
H interface{} `json:"h"`
|
||||
I interface{} `json:"i"`
|
||||
J interface{} `json:"j"`
|
||||
K interface{} `json:"k"`
|
||||
}{}
|
||||
return &v
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testcases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
for _, target := range targets {
|
||||
t.Run(target.name, func(t *testing.T) {
|
||||
err := jsoniter.Unmarshal(tc.data, target.new())
|
||||
if len(tc.expectError) == 0 {
|
||||
if err != nil {
|
||||
t.Errorf("unexpected error: %v", err)
|
||||
}
|
||||
} else {
|
||||
if err == nil {
|
||||
t.Errorf("expected error, got none")
|
||||
} else if !strings.Contains(err.Error(), tc.expectError) {
|
||||
t.Errorf("expected error containing '%s', got: %v", tc.expectError, err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_nested(t *testing.T) {
|
||||
iter := jsoniter.ParseString(jsoniter.ConfigDefault, `{"hello": [{"world": "value1"}, {"world": "value2"}]}`)
|
||||
l1 := Level1{}
|
||||
|
@ -60,6 +60,7 @@ func (b *ctx) append(prefix string) *ctx {
|
||||
|
||||
// ReadVal copy the underlying JSON into go interface, same as json.Unmarshal
|
||||
func (iter *Iterator) ReadVal(obj interface{}) {
|
||||
depth := iter.depth
|
||||
cacheKey := reflect2.RTypeOf(obj)
|
||||
decoder := iter.cfg.getDecoderFromCache(cacheKey)
|
||||
if decoder == nil {
|
||||
@ -76,6 +77,10 @@ func (iter *Iterator) ReadVal(obj interface{}) {
|
||||
return
|
||||
}
|
||||
decoder.Decode(ptr, iter)
|
||||
if iter.depth != depth {
|
||||
iter.ReportError("ReadVal", "unexpected mismatched nesting")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// WriteVal copy the go interface into underlying JSON, same as json.Marshal
|
||||
|
@ -341,10 +341,10 @@ func describeStruct(ctx *ctx, typ reflect2.Type) *StructDescriptor {
|
||||
if ctx.onlyTaggedField && !hastag && !field.Anonymous() {
|
||||
continue
|
||||
}
|
||||
tagParts := strings.Split(tag, ",")
|
||||
if tag == "-" {
|
||||
continue
|
||||
}
|
||||
tagParts := strings.Split(tag, ",")
|
||||
if field.Anonymous() && (tag == "" || tagParts[0] == "") {
|
||||
if field.Type().Kind() == reflect.Struct {
|
||||
structDescriptor := describeStruct(ctx, field.Type())
|
||||
|
@ -249,6 +249,10 @@ type mapEncoder struct {
|
||||
}
|
||||
|
||||
func (encoder *mapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
if *(*unsafe.Pointer)(ptr) == nil {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
stream.WriteObjectStart()
|
||||
iter := encoder.mapType.UnsafeIterate(ptr)
|
||||
for i := 0; iter.HasNext(); i++ {
|
||||
|
@ -3,8 +3,9 @@ package jsoniter
|
||||
import (
|
||||
"encoding"
|
||||
"encoding/json"
|
||||
"github.com/modern-go/reflect2"
|
||||
"unsafe"
|
||||
|
||||
"github.com/modern-go/reflect2"
|
||||
)
|
||||
|
||||
var marshalerType = reflect2.TypeOfPtr((*json.Marshaler)(nil)).Elem()
|
||||
@ -93,10 +94,17 @@ func (encoder *marshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
bytes, err := json.Marshal(obj)
|
||||
marshaler := obj.(json.Marshaler)
|
||||
bytes, err := marshaler.MarshalJSON()
|
||||
if err != nil {
|
||||
stream.Error = err
|
||||
} else {
|
||||
// html escape was already done by jsoniter
|
||||
// but the extra '\n' should be trimed
|
||||
l := len(bytes)
|
||||
if l > 0 && bytes[l-1] == '\n' {
|
||||
bytes = bytes[:l-1]
|
||||
}
|
||||
stream.Write(bytes)
|
||||
}
|
||||
}
|
||||
|
@ -432,17 +432,19 @@ func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
}
|
||||
|
||||
func (codec *base64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
src := *((*[]byte)(ptr))
|
||||
if len(src) == 0 {
|
||||
if codec.sliceType.UnsafeIsNil(ptr) {
|
||||
stream.WriteNil()
|
||||
return
|
||||
}
|
||||
src := *((*[]byte)(ptr))
|
||||
encoding := base64.StdEncoding
|
||||
stream.writeByte('"')
|
||||
size := encoding.EncodedLen(len(src))
|
||||
buf := make([]byte, size)
|
||||
encoding.Encode(buf, src)
|
||||
stream.buf = append(stream.buf, buf...)
|
||||
if len(src) != 0 {
|
||||
size := encoding.EncodedLen(len(src))
|
||||
buf := make([]byte, size)
|
||||
encoding.Encode(buf, src)
|
||||
stream.buf = append(stream.buf, buf...)
|
||||
}
|
||||
stream.writeByte('"')
|
||||
}
|
||||
|
||||
|
@ -500,6 +500,9 @@ func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
var c byte
|
||||
for c = ','; c == ','; c = iter.nextToken() {
|
||||
decoder.decodeOneField(ptr, iter)
|
||||
@ -510,6 +513,7 @@ func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
|
||||
if c != '}' {
|
||||
iter.ReportError("struct Decode", `expect }, but found `+string([]byte{c}))
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
func (decoder *generalStructDecoder) decodeOneField(ptr unsafe.Pointer, iter *Iterator) {
|
||||
@ -530,8 +534,8 @@ func (decoder *generalStructDecoder) decodeOneField(ptr unsafe.Pointer, iter *It
|
||||
}
|
||||
}
|
||||
if fieldDecoder == nil {
|
||||
msg := "found unknown field: " + field
|
||||
if decoder.disallowUnknownFields {
|
||||
msg := "found unknown field: " + field
|
||||
iter.ReportError("ReadObject", msg)
|
||||
}
|
||||
c := iter.nextToken()
|
||||
@ -571,6 +575,9 @@ func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
if iter.readFieldHash() == decoder.fieldHash {
|
||||
decoder.fieldDecoder.Decode(ptr, iter)
|
||||
@ -584,6 +591,7 @@ func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type twoFieldsStructDecoder struct {
|
||||
@ -598,6 +606,9 @@ func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -614,6 +625,7 @@ func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type threeFieldsStructDecoder struct {
|
||||
@ -630,6 +642,9 @@ func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -648,6 +663,7 @@ func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type fourFieldsStructDecoder struct {
|
||||
@ -666,6 +682,9 @@ func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -686,6 +705,7 @@ func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type fiveFieldsStructDecoder struct {
|
||||
@ -706,6 +726,9 @@ func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -728,6 +751,7 @@ func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type sixFieldsStructDecoder struct {
|
||||
@ -750,6 +774,9 @@ func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -774,6 +801,7 @@ func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type sevenFieldsStructDecoder struct {
|
||||
@ -798,6 +826,9 @@ func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -824,6 +855,7 @@ func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type eightFieldsStructDecoder struct {
|
||||
@ -850,6 +882,9 @@ func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -878,6 +913,7 @@ func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type nineFieldsStructDecoder struct {
|
||||
@ -906,6 +942,9 @@ func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -936,6 +975,7 @@ func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type tenFieldsStructDecoder struct {
|
||||
@ -966,6 +1006,9 @@ func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
if !iter.readObjectStart() {
|
||||
return
|
||||
}
|
||||
if !iter.incrementDepth() {
|
||||
return
|
||||
}
|
||||
for {
|
||||
switch iter.readFieldHash() {
|
||||
case decoder.fieldHash1:
|
||||
@ -998,6 +1041,7 @@ func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
|
||||
}
|
||||
iter.decrementDepth()
|
||||
}
|
||||
|
||||
type structFieldDecoder struct {
|
||||
|
@ -105,6 +105,15 @@ func Test_skip_and_return_bytes_with_reader(t *testing.T) {
|
||||
should.Equal(`{"a" : [{"stream": "c"}], "d": 102 }`, string(skipped))
|
||||
}
|
||||
|
||||
func Test_append_skip_and_return_bytes_with_reader(t *testing.T) {
|
||||
should := require.New(t)
|
||||
iter := jsoniter.Parse(jsoniter.ConfigDefault, bytes.NewBufferString(`[ {"a" : [{"stream": "c"}], "d": 102 }, "stream"]`), 4)
|
||||
iter.ReadArray()
|
||||
buf := make([]byte, 0, 1024)
|
||||
buf = iter.SkipAndAppendBytes(buf)
|
||||
should.Equal(`{"a" : [{"stream": "c"}], "d": 102 }`, string(buf))
|
||||
}
|
||||
|
||||
func Test_skip_empty(t *testing.T) {
|
||||
should := require.New(t)
|
||||
should.NotNil(jsoniter.Get([]byte("")).LastError())
|
||||
|
@ -1,6 +1,7 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strconv"
|
||||
)
|
||||
@ -13,6 +14,10 @@ func init() {
|
||||
|
||||
// WriteFloat32 write float32 to stream
|
||||
func (stream *Stream) WriteFloat32(val float32) {
|
||||
if math.IsInf(float64(val), 0) || math.IsNaN(float64(val)) {
|
||||
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||
return
|
||||
}
|
||||
abs := math.Abs(float64(val))
|
||||
fmt := byte('f')
|
||||
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
|
||||
@ -26,6 +31,10 @@ func (stream *Stream) WriteFloat32(val float32) {
|
||||
|
||||
// WriteFloat32Lossy write float32 to stream with ONLY 6 digits precision although much much faster
|
||||
func (stream *Stream) WriteFloat32Lossy(val float32) {
|
||||
if math.IsInf(float64(val), 0) || math.IsNaN(float64(val)) {
|
||||
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||
return
|
||||
}
|
||||
if val < 0 {
|
||||
stream.writeByte('-')
|
||||
val = -val
|
||||
@ -54,6 +63,10 @@ func (stream *Stream) WriteFloat32Lossy(val float32) {
|
||||
|
||||
// WriteFloat64 write float64 to stream
|
||||
func (stream *Stream) WriteFloat64(val float64) {
|
||||
if math.IsInf(val, 0) || math.IsNaN(val) {
|
||||
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||
return
|
||||
}
|
||||
abs := math.Abs(val)
|
||||
fmt := byte('f')
|
||||
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
|
||||
@ -67,6 +80,10 @@ func (stream *Stream) WriteFloat64(val float64) {
|
||||
|
||||
// WriteFloat64Lossy write float64 to stream with ONLY 6 digits precision although much much faster
|
||||
func (stream *Stream) WriteFloat64Lossy(val float64) {
|
||||
if math.IsInf(val, 0) || math.IsNaN(val) {
|
||||
stream.Error = fmt.Errorf("unsupported value: %f", val)
|
||||
return
|
||||
}
|
||||
if val < 0 {
|
||||
stream.writeByte('-')
|
||||
val = -val
|
||||
|
Reference in New Issue
Block a user