1
0
mirror of https://github.com/json-iterator/go.git synced 2025-06-15 22:50:24 +02:00

119 Commits

Author SHA1 Message Date
caaa04195e #69 fix unicode support 2017-06-21 18:25:37 +08:00
ff3c624fa9 fix anonymous fields 2017-06-21 00:26:18 +08:00
3333ec11a0 support private fields 2017-06-20 23:48:41 +08:00
29a928e1d2 support naming strategy 2017-06-20 23:09:53 +08:00
83fa27ff9a #67 time as int64 with decoder 2017-06-20 17:52:41 +08:00
cefb2972fd #67 time as int64 with specified precision 2017-06-20 17:46:29 +08:00
486534c67c #67 time as int64 2017-06-20 17:43:47 +08:00
ed79b1726e fix encoder/decoder cast issue 2017-06-20 17:01:21 +08:00
85be06b145 #68 empty array to object/map 2017-06-20 16:36:22 +08:00
086001225d #68 string to float64 2017-06-20 16:20:56 +08:00
2ea4d48e1f #68 string to float32 2017-06-20 16:17:00 +08:00
417011b497 #68 remove redundant math max constants 2017-06-20 16:10:29 +08:00
ae6ce2fc3f #68 fuzzy all kinds of integer 2017-06-20 16:07:30 +08:00
8ef0c22f25 #68 handle float to int safely 2017-06-20 15:46:22 +08:00
a5ae3a2649 #68 float to int 2017-06-20 15:20:56 +08:00
306b2896cf #68 string to int 2017-06-20 15:18:24 +08:00
818ae1331a #68 number to string 2017-06-20 15:11:01 +08:00
8f6a840c63 fix anonymous struct 2017-06-20 13:33:40 +08:00
be221df432 #66 Make extension api like the java version 2017-06-20 10:41:54 +08:00
499412ec4c #66 extract out feacture_reflect_extension 2017-06-20 08:42:36 +08:00
c36a7ed7cd #66 extract out feacture_reflect_extension 2017-06-20 08:42:25 +08:00
14588726a1 expose ValEncoder & ValDecoder 2017-06-20 08:08:59 +08:00
aa01f57b7f rename AdaptedDecoder => Decoder and AdaptedEncoder => Encoder 2017-06-20 08:00:43 +08:00
b3170a8cef rename Encoder => ValEncoder and Decoder => ValDecoder 2017-06-20 07:59:45 +08:00
43a832beee add isEmptyFunc 2017-06-20 07:57:23 +08:00
39c9bb226a fix lossy float marshal and omit empty 2017-06-20 07:51:38 +08:00
945fe53724 fix html escape test and omit empty 2017-06-20 07:46:13 +08:00
8367a97ad8 gofmt 2017-06-20 07:39:54 +08:00
365d399192 #65 make placeholder thread safe 2017-06-20 07:39:38 +08:00
839247df05 #63 fix Marshaler and Unmarshaler on struct 2017-06-20 07:23:22 +08:00
f5edf564c8 gofmt 2017-06-19 23:43:53 +08:00
c3f5a2c536 #64 support fixed array 2017-06-19 23:43:28 +08:00
c6a598e292 # add jsoniter.RawMessage 2017-06-19 23:10:20 +08:00
eecb062c32 #63 support decode anonymous struct 2017-06-19 23:02:57 +08:00
50583f6bae #63 support *json.RawMessage 2017-06-19 22:57:43 +08:00
3b883aeffc #63 add more tests for json.RawMessage 2017-06-19 21:24:59 +08:00
baca358b53 add MustBeValid to Any 2017-06-19 21:21:20 +08:00
514db10f97 add Any.ToVal 2017-06-19 15:40:00 +08:00
31afe6450e add Api interface to allow save the frozen config 2017-06-19 13:43:22 +08:00
7e9017caa2 remove unused files 2017-06-18 23:43:01 +08:00
b6dfbbd6bc add document 2017-06-18 23:42:23 +08:00
3ffa5af7ec #61 remove internal buffer from mapAny 2017-06-18 23:18:32 +08:00
02cf6a73cc #61 remove internal buffer from objectAny 2017-06-18 23:09:30 +08:00
15c92d48df #61 remove internal buffer from numberLazyAny 2017-06-18 22:48:28 +08:00
a84cdaa694 #61 remove internal buffer from arrayAny 2017-06-18 22:40:18 +08:00
9f9ca4c9fc #61 remove stringLazyAny 2017-06-18 22:24:11 +08:00
985e263300 #61 removed internal buffer from lazy array and object; jsoniter.Get replaced jsoniter.UnmarshalAny 2017-06-18 22:22:13 +08:00
1ec246d16b #61 read any reuse skip impl 2017-06-18 17:00:28 +08:00
54dbcda64d #62 SkipAndReturnBytes should support reader 2017-06-18 16:28:43 +08:00
7a049ec79c #60 support read interface{} as json.Number 2017-06-18 15:22:37 +08:00
77dcffe77d tweak performance 2017-06-17 22:42:11 +08:00
8ab46965bd extract out any tests 2017-06-17 21:32:48 +08:00
2503ef17eb marshal lazy array/object using the config 2017-06-17 21:13:17 +08:00
0195110b5b gofmt 2017-06-17 21:11:23 +08:00
55fc498d27 use iterator from cache for any 2017-06-17 21:10:08 +08:00
50e4910c63 document how to get best performance 2017-06-17 17:14:34 +08:00
f29fe7407e downgrade to lower golang version 2017-06-17 16:27:19 +08:00
3c8bd9ef54 #57 copy bytes 2017-06-17 14:36:38 +08:00
952a42af6c #57 copy bytes 2017-06-17 14:36:05 +08:00
17bd91fd71 #57 reuse stream and iterator 2017-06-17 14:23:02 +08:00
3d5f6d3a4a #58 string mode support both encoding and decoding 2017-06-17 11:38:09 +08:00
b31b1301e2 #59 add ConfigFastest 2017-06-17 10:21:37 +08:00
69bc64b6d8 #54 support sort map keys 2017-06-16 16:46:30 +08:00
e0e2423e9a #53 test compatibility without html escape 2017-06-16 16:03:02 +08:00
a6ea770365 #53 implement SetEscapeHtml 2017-06-16 00:10:05 +08:00
5f22e50c89 #53 support escapeHtml 2017-06-15 23:55:04 +08:00
d867c8ba5c #53 split config into Config and frozenConfig 2017-06-13 18:49:35 +08:00
d0418857ce #53 move current config EnableXXX 2017-06-13 17:47:40 +08:00
48e9f6ec84 move IndentionStep to config 2017-06-13 17:03:27 +08:00
acddcf5bbf #53 extract out config 2017-06-13 16:58:53 +08:00
788918b85d #56 nil map or array should be null not [] or {} 2017-06-13 09:14:19 +08:00
6e5817b773 Merge pull request #55 from thockin/output_tests
WIP: Tests to compare against stdlib
2017-06-12 01:47:50 -05:00
7480e41836 Add output tests for maps of builtins
This tests for exact stdlib compatibility.
2017-06-11 21:09:56 -07:00
9215b3c508 Add output tests for builtin types
This fuzzes a type, marshals it with stdlib and json-iterator, compares,
then unmarshals with stdlib and json-iterator and compares.  This is
checking for literal, byte-for-byte compatibility.

In every case the test is exactly the same.

It also include benchmark functions to compare stdlib vs json-iterator.

This depends on a couple PRs to be merged in gofuzz.
2017-06-11 21:04:59 -07:00
64e500f3c8 Merge branch 'master' of https://github.com/json-iterator/go 2017-06-12 10:13:22 +08:00
3307ce3ba2 #50 map key unlike object field, can contain escaped char 2017-06-12 10:13:13 +08:00
6f50f15678 decoder/encoder;float precision doc 2017-06-11 16:30:31 +08:00
cee09816e3 decoder/encoder;float precision doc 2017-06-11 16:28:31 +08:00
cdbad22d22 test more package description 2017-06-11 15:35:45 +08:00
b0c9f047e2 test more than one pakcage description 2017-06-11 15:32:58 +08:00
6bd13c2948 Merge branch 'master' of https://github.com/json-iterator/go 2017-06-09 17:06:38 +08:00
84ad508437 #48 should return error if concrete tpye unknown 2017-06-09 17:06:27 +08:00
4f909776cf Merge pull request #49 from zhaitianduo/master
Use jsoniter instead of json in example
2017-06-09 03:32:31 -05:00
962c470806 fix import not use 2017-06-09 16:28:20 +08:00
46d443fbad use jsoniter for example 2017-06-09 16:25:58 +08:00
2608d40f2a example unmarshal 2017-06-08 12:08:47 +08:00
3cf822853f example unmarshal 2017-06-08 12:07:03 +08:00
26708bccc9 report error when string end not found 2017-06-08 09:46:19 +08:00
d75b539bad add test for scientific float 2017-06-07 21:34:56 +08:00
cfffa29c8a gofmt 2017-06-06 23:27:00 +08:00
925df245d3 good enough indent implementation 2017-06-06 23:18:37 +08:00
962a8cd303 #40 support UseNumber 2017-06-06 23:15:15 +08:00
6509ba05df Merge pull request #41 from 1046102779/master
解析时,如果输出参数不是指针类型,直接报错,避免程序挂掉
2017-06-06 10:03:06 -05:00
579dbf3c1d Merge pull request #42 from 1046102779/patch-1
把floatDigits改为intDigits
2017-06-06 10:02:26 -05:00
aa5181db67 把floatDigits改为intDigits 2017-06-06 21:08:04 +08:00
67be6df2b1 Update feature_adapter.go 2017-06-06 20:01:43 +08:00
0f5379494a unmarshal failed return non-pointer error 2017-06-06 19:36:33 +08:00
d09e2419ba update benchmark 2017-06-06 16:55:32 +08:00
e1a71f6ba1 update benchmark 2017-06-06 16:54:26 +08:00
dcb78991c4 flush when buffer is large enough 2017-06-06 14:16:54 +08:00
9e8238cdc6 remove unused file 2017-06-06 12:41:13 +08:00
a4e5abf492 support []byte; marshal without copy 2017-06-06 09:44:56 +08:00
3979955e69 support TextMarshaler as map key 2017-06-06 00:09:33 +08:00
5fd09f0e02 remove mapInterfaceEncoder 2017-06-05 23:56:37 +08:00
af4982b22c support decode int key map 2017-06-05 23:53:48 +08:00
29dc1d407d write map with int key 2017-06-05 23:01:00 +08:00
5b27aaa62c update test 2017-06-05 22:10:01 +08:00
106636a191 update test 2017-06-05 22:08:28 +08:00
f50c4cfbbe Merge branch 'master' of https://github.com/json-iterator/go 2017-06-05 22:05:02 +08:00
87149ae489 add simple marshal benchmark 2017-06-05 22:04:52 +08:00
c0a4ad72e1 example test 2017-06-05 20:37:08 +08:00
404c0ee44b Decoder doc 2017-06-05 19:57:20 +08:00
10c1506f87 link test 2017-06-05 19:38:34 +08:00
9a43fe6468 adapter api comment 2017-06-05 19:31:30 +08:00
95e03f2937 Marshal comment 2017-06-05 19:19:46 +08:00
4406ed9e62 Marshal comment 2017-06-05 19:18:12 +08:00
ff027701f5 Marshal comment 2017-06-05 19:15:56 +08:00
c69b61f879 Marshal comment 2017-06-05 19:14:40 +08:00
d97f5db769 Marshal comment 2017-06-05 19:11:16 +08:00
224 changed files with 15710 additions and 3711 deletions

View File

@ -2,6 +2,23 @@
jsoniter (json-iterator) is fast and flexible JSON parser available in [Java](https://github.com/json-iterator/java) and [Go](https://github.com/json-iterator/go)
# Benchmark
![benchmark](http://jsoniter.com/benchmarks/go-benchmark.png)
Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/github.com/json-iterator/go-benchmark/benchmark_medium_payload_test.go
Raw Result (easyjson requires static code generation)
| | ns/op | allocation bytes | allocation times |
| --- | --- | --- | --- |
| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
# Usage
100% compatibility with standard lib
@ -34,6 +51,8 @@ import "github.com/json-iterator/go"
jsoniter.Unmarshal(input, &data)
```
[More documentation](http://jsoniter.com/migrate-from-go-std.html)
# How to get
```

View File

@ -1014,4 +1014,4 @@ func typeAndKind(v interface{}) (reflect.Type, reflect.Kind) {
k = t.Kind()
}
return t, k
}
}

97
example_test.go Normal file
View File

@ -0,0 +1,97 @@
package jsoniter_test
import (
"fmt"
"os"
"github.com/json-iterator/go"
)
func ExampleMarshal() {
type ColorGroup struct {
ID int
Name string
Colors []string
}
group := ColorGroup{
ID: 1,
Name: "Reds",
Colors: []string{"Crimson", "Red", "Ruby", "Maroon"},
}
b, err := jsoniter.Marshal(group)
if err != nil {
fmt.Println("error:", err)
}
os.Stdout.Write(b)
// Output:
// {"ID":1,"Name":"Reds","Colors":["Crimson","Red","Ruby","Maroon"]}
}
func ExampleUnmarshal() {
var jsonBlob = []byte(`[
{"Name": "Platypus", "Order": "Monotremata"},
{"Name": "Quoll", "Order": "Dasyuromorphia"}
]`)
type Animal struct {
Name string
Order string
}
var animals []Animal
err := jsoniter.Unmarshal(jsonBlob, &animals)
if err != nil {
fmt.Println("error:", err)
}
fmt.Printf("%+v", animals)
// Output:
// [{Name:Platypus Order:Monotremata} {Name:Quoll Order:Dasyuromorphia}]
}
func ExampleMarshalWithBestPerformance() {
type ColorGroup struct {
ID int
Name string
Colors []string
}
group := ColorGroup{
ID: 1,
Name: "Reds",
Colors: []string{"Crimson", "Red", "Ruby", "Maroon"},
}
stream := jsoniter.ConfigFastest.BorrowStream(nil)
defer jsoniter.ConfigFastest.ReturnStream(stream)
stream.WriteVal(group)
if stream.Error != nil {
fmt.Println("error:", stream.Error)
}
os.Stdout.Write(stream.Buffer())
// Output:
// {"ID":1,"Name":"Reds","Colors":["Crimson","Red","Ruby","Maroon"]}
}
func ExampleUnmarshalWithBestPerformance() {
var jsonBlob = []byte(`[
{"Name": "Platypus", "Order": "Monotremata"},
{"Name": "Quoll", "Order": "Dasyuromorphia"}
]`)
type Animal struct {
Name string
Order string
}
var animals []Animal
iter := jsoniter.ConfigFastest.BorrowIterator(jsonBlob)
defer jsoniter.ConfigFastest.ReturnIterator(iter)
iter.ReadVal(&animals)
if iter.Error != nil {
fmt.Println("error:", iter.Error)
}
fmt.Printf("%+v", animals)
// Output:
// [{Name:Platypus Order:Monotremata} {Name:Quoll Order:Dasyuromorphia}]
}
func ExampleOneLine() {
val := []byte(`{"ID":1,"Name":"Reds","Colors":["Crimson","Red","Ruby","Maroon"]}`)
fmt.Printf(jsoniter.Get(val, "Colors", 0).ToString())
// Output:
// Crimson
}

254
extra/fuzzy_decoder.go Normal file
View File

@ -0,0 +1,254 @@
package extra
import (
"encoding/json"
"github.com/json-iterator/go"
"math"
"reflect"
"strings"
"unsafe"
)
const MaxUint = ^uint(0)
const MaxInt = int(MaxUint >> 1)
const MinInt = -MaxInt - 1
func RegisterFuzzyDecoders() {
jsoniter.RegisterExtension(&tolerateEmptyArrayExtension{})
jsoniter.RegisterTypeDecoder("string", &FuzzyStringDecoder{})
jsoniter.RegisterTypeDecoder("float32", &FuzzyFloat32Decoder{})
jsoniter.RegisterTypeDecoder("float64", &FuzzyFloat64Decoder{})
jsoniter.RegisterTypeDecoder("int", &FuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if isFloat {
val := iter.ReadFloat64()
if val > float64(MaxInt) || val < float64(MinInt) {
iter.ReportError("fuzzy decode int", "exceed range")
return
}
*((*int)(ptr)) = int(val)
} else {
*((*int)(ptr)) = iter.ReadInt()
}
}})
jsoniter.RegisterTypeDecoder("uint", &FuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if isFloat {
val := iter.ReadFloat64()
if val > float64(MaxUint) || val < 0 {
iter.ReportError("fuzzy decode uint", "exceed range")
return
}
*((*uint)(ptr)) = uint(val)
} else {
*((*uint)(ptr)) = iter.ReadUint()
}
}})
jsoniter.RegisterTypeDecoder("int8", &FuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if isFloat {
val := iter.ReadFloat64()
if val > float64(math.MaxInt8) || val < float64(math.MinInt8) {
iter.ReportError("fuzzy decode int8", "exceed range")
return
}
*((*int8)(ptr)) = int8(val)
} else {
*((*int8)(ptr)) = iter.ReadInt8()
}
}})
jsoniter.RegisterTypeDecoder("uint8", &FuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if isFloat {
val := iter.ReadFloat64()
if val > float64(math.MaxUint8) || val < 0 {
iter.ReportError("fuzzy decode uint8", "exceed range")
return
}
*((*uint8)(ptr)) = uint8(val)
} else {
*((*uint8)(ptr)) = iter.ReadUint8()
}
}})
jsoniter.RegisterTypeDecoder("int16", &FuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if isFloat {
val := iter.ReadFloat64()
if val > float64(math.MaxInt16) || val < float64(math.MinInt16) {
iter.ReportError("fuzzy decode int16", "exceed range")
return
}
*((*int16)(ptr)) = int16(val)
} else {
*((*int16)(ptr)) = iter.ReadInt16()
}
}})
jsoniter.RegisterTypeDecoder("uint16", &FuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if isFloat {
val := iter.ReadFloat64()
if val > float64(math.MaxUint16) || val < 0 {
iter.ReportError("fuzzy decode uint16", "exceed range")
return
}
*((*uint16)(ptr)) = uint16(val)
} else {
*((*uint16)(ptr)) = iter.ReadUint16()
}
}})
jsoniter.RegisterTypeDecoder("int32", &FuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if isFloat {
val := iter.ReadFloat64()
if val > float64(math.MaxInt32) || val < float64(math.MinInt32) {
iter.ReportError("fuzzy decode int32", "exceed range")
return
}
*((*int32)(ptr)) = int32(val)
} else {
*((*int32)(ptr)) = iter.ReadInt32()
}
}})
jsoniter.RegisterTypeDecoder("uint32", &FuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if isFloat {
val := iter.ReadFloat64()
if val > float64(math.MaxUint32) || val < 0 {
iter.ReportError("fuzzy decode uint32", "exceed range")
return
}
*((*uint32)(ptr)) = uint32(val)
} else {
*((*uint32)(ptr)) = iter.ReadUint32()
}
}})
jsoniter.RegisterTypeDecoder("int64", &FuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if isFloat {
val := iter.ReadFloat64()
if val > float64(math.MaxInt64) || val < float64(math.MinInt64) {
iter.ReportError("fuzzy decode int64", "exceed range")
return
}
*((*int64)(ptr)) = int64(val)
} else {
*((*int64)(ptr)) = iter.ReadInt64()
}
}})
jsoniter.RegisterTypeDecoder("uint64", &FuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if isFloat {
val := iter.ReadFloat64()
if val > float64(math.MaxUint64) || val < 0 {
iter.ReportError("fuzzy decode uint64", "exceed range")
return
}
*((*uint64)(ptr)) = uint64(val)
} else {
*((*uint64)(ptr)) = iter.ReadUint64()
}
}})
}
type tolerateEmptyArrayExtension struct {
jsoniter.DummyExtension
}
func (extension *tolerateEmptyArrayExtension) DecorateDecoder(typ reflect.Type, decoder jsoniter.ValDecoder) jsoniter.ValDecoder {
if typ.Kind() == reflect.Struct || typ.Kind() == reflect.Map {
return &tolerateEmptyArrayDecoder{decoder}
}
return decoder
}
type tolerateEmptyArrayDecoder struct {
valDecoder jsoniter.ValDecoder
}
func (decoder *tolerateEmptyArrayDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if iter.WhatIsNext() == jsoniter.Array {
iter.Skip()
newIter := iter.Config().BorrowIterator([]byte("{}"))
defer iter.Config().ReturnIterator(newIter)
decoder.valDecoder.Decode(ptr, newIter)
} else {
decoder.valDecoder.Decode(ptr, iter)
}
}
type FuzzyStringDecoder struct {
}
func (decoder *FuzzyStringDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
valueType := iter.WhatIsNext()
switch valueType {
case jsoniter.Number:
var number json.Number
iter.ReadVal(&number)
*((*string)(ptr)) = string(number)
case jsoniter.String:
*((*string)(ptr)) = iter.ReadString()
default:
iter.ReportError("FuzzyStringDecoder", "not number or string")
}
}
type FuzzyIntegerDecoder struct {
fun func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator)
}
func (decoder *FuzzyIntegerDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
valueType := iter.WhatIsNext()
var str string
switch valueType {
case jsoniter.Number:
var number json.Number
iter.ReadVal(&number)
str = string(number)
case jsoniter.String:
str = iter.ReadString()
default:
iter.ReportError("FuzzyIntegerDecoder", "not number or string")
}
newIter := iter.Config().BorrowIterator([]byte(str))
defer iter.Config().ReturnIterator(newIter)
isFloat := strings.IndexByte(str, '.') != -1
decoder.fun(isFloat, ptr, newIter)
if newIter.Error != nil {
iter.Error = newIter.Error
}
}
type FuzzyFloat32Decoder struct {
}
func (decoder *FuzzyFloat32Decoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
valueType := iter.WhatIsNext()
var str string
switch valueType {
case jsoniter.Number:
*((*float32)(ptr)) = iter.ReadFloat32()
case jsoniter.String:
str = iter.ReadString()
newIter := iter.Config().BorrowIterator([]byte(str))
defer iter.Config().ReturnIterator(newIter)
*((*float32)(ptr)) = newIter.ReadFloat32()
if newIter.Error != nil {
iter.Error = newIter.Error
}
default:
iter.ReportError("FuzzyFloat32Decoder", "not number or string")
}
}
type FuzzyFloat64Decoder struct {
}
func (decoder *FuzzyFloat64Decoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
valueType := iter.WhatIsNext()
var str string
switch valueType {
case jsoniter.Number:
*((*float64)(ptr)) = iter.ReadFloat64()
case jsoniter.String:
str = iter.ReadString()
newIter := iter.Config().BorrowIterator([]byte(str))
defer iter.Config().ReturnIterator(newIter)
*((*float64)(ptr)) = newIter.ReadFloat64()
if newIter.Error != nil {
iter.Error = newIter.Error
}
default:
iter.ReportError("FuzzyFloat32Decoder", "not number or string")
}
}

101
extra/fuzzy_decoder_test.go Normal file
View File

@ -0,0 +1,101 @@
package extra
import (
"github.com/json-iterator/go"
"github.com/json-iterator/go/require"
"testing"
)
func init() {
RegisterFuzzyDecoders()
}
func Test_string_to_string(t *testing.T) {
should := require.New(t)
var val string
should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val))
should.Equal("100", val)
}
func Test_int_to_string(t *testing.T) {
should := require.New(t)
var val string
should.Nil(jsoniter.UnmarshalFromString(`100`, &val))
should.Equal("100", val)
}
func Test_float_to_string(t *testing.T) {
should := require.New(t)
var val string
should.Nil(jsoniter.UnmarshalFromString(`12.0`, &val))
should.Equal("12.0", val)
}
func Test_string_to_int(t *testing.T) {
should := require.New(t)
var val int
should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val))
should.Equal(100, val)
}
func Test_int_to_int(t *testing.T) {
should := require.New(t)
var val int
should.Nil(jsoniter.UnmarshalFromString(`100`, &val))
should.Equal(100, val)
}
func Test_float_to_int(t *testing.T) {
should := require.New(t)
var val int
should.Nil(jsoniter.UnmarshalFromString(`1.23`, &val))
should.Equal(1, val)
}
func Test_large_float_to_int(t *testing.T) {
should := require.New(t)
var val int
should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val))
}
func Test_string_to_float32(t *testing.T) {
should := require.New(t)
var val float32
should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val))
should.Equal(float32(100), val)
}
func Test_float_to_float32(t *testing.T) {
should := require.New(t)
var val float32
should.Nil(jsoniter.UnmarshalFromString(`1.23`, &val))
should.Equal(float32(1.23), val)
}
func Test_string_to_float64(t *testing.T) {
should := require.New(t)
var val float64
should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val))
should.Equal(float64(100), val)
}
func Test_float_to_float64(t *testing.T) {
should := require.New(t)
var val float64
should.Nil(jsoniter.UnmarshalFromString(`1.23`, &val))
should.Equal(float64(1.23), val)
}
func Test_empty_array_as_map(t *testing.T) {
should := require.New(t)
var val map[string]interface{}
should.Nil(jsoniter.UnmarshalFromString(`[]`, &val))
should.Equal(map[string]interface{}{}, val)
}
func Test_empty_array_as_object(t *testing.T) {
should := require.New(t)
var val struct{}
should.Nil(jsoniter.UnmarshalFromString(`[]`, &val))
should.Equal(struct{}{}, val)
}

39
extra/naming_strategy.go Normal file
View File

@ -0,0 +1,39 @@
package extra
import (
"github.com/json-iterator/go"
"unicode"
)
func SetNamingStrategy(translate func(string) string) {
jsoniter.RegisterExtension(&namingStrategyExtension{jsoniter.DummyExtension{}, translate})
}
type namingStrategyExtension struct {
jsoniter.DummyExtension
translate func(string) string
}
func (extension *namingStrategyExtension) UpdateStructDescriptor(structDescriptor *jsoniter.StructDescriptor) {
for _, binding := range structDescriptor.Fields {
binding.ToNames = []string{extension.translate(binding.Field.Name)}
binding.FromNames = []string{extension.translate(binding.Field.Name)}
}
}
func LowerCaseWithUnderscores(name string) string {
newName := []rune{}
for i, c := range name {
if i == 0 {
newName = append(newName, unicode.ToLower(c))
} else {
if unicode.IsUpper(c) {
newName = append(newName, '_')
newName = append(newName, unicode.ToLower(c))
} else {
newName = append(newName, c)
}
}
}
return string(newName)
}

View File

@ -0,0 +1,23 @@
package extra
import (
"github.com/json-iterator/go"
"github.com/json-iterator/go/require"
"testing"
)
func Test_lower_case_with_underscores(t *testing.T) {
should := require.New(t)
should.Equal("hello_world", LowerCaseWithUnderscores("helloWorld"))
should.Equal("hello_world", LowerCaseWithUnderscores("HelloWorld"))
SetNamingStrategy(LowerCaseWithUnderscores)
output, err := jsoniter.Marshal(struct {
UserName string
FirstLanguage string
}{
UserName: "taowen",
FirstLanguage: "Chinese",
})
should.Nil(err)
should.Equal(`{"user_name":"taowen","first_language":"Chinese"}`, string(output))
}

24
extra/privat_fields.go Normal file
View File

@ -0,0 +1,24 @@
package extra
import (
"github.com/json-iterator/go"
"unicode"
)
func SupportPrivateFields() {
jsoniter.RegisterExtension(&privateFieldsExtension{})
}
type privateFieldsExtension struct {
jsoniter.DummyExtension
}
func (extension *privateFieldsExtension) UpdateStructDescriptor(structDescriptor *jsoniter.StructDescriptor) {
for _, binding := range structDescriptor.Fields {
isPrivate := unicode.IsLower(rune(binding.Field.Name[0]))
if isPrivate {
binding.FromNames = []string{binding.Field.Name}
binding.ToNames = []string{binding.Field.Name}
}
}
}

View File

@ -0,0 +1,18 @@
package extra
import (
"github.com/json-iterator/go"
"github.com/json-iterator/go/require"
"testing"
)
func Test_private_fields(t *testing.T) {
type TestObject struct {
field1 string
}
SupportPrivateFields()
should := require.New(t)
obj := TestObject{}
should.Nil(jsoniter.UnmarshalFromString(`{"field1":"Hello"}`, &obj))
should.Equal("Hello", obj.field1)
}

View File

@ -0,0 +1,34 @@
package extra
import (
"github.com/json-iterator/go"
"time"
"unsafe"
)
// keep epoch milliseconds
func RegisterTimeAsInt64Codec(precision time.Duration) {
jsoniter.RegisterTypeEncoder("time.Time", &timeAsInt64Codec{precision})
jsoniter.RegisterTypeDecoder("time.Time", &timeAsInt64Codec{precision})
}
type timeAsInt64Codec struct {
precision time.Duration
}
func (codec *timeAsInt64Codec) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
nanoseconds := iter.ReadInt64() * codec.precision.Nanoseconds()
*((*time.Time)(ptr)) = time.Unix(0, nanoseconds)
}
func (codec *timeAsInt64Codec) IsEmpty(ptr unsafe.Pointer) bool {
ts := *((*time.Time)(ptr))
return ts.UnixNano() == 0
}
func (codec *timeAsInt64Codec) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream) {
ts := *((*time.Time)(ptr))
stream.WriteInt64(ts.UnixNano() / codec.precision.Nanoseconds())
}
func (codec *timeAsInt64Codec) EncodeInterface(val interface{}, stream *jsoniter.Stream) {
jsoniter.WriteToStream(val, stream, codec)
}

View File

@ -0,0 +1,30 @@
package extra
import (
"github.com/json-iterator/go"
"github.com/json-iterator/go/require"
"testing"
"time"
)
func Test_time_as_int64(t *testing.T) {
should := require.New(t)
RegisterTimeAsInt64Codec(time.Nanosecond)
output, err := jsoniter.Marshal(time.Unix(1497952257, 1002))
should.Nil(err)
should.Equal("1497952257000001002", string(output))
var val time.Time
should.Nil(jsoniter.Unmarshal(output, &val))
should.Equal(int64(1497952257000001002), val.UnixNano())
}
func Test_time_as_int64_keep_microsecond(t *testing.T) {
should := require.New(t)
RegisterTimeAsInt64Codec(time.Microsecond)
output, err := jsoniter.Marshal(time.Unix(1, 1002))
should.Nil(err)
should.Equal("1000001", string(output))
var val time.Time
should.Nil(jsoniter.Unmarshal(output, &val))
should.Equal(int64(1000001000), val.UnixNano())
}

View File

@ -1,41 +1,29 @@
// Package jsoniter implements encoding and decoding of JSON as defined in
// RFC 4627 and provides interfaces with identical syntax of standard lib encoding/json.
// Converting from encoding/json to jsoniter is no more than replacing the package with jsoniter
// and variable type declarations (if any).
// jsoniter interfaces gives 100% compatibility with code using standard lib.
//
// "JSON and Go"
// (https://golang.org/doc/articles/json_and_go.html)
// gives a description of how Marshal/Unmarshal operate
// between arbitrary or predefined json objects and bytes,
// and it applies to jsoniter.Marshal/Unmarshal as well.
package jsoniter
import (
"io"
"bytes"
"io"
)
// Unmarshal adapts to json/encoding APIs
func Unmarshal(data []byte, v interface{}) error {
data = data[:lastNotSpacePos(data)]
iter := ParseBytes(data)
iter.ReadVal(v)
if iter.head == iter.tail {
iter.loadMore()
}
if iter.Error == io.EOF {
return nil
}
if iter.Error == nil {
iter.reportError("Unmarshal", "there are bytes left after unmarshal")
}
return iter.Error
}
type RawMessage []byte
func UnmarshalAny(data []byte) (Any, error) {
data = data[:lastNotSpacePos(data)]
iter := ParseBytes(data)
any := iter.ReadAny()
if iter.head == iter.tail {
iter.loadMore()
}
if iter.Error == io.EOF {
return any, nil
}
if iter.Error == nil {
iter.reportError("UnmarshalAny", "there are bytes left after unmarshal")
}
return any, iter.Error
// Unmarshal adapts to json/encoding Unmarshal API
//
// Unmarshal parses the JSON-encoded data and stores the result in the value pointed to by v.
// Refer to https://godoc.org/encoding/json#Unmarshal for more information
func Unmarshal(data []byte, v interface{}) error {
return ConfigDefault.Unmarshal(data, v)
}
func lastNotSpacePos(data []byte) int {
@ -48,68 +36,42 @@ func lastNotSpacePos(data []byte) int {
}
func UnmarshalFromString(str string, v interface{}) error {
data := []byte(str)
data = data[:lastNotSpacePos(data)]
iter := ParseBytes(data)
iter.ReadVal(v)
if iter.head == iter.tail {
iter.loadMore()
}
if iter.Error == io.EOF {
return nil
}
if iter.Error == nil {
iter.reportError("UnmarshalFromString", "there are bytes left after unmarshal")
}
return iter.Error
return ConfigDefault.UnmarshalFromString(str, v)
}
func UnmarshalAnyFromString(str string) (Any, error) {
data := []byte(str)
data = data[:lastNotSpacePos(data)]
iter := ParseBytes(data)
any := iter.ReadAny()
if iter.head == iter.tail {
iter.loadMore()
}
if iter.Error == io.EOF {
return any, nil
}
if iter.Error == nil {
iter.reportError("UnmarshalAnyFromString", "there are bytes left after unmarshal")
}
return nil, iter.Error
func Get(data []byte, path ...interface{}) Any {
return ConfigDefault.Get(data, path...)
}
// Marshal adapts to json/encoding Marshal API
//
// Marshal returns the JSON encoding of v, adapts to json/encoding Marshal API
// Refer to https://godoc.org/encoding/json#Marshal for more information
func Marshal(v interface{}) ([]byte, error) {
buf := &bytes.Buffer{}
stream := NewStream(buf, 512)
stream.WriteVal(v)
stream.Flush()
if stream.Error != nil {
return nil, stream.Error
}
return buf.Bytes(), nil
return ConfigDefault.Marshal(v)
}
func MarshalToString(v interface{}) (string, error) {
buf, err := Marshal(v)
if err != nil {
return "", err
}
return string(buf), nil
return ConfigDefault.MarshalToString(v)
}
func NewDecoder(reader io.Reader) *AdaptedDecoder {
iter := Parse(reader, 512)
return &AdaptedDecoder{iter}
// NewDecoder adapts to json/stream NewDecoder API.
//
// NewDecoder returns a new decoder that reads from r.
//
// Instead of a json/encoding Decoder, an Decoder is returned
// Refer to https://godoc.org/encoding/json#NewDecoder for more information
func NewDecoder(reader io.Reader) *Decoder {
return ConfigDefault.NewDecoder(reader)
}
type AdaptedDecoder struct {
// Decoder reads and decodes JSON values from an input stream.
// Decoder provides identical APIs with json/stream Decoder (Token() and UseNumber() are in progress)
type Decoder struct {
iter *Iterator
}
func (adapter *AdaptedDecoder) Decode(obj interface{}) error {
func (adapter *Decoder) Decode(obj interface{}) error {
adapter.iter.ReadVal(obj)
err := adapter.iter.Error
if err == io.EOF {
@ -118,30 +80,41 @@ func (adapter *AdaptedDecoder) Decode(obj interface{}) error {
return adapter.iter.Error
}
func (adapter *AdaptedDecoder) More() bool {
func (adapter *Decoder) More() bool {
return adapter.iter.head != adapter.iter.tail
}
func (adapter *AdaptedDecoder) Buffered() io.Reader {
func (adapter *Decoder) Buffered() io.Reader {
remaining := adapter.iter.buf[adapter.iter.head:adapter.iter.tail]
return bytes.NewReader(remaining)
}
func NewEncoder(writer io.Writer) *AdaptedEncoder {
stream := NewStream(writer, 512)
return &AdaptedEncoder{stream}
func (decoder *Decoder) UseNumber() {
origCfg := decoder.iter.cfg.configBeforeFrozen
origCfg.UseNumber = true
decoder.iter.cfg = origCfg.Froze()
}
type AdaptedEncoder struct {
func NewEncoder(writer io.Writer) *Encoder {
return ConfigDefault.NewEncoder(writer)
}
type Encoder struct {
stream *Stream
}
func (adapter *AdaptedEncoder) Encode(val interface{}) error {
func (adapter *Encoder) Encode(val interface{}) error {
adapter.stream.WriteVal(val)
adapter.stream.Flush()
return adapter.stream.Error
}
func (adapter *AdaptedEncoder) SetIndent(prefix, indent string) {
// not implemented yet
}
func (adapter *Encoder) SetIndent(prefix, indent string) {
adapter.stream.cfg.indentionStep = len(indent)
}
func (adapter *Encoder) SetEscapeHTML(escapeHtml bool) {
config := adapter.stream.cfg.configBeforeFrozen
config.EscapeHtml = escapeHtml
adapter.stream.cfg = config.Froze()
}

View File

@ -2,12 +2,14 @@ package jsoniter
import (
"fmt"
"io"
"reflect"
)
type Any interface {
LastError() error
ValueType() ValueType
MustBeValid() Any
ToBool() bool
ToInt() int
ToInt32() int32
@ -18,18 +20,17 @@ type Any interface {
ToFloat32() float32
ToFloat64() float64
ToString() string
ToVal(val interface{})
Get(path ...interface{}) Any
// TODO: add Set
Size() int
Keys() []string
IterateObject() (func() (string, Any, bool), bool)
IterateArray() (func() (Any, bool), bool)
// TODO: remove me
GetArray() []Any
SetArray(newList []Any) bool
// TODO: remove me
GetObject() map[string]Any
SetObject(map[string]Any) bool
GetInterface() interface{}
WriteTo(stream *Stream)
Parse() *Iterator
}
type baseAny struct{}
@ -46,28 +47,16 @@ func (any *baseAny) Keys() []string {
return []string{}
}
func (any *baseAny) IterateObject() (func() (string, Any, bool), bool) {
return nil, false
}
func (any *baseAny) IterateArray() (func() (Any, bool), bool) {
return nil, false
}
func (any *baseAny) GetArray() []Any {
return []Any{}
}
func (any *baseAny) SetArray(newList []Any) bool {
return false
}
func (any *baseAny) GetObject() map[string]Any {
return map[string]Any{}
}
func (any *baseAny) SetObject(map[string]Any) bool {
return false
func (any *baseAny) ToVal(obj interface{}) {
panic("not implemented")
}
func WrapInt32(val int32) Any {
@ -91,7 +80,7 @@ func WrapFloat64(val float64) Any {
}
func WrapString(val string) Any {
return &stringAny{baseAny{}, nil, val}
return &stringAny{baseAny{}, val}
}
func Wrap(val interface{}) Any {
@ -147,14 +136,15 @@ func Wrap(val interface{}) Any {
}
func (iter *Iterator) ReadAny() Any {
return iter.readAny(nil)
return iter.readAny()
}
func (iter *Iterator) readAny(reusableIter *Iterator) Any {
func (iter *Iterator) readAny() Any {
c := iter.nextToken()
switch c {
case '"':
return iter.readStringAny(reusableIter)
iter.unreadByte()
return &stringAny{baseAny{}, iter.ReadString()}
case 'n':
iter.skipFixedBytes(3) // null
return &nilAny{}
@ -165,140 +155,92 @@ func (iter *Iterator) readAny(reusableIter *Iterator) Any {
iter.skipFixedBytes(4) // false
return &falseAny{}
case '{':
return iter.readObjectAny(reusableIter)
return iter.readObjectAny()
case '[':
return iter.readArrayAny(reusableIter)
return iter.readArrayAny()
case '-':
return iter.readNumberAny(false)
default:
return iter.readNumberAny(reusableIter, c)
return iter.readNumberAny(true)
}
}
func (iter *Iterator) readNumberAny(reusableIter *Iterator, firstByte byte) Any {
dotFound := false
lazyBuf := make([]byte, 1, 8)
lazyBuf[0] = firstByte
for {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
if c == '.' {
dotFound = true
continue
}
switch c {
case ' ', '\n', '\r', '\t', ',', '}', ']':
lazyBuf = append(lazyBuf, iter.buf[iter.head:i]...)
iter.head = i
if dotFound {
return &float64LazyAny{baseAny{}, lazyBuf, reusableIter, nil, 0}
} else {
if firstByte == '-' {
return &int64LazyAny{baseAny{}, lazyBuf, reusableIter, nil, 0}
} else {
return &uint64LazyAny{baseAny{}, lazyBuf, reusableIter, nil, 0}
}
}
}
func (iter *Iterator) readNumberAny(positive bool) Any {
iter.startCapture(iter.head - 1)
iter.skipNumber()
lazyBuf := iter.stopCapture()
return &numberLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
}
func (iter *Iterator) readObjectAny() Any {
iter.startCapture(iter.head - 1)
iter.skipObject()
lazyBuf := iter.stopCapture()
return &objectLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
}
func (iter *Iterator) readArrayAny() Any {
iter.startCapture(iter.head - 1)
iter.skipArray()
lazyBuf := iter.stopCapture()
return &arrayLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
}
func locateObjectField(iter *Iterator, target string) []byte {
var found []byte
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
if field == target {
found = iter.SkipAndReturnBytes()
return false
}
lazyBuf = append(lazyBuf, iter.buf[iter.head:iter.tail]...)
if !iter.loadMore() {
iter.head = iter.tail
if dotFound {
return &float64LazyAny{baseAny{}, lazyBuf, reusableIter, nil, 0}
iter.Skip()
return true
})
return found
}
func locateArrayElement(iter *Iterator, target int) []byte {
var found []byte
n := 0
iter.ReadArrayCB(func(iter *Iterator) bool {
if n == target {
found = iter.SkipAndReturnBytes()
return false
}
iter.Skip()
n++
return true
})
return found
}
func locatePath(iter *Iterator, path []interface{}) Any {
for i, pathKeyObj := range path {
switch pathKey := pathKeyObj.(type) {
case string:
valueBytes := locateObjectField(iter, pathKey)
if valueBytes == nil {
return newInvalidAny(path[i:])
}
iter.ResetBytes(valueBytes)
case int:
valueBytes := locateArrayElement(iter, pathKey)
if valueBytes == nil {
return newInvalidAny(path[i:])
}
iter.ResetBytes(valueBytes)
case int32:
if '*' == pathKey {
return iter.readAny().Get(path[i:]...)
} else {
if firstByte == '-' {
return &int64LazyAny{baseAny{}, lazyBuf, reusableIter, nil, 0}
} else {
return &uint64LazyAny{baseAny{}, lazyBuf, reusableIter, nil, 0}
}
return newInvalidAny(path[i:])
}
default:
return newInvalidAny(path[i:])
}
}
}
func (iter *Iterator) readStringAny(reusableIter *Iterator) Any {
lazyBuf := make([]byte, 1, 8)
lazyBuf[0] = '"'
for {
end, escaped := iter.findStringEnd()
if end == -1 {
lazyBuf = append(lazyBuf, iter.buf[iter.head:iter.tail]...)
if !iter.loadMore() {
iter.reportError("readStringAny", "incomplete string")
return &invalidAny{}
}
if escaped {
iter.head = 1 // skip the first char as last char read is \
}
} else {
lazyBuf = append(lazyBuf, iter.buf[iter.head:end]...)
iter.head = end
return &stringLazyAny{baseAny{}, lazyBuf, reusableIter, nil, ""}
}
}
}
func (iter *Iterator) readObjectAny(reusableIter *Iterator) Any {
level := 1
lazyBuf := make([]byte, 1, 32)
lazyBuf[0] = '{'
for {
start := iter.head
for i := iter.head; i < iter.tail; i++ {
switch iter.buf[i] {
case '"': // If inside string, skip it
iter.head = i + 1
iter.skipString()
i = iter.head - 1 // it will be i++ soon
case '{': // If open symbol, increase level
level++
case '}': // If close symbol, increase level
level--
// If we have returned to the original level, we're done
if level == 0 {
iter.head = i + 1
lazyBuf = append(lazyBuf, iter.buf[start:iter.head]...)
return &objectLazyAny{baseAny{}, lazyBuf, reusableIter, nil, nil, lazyBuf}
}
}
}
lazyBuf = append(lazyBuf, iter.buf[iter.head:iter.tail]...)
if !iter.loadMore() {
iter.reportError("skipObject", "incomplete object")
return &invalidAny{}
}
}
}
func (iter *Iterator) readArrayAny(reusableIter *Iterator) Any {
level := 1
lazyBuf := make([]byte, 1, 32)
lazyBuf[0] = '['
for {
start := iter.head
for i := iter.head; i < iter.tail; i++ {
switch iter.buf[i] {
case '"': // If inside string, skip it
iter.head = i + 1
iter.skipString()
i = iter.head - 1 // it will be i++ soon
case '[': // If open symbol, increase level
level++
case ']': // If close symbol, increase level
level--
// If we have returned to the original level, we're done
if level == 0 {
iter.head = i + 1
lazyBuf = append(lazyBuf, iter.buf[start:iter.head]...)
return &arrayLazyAny{baseAny{}, lazyBuf, reusableIter, nil, nil, lazyBuf}
}
}
}
lazyBuf = append(lazyBuf, iter.buf[iter.head:iter.tail]...)
if !iter.loadMore() {
iter.reportError("skipArray", "incomplete array")
return &invalidAny{}
}
if iter.Error != nil && iter.Error != io.EOF {
return &invalidAny{baseAny{}, iter.Error}
}
return iter.readAny()
}

View File

@ -1,108 +1,23 @@
package jsoniter
import (
"unsafe"
"fmt"
"reflect"
"unsafe"
)
type arrayLazyAny struct {
baseAny
buf []byte
iter *Iterator
err error
cache []Any
remaining []byte
cfg *frozenConfig
buf []byte
err error
}
func (any *arrayLazyAny) ValueType() ValueType {
return Array
}
func (any *arrayLazyAny) Parse() *Iterator {
iter := any.iter
if iter == nil {
iter = NewIterator()
any.iter = iter
}
iter.ResetBytes(any.remaining)
return iter
}
func (any *arrayLazyAny) fillCacheUntil(target int) Any {
if any.remaining == nil {
if target >= len(any.cache) {
return nil
}
return any.cache[target]
}
if any.cache == nil {
any.cache = make([]Any, 0, 8)
}
i := len(any.cache)
if target < i {
return any.cache[target]
}
iter := any.Parse()
if (len(any.remaining) == len(any.buf)) {
iter.head++
c := iter.nextToken()
if c != ']' {
iter.unreadByte()
element := iter.readAny(iter)
any.cache = append(any.cache, element)
if target == 0 {
any.remaining = iter.buf[iter.head:]
any.err = iter.Error
return element
}
i = 1
} else {
any.remaining = nil
any.err = iter.Error
return nil
}
}
for iter.nextToken() == ',' {
element := iter.readAny(iter)
any.cache = append(any.cache, element)
if i == target {
any.remaining = iter.buf[iter.head:]
any.err = iter.Error
return element
}
i++
}
any.remaining = nil
any.err = iter.Error
return nil
}
func (any *arrayLazyAny) fillCache() {
if any.remaining == nil {
return
}
if any.cache == nil {
any.cache = make([]Any, 0, 8)
}
iter := any.Parse()
if len(any.remaining) == len(any.buf) {
iter.head++
c := iter.nextToken()
if c != ']' {
iter.unreadByte()
any.cache = append(any.cache, iter.readAny(iter))
} else {
any.remaining = nil
any.err = iter.Error
return
}
}
for iter.nextToken() == ',' {
any.cache = append(any.cache, iter.readAny(iter))
}
any.remaining = nil
any.err = iter.Error
func (any *arrayLazyAny) MustBeValid() Any {
return any
}
func (any *arrayLazyAny) LastError() error {
@ -110,252 +25,173 @@ func (any *arrayLazyAny) LastError() error {
}
func (any *arrayLazyAny) ToBool() bool {
if any.cache == nil {
any.IterateArray() // trigger first element read
}
return len(any.cache) != 0
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
return iter.ReadArray()
}
func (any *arrayLazyAny) ToInt() int {
if any.cache == nil {
any.IterateArray() // trigger first element read
}
if len(any.cache) == 0 {
if any.ToBool() {
return 1
} else {
return 0
}
return 1
}
func (any *arrayLazyAny) ToInt32() int32 {
if any.cache == nil {
any.IterateArray() // trigger first element read
}
if len(any.cache) == 0 {
if any.ToBool() {
return 1
} else {
return 0
}
return 1
}
func (any *arrayLazyAny) ToInt64() int64 {
if any.cache == nil {
any.IterateArray() // trigger first element read
}
if len(any.cache) == 0 {
if any.ToBool() {
return 1
} else {
return 0
}
return 1
}
func (any *arrayLazyAny) ToUint() uint {
if any.cache == nil {
any.IterateArray() // trigger first element read
}
if len(any.cache) == 0 {
if any.ToBool() {
return 1
} else {
return 0
}
return 1
}
func (any *arrayLazyAny) ToUint32() uint32 {
if any.cache == nil {
any.IterateArray() // trigger first element read
}
if len(any.cache) == 0 {
if any.ToBool() {
return 1
} else {
return 0
}
return 1
}
func (any *arrayLazyAny) ToUint64() uint64 {
if any.cache == nil {
any.IterateArray() // trigger first element read
}
if len(any.cache) == 0 {
if any.ToBool() {
return 1
} else {
return 0
}
return 1
}
func (any *arrayLazyAny) ToFloat32() float32 {
if any.cache == nil {
any.IterateArray() // trigger first element read
}
if len(any.cache) == 0 {
if any.ToBool() {
return 1
} else {
return 0
}
return 1
}
func (any *arrayLazyAny) ToFloat64() float64 {
if any.cache == nil {
any.IterateArray() // trigger first element read
}
if len(any.cache) == 0 {
if any.ToBool() {
return 1
} else {
return 0
}
return 1
}
func (any *arrayLazyAny) ToString() string {
if len(any.remaining) == len(any.buf) {
// nothing has been parsed yet
return *(*string)(unsafe.Pointer(&any.buf))
} else {
any.fillCache()
str, err := MarshalToString(any.cache)
any.err = err
return str
}
return *(*string)(unsafe.Pointer(&any.buf))
}
func (any *arrayLazyAny) ToVal(val interface{}) {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
iter.ReadVal(val)
}
func (any *arrayLazyAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
var element Any
switch firstPath := path[0].(type) {
case int:
element = any.fillCacheUntil(firstPath)
if element == nil {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
valueBytes := locateArrayElement(iter, firstPath)
if valueBytes == nil {
return newInvalidAny(path)
} else {
iter.ResetBytes(valueBytes)
return locatePath(iter, path[1:])
}
case int32:
if '*' == firstPath {
any.fillCache()
arr := make([]Any, 0, len(any.cache))
for _, element := range any.cache {
found := element.Get(path[1:]...)
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
arr := make([]Any, 0)
iter.ReadArrayCB(func(iter *Iterator) bool {
found := iter.readAny().Get(path[1:]...)
if found.ValueType() != Invalid {
arr = append(arr, found)
}
}
return true
})
return wrapArray(arr)
} else {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", path[0], any.cache)}
return newInvalidAny(path)
}
default:
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", path[0], any.cache)}
}
if len(path) == 1 {
return element
} else {
return element.Get(path[1:]...)
return newInvalidAny(path)
}
}
func (any *arrayLazyAny) Size() int {
any.fillCache()
return len(any.cache)
}
func (any *arrayLazyAny) IterateArray() (func() (Any, bool), bool) {
if any.cache == nil {
any.cache = make([]Any, 0, 8)
}
remaining := any.remaining
if len(remaining) == len(any.buf) {
iter := any.Parse()
iter.head++
c := iter.nextToken()
if c != ']' {
iter.unreadByte()
v := iter.readAny(iter)
any.cache = append(any.cache, v)
remaining = iter.buf[iter.head:]
any.remaining = remaining
} else {
remaining = nil
any.remaining = nil
any.err = iter.Error
return nil, false
}
}
if len(any.cache) == 0 {
return nil, false
}
arr := any.cache
nextValue := arr[0]
i := 1
return func() (Any, bool) {
value := nextValue
if i < len(arr) {
// read from cache
nextValue = arr[i]
i++
return value, true
} else {
// read from buffer
iter := any.iter
if iter == nil {
iter = NewIterator()
any.iter = iter
}
iter.ResetBytes(remaining)
c := iter.nextToken()
if c == ',' {
nextValue = iter.readAny(iter)
any.cache = append(any.cache, nextValue)
remaining = iter.buf[iter.head:]
any.remaining = remaining
any.err = iter.Error
return value, true
} else {
remaining = nil
any.remaining = nil
any.err = iter.Error
return value, false
}
}
}, true
size := 0
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
iter.ReadArrayCB(func(iter *Iterator) bool {
size++
iter.Skip()
return true
})
return size
}
func (any *arrayLazyAny) GetArray() []Any {
any.fillCache()
return any.cache
}
func (any *arrayLazyAny) SetArray(newList []Any) bool {
any.fillCache()
any.cache = newList
return true
elements := make([]Any, 0)
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
iter.ReadArrayCB(func(iter *Iterator) bool {
elements = append(elements, iter.ReadAny())
return true
})
return elements
}
func (any *arrayLazyAny) WriteTo(stream *Stream) {
if len(any.remaining) == len(any.buf) {
// nothing has been parsed yet
stream.Write(any.buf)
} else {
any.fillCache()
stream.WriteVal(any.cache)
}
stream.Write(any.buf)
}
func (any *arrayLazyAny) GetInterface() interface{} {
any.fillCache()
return any.cache
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
return iter.Read()
}
type arrayAny struct {
baseAny
err error
cache []Any
val reflect.Value
val reflect.Value
}
func wrapArray(val interface{}) *arrayAny {
return &arrayAny{baseAny{}, nil, nil, reflect.ValueOf(val)}
return &arrayAny{baseAny{}, reflect.ValueOf(val)}
}
func (any *arrayAny) ValueType() ValueType {
return Array
}
func (any *arrayAny) Parse() *Iterator {
return nil
func (any *arrayAny) MustBeValid() Any {
return any
}
func (any *arrayAny) LastError() error {
return any.err
return nil
}
func (any *arrayAny) ToBool() bool {
@ -419,121 +255,54 @@ func (any *arrayAny) ToFloat64() float64 {
}
func (any *arrayAny) ToString() string {
if len(any.cache) == 0 {
// nothing has been parsed yet
str, err := MarshalToString(any.val.Interface())
any.err = err
return str
} else {
any.fillCache()
str, err := MarshalToString(any.cache)
any.err = err
return str
}
}
func (any *arrayAny) fillCacheUntil(idx int) Any {
if idx < len(any.cache) {
return any.cache[idx]
} else {
for i := len(any.cache); i < any.val.Len(); i++ {
element := Wrap(any.val.Index(i).Interface())
any.cache = append(any.cache, element)
if idx == i {
return element
}
}
return nil
}
}
func (any *arrayAny) fillCache() {
any.cache = make([]Any, any.val.Len())
for i := 0; i < any.val.Len(); i++ {
any.cache[i] = Wrap(any.val.Index(i).Interface())
}
str, _ := MarshalToString(any.val.Interface())
return str
}
func (any *arrayAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
var element Any
switch firstPath := path[0].(type) {
case int:
element = any.fillCacheUntil(firstPath)
if element == nil {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
if firstPath < 0 || firstPath >= any.val.Len() {
return newInvalidAny(path)
}
return Wrap(any.val.Index(firstPath).Interface())
case int32:
if '*' == firstPath {
any.fillCache()
mappedAll := make([]Any, 0, len(any.cache))
for _, element := range any.cache {
mapped := element.Get(path[1:]...)
mappedAll := make([]Any, 0)
for i := 0; i < any.val.Len(); i++ {
mapped := Wrap(any.val.Index(i).Interface()).Get(path[1:]...)
if mapped.ValueType() != Invalid {
mappedAll = append(mappedAll, mapped)
}
}
return wrapArray(mappedAll)
} else {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", path[0], any.cache)}
return newInvalidAny(path)
}
default:
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", path[0], any.cache)}
}
if len(path) == 1 {
return element
} else {
return element.Get(path[1:]...)
return newInvalidAny(path)
}
}
func (any *arrayAny) Size() int {
any.fillCache()
return len(any.cache)
}
func (any *arrayAny) IterateArray() (func() (Any, bool), bool) {
if any.val.Len() == 0 {
return nil, false
}
i := 0
return func() (Any, bool) {
if i == any.val.Len() {
return nil, false
}
if i == len(any.cache) {
any.cache = append(any.cache, Wrap(any.val.Index(i).Interface()))
}
val := any.cache[i]
i++
return val, i != any.val.Len()
}, true
return any.val.Len()
}
func (any *arrayAny) GetArray() []Any {
any.fillCache()
return any.cache
}
func (any *arrayAny) SetArray(newList []Any) bool {
any.fillCache()
any.cache = newList
return true
elements := make([]Any, any.val.Len())
for i := 0; i < any.val.Len(); i++ {
elements[i] = Wrap(any.val.Index(i).Interface())
}
return elements
}
func (any *arrayAny) WriteTo(stream *Stream) {
if len(any.cache) == 0 {
// nothing has been parsed yet
stream.WriteVal(any.val)
} else {
any.fillCache()
stream.WriteVal(any.cache)
}
stream.WriteVal(any.val)
}
func (any *arrayAny) GetInterface() interface{} {
any.fillCache()
return any.cache
}
return any.val.Interface()
}

View File

@ -64,6 +64,10 @@ func (any *trueAny) ValueType() ValueType {
return Bool
}
func (any *trueAny) MustBeValid() Any {
return any
}
type falseAny struct {
baseAny
}
@ -127,3 +131,7 @@ func (any *falseAny) GetInterface() interface{} {
func (any *falseAny) ValueType() ValueType {
return Bool
}
func (any *falseAny) MustBeValid() Any {
return any
}

View File

@ -1,105 +1,9 @@
package jsoniter
import (
"io"
"unsafe"
"strconv"
)
type float64LazyAny struct {
baseAny
buf []byte
iter *Iterator
err error
cache float64
}
func (any *float64LazyAny) Parse() *Iterator {
iter := any.iter
if iter == nil {
iter = NewIterator()
}
iter.ResetBytes(any.buf)
return iter
}
func (any *float64LazyAny) ValueType() ValueType {
return Number
}
func (any *float64LazyAny) fillCache() {
if any.err != nil {
return
}
iter := any.Parse()
any.cache = iter.ReadFloat64()
if iter.Error != io.EOF {
iter.reportError("floatLazyAny", "there are bytes left")
}
any.err = iter.Error
}
func (any *float64LazyAny) LastError() error {
return any.err
}
func (any *float64LazyAny) ToBool() bool {
return any.ToFloat64() != 0
}
func (any *float64LazyAny) ToInt() int {
any.fillCache()
return int(any.cache)
}
func (any *float64LazyAny) ToInt32() int32 {
any.fillCache()
return int32(any.cache)
}
func (any *float64LazyAny) ToInt64() int64 {
any.fillCache()
return int64(any.cache)
}
func (any *float64LazyAny) ToUint() uint {
any.fillCache()
return uint(any.cache)
}
func (any *float64LazyAny) ToUint32() uint32 {
any.fillCache()
return uint32(any.cache)
}
func (any *float64LazyAny) ToUint64() uint64 {
any.fillCache()
return uint64(any.cache)
}
func (any *float64LazyAny) ToFloat32() float32 {
any.fillCache()
return float32(any.cache)
}
func (any *float64LazyAny) ToFloat64() float64 {
any.fillCache()
return any.cache
}
func (any *float64LazyAny) ToString() string {
return *(*string)(unsafe.Pointer(&any.buf))
}
func (any *float64LazyAny) WriteTo(stream *Stream) {
stream.Write(any.buf)
}
func (any *float64LazyAny) GetInterface() interface{} {
any.fillCache()
return any.cache
}
type floatAny struct {
baseAny
val float64
@ -113,6 +17,10 @@ func (any *floatAny) ValueType() ValueType {
return Number
}
func (any *floatAny) MustBeValid() Any {
return any
}
func (any *floatAny) LastError() error {
return nil
}
@ -163,4 +71,4 @@ func (any *floatAny) WriteTo(stream *Stream) {
func (any *floatAny) GetInterface() interface{} {
return any.val
}
}

View File

@ -17,6 +17,10 @@ func (any *int32Any) ValueType() ValueType {
return Number
}
func (any *int32Any) MustBeValid() Any {
return any
}
func (any *int32Any) ToBool() bool {
return any.val != 0
}
@ -67,4 +71,4 @@ func (any *int32Any) Parse() *Iterator {
func (any *int32Any) GetInterface() interface{} {
return any.val
}
}

View File

@ -1,105 +1,9 @@
package jsoniter
import (
"io"
"unsafe"
"strconv"
)
type int64LazyAny struct {
baseAny
buf []byte
iter *Iterator
err error
cache int64
}
func (any *int64LazyAny) ValueType() ValueType {
return Number
}
func (any *int64LazyAny) Parse() *Iterator {
iter := any.iter
if iter == nil {
iter = NewIterator()
}
iter.ResetBytes(any.buf)
return iter
}
func (any *int64LazyAny) fillCache() {
if any.err != nil {
return
}
iter := any.Parse()
any.cache = iter.ReadInt64()
if iter.Error != io.EOF {
iter.reportError("intLazyAny", "there are bytes left")
}
any.err = iter.Error
}
func (any *int64LazyAny) LastError() error {
return any.err
}
func (any *int64LazyAny) ToBool() bool {
return any.ToInt64() != 0
}
func (any *int64LazyAny) ToInt() int {
any.fillCache()
return int(any.cache)
}
func (any *int64LazyAny) ToInt32() int32 {
any.fillCache()
return int32(any.cache)
}
func (any *int64LazyAny) ToInt64() int64 {
any.fillCache()
return any.cache
}
func (any *int64LazyAny) ToUint() uint {
any.fillCache()
return uint(any.cache)
}
func (any *int64LazyAny) ToUint32() uint32 {
any.fillCache()
return uint32(any.cache)
}
func (any *int64LazyAny) ToUint64() uint64 {
any.fillCache()
return uint64(any.cache)
}
func (any *int64LazyAny) ToFloat32() float32 {
any.fillCache()
return float32(any.cache)
}
func (any *int64LazyAny) ToFloat64() float64 {
any.fillCache()
return float64(any.cache)
}
func (any *int64LazyAny) ToString() string {
return *(*string)(unsafe.Pointer(&any.buf))
}
func (any *int64LazyAny) WriteTo(stream *Stream) {
stream.Write(any.buf)
}
func (any *int64LazyAny) GetInterface() interface{} {
any.fillCache()
return any.cache
}
type int64Any struct {
baseAny
val int64
@ -113,6 +17,10 @@ func (any *int64Any) ValueType() ValueType {
return Number
}
func (any *int64Any) MustBeValid() Any {
return any
}
func (any *int64Any) ToBool() bool {
return any.val != 0
}
@ -163,4 +71,4 @@ func (any *int64Any) Parse() *Iterator {
func (any *int64Any) GetInterface() interface{} {
return any.val
}
}

View File

@ -7,6 +7,10 @@ type invalidAny struct {
err error
}
func newInvalidAny(path []interface{}) *invalidAny {
return &invalidAny{baseAny{}, fmt.Errorf("%v not found", path)}
}
func (any *invalidAny) LastError() error {
return any.err
}
@ -15,6 +19,11 @@ func (any *invalidAny) ValueType() ValueType {
return Invalid
}
func (any *invalidAny) MustBeValid() Any {
panic(any.err)
return any
}
func (any *invalidAny) ToBool() bool {
return false
}

View File

@ -12,6 +12,10 @@ func (any *nilAny) ValueType() ValueType {
return Nil
}
func (any *nilAny) MustBeValid() Any {
return any
}
func (any *nilAny) ToBool() bool {
return false
}
@ -62,4 +66,4 @@ func (any *nilAny) Parse() *Iterator {
func (any *nilAny) GetInterface() interface{} {
return nil
}
}

106
feature_any_number.go Normal file
View File

@ -0,0 +1,106 @@
package jsoniter
import (
"unsafe"
)
type numberLazyAny struct {
baseAny
cfg *frozenConfig
buf []byte
err error
}
func (any *numberLazyAny) ValueType() ValueType {
return Number
}
func (any *numberLazyAny) MustBeValid() Any {
return any
}
func (any *numberLazyAny) LastError() error {
return any.err
}
func (any *numberLazyAny) ToBool() bool {
return any.ToFloat64() != 0
}
func (any *numberLazyAny) ToInt() int {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadInt()
any.err = iter.Error
return val
}
func (any *numberLazyAny) ToInt32() int32 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadInt32()
any.err = iter.Error
return val
}
func (any *numberLazyAny) ToInt64() int64 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadInt64()
any.err = iter.Error
return val
}
func (any *numberLazyAny) ToUint() uint {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadUint()
any.err = iter.Error
return val
}
func (any *numberLazyAny) ToUint32() uint32 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadUint32()
any.err = iter.Error
return val
}
func (any *numberLazyAny) ToUint64() uint64 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadUint64()
any.err = iter.Error
return val
}
func (any *numberLazyAny) ToFloat32() float32 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadFloat32()
any.err = iter.Error
return val
}
func (any *numberLazyAny) ToFloat64() float64 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadFloat64()
any.err = iter.Error
return val
}
func (any *numberLazyAny) ToString() string {
return *(*string)(unsafe.Pointer(&any.buf))
}
func (any *numberLazyAny) WriteTo(stream *Stream) {
stream.Write(any.buf)
}
func (any *numberLazyAny) GetInterface() interface{} {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
return iter.Read()
}

View File

@ -1,110 +1,23 @@
package jsoniter
import (
"unsafe"
"fmt"
"reflect"
"unsafe"
)
type objectLazyAny struct {
baseAny
buf []byte
iter *Iterator
err error
cache map[string]Any
remaining []byte
cfg *frozenConfig
buf []byte
err error
}
func (any *objectLazyAny) ValueType() ValueType {
return Object
}
func (any *objectLazyAny) Parse() *Iterator {
iter := any.iter
if iter == nil {
iter = NewIterator()
any.iter = iter
}
iter.ResetBytes(any.remaining)
return iter
}
func (any *objectLazyAny) fillCacheUntil(target string) Any {
if any.remaining == nil {
return any.cache[target]
}
if any.cache == nil {
any.cache = map[string]Any{}
}
val := any.cache[target]
if val != nil {
return val
}
iter := any.Parse()
if len(any.remaining) == len(any.buf) {
iter.head++
c := iter.nextToken()
if c != '}' {
iter.unreadByte()
k := string(iter.readObjectFieldAsBytes())
v := iter.readAny(iter)
any.cache[k] = v
if target == k {
any.remaining = iter.buf[iter.head:]
any.err = iter.Error
return v
}
} else {
any.remaining = nil
any.err = iter.Error
return nil
}
}
for iter.nextToken() == ',' {
k := string(iter.readObjectFieldAsBytes())
v := iter.readAny(iter)
any.cache[k] = v
if target == k {
any.remaining = iter.buf[iter.head:]
any.err = iter.Error
return v
}
}
any.remaining = nil
any.err = iter.Error
return nil
}
func (any *objectLazyAny) fillCache() {
if any.remaining == nil {
return
}
if any.cache == nil {
any.cache = map[string]Any{}
}
iter := any.Parse()
if len(any.remaining) == len(any.buf) {
iter.head++
c := iter.nextToken()
if c != '}' {
iter.unreadByte()
k := string(iter.readObjectFieldAsBytes())
v := iter.readAny(iter)
any.cache[k] = v
} else {
any.remaining = nil
any.err = iter.Error
return
}
}
for iter.nextToken() == ',' {
k := string(iter.readObjectFieldAsBytes())
v := iter.readAny(iter)
any.cache[k] = v
}
any.remaining = nil
any.err = iter.Error
return
func (any *objectLazyAny) MustBeValid() Any {
return any
}
func (any *objectLazyAny) LastError() error {
@ -112,316 +25,188 @@ func (any *objectLazyAny) LastError() error {
}
func (any *objectLazyAny) ToBool() bool {
if any.cache == nil {
any.IterateObject() // trigger first value read
}
return len(any.cache) != 0
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
return iter.ReadObject() != ""
}
func (any *objectLazyAny) ToInt() int {
if any.cache == nil {
any.IterateObject() // trigger first value read
}
if len(any.cache) == 0 {
if any.ToBool() {
return 1
} else {
return 0
}
return 1
}
func (any *objectLazyAny) ToInt32() int32 {
if any.cache == nil {
any.IterateObject() // trigger first value read
}
if len(any.cache) == 0 {
if any.ToBool() {
return 1
} else {
return 0
}
return 1
}
func (any *objectLazyAny) ToInt64() int64 {
if any.cache == nil {
any.IterateObject() // trigger first value read
}
if len(any.cache) == 0 {
if any.ToBool() {
return 1
} else {
return 0
}
return 1
}
func (any *objectLazyAny) ToUint() uint {
if any.cache == nil {
any.IterateObject() // trigger first value read
}
if len(any.cache) == 0 {
if any.ToBool() {
return 1
} else {
return 0
}
return 1
}
func (any *objectLazyAny) ToUint32() uint32 {
if any.cache == nil {
any.IterateObject() // trigger first value read
}
if len(any.cache) == 0 {
if any.ToBool() {
return 1
} else {
return 0
}
return 1
}
func (any *objectLazyAny) ToUint64() uint64 {
if any.cache == nil {
any.IterateObject() // trigger first value read
}
if len(any.cache) == 0 {
if any.ToBool() {
return 1
} else {
return 0
}
return 1
}
func (any *objectLazyAny) ToFloat32() float32 {
if any.cache == nil {
any.IterateObject() // trigger first value read
}
if len(any.cache) == 0 {
if any.ToBool() {
return 1
} else {
return 0
}
return 1
}
func (any *objectLazyAny) ToFloat64() float64 {
if any.cache == nil {
any.IterateObject() // trigger first value read
}
if len(any.cache) == 0 {
if any.ToBool() {
return 1
} else {
return 0
}
return 1
}
func (any *objectLazyAny) ToString() string {
if len(any.remaining) == len(any.buf) {
// nothing has been parsed yet
return *(*string)(unsafe.Pointer(&any.buf))
} else {
any.fillCache()
str, err := MarshalToString(any.cache)
any.err = err
return str
}
return *(*string)(unsafe.Pointer(&any.buf))
}
func (any *objectLazyAny) ToVal(obj interface{}) {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
iter.ReadVal(obj)
}
func (any *objectLazyAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
var element Any
switch firstPath := path[0].(type) {
case string:
element = any.fillCacheUntil(firstPath)
if element == nil {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
valueBytes := locateObjectField(iter, firstPath)
if valueBytes == nil {
return newInvalidAny(path)
} else {
iter.ResetBytes(valueBytes)
return locatePath(iter, path[1:])
}
case int32:
if '*' == firstPath {
any.fillCache()
mappedAll := map[string]Any{}
for key, value := range any.cache {
mapped := value.Get(path[1:]...)
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
mapped := locatePath(iter, path[1:])
if mapped.ValueType() != Invalid {
mappedAll[key] = mapped
mappedAll[field] = mapped
}
}
return true
})
return wrapMap(mappedAll)
} else {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
return newInvalidAny(path)
}
default:
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
}
if len(path) == 1 {
return element
} else {
return element.Get(path[1:]...)
return newInvalidAny(path)
}
}
func (any *objectLazyAny) Keys() []string {
any.fillCache()
keys := make([]string, 0, len(any.cache))
for key := range any.cache {
keys = append(keys, key)
}
keys := []string{}
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
iter.Skip()
keys = append(keys, field)
return true
})
return keys
}
func (any *objectLazyAny) Size() int {
any.fillCache()
return len(any.cache)
}
func (any *objectLazyAny) IterateObject() (func() (string, Any, bool), bool) {
if any.cache == nil {
any.cache = map[string]Any{}
}
remaining := any.remaining
if len(remaining) == len(any.buf) {
iter := any.Parse()
iter.head++
c := iter.nextToken()
if c != '}' {
iter.unreadByte()
k := string(iter.readObjectFieldAsBytes())
v := iter.readAny(iter)
any.cache[k] = v
remaining = iter.buf[iter.head:]
any.remaining = remaining
} else {
remaining = nil
any.remaining = nil
any.err = iter.Error
return nil, false
}
}
if len(any.cache) == 0 {
return nil, false
}
keys := make([]string, 0, len(any.cache))
values := make([]Any, 0, len(any.cache))
for key, value := range any.cache {
keys = append(keys, key)
values = append(values, value)
}
nextKey := keys[0]
nextValue := values[0]
i := 1
return func() (string, Any, bool) {
key := nextKey
value := nextValue
if i < len(keys) {
// read from cache
nextKey = keys[i]
nextValue = values[i]
i++
return key, value, true
} else {
// read from buffer
iter := any.iter
if iter == nil {
iter = NewIterator()
any.iter = iter
}
iter.ResetBytes(remaining)
c := iter.nextToken()
if c == ',' {
nextKey = string(iter.readObjectFieldAsBytes())
nextValue = iter.readAny(iter)
any.cache[nextKey] = nextValue
remaining = iter.buf[iter.head:]
any.remaining = remaining
any.err = iter.Error
return key, value, true
} else {
nextKey = ""
remaining = nil
any.remaining = nil
any.err = iter.Error
return key, value, false
}
}
}, true
size := 0
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
iter.Skip()
size++
return true
})
return size
}
func (any *objectLazyAny) GetObject() map[string]Any {
any.fillCache()
return any.cache
}
func (any *objectLazyAny) SetObject(val map[string]Any) bool {
any.fillCache()
any.cache = val
return true
asMap := map[string]Any{}
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
asMap[field] = iter.ReadAny()
return true
})
return asMap
}
func (any *objectLazyAny) WriteTo(stream *Stream) {
if len(any.remaining) == len(any.buf) {
// nothing has been parsed yet
stream.Write(any.buf)
} else {
any.fillCache()
stream.WriteVal(any.cache)
}
stream.Write(any.buf)
}
func (any *objectLazyAny) GetInterface() interface{} {
any.fillCache()
return any.cache
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
return iter.Read()
}
type objectAny struct {
baseAny
err error
cache map[string]Any
val reflect.Value
err error
val reflect.Value
}
func wrapStruct(val interface{}) *objectAny {
return &objectAny{baseAny{}, nil, nil, reflect.ValueOf(val)}
return &objectAny{baseAny{}, nil, reflect.ValueOf(val)}
}
func (any *objectAny) ValueType() ValueType {
return Object
}
func (any *objectAny) MustBeValid() Any {
return any
}
func (any *objectAny) Parse() *Iterator {
return nil
}
func (any *objectAny) fillCacheUntil(target string) Any {
if any.cache == nil {
any.cache = map[string]Any{}
}
element, found := any.cache[target]
if found {
return element
}
for i := len(any.cache); i < any.val.NumField(); i++ {
field := any.val.Field(i)
fieldName := any.val.Type().Field(i).Name
var element Any
if field.CanInterface() {
element = Wrap(field.Interface())
} else {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", fieldName, any.cache)}
}
any.cache[fieldName] = element
if fieldName == target {
return element
}
}
return nil
}
func (any *objectAny) fillCache() {
if any.cache == nil {
any.cache = map[string]Any{}
}
if len(any.cache) == any.val.NumField() {
return
}
for i := 0; i < any.val.NumField(); i++ {
field := any.val.Field(i)
fieldName := any.val.Type().Field(i).Name
var element Any
if field.CanInterface() {
element = Wrap(field.Interface())
} else {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", fieldName, any.cache)}
}
any.cache[fieldName] = element
}
}
func (any *objectAny) LastError() error {
return any.err
}
@ -487,189 +272,96 @@ func (any *objectAny) ToFloat64() float64 {
}
func (any *objectAny) ToString() string {
if len(any.cache) == 0 {
str, err := MarshalToString(any.val.Interface())
any.err = err
return str
} else {
any.fillCache()
str, err := MarshalToString(any.cache)
any.err = err
return str
}
str, err := MarshalToString(any.val.Interface())
any.err = err
return str
}
func (any *objectAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
var element Any
switch firstPath := path[0].(type) {
case string:
element = any.fillCacheUntil(firstPath)
if element == nil {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
field := any.val.FieldByName(firstPath)
if !field.IsValid() {
return newInvalidAny(path)
}
return Wrap(field.Interface())
case int32:
if '*' == firstPath {
any.fillCache()
mappedAll := map[string]Any{}
for key, value := range any.cache {
mapped := value.Get(path[1:]...)
if mapped.ValueType() != Invalid {
mappedAll[key] = mapped
for i := 0; i < any.val.NumField(); i++ {
field := any.val.Field(i)
if field.CanInterface() {
mapped := Wrap(field.Interface()).Get(path[1:]...)
if mapped.ValueType() != Invalid {
mappedAll[any.val.Type().Field(i).Name] = mapped
}
}
}
return wrapMap(mappedAll)
} else {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
return newInvalidAny(path)
}
default:
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
}
if len(path) == 1 {
return element
} else {
return element.Get(path[1:]...)
return newInvalidAny(path)
}
}
func (any *objectAny) Keys() []string {
any.fillCache()
keys := make([]string, 0, len(any.cache))
for key := range any.cache {
keys = append(keys, key)
keys := make([]string, 0, any.val.NumField())
for i := 0; i < any.val.NumField(); i++ {
keys = append(keys, any.val.Type().Field(i).Name)
}
return keys
}
func (any *objectAny) Size() int {
any.fillCache()
return len(any.cache)
}
func (any *objectAny) IterateObject() (func() (string, Any, bool), bool) {
if any.cache == nil {
any.cache = map[string]Any{}
}
if any.val.NumField() == 0 {
return nil, false
}
cacheKeys := make([]string, len(any.cache))
i := 0
for key := range any.cache {
cacheKeys[i] = key
i++
}
i = 0
return func() (string, Any, bool) {
if i == any.val.NumField() {
return "", nil, false
}
var fieldName string
var fieldValueAsAny Any
if i == len(cacheKeys) {
fieldName = any.val.Type().Field(i).Name
cacheKeys = append(cacheKeys, fieldName)
fieldValue := any.val.Field(i)
if fieldValue.CanInterface() {
fieldValueAsAny = Wrap(fieldValue.Interface())
any.cache[fieldName] = fieldValueAsAny
} else {
fieldValueAsAny = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", fieldName, any.cache)}
any.cache[fieldName] = fieldValueAsAny
}
} else {
fieldName = cacheKeys[i]
fieldValueAsAny = any.cache[fieldName]
}
i++
return fieldName, fieldValueAsAny, i != any.val.NumField()
}, true
return any.val.NumField()
}
func (any *objectAny) GetObject() map[string]Any {
any.fillCache()
return any.cache
}
func (any *objectAny) SetObject(val map[string]Any) bool {
any.fillCache()
any.cache = val
return true
object := map[string]Any{}
for i := 0; i < any.val.NumField(); i++ {
field := any.val.Field(i)
if field.CanInterface() {
object[any.val.Type().Field(i).Name] = Wrap(field.Interface())
}
}
return object
}
func (any *objectAny) WriteTo(stream *Stream) {
if len(any.cache) == 0 {
// nothing has been parsed yet
stream.WriteVal(any.val)
} else {
any.fillCache()
stream.WriteVal(any.cache)
}
stream.WriteVal(any.val)
}
func (any *objectAny) GetInterface() interface{} {
any.fillCache()
return any.cache
return any.val.Interface()
}
type mapAny struct {
baseAny
err error
cache map[string]Any
val reflect.Value
err error
val reflect.Value
}
func wrapMap(val interface{}) *mapAny {
return &mapAny{baseAny{}, nil, nil, reflect.ValueOf(val)}
return &mapAny{baseAny{}, nil, reflect.ValueOf(val)}
}
func (any *mapAny) ValueType() ValueType {
return Object
}
func (any *mapAny) MustBeValid() Any {
return any
}
func (any *mapAny) Parse() *Iterator {
return nil
}
func (any *mapAny) fillCacheUntil(target string) Any {
if any.cache == nil {
any.cache = map[string]Any{}
}
element, found := any.cache[target]
if found {
return element
}
for _, key := range any.val.MapKeys() {
keyAsStr := key.String()
_, found := any.cache[keyAsStr]
if found {
continue
}
element := Wrap(any.val.MapIndex(key).Interface())
any.cache[keyAsStr] = element
if keyAsStr == target {
return element
}
}
return nil
}
func (any *mapAny) fillCache() {
if any.cache == nil {
any.cache = map[string]Any{}
}
if len(any.cache) == any.val.Len() {
return
}
for _, key := range any.val.MapKeys() {
keyAsStr := key.String()
element := Wrap(any.val.MapIndex(key).Interface())
any.cache[keyAsStr] = element
}
}
func (any *mapAny) LastError() error {
return any.err
}
@ -735,114 +427,66 @@ func (any *mapAny) ToFloat64() float64 {
}
func (any *mapAny) ToString() string {
if len(any.cache) == 0 {
str, err := MarshalToString(any.val.Interface())
any.err = err
return str
} else {
any.fillCache()
str, err := MarshalToString(any.cache)
any.err = err
return str
}
str, err := MarshalToString(any.val.Interface())
any.err = err
return str
}
func (any *mapAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
var element Any
switch firstPath := path[0].(type) {
case string:
element = any.fillCacheUntil(firstPath)
if element == nil {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
}
case int32:
if '*' == firstPath {
any.fillCache()
mappedAll := map[string]Any{}
for key, value := range any.cache {
mapped := value.Get(path[1:]...)
for _, key := range any.val.MapKeys() {
keyAsStr := key.String()
element := Wrap(any.val.MapIndex(key).Interface())
mapped := element.Get(path[1:]...)
if mapped.ValueType() != Invalid {
mappedAll[key] = mapped
mappedAll[keyAsStr] = mapped
}
}
return wrapMap(mappedAll)
} else {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
return newInvalidAny(path)
}
default:
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
}
if len(path) == 1 {
return element
} else {
return element.Get(path[1:]...)
value := any.val.MapIndex(reflect.ValueOf(firstPath))
if !value.IsValid() {
return newInvalidAny(path)
}
return Wrap(value.Interface())
}
}
func (any *mapAny) Keys() []string {
any.fillCache()
keys := make([]string, 0, len(any.cache))
for key := range any.cache {
keys = append(keys, key)
keys := make([]string, 0, any.val.Len())
for _, key := range any.val.MapKeys() {
keys = append(keys, key.String())
}
return keys
}
func (any *mapAny) Size() int {
any.fillCache()
return len(any.cache)
}
func (any *mapAny) IterateObject() (func() (string, Any, bool), bool) {
any.fillCache()
if len(any.cache) == 0 {
return nil, false
}
keys := make([]string, len(any.cache))
values := make([]Any, len(any.cache))
i := 0
for k, v := range any.cache {
keys[i] = k
values[i] = v
i++
}
i = 0
return func() (string, Any, bool) {
if i == len(keys) {
return "", nil, false
}
k := keys[i]
v := values[i]
i++
return k, v, i != len(keys)
}, true
return any.val.Len()
}
func (any *mapAny) GetObject() map[string]Any {
any.fillCache()
return any.cache
}
func (any *mapAny) SetObject(val map[string]Any) bool {
any.fillCache()
any.cache = val
return true
object := map[string]Any{}
for _, key := range any.val.MapKeys() {
keyAsStr := key.String()
element := Wrap(any.val.MapIndex(key).Interface())
object[keyAsStr] = element
}
return object
}
func (any *mapAny) WriteTo(stream *Stream) {
if len(any.cache) == 0 {
// nothing has been parsed yet
stream.WriteVal(any.val)
} else {
any.fillCache()
stream.WriteVal(any.cache)
}
stream.WriteVal(any.val)
}
func (any *mapAny) GetInterface() interface{} {
any.fillCache()
return any.cache
return any.val.Interface()
}

View File

@ -1,158 +1,36 @@
package jsoniter
import (
"io"
"fmt"
"strconv"
)
type stringLazyAny struct{
type stringAny struct {
baseAny
buf []byte
iter *Iterator
err error
cache string
}
func (any *stringLazyAny) ValueType() ValueType {
return String
}
func (any *stringLazyAny) Parse() *Iterator {
iter := any.iter
if iter == nil {
iter = NewIterator()
any.iter = iter
}
iter.ResetBytes(any.buf)
return iter
}
func (any *stringLazyAny) fillCache() {
if any.err != nil {
return
}
iter := any.Parse()
any.cache = iter.ReadString()
if iter.Error != io.EOF {
iter.reportError("stringLazyAny", "there are bytes left")
}
any.err = iter.Error
}
func (any *stringLazyAny) LastError() error {
return any.err
}
func (any *stringLazyAny) ToBool() bool {
str := any.ToString()
if str == "false" {
return false
}
for _, c := range str {
switch c {
case ' ', '\n', '\r', '\t':
default:
return true
}
}
return false
}
func (any *stringLazyAny) ToInt() int {
iter := any.Parse()
iter.head++
val := iter.ReadInt()
any.err = iter.Error
return val
}
func (any *stringLazyAny) ToInt32() int32 {
iter := any.Parse()
iter.head++
val := iter.ReadInt32()
any.err = iter.Error
return val
}
func (any *stringLazyAny) ToInt64() int64 {
iter := any.Parse()
iter.head++
val := iter.ReadInt64()
any.err = iter.Error
return val
}
func (any *stringLazyAny) ToUint() uint {
iter := any.Parse()
iter.head++
val := iter.ReadUint()
any.err = iter.Error
return val
}
func (any *stringLazyAny) ToUint32() uint32 {
iter := any.Parse()
iter.head++
val := iter.ReadUint32()
any.err = iter.Error
return val
}
func (any *stringLazyAny) ToUint64() uint64 {
iter := any.Parse()
iter.head++
val := iter.ReadUint64()
any.err = iter.Error
return val
}
func (any *stringLazyAny) ToFloat32() float32 {
iter := any.Parse()
iter.head++
val := iter.ReadFloat32()
any.err = iter.Error
return val
}
func (any *stringLazyAny) ToFloat64() float64 {
iter := any.Parse()
iter.head++
val := iter.ReadFloat64()
any.err = iter.Error
return val
}
func (any *stringLazyAny) ToString() string {
any.fillCache()
return any.cache
}
func (any *stringLazyAny) WriteTo(stream *Stream) {
stream.Write(any.buf)
}
func (any *stringLazyAny) GetInterface() interface{} {
any.fillCache()
return any.cache
}
type stringAny struct{
baseAny
err error
val string
}
func (any *stringAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
return &invalidAny{baseAny{}, fmt.Errorf("Get %v from simple value", path)}
}
func (any *stringAny) Parse() *Iterator {
return nil
}
func (any *stringAny) ValueType() ValueType {
return String
}
func (any *stringAny) MustBeValid() Any {
return any
}
func (any *stringAny) LastError() error {
return any.err
return nil
}
func (any *stringAny) ToBool() bool {
@ -171,50 +49,42 @@ func (any *stringAny) ToBool() bool {
}
func (any *stringAny) ToInt() int {
parsed, err := strconv.ParseInt(any.val, 10, 64)
any.err = err
parsed, _ := strconv.ParseInt(any.val, 10, 64)
return int(parsed)
}
func (any *stringAny) ToInt32() int32 {
parsed, err := strconv.ParseInt(any.val, 10, 32)
any.err = err
parsed, _ := strconv.ParseInt(any.val, 10, 32)
return int32(parsed)
}
func (any *stringAny) ToInt64() int64 {
parsed, err := strconv.ParseInt(any.val, 10, 64)
any.err = err
parsed, _ := strconv.ParseInt(any.val, 10, 64)
return parsed
}
func (any *stringAny) ToUint() uint {
parsed, err := strconv.ParseUint(any.val, 10, 64)
any.err = err
parsed, _ := strconv.ParseUint(any.val, 10, 64)
return uint(parsed)
}
func (any *stringAny) ToUint32() uint32 {
parsed, err := strconv.ParseUint(any.val, 10, 32)
any.err = err
parsed, _ := strconv.ParseUint(any.val, 10, 32)
return uint32(parsed)
}
func (any *stringAny) ToUint64() uint64 {
parsed, err := strconv.ParseUint(any.val, 10, 64)
any.err = err
parsed, _ := strconv.ParseUint(any.val, 10, 64)
return parsed
}
func (any *stringAny) ToFloat32() float32 {
parsed, err := strconv.ParseFloat(any.val, 32)
any.err = err
parsed, _ := strconv.ParseFloat(any.val, 32)
return float32(parsed)
}
func (any *stringAny) ToFloat64() float64 {
parsed, err := strconv.ParseFloat(any.val, 64)
any.err = err
parsed, _ := strconv.ParseFloat(any.val, 64)
return parsed
}
@ -228,4 +98,4 @@ func (any *stringAny) WriteTo(stream *Stream) {
func (any *stringAny) GetInterface() interface{} {
return any.val
}
}

View File

@ -17,6 +17,10 @@ func (any *uint32Any) ValueType() ValueType {
return Number
}
func (any *uint32Any) MustBeValid() Any {
return any
}
func (any *uint32Any) ToBool() bool {
return any.val != 0
}

View File

@ -2,105 +2,8 @@ package jsoniter
import (
"strconv"
"unsafe"
"io"
)
type uint64LazyAny struct {
baseAny
buf []byte
iter *Iterator
err error
cache uint64
}
func (any *uint64LazyAny) ValueType() ValueType {
return Number
}
func (any *uint64LazyAny) Parse() *Iterator {
iter := any.iter
if iter == nil {
iter = NewIterator()
}
iter.ResetBytes(any.buf)
return iter
}
func (any *uint64LazyAny) fillCache() {
if any.err != nil {
return
}
iter := any.Parse()
any.cache = iter.ReadUint64()
if iter.Error != io.EOF {
iter.reportError("intLazyAny", "there are bytes left")
}
any.err = iter.Error
}
func (any *uint64LazyAny) LastError() error {
return any.err
}
func (any *uint64LazyAny) ToBool() bool {
return any.ToInt64() != 0
}
func (any *uint64LazyAny) ToInt() int {
any.fillCache()
return int(any.cache)
}
func (any *uint64LazyAny) ToInt32() int32 {
any.fillCache()
return int32(any.cache)
}
func (any *uint64LazyAny) ToInt64() int64 {
any.fillCache()
return int64(any.cache)
}
func (any *uint64LazyAny) ToUint() uint {
any.fillCache()
return uint(any.cache)
}
func (any *uint64LazyAny) ToUint32() uint32 {
any.fillCache()
return uint32(any.cache)
}
func (any *uint64LazyAny) ToUint64() uint64 {
any.fillCache()
return any.cache
}
func (any *uint64LazyAny) ToFloat32() float32 {
any.fillCache()
return float32(any.cache)
}
func (any *uint64LazyAny) ToFloat64() float64 {
any.fillCache()
return float64(any.cache)
}
func (any *uint64LazyAny) ToString() string {
return *(*string)(unsafe.Pointer(&any.buf))
}
func (any *uint64LazyAny) WriteTo(stream *Stream) {
stream.Write(any.buf)
}
func (any *uint64LazyAny) GetInterface() interface{} {
any.fillCache()
return any.cache
}
type uint64Any struct {
baseAny
val uint64
@ -114,6 +17,10 @@ func (any *uint64Any) ValueType() ValueType {
return Number
}
func (any *uint64Any) MustBeValid() Any {
return any
}
func (any *uint64Any) ToBool() bool {
return any.val != 0
}
@ -164,4 +71,4 @@ func (any *uint64Any) Parse() *Iterator {
func (any *uint64Any) GetInterface() interface{} {
return any.val
}
}

282
feature_config.go Normal file
View File

@ -0,0 +1,282 @@
package jsoniter
import (
"encoding/json"
"errors"
"io"
"reflect"
"sync/atomic"
"unsafe"
)
type Config struct {
IndentionStep int
MarshalFloatWith6Digits bool
EscapeHtml bool
SortMapKeys bool
UseNumber bool
}
type frozenConfig struct {
configBeforeFrozen Config
sortMapKeys bool
indentionStep int
decoderCache unsafe.Pointer
encoderCache unsafe.Pointer
extensions []Extension
streamPool chan *Stream
iteratorPool chan *Iterator
}
type Api interface {
MarshalToString(v interface{}) (string, error)
Marshal(v interface{}) ([]byte, error)
UnmarshalFromString(str string, v interface{}) error
Unmarshal(data []byte, v interface{}) error
Get(data []byte, path ...interface{}) Any
NewEncoder(writer io.Writer) *Encoder
NewDecoder(reader io.Reader) *Decoder
}
var ConfigDefault = Config{
EscapeHtml: true,
}.Froze()
// Trying to be 100% compatible with standard library behavior
var ConfigCompatibleWithStandardLibrary = Config{
EscapeHtml: true,
SortMapKeys: true,
}.Froze()
var ConfigFastest = Config{
EscapeHtml: false,
MarshalFloatWith6Digits: true,
}.Froze()
func (cfg Config) Froze() *frozenConfig {
frozenConfig := &frozenConfig{
sortMapKeys: cfg.SortMapKeys,
indentionStep: cfg.IndentionStep,
streamPool: make(chan *Stream, 16),
iteratorPool: make(chan *Iterator, 16),
}
atomic.StorePointer(&frozenConfig.decoderCache, unsafe.Pointer(&map[string]ValDecoder{}))
atomic.StorePointer(&frozenConfig.encoderCache, unsafe.Pointer(&map[string]ValEncoder{}))
if cfg.MarshalFloatWith6Digits {
frozenConfig.marshalFloatWith6Digits()
}
if cfg.EscapeHtml {
frozenConfig.escapeHtml()
}
if cfg.UseNumber {
frozenConfig.useNumber()
}
frozenConfig.configBeforeFrozen = cfg
return frozenConfig
}
func (cfg *frozenConfig) useNumber() {
cfg.addDecoderToCache(reflect.TypeOf((*interface{})(nil)).Elem(), &funcDecoder{func(ptr unsafe.Pointer, iter *Iterator) {
if iter.WhatIsNext() == Number {
*((*interface{})(ptr)) = json.Number(iter.readNumberAsString())
} else {
*((*interface{})(ptr)) = iter.Read()
}
}})
}
func (cfg *frozenConfig) registerExtension(extension Extension) {
cfg.extensions = append(cfg.extensions, extension)
}
type lossyFloat32Encoder struct {
}
func (encoder *lossyFloat32Encoder) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteFloat32Lossy(*((*float32)(ptr)))
}
func (encoder *lossyFloat32Encoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *lossyFloat32Encoder) IsEmpty(ptr unsafe.Pointer) bool {
return *((*float32)(ptr)) == 0
}
type lossyFloat64Encoder struct {
}
func (encoder *lossyFloat64Encoder) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteFloat64Lossy(*((*float64)(ptr)))
}
func (encoder *lossyFloat64Encoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *lossyFloat64Encoder) IsEmpty(ptr unsafe.Pointer) bool {
return *((*float64)(ptr)) == 0
}
// EnableLossyFloatMarshalling keeps 10**(-6) precision
// for float variables for better performance.
func (cfg *frozenConfig) marshalFloatWith6Digits() {
// for better performance
cfg.addEncoderToCache(reflect.TypeOf((*float32)(nil)).Elem(), &lossyFloat32Encoder{})
cfg.addEncoderToCache(reflect.TypeOf((*float64)(nil)).Elem(), &lossyFloat64Encoder{})
}
type htmlEscapedStringEncoder struct {
}
func (encoder *htmlEscapedStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
str := *((*string)(ptr))
stream.WriteStringWithHtmlEscaped(str)
}
func (encoder *htmlEscapedStringEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *htmlEscapedStringEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return *((*string)(ptr)) == ""
}
func (cfg *frozenConfig) escapeHtml() {
// for better performance
cfg.addEncoderToCache(reflect.TypeOf((*string)(nil)).Elem(), &htmlEscapedStringEncoder{})
}
func (cfg *frozenConfig) addDecoderToCache(cacheKey reflect.Type, decoder ValDecoder) {
done := false
for !done {
ptr := atomic.LoadPointer(&cfg.decoderCache)
cache := *(*map[reflect.Type]ValDecoder)(ptr)
copied := map[reflect.Type]ValDecoder{}
for k, v := range cache {
copied[k] = v
}
copied[cacheKey] = decoder
done = atomic.CompareAndSwapPointer(&cfg.decoderCache, ptr, unsafe.Pointer(&copied))
}
}
func (cfg *frozenConfig) addEncoderToCache(cacheKey reflect.Type, encoder ValEncoder) {
done := false
for !done {
ptr := atomic.LoadPointer(&cfg.encoderCache)
cache := *(*map[reflect.Type]ValEncoder)(ptr)
copied := map[reflect.Type]ValEncoder{}
for k, v := range cache {
copied[k] = v
}
copied[cacheKey] = encoder
done = atomic.CompareAndSwapPointer(&cfg.encoderCache, ptr, unsafe.Pointer(&copied))
}
}
func (cfg *frozenConfig) getDecoderFromCache(cacheKey reflect.Type) ValDecoder {
ptr := atomic.LoadPointer(&cfg.decoderCache)
cache := *(*map[reflect.Type]ValDecoder)(ptr)
return cache[cacheKey]
}
func (cfg *frozenConfig) getEncoderFromCache(cacheKey reflect.Type) ValEncoder {
ptr := atomic.LoadPointer(&cfg.encoderCache)
cache := *(*map[reflect.Type]ValEncoder)(ptr)
return cache[cacheKey]
}
// cleanDecoders cleans decoders registered or cached
func (cfg *frozenConfig) cleanDecoders() {
typeDecoders = map[string]ValDecoder{}
fieldDecoders = map[string]ValDecoder{}
atomic.StorePointer(&cfg.decoderCache, unsafe.Pointer(&map[string]ValDecoder{}))
}
// cleanEncoders cleans encoders registered or cached
func (cfg *frozenConfig) cleanEncoders() {
typeEncoders = map[string]ValEncoder{}
fieldEncoders = map[string]ValEncoder{}
atomic.StorePointer(&cfg.encoderCache, unsafe.Pointer(&map[string]ValEncoder{}))
}
func (cfg *frozenConfig) MarshalToString(v interface{}) (string, error) {
stream := cfg.BorrowStream(nil)
defer cfg.ReturnStream(stream)
stream.WriteVal(v)
if stream.Error != nil {
return "", stream.Error
}
return string(stream.Buffer()), nil
}
func (cfg *frozenConfig) Marshal(v interface{}) ([]byte, error) {
stream := cfg.BorrowStream(nil)
defer cfg.ReturnStream(stream)
stream.WriteVal(v)
if stream.Error != nil {
return nil, stream.Error
}
result := stream.Buffer()
copied := make([]byte, len(result))
copy(copied, result)
return copied, nil
}
func (cfg *frozenConfig) UnmarshalFromString(str string, v interface{}) error {
data := []byte(str)
data = data[:lastNotSpacePos(data)]
iter := cfg.BorrowIterator(data)
defer cfg.ReturnIterator(iter)
iter.ReadVal(v)
if iter.head == iter.tail {
iter.loadMore()
}
if iter.Error == io.EOF {
return nil
}
if iter.Error == nil {
iter.ReportError("UnmarshalFromString", "there are bytes left after unmarshal")
}
return iter.Error
}
func (cfg *frozenConfig) Get(data []byte, path ...interface{}) Any {
iter := cfg.BorrowIterator(data)
defer cfg.ReturnIterator(iter)
return locatePath(iter, path)
}
func (cfg *frozenConfig) Unmarshal(data []byte, v interface{}) error {
data = data[:lastNotSpacePos(data)]
iter := cfg.BorrowIterator(data)
defer cfg.ReturnIterator(iter)
typ := reflect.TypeOf(v)
if typ.Kind() != reflect.Ptr {
// return non-pointer error
return errors.New("the second param must be ptr type")
}
iter.ReadVal(v)
if iter.head == iter.tail {
iter.loadMore()
}
if iter.Error == io.EOF {
return nil
}
if iter.Error == nil {
iter.ReportError("Unmarshal", "there are bytes left after unmarshal")
}
return iter.Error
}
func (cfg *frozenConfig) NewEncoder(writer io.Writer) *Encoder {
stream := NewStream(cfg, writer, 512)
return &Encoder{stream}
}
func (cfg *frozenConfig) NewDecoder(reader io.Reader) *Decoder {
iter := Parse(cfg, reader, 512)
return &Decoder{iter}
}

View File

@ -1,7 +1,12 @@
//
// Besides, jsoniter.Iterator provides a different set of interfaces
// iterating given bytes/string/reader
// and yielding parsed elements one by one.
// This set of interfaces reads input as required and gives
// better performance.
package jsoniter
import (
"encoding/base64"
"fmt"
"io"
)
@ -60,16 +65,20 @@ func init() {
// Iterator is a fast and flexible JSON parser
type Iterator struct {
reader io.Reader
buf []byte
head int
tail int
Error error
cfg *frozenConfig
reader io.Reader
buf []byte
head int
tail int
captureStartedAt int
captured []byte
Error error
}
// Create creates an empty Iterator instance
func NewIterator() *Iterator {
func NewIterator(cfg *frozenConfig) *Iterator {
return &Iterator{
cfg: cfg,
reader: nil,
buf: nil,
head: 0,
@ -78,8 +87,9 @@ func NewIterator() *Iterator {
}
// Parse parses a json buffer in io.Reader into an Iterator instance
func Parse(reader io.Reader, bufSize int) *Iterator {
func Parse(cfg *frozenConfig, reader io.Reader, bufSize int) *Iterator {
return &Iterator{
cfg: cfg,
reader: reader,
buf: make([]byte, bufSize),
head: 0,
@ -88,8 +98,9 @@ func Parse(reader io.Reader, bufSize int) *Iterator {
}
// ParseBytes parses a json byte slice into an Iterator instance
func ParseBytes(input []byte) *Iterator {
func ParseBytes(cfg *frozenConfig, input []byte) *Iterator {
return &Iterator{
cfg: cfg,
reader: nil,
buf: input,
head: 0,
@ -98,8 +109,12 @@ func ParseBytes(input []byte) *Iterator {
}
// ParseString parses a json string into an Iterator instance
func ParseString(input string) *Iterator {
return ParseBytes([]byte(input))
func ParseString(cfg *frozenConfig, input string) *Iterator {
return ParseBytes(cfg, []byte(input))
}
func (iter *Iterator) Config() *frozenConfig {
return iter.cfg
}
// Reset can reset an Iterator instance for another json buffer in io.Reader
@ -113,7 +128,6 @@ func (iter *Iterator) Reset(reader io.Reader) *Iterator {
// ResetBytes can reset an Iterator instance for another json byte slice
func (iter *Iterator) ResetBytes(input []byte) *Iterator {
iter.reader = nil
iter.Error = nil
iter.buf = input
iter.head = 0
iter.tail = len(input)
@ -158,7 +172,7 @@ func (iter *Iterator) nextToken() byte {
}
}
func (iter *Iterator) reportError(operation string, msg string) {
func (iter *Iterator) ReportError(operation string, msg string) {
if iter.Error != nil {
if iter.Error != io.EOF {
return
@ -199,10 +213,16 @@ func (iter *Iterator) readByte() (ret byte) {
func (iter *Iterator) loadMore() bool {
if iter.reader == nil {
if iter.Error == nil {
iter.head = iter.tail
iter.Error = io.EOF
}
return false
}
if iter.captureStartedAt != -1 {
iter.captured = append(iter.captured,
iter.buf[iter.captureStartedAt:iter.tail]...)
iter.captureStartedAt = 0
}
for {
n, err := iter.reader.Read(iter.buf)
if n == 0 {
@ -222,7 +242,7 @@ func (iter *Iterator) loadMore() bool {
func (iter *Iterator) unreadByte() {
if iter.head == 0 {
iter.reportError("unreadByte", "unread too many bytes")
iter.ReportError("unreadByte", "unread too many bytes")
return
}
iter.head--
@ -256,24 +276,7 @@ func (iter *Iterator) Read() interface{} {
})
return obj
default:
iter.reportError("Read", fmt.Sprintf("unexpected value type: %v", valueType))
iter.ReportError("Read", fmt.Sprintf("unexpected value type: %v", valueType))
return nil
}
}
// ReadBase64 reads a json object as Base64 in byte slice
func (iter *Iterator) ReadBase64() (ret []byte) {
src := iter.ReadStringAsSlice()
if iter.Error != nil {
return
}
b64 := base64.StdEncoding
ret = make([]byte, b64.DecodedLen(len(src)))
n, err := b64.Decode(ret, src)
if err != nil {
iter.Error = err
return
}
return ret[:n]
}

View File

@ -18,12 +18,11 @@ func (iter *Iterator) ReadArray() (ret bool) {
case ',':
return true
default:
iter.reportError("ReadArray", "expect [ or , or ] or n, but found: " + string([]byte{c}))
iter.ReportError("ReadArray", "expect [ or , or ] or n, but found: "+string([]byte{c}))
return
}
}
func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
c := iter.nextToken()
if c == '[' {
@ -46,6 +45,6 @@ func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
iter.skipFixedBytes(3)
return true // null
}
iter.reportError("ReadArrayCB", "expect [ or n, but found: " + string([]byte{c}))
iter.ReportError("ReadArrayCB", "expect [ or n, but found: "+string([]byte{c}))
return false
}
}

View File

@ -2,12 +2,13 @@ package jsoniter
import (
"io"
"math/big"
"strconv"
"unsafe"
"math/big"
)
var floatDigits []int8
const invalidCharForNumber = int8(-1)
const endOfNumber = int8(-2)
const dotInNumber = int8(-3)
@ -55,7 +56,7 @@ func (iter *Iterator) ReadBigInt() (ret *big.Int) {
var success bool
ret, success = ret.SetString(str, 10)
if !success {
iter.reportError("ReadBigInt", "invalid big int")
iter.ReportError("ReadBigInt", "invalid big int")
return nil
}
return ret
@ -75,7 +76,7 @@ func (iter *Iterator) readPositiveFloat32() (ret float32) {
value := uint64(0)
c := byte(' ')
i := iter.head
non_decimal_loop:
non_decimal_loop:
for ; i < iter.tail; i++ {
c = iter.buf[i]
ind := floatDigits[c]
@ -91,22 +92,22 @@ func (iter *Iterator) readPositiveFloat32() (ret float32) {
if value > uint64SafeToMultiple10 {
return iter.readFloat32SlowPath()
}
value = (value << 3) + (value << 1) + uint64(ind); // value = value * 10 + ind;
value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
}
if c == '.' {
i++
decimalPlaces := 0;
decimalPlaces := 0
for ; i < iter.tail; i++ {
c = iter.buf[i]
ind := floatDigits[c];
ind := floatDigits[c]
switch ind {
case endOfNumber:
if decimalPlaces > 0 && decimalPlaces < len(POW10) {
if decimalPlaces > 0 && decimalPlaces < len(_POW10) {
iter.head = i
return float32(float64(value) / float64(POW10[decimalPlaces]))
return float32(float64(value) / float64(_POW10[decimalPlaces]))
}
// too many decimal places
return iter.readFloat32SlowPath()
return iter.readFloat32SlowPath()
case invalidCharForNumber:
fallthrough
case dotInNumber:
@ -125,7 +126,7 @@ func (iter *Iterator) readPositiveFloat32() (ret float32) {
func (iter *Iterator) readNumberAsString() (ret string) {
strBuf := [16]byte{}
str := strBuf[0:0]
load_loop:
load_loop:
for {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
@ -146,7 +147,7 @@ func (iter *Iterator) readNumberAsString() (ret string) {
return
}
if len(str) == 0 {
iter.reportError("readNumberAsString", "invalid number")
iter.ReportError("readNumberAsString", "invalid number")
}
return *(*string)(unsafe.Pointer(&str))
}
@ -178,7 +179,7 @@ func (iter *Iterator) readPositiveFloat64() (ret float64) {
value := uint64(0)
c := byte(' ')
i := iter.head
non_decimal_loop:
non_decimal_loop:
for ; i < iter.tail; i++ {
c = iter.buf[i]
ind := floatDigits[c]
@ -194,19 +195,19 @@ func (iter *Iterator) readPositiveFloat64() (ret float64) {
if value > uint64SafeToMultiple10 {
return iter.readFloat64SlowPath()
}
value = (value << 3) + (value << 1) + uint64(ind); // value = value * 10 + ind;
value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
}
if c == '.' {
i++
decimalPlaces := 0;
decimalPlaces := 0
for ; i < iter.tail; i++ {
c = iter.buf[i]
ind := floatDigits[c];
ind := floatDigits[c]
switch ind {
case endOfNumber:
if decimalPlaces > 0 && decimalPlaces < len(POW10) {
if decimalPlaces > 0 && decimalPlaces < len(_POW10) {
iter.head = i
return float64(value) / float64(POW10[decimalPlaces])
return float64(value) / float64(_POW10[decimalPlaces])
}
// too many decimal places
return iter.readFloat64SlowPath()

View File

@ -1,23 +1,18 @@
package jsoniter
import (
"math"
"strconv"
)
var intDigits []int8
const uint32SafeToMultiply10 = uint32(0xffffffff) / 10 - 1
const uint64SafeToMultiple10 = uint64(0xffffffffffffffff) / 10 - 1
const int64Max = uint64(0x7fffffffffffffff)
const int32Max = uint32(0x7fffffff)
const int16Max = uint32(0x7fff)
const uint16Max = uint32(0xffff)
const int8Max = uint32(0x7fff)
const uint8Max = uint32(0xffff)
const uint32SafeToMultiply10 = uint32(0xffffffff)/10 - 1
const uint64SafeToMultiple10 = uint64(0xffffffffffffffff)/10 - 1
func init() {
intDigits = make([]int8, 256)
for i := 0; i < len(floatDigits); i++ {
for i := 0; i < len(intDigits); i++ {
intDigits[i] = invalidCharForNumber
}
for i := int8('0'); i <= int8('9'); i++ {
@ -37,15 +32,15 @@ func (iter *Iterator) ReadInt8() (ret int8) {
c := iter.nextToken()
if c == '-' {
val := iter.readUint32(iter.readByte())
if val > int8Max + 1 {
iter.reportError("ReadInt8", "overflow: " + strconv.FormatInt(int64(val), 10))
if val > math.MaxInt8+1 {
iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return -int8(val)
} else {
val := iter.readUint32(c)
if val > int8Max {
iter.reportError("ReadInt8", "overflow: " + strconv.FormatInt(int64(val), 10))
if val > math.MaxInt8 {
iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return int8(val)
@ -54,8 +49,8 @@ func (iter *Iterator) ReadInt8() (ret int8) {
func (iter *Iterator) ReadUint8() (ret uint8) {
val := iter.readUint32(iter.nextToken())
if val > uint8Max {
iter.reportError("ReadUint8", "overflow: " + strconv.FormatInt(int64(val), 10))
if val > math.MaxUint8 {
iter.ReportError("ReadUint8", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return uint8(val)
@ -65,15 +60,15 @@ func (iter *Iterator) ReadInt16() (ret int16) {
c := iter.nextToken()
if c == '-' {
val := iter.readUint32(iter.readByte())
if val > int16Max + 1 {
iter.reportError("ReadInt16", "overflow: " + strconv.FormatInt(int64(val), 10))
if val > math.MaxInt16+1 {
iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return -int16(val)
} else {
val := iter.readUint32(c)
if val > int16Max {
iter.reportError("ReadInt16", "overflow: " + strconv.FormatInt(int64(val), 10))
if val > math.MaxInt16 {
iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return int16(val)
@ -82,8 +77,8 @@ func (iter *Iterator) ReadInt16() (ret int16) {
func (iter *Iterator) ReadUint16() (ret uint16) {
val := iter.readUint32(iter.nextToken())
if val > uint16Max {
iter.reportError("ReadUint16", "overflow: " + strconv.FormatInt(int64(val), 10))
if val > math.MaxUint16 {
iter.ReportError("ReadUint16", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return uint16(val)
@ -93,15 +88,15 @@ func (iter *Iterator) ReadInt32() (ret int32) {
c := iter.nextToken()
if c == '-' {
val := iter.readUint32(iter.readByte())
if val > int32Max + 1 {
iter.reportError("ReadInt32", "overflow: " + strconv.FormatInt(int64(val), 10))
if val > math.MaxInt32+1 {
iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return -int32(val)
} else {
val := iter.readUint32(c)
if val > int32Max {
iter.reportError("ReadInt32", "overflow: " + strconv.FormatInt(int64(val), 10))
if val > math.MaxInt32 {
iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return int32(val)
@ -118,11 +113,11 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
return 0 // single zero
}
if ind == invalidCharForNumber {
iter.reportError("readUint32", "unexpected character: " + string([]byte{byte(ind)}))
iter.ReportError("readUint32", "unexpected character: "+string([]byte{byte(ind)}))
return
}
value := uint32(ind)
if iter.tail - iter.head > 10 {
if iter.tail-iter.head > 10 {
i := iter.head
ind2 := intDigits[iter.buf[i]]
if ind2 == invalidCharForNumber {
@ -133,7 +128,7 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind3 := intDigits[iter.buf[i]]
if ind3 == invalidCharForNumber {
iter.head = i
return value * 10 + uint32(ind2)
return value*10 + uint32(ind2)
}
//iter.head = i + 1
//value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
@ -141,35 +136,35 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind4 := intDigits[iter.buf[i]]
if ind4 == invalidCharForNumber {
iter.head = i
return value * 100 + uint32(ind2) * 10 + uint32(ind3)
return value*100 + uint32(ind2)*10 + uint32(ind3)
}
i++
ind5 := intDigits[iter.buf[i]]
if ind5 == invalidCharForNumber {
iter.head = i
return value * 1000 + uint32(ind2) * 100 + uint32(ind3) * 10 + uint32(ind4)
return value*1000 + uint32(ind2)*100 + uint32(ind3)*10 + uint32(ind4)
}
i++
ind6 := intDigits[iter.buf[i]]
if ind6 == invalidCharForNumber {
iter.head = i
return value * 10000 + uint32(ind2) * 1000 + uint32(ind3) * 100 + uint32(ind4) * 10 + uint32(ind5)
return value*10000 + uint32(ind2)*1000 + uint32(ind3)*100 + uint32(ind4)*10 + uint32(ind5)
}
i++
ind7 := intDigits[iter.buf[i]]
if ind7 == invalidCharForNumber {
iter.head = i
return value * 100000 + uint32(ind2) * 10000 + uint32(ind3) * 1000 + uint32(ind4) * 100 + uint32(ind5) * 10 + uint32(ind6)
return value*100000 + uint32(ind2)*10000 + uint32(ind3)*1000 + uint32(ind4)*100 + uint32(ind5)*10 + uint32(ind6)
}
i++
ind8 := intDigits[iter.buf[i]]
if ind8 == invalidCharForNumber {
iter.head = i
return value * 1000000 + uint32(ind2) * 100000 + uint32(ind3) * 10000 + uint32(ind4) * 1000 + uint32(ind5) * 100 + uint32(ind6) * 10 + uint32(ind7)
return value*1000000 + uint32(ind2)*100000 + uint32(ind3)*10000 + uint32(ind4)*1000 + uint32(ind5)*100 + uint32(ind6)*10 + uint32(ind7)
}
i++
ind9 := intDigits[iter.buf[i]]
value = value * 10000000 + uint32(ind2) * 1000000 + uint32(ind3) * 100000 + uint32(ind4) * 10000 + uint32(ind5) * 1000 + uint32(ind6) * 100 + uint32(ind7) * 10 + uint32(ind8)
value = value*10000000 + uint32(ind2)*1000000 + uint32(ind3)*100000 + uint32(ind4)*10000 + uint32(ind5)*1000 + uint32(ind6)*100 + uint32(ind7)*10 + uint32(ind8)
iter.head = i
if ind9 == invalidCharForNumber {
return value
@ -185,7 +180,7 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
if value > uint32SafeToMultiply10 {
value2 := (value << 3) + (value << 1) + uint32(ind)
if value2 < value {
iter.reportError("readUint32", "overflow")
iter.ReportError("readUint32", "overflow")
return
} else {
value = value2
@ -194,7 +189,7 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
}
value = (value << 3) + (value << 1) + uint32(ind)
}
if (!iter.loadMore()) {
if !iter.loadMore() {
return value
}
}
@ -204,15 +199,15 @@ func (iter *Iterator) ReadInt64() (ret int64) {
c := iter.nextToken()
if c == '-' {
val := iter.readUint64(iter.readByte())
if val > int64Max + 1 {
iter.reportError("ReadInt64", "overflow: " + strconv.FormatUint(uint64(val), 10))
if val > math.MaxInt64+1 {
iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
return
}
return -int64(val)
} else {
val := iter.readUint64(c)
if val > int64Max {
iter.reportError("ReadInt64", "overflow: " + strconv.FormatUint(uint64(val), 10))
if val > math.MaxInt64 {
iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
return
}
return int64(val)
@ -229,7 +224,7 @@ func (iter *Iterator) readUint64(c byte) (ret uint64) {
return 0 // single zero
}
if ind == invalidCharForNumber {
iter.reportError("readUint64", "unexpected character: " + string([]byte{byte(ind)}))
iter.ReportError("readUint64", "unexpected character: "+string([]byte{byte(ind)}))
return
}
value := uint64(ind)
@ -243,7 +238,7 @@ func (iter *Iterator) readUint64(c byte) (ret uint64) {
if value > uint64SafeToMultiple10 {
value2 := (value << 3) + (value << 1) + uint64(ind)
if value2 < value {
iter.reportError("readUint64", "overflow")
iter.ReportError("readUint64", "overflow")
return
} else {
value = value2
@ -252,7 +247,7 @@ func (iter *Iterator) readUint64(c byte) (ret uint64) {
}
value = (value << 3) + (value << 1) + uint64(ind)
}
if (!iter.loadMore()) {
if !iter.loadMore() {
return value
}
}

View File

@ -3,6 +3,7 @@ package jsoniter
import (
"fmt"
"unicode"
"unsafe"
)
func (iter *Iterator) ReadObject() (ret string) {
@ -20,14 +21,14 @@ func (iter *Iterator) ReadObject() (ret string) {
if c == '}' {
return "" // end of object
}
iter.reportError("ReadObject", `expect " after {`)
iter.ReportError("ReadObject", `expect " after {`)
return
case ',':
return string(iter.readObjectFieldAsBytes())
case '}':
return "" // end of object
default:
iter.reportError("ReadObject", fmt.Sprintf(`expect { or , or } or n, but found %s`, string([]byte{c})))
iter.ReportError("ReadObject", fmt.Sprintf(`expect { or , or } or n, but found %s`, string([]byte{c})))
return
}
}
@ -44,10 +45,10 @@ func (iter *Iterator) readFieldHash() int32 {
b += 'a' - 'A'
}
if b == '"' {
iter.head = i+1
iter.head = i + 1
c = iter.nextToken()
if c != ':' {
iter.reportError("readFieldHash", `expect :, but found ` + string([]byte{c}))
iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
}
return int32(hash)
}
@ -55,12 +56,12 @@ func (iter *Iterator) readFieldHash() int32 {
hash *= 0x1000193
}
if !iter.loadMore() {
iter.reportError("readFieldHash", `incomplete field name`)
iter.ReportError("readFieldHash", `incomplete field name`)
return 0
}
}
}
iter.reportError("readFieldHash", `expect ", but found ` + string([]byte{c}))
iter.ReportError("readFieldHash", `expect ", but found `+string([]byte{c}))
return 0
}
@ -79,12 +80,52 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
c = iter.nextToken()
if c == '"' {
iter.unreadByte()
field := string(iter.readObjectFieldAsBytes())
field := iter.readObjectFieldAsBytes()
if !callback(iter, *(*string)(unsafe.Pointer(&field))) {
return false
}
for iter.nextToken() == ',' {
field = iter.readObjectFieldAsBytes()
if !callback(iter, *(*string)(unsafe.Pointer(&field))) {
return false
}
}
return true
}
if c == '}' {
return true
}
iter.ReportError("ReadObjectCB", `expect " after }`)
return false
}
if c == 'n' {
iter.skipFixedBytes(3)
return true // null
}
iter.ReportError("ReadObjectCB", `expect { or n`)
return false
}
func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
c := iter.nextToken()
if c == '{' {
c = iter.nextToken()
if c == '"' {
iter.unreadByte()
field := iter.ReadString()
if iter.nextToken() != ':' {
iter.ReportError("ReadMapCB", "expect : after object field")
return false
}
if !callback(iter, field) {
return false
}
for iter.nextToken() == ',' {
field := string(iter.readObjectFieldAsBytes())
field = iter.ReadString()
if iter.nextToken() != ':' {
iter.ReportError("ReadMapCB", "expect : after object field")
return false
}
if !callback(iter, field) {
return false
}
@ -94,14 +135,14 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
if c == '}' {
return true
}
iter.reportError("ReadObjectCB", `expect " after }`)
iter.ReportError("ReadMapCB", `expect " after }`)
return false
}
if c == 'n' {
iter.skipFixedBytes(3)
return true // null
}
iter.reportError("ReadObjectCB", `expect { or n`)
iter.ReportError("ReadMapCB", `expect { or n`)
return false
}
@ -118,7 +159,7 @@ func (iter *Iterator) readObjectStart() bool {
iter.skipFixedBytes(3)
return false
}
iter.reportError("readObjectStart", "expect { or n")
iter.ReportError("readObjectStart", "expect { or n")
return false
}
@ -134,7 +175,7 @@ func (iter *Iterator) readObjectFieldAsBytes() (ret []byte) {
}
}
if iter.buf[iter.head] != ':' {
iter.reportError("readObjectFieldAsBytes", "expect : after object field")
iter.ReportError("readObjectFieldAsBytes", "expect : after object field")
return
}
iter.head++

View File

@ -25,21 +25,44 @@ func (iter *Iterator) ReadBool() (ret bool) {
iter.skipFixedBytes(4)
return false
}
iter.reportError("ReadBool", "expect t or f")
iter.ReportError("ReadBool", "expect t or f")
return
}
func (iter *Iterator) SkipAndReturnBytes() []byte {
if iter.reader != nil {
panic("reader input does not support this api")
}
before := iter.head
iter.startCapture(iter.head)
iter.Skip()
after := iter.head
return iter.buf[before:after]
return iter.stopCapture()
}
type captureBuffer struct {
startedAt int
captured []byte
}
func (iter *Iterator) startCapture(captureStartedAt int) {
if iter.captured != nil {
panic("already in capture mode")
}
iter.captureStartedAt = captureStartedAt
iter.captured = make([]byte, 0, 32)
}
func (iter *Iterator) stopCapture() []byte {
if iter.captured == nil {
panic("not in capture mode")
}
captured := iter.captured
remaining := iter.buf[iter.captureStartedAt:iter.head]
iter.captureStartedAt = -1
iter.captured = nil
if len(captured) == 0 {
return remaining
} else {
captured = append(captured, remaining...)
return captured
}
}
// Skip skips a json object and positions to relatively the next json object
func (iter *Iterator) Skip() {
@ -52,13 +75,13 @@ func (iter *Iterator) Skip() {
case 'f':
iter.skipFixedBytes(4) // false
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
iter.skipUntilBreak()
iter.skipNumber()
case '[':
iter.skipArray()
case '{':
iter.skipObject()
default:
iter.reportError("Skip", fmt.Sprintf("do not know how to skip: %v", c))
iter.ReportError("Skip", fmt.Sprintf("do not know how to skip: %v", c))
return
}
}
@ -68,7 +91,7 @@ func (iter *Iterator) skipString() {
end, escaped := iter.findStringEnd()
if end == -1 {
if !iter.loadMore() {
iter.reportError("skipString", "incomplete string")
iter.ReportError("skipString", "incomplete string")
return
}
if escaped {
@ -152,7 +175,7 @@ func (iter *Iterator) skipArray() {
}
}
if !iter.loadMore() {
iter.reportError("skipObject", "incomplete array")
iter.ReportError("skipObject", "incomplete array")
return
}
}
@ -180,14 +203,13 @@ func (iter *Iterator) skipObject() {
}
}
if !iter.loadMore() {
iter.reportError("skipObject", "incomplete object")
iter.ReportError("skipObject", "incomplete object")
return
}
}
}
func (iter *Iterator) skipUntilBreak() {
// true, false, null, number
func (iter *Iterator) skipNumber() {
for {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
@ -204,15 +226,15 @@ func (iter *Iterator) skipUntilBreak() {
}
func (iter *Iterator) skipFixedBytes(n int) {
iter.head += n;
if (iter.head >= iter.tail) {
more := iter.head - iter.tail;
iter.head += n
if iter.head >= iter.tail {
more := iter.head - iter.tail
if !iter.loadMore() {
if more > 0 {
iter.reportError("skipFixedBytes", "unexpected end");
iter.ReportError("skipFixedBytes", "unexpected end")
}
return
}
iter.head += more;
iter.head += more
}
}
}

View File

@ -7,7 +7,7 @@ import (
func (iter *Iterator) ReadString() (ret string) {
c := iter.nextToken()
if c == '"' {
for i := iter.head ; i < iter.tail; i++ {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
if c == '"' {
ret = string(iter.buf[iter.head:i])
@ -22,7 +22,7 @@ func (iter *Iterator) ReadString() (ret string) {
iter.skipFixedBytes(3)
return ""
}
iter.reportError("ReadString", `expects " or n`)
iter.ReportError("ReadString", `expects " or n`)
return
}
@ -45,7 +45,7 @@ func (iter *Iterator) readStringSlowPath() (ret string) {
return
}
if c != '\\' {
iter.reportError("ReadString",
iter.ReportError("ReadString",
`expects \u after utf16 surrogate, but \ not found`)
return
}
@ -54,7 +54,7 @@ func (iter *Iterator) readStringSlowPath() (ret string) {
return
}
if c != 'u' && c != 'U' {
iter.reportError("ReadString",
iter.ReportError("ReadString",
`expects \u after utf16 surrogate, but \u not found`)
return
}
@ -84,7 +84,7 @@ func (iter *Iterator) readStringSlowPath() (ret string) {
case 't':
str = append(str, '\t')
default:
iter.reportError("ReadString",
iter.ReportError("ReadString",
`invalid escape char after \`)
return
}
@ -92,6 +92,7 @@ func (iter *Iterator) readStringSlowPath() (ret string) {
str = append(str, c)
}
}
iter.ReportError("ReadString", "unexpected end of input")
return
}
@ -103,13 +104,13 @@ func (iter *Iterator) ReadStringAsSlice() (ret []byte) {
// for: field name, base64, number
if iter.buf[i] == '"' {
// fast path: reuse the underlying buffer
ret = iter.buf[iter.head : i]
ret = iter.buf[iter.head:i]
iter.head = i + 1
return ret
}
}
readLen := iter.tail - iter.head
copied := make([]byte, readLen, readLen * 2)
copied := make([]byte, readLen, readLen*2)
copy(copied, iter.buf[iter.head:iter.tail])
iter.head = iter.tail
for iter.Error == nil {
@ -121,7 +122,7 @@ func (iter *Iterator) ReadStringAsSlice() (ret []byte) {
}
return copied
}
iter.reportError("ReadString", `expects " or n`)
iter.ReportError("ReadString", `expects " or n`)
return
}
@ -132,13 +133,13 @@ func (iter *Iterator) readU4() (ret rune) {
return
}
if c >= '0' && c <= '9' {
ret = ret * 16 + rune(c - '0')
ret = ret*16 + rune(c-'0')
} else if c >= 'a' && c <= 'f' {
ret = ret * 16 + rune(c - 'a' + 10)
ret = ret*16 + rune(c-'a'+10)
} else if c >= 'A' && c <= 'F' {
ret = ret * 16 + rune(c - 'A' + 10)
ret = ret*16 + rune(c-'A'+10)
} else {
iter.reportError("readU4", "expects 0~9 or a~f")
iter.ReportError("readU4", "expects 0~9 or a~f")
return
}
}
@ -158,14 +159,14 @@ const (
mask3 = 0x0F // 0000 1111
mask4 = 0x07 // 0000 0111
rune1Max = 1 << 7 - 1
rune2Max = 1 << 11 - 1
rune3Max = 1 << 16 - 1
rune1Max = 1<<7 - 1
rune2Max = 1<<11 - 1
rune3Max = 1<<16 - 1
surrogateMin = 0xD800
surrogateMax = 0xDFFF
maxRune = '\U0010FFFF' // Maximum valid Unicode code point.
maxRune = '\U0010FFFF' // Maximum valid Unicode code point.
runeError = '\uFFFD' // the "error" Rune or "Unicode replacement character"
)
@ -176,22 +177,22 @@ func appendRune(p []byte, r rune) []byte {
p = append(p, byte(r))
return p
case i <= rune2Max:
p = append(p, t2 | byte(r >> 6))
p = append(p, tx | byte(r) & maskx)
p = append(p, t2|byte(r>>6))
p = append(p, tx|byte(r)&maskx)
return p
case i > maxRune, surrogateMin <= i && i <= surrogateMax:
r = runeError
fallthrough
case i <= rune3Max:
p = append(p, t3 | byte(r >> 12))
p = append(p, tx | byte(r >> 6) & maskx)
p = append(p, tx | byte(r) & maskx)
p = append(p, t3|byte(r>>12))
p = append(p, tx|byte(r>>6)&maskx)
p = append(p, tx|byte(r)&maskx)
return p
default:
p = append(p, t4 | byte(r >> 18))
p = append(p, tx | byte(r >> 12) & maskx)
p = append(p, tx | byte(r >> 6) & maskx)
p = append(p, tx | byte(r) & maskx)
p = append(p, t4|byte(r>>18))
p = append(p, tx|byte(r>>12)&maskx)
p = append(p, tx|byte(r>>6)&maskx)
p = append(p, tx|byte(r)&maskx)
return p
}
}

45
feature_pool.go Normal file
View File

@ -0,0 +1,45 @@
package jsoniter
import (
"io"
)
func (cfg *frozenConfig) BorrowStream(writer io.Writer) *Stream {
select {
case stream := <-cfg.streamPool:
stream.Reset(writer)
return stream
default:
return NewStream(cfg, writer, 512)
}
}
func (cfg *frozenConfig) ReturnStream(stream *Stream) {
stream.Error = nil
select {
case cfg.streamPool <- stream:
return
default:
return
}
}
func (cfg *frozenConfig) BorrowIterator(data []byte) *Iterator {
select {
case iter := <-cfg.iteratorPool:
iter.ResetBytes(data)
return iter
default:
return ParseBytes(cfg, data)
}
}
func (cfg *frozenConfig) ReturnIterator(iter *Iterator) {
iter.Error = nil
select {
case cfg.iteratorPool <- iter:
return
default:
return
}
}

View File

@ -1,239 +1,163 @@
package jsoniter
import (
"encoding"
"encoding/json"
"fmt"
"reflect"
"sync/atomic"
"time"
"unsafe"
"encoding/json"
)
/*
Reflection on type to create decoders, which is then cached
Reflection on value is avoided as we can, as the reflect.Value itself will allocate, with following exceptions
1. create instance of new value, for example *int will need a int to be allocated
2. append to slice, if the existing cap is not enough, allocate will be done using Reflect.New
3. assignment to map, both key and value will be reflect.Value
For a simple struct binding, it will be reflect.Value free and allocation free
*/
type Decoder interface {
decode(ptr unsafe.Pointer, iter *Iterator)
// ValDecoder is an internal type registered to cache as needed.
// Don't confuse jsoniter.ValDecoder with json.Decoder.
// For json.Decoder's adapter, refer to jsoniter.AdapterDecoder(todo link).
//
// Reflection on type to create decoders, which is then cached
// Reflection on value is avoided as we can, as the reflect.Value itself will allocate, with following exceptions
// 1. create instance of new value, for example *int will need a int to be allocated
// 2. append to slice, if the existing cap is not enough, allocate will be done using Reflect.New
// 3. assignment to map, both key and value will be reflect.Value
// For a simple struct binding, it will be reflect.Value free and allocation free
type ValDecoder interface {
Decode(ptr unsafe.Pointer, iter *Iterator)
}
type Encoder interface {
isEmpty(ptr unsafe.Pointer) bool
encode(ptr unsafe.Pointer, stream *Stream)
encodeInterface(val interface{}, stream *Stream)
// ValEncoder is an internal type registered to cache as needed.
// Don't confuse jsoniter.ValEncoder with json.Encoder.
// For json.Encoder's adapter, refer to jsoniter.AdapterEncoder(todo godoc link).
type ValEncoder interface {
IsEmpty(ptr unsafe.Pointer) bool
Encode(ptr unsafe.Pointer, stream *Stream)
EncodeInterface(val interface{}, stream *Stream)
}
func writeToStream(val interface{}, stream *Stream, encoder Encoder) {
func WriteToStream(val interface{}, stream *Stream, encoder ValEncoder) {
e := (*emptyInterface)(unsafe.Pointer(&val))
if e.word == nil {
stream.WriteNil()
return
}
if reflect.TypeOf(val).Kind() == reflect.Ptr {
encoder.encode(unsafe.Pointer(&e.word), stream)
encoder.Encode(unsafe.Pointer(&e.word), stream)
} else {
encoder.encode(e.word, stream)
encoder.Encode(e.word, stream)
}
}
type DecoderFunc func(ptr unsafe.Pointer, iter *Iterator)
type EncoderFunc func(ptr unsafe.Pointer, stream *Stream)
type ExtensionFunc func(typ reflect.Type, field *reflect.StructField) ([]string, EncoderFunc, DecoderFunc)
type funcDecoder struct {
fun DecoderFunc
}
func (decoder *funcDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
decoder.fun(ptr, iter)
}
type funcEncoder struct {
fun EncoderFunc
}
func (encoder *funcEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
encoder.fun(ptr, stream)
}
func (encoder *funcEncoder) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
}
func (encoder *funcEncoder) isEmpty(ptr unsafe.Pointer) bool {
return false
}
var DECODERS unsafe.Pointer
var ENCODERS unsafe.Pointer
var typeDecoders map[string]Decoder
var fieldDecoders map[string]Decoder
var typeEncoders map[string]Encoder
var fieldEncoders map[string]Encoder
var extensions []ExtensionFunc
var jsonNumberType reflect.Type
var jsonRawMessageType reflect.Type
var jsoniterRawMessageType reflect.Type
var anyType reflect.Type
var marshalerType reflect.Type
var unmarshalerType reflect.Type
var textUnmarshalerType reflect.Type
func init() {
typeDecoders = map[string]Decoder{}
fieldDecoders = map[string]Decoder{}
typeEncoders = map[string]Encoder{}
fieldEncoders = map[string]Encoder{}
extensions = []ExtensionFunc{}
atomic.StorePointer(&DECODERS, unsafe.Pointer(&map[string]Decoder{}))
atomic.StorePointer(&ENCODERS, unsafe.Pointer(&map[string]Encoder{}))
jsonNumberType = reflect.TypeOf((*json.Number)(nil)).Elem()
jsonRawMessageType = reflect.TypeOf((*json.RawMessage)(nil)).Elem()
jsoniterRawMessageType = reflect.TypeOf((*RawMessage)(nil)).Elem()
anyType = reflect.TypeOf((*Any)(nil)).Elem()
marshalerType = reflect.TypeOf((*json.Marshaler)(nil)).Elem()
unmarshalerType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem()
}
func addDecoderToCache(cacheKey reflect.Type, decoder Decoder) {
done := false
for !done {
ptr := atomic.LoadPointer(&DECODERS)
cache := *(*map[reflect.Type]Decoder)(ptr)
copied := map[reflect.Type]Decoder{}
for k, v := range cache {
copied[k] = v
}
copied[cacheKey] = decoder
done = atomic.CompareAndSwapPointer(&DECODERS, ptr, unsafe.Pointer(&copied))
}
}
func addEncoderToCache(cacheKey reflect.Type, encoder Encoder) {
done := false
for !done {
ptr := atomic.LoadPointer(&ENCODERS)
cache := *(*map[reflect.Type]Encoder)(ptr)
copied := map[reflect.Type]Encoder{}
for k, v := range cache {
copied[k] = v
}
copied[cacheKey] = encoder
done = atomic.CompareAndSwapPointer(&ENCODERS, ptr, unsafe.Pointer(&copied))
}
}
func getDecoderFromCache(cacheKey reflect.Type) Decoder {
ptr := atomic.LoadPointer(&DECODERS)
cache := *(*map[reflect.Type]Decoder)(ptr)
return cache[cacheKey]
}
func getEncoderFromCache(cacheKey reflect.Type) Encoder {
ptr := atomic.LoadPointer(&ENCODERS)
cache := *(*map[reflect.Type]Encoder)(ptr)
return cache[cacheKey]
}
// RegisterTypeDecoder can register a type for json object
func RegisterTypeDecoder(typ string, fun DecoderFunc) {
typeDecoders[typ] = &funcDecoder{fun}
}
// RegisterFieldDecoder can register a type for json field
func RegisterFieldDecoder(typ string, field string, fun DecoderFunc) {
fieldDecoders[fmt.Sprintf("%s/%s", typ, field)] = &funcDecoder{fun}
}
func RegisterTypeEncoder(typ string, fun EncoderFunc) {
typeEncoders[typ] = &funcEncoder{fun}
}
func RegisterFieldEncoder(typ string, field string, fun EncoderFunc) {
fieldEncoders[fmt.Sprintf("%s/%s", typ, field)] = &funcEncoder{fun}
}
// RegisterExtension can register a custom extension
func RegisterExtension(extension ExtensionFunc) {
extensions = append(extensions, extension)
}
// CleanDecoders cleans decoders registered
func CleanDecoders() {
typeDecoders = map[string]Decoder{}
fieldDecoders = map[string]Decoder{}
}
func CleanEncoders() {
typeEncoders = map[string]Encoder{}
fieldEncoders = map[string]Encoder{}
textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
}
type optionalDecoder struct {
valueType reflect.Type
valueDecoder Decoder
valueDecoder ValDecoder
}
func (decoder *optionalDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
func (decoder *optionalDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
*((*unsafe.Pointer)(ptr)) = nil
} else {
if *((*unsafe.Pointer)(ptr)) == nil {
// pointer to null, we have to allocate memory to hold the value
value := reflect.New(decoder.valueType)
decoder.valueDecoder.decode(unsafe.Pointer(value.Pointer()), iter)
decoder.valueDecoder.Decode(unsafe.Pointer(value.Pointer()), iter)
*((*uintptr)(ptr)) = value.Pointer()
} else {
// reuse existing instance
decoder.valueDecoder.decode(*((*unsafe.Pointer)(ptr)), iter)
decoder.valueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
}
}
}
type optionalEncoder struct {
valueEncoder Encoder
valueEncoder ValEncoder
}
func (encoder *optionalEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
func (encoder *optionalEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if *((*unsafe.Pointer)(ptr)) == nil {
stream.WriteNil()
} else {
encoder.valueEncoder.encode(*((*unsafe.Pointer)(ptr)), stream)
encoder.valueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
}
}
func (encoder *optionalEncoder) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func (encoder *optionalEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *optionalEncoder) isEmpty(ptr unsafe.Pointer) bool {
func (encoder *optionalEncoder) IsEmpty(ptr unsafe.Pointer) bool {
if *((*unsafe.Pointer)(ptr)) == nil {
return true
} else {
return encoder.valueEncoder.isEmpty(*((*unsafe.Pointer)(ptr)))
return encoder.valueEncoder.IsEmpty(*((*unsafe.Pointer)(ptr)))
}
}
type placeholderEncoder struct {
valueEncoder Encoder
cfg *frozenConfig
cacheKey reflect.Type
}
func (encoder *placeholderEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
encoder.valueEncoder.encode(ptr, stream)
func (encoder *placeholderEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
encoder.getRealEncoder().Encode(ptr, stream)
}
func (encoder *placeholderEncoder) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func (encoder *placeholderEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *placeholderEncoder) isEmpty(ptr unsafe.Pointer) bool {
return encoder.valueEncoder.isEmpty(ptr)
func (encoder *placeholderEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return encoder.getRealEncoder().IsEmpty(ptr)
}
func (encoder *placeholderEncoder) getRealEncoder() ValEncoder {
for i := 0; i < 30; i++ {
realDecoder := encoder.cfg.getEncoderFromCache(encoder.cacheKey)
_, isPlaceholder := realDecoder.(*placeholderEncoder)
if isPlaceholder {
time.Sleep(time.Second)
} else {
return realDecoder
}
}
panic(fmt.Sprintf("real encoder not found for cache key: %v", encoder.cacheKey))
}
type placeholderDecoder struct {
valueDecoder Decoder
cfg *frozenConfig
cacheKey reflect.Type
}
func (decoder *placeholderDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
decoder.valueDecoder.decode(ptr, iter)
func (decoder *placeholderDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
for i := 0; i < 30; i++ {
realDecoder := decoder.cfg.getDecoderFromCache(decoder.cacheKey)
_, isPlaceholder := realDecoder.(*placeholderDecoder)
if isPlaceholder {
time.Sleep(time.Second)
} else {
realDecoder.Decode(ptr, iter)
return
}
}
panic(fmt.Sprintf("real decoder not found for cache key: %v", decoder.cacheKey))
}
// emptyInterface is the header for an interface{} value.
@ -260,21 +184,15 @@ type nonEmptyInterface struct {
func (iter *Iterator) ReadVal(obj interface{}) {
typ := reflect.TypeOf(obj)
cacheKey := typ.Elem()
cachedDecoder := getDecoderFromCache(cacheKey)
if cachedDecoder == nil {
decoder, err := decoderOfType(cacheKey)
if err != nil {
iter.Error = err
return
}
cachedDecoder = decoder
addDecoderToCache(cacheKey, decoder)
decoder, err := decoderOfType(iter.cfg, cacheKey)
if err != nil {
iter.Error = err
return
}
e := (*emptyInterface)(unsafe.Pointer(&obj))
cachedDecoder.decode(e.word, iter)
decoder.Decode(e.word, iter)
}
func (stream *Stream) WriteVal(val interface{}) {
if nil == val {
stream.WriteNil()
@ -282,70 +200,71 @@ func (stream *Stream) WriteVal(val interface{}) {
}
typ := reflect.TypeOf(val)
cacheKey := typ
cachedEncoder := getEncoderFromCache(cacheKey)
if cachedEncoder == nil {
encoder, err := encoderOfType(cacheKey)
if err != nil {
stream.Error = err
return
}
cachedEncoder = encoder
addEncoderToCache(cacheKey, encoder)
encoder, err := encoderOfType(stream.cfg, cacheKey)
if err != nil {
stream.Error = err
return
}
cachedEncoder.encodeInterface(val, stream)
encoder.EncodeInterface(val, stream)
}
type prefix string
func (p prefix) addToDecoder(decoder Decoder, err error) (Decoder, error) {
func (p prefix) addToDecoder(decoder ValDecoder, err error) (ValDecoder, error) {
if err != nil {
return nil, fmt.Errorf("%s: %s", p, err.Error())
}
return decoder, err
}
func (p prefix) addToEncoder(encoder Encoder, err error) (Encoder, error) {
func (p prefix) addToEncoder(encoder ValEncoder, err error) (ValEncoder, error) {
if err != nil {
return nil, fmt.Errorf("%s: %s", p, err.Error())
}
return encoder, err
}
func decoderOfType(typ reflect.Type) (Decoder, error) {
typeName := typ.String()
typeDecoder := typeDecoders[typeName]
if typeDecoder != nil {
return typeDecoder, nil
}
if typ.Kind() == reflect.Ptr {
typeDecoder := typeDecoders[typ.Elem().String()]
if typeDecoder != nil {
return &optionalDecoder{typ.Elem(),typeDecoder}, nil
}
}
func decoderOfType(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) {
cacheKey := typ
cachedDecoder := getDecoderFromCache(cacheKey)
if cachedDecoder != nil {
return cachedDecoder, nil
decoder := cfg.getDecoderFromCache(cacheKey)
if decoder != nil {
return decoder, nil
}
placeholder := &placeholderDecoder{}
addDecoderToCache(cacheKey, placeholder)
newDecoder, err := createDecoderOfType(typ)
placeholder.valueDecoder = newDecoder
addDecoderToCache(cacheKey, newDecoder)
return newDecoder, err
decoder = getTypeDecoderFromExtension(typ)
if decoder != nil {
cfg.addDecoderToCache(cacheKey, decoder)
return decoder, nil
}
decoder = &placeholderDecoder{cfg: cfg, cacheKey: cacheKey}
cfg.addDecoderToCache(cacheKey, decoder)
decoder, err := createDecoderOfType(cfg, typ)
for _, extension := range extensions {
decoder = extension.DecorateDecoder(typ, decoder)
}
cfg.addDecoderToCache(cacheKey, decoder)
return decoder, err
}
func createDecoderOfType(typ reflect.Type) (Decoder, error) {
func createDecoderOfType(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) {
if typ.String() == "[]uint8" {
return &base64Codec{}, nil
}
if typ.AssignableTo(jsonRawMessageType) {
return &jsonRawMessageCodec{}, nil
}
if typ.AssignableTo(jsoniterRawMessageType) {
return &jsoniterRawMessageCodec{}, nil
}
if typ.AssignableTo(jsonNumberType) {
return &jsonNumberCodec{}, nil
}
if typ.ConvertibleTo(unmarshalerType) {
templateInterface := reflect.New(typ).Elem().Interface()
return &optionalDecoder{typ, &unmarshalerDecoder{extractInterface(templateInterface)}}, nil
var decoder ValDecoder = &unmarshalerDecoder{extractInterface(templateInterface)}
if typ.Kind() != reflect.Struct {
decoder = &optionalDecoder{typ, decoder}
}
return decoder, nil
}
if typ.ConvertibleTo(anyType) {
return &anyCodec{}, nil
@ -386,53 +305,61 @@ func createDecoderOfType(typ reflect.Type) (Decoder, error) {
return &nonEmptyInterfaceCodec{}, nil
}
case reflect.Struct:
return prefix(fmt.Sprintf("[%s]", typ.String())).addToDecoder(decoderOfStruct(typ))
return prefix(fmt.Sprintf("[%s]", typ.String())).addToDecoder(decoderOfStruct(cfg, typ))
case reflect.Array:
return prefix("[array]").addToDecoder(decoderOfArray(cfg, typ))
case reflect.Slice:
return prefix("[slice]").addToDecoder(decoderOfSlice(typ))
return prefix("[slice]").addToDecoder(decoderOfSlice(cfg, typ))
case reflect.Map:
return prefix("[map]").addToDecoder(decoderOfMap(typ))
return prefix("[map]").addToDecoder(decoderOfMap(cfg, typ))
case reflect.Ptr:
return prefix("[optional]").addToDecoder(decoderOfOptional(typ))
return prefix("[optional]").addToDecoder(decoderOfOptional(cfg, typ))
default:
return nil, fmt.Errorf("unsupported type: %v", typ)
}
}
func encoderOfType(typ reflect.Type) (Encoder, error) {
typeName := typ.String()
typeEncoder := typeEncoders[typeName]
if typeEncoder != nil {
return typeEncoder, nil
}
if typ.Kind() == reflect.Ptr {
typeEncoder := typeEncoders[typ.Elem().String()]
if typeEncoder != nil {
return &optionalEncoder{typeEncoder}, nil
}
}
func encoderOfType(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
cacheKey := typ
cachedEncoder := getEncoderFromCache(cacheKey)
if cachedEncoder != nil {
return cachedEncoder, nil
encoder := cfg.getEncoderFromCache(cacheKey)
if encoder != nil {
return encoder, nil
}
placeholder := &placeholderEncoder{}
addEncoderToCache(cacheKey, placeholder)
newEncoder, err := createEncoderOfType(typ)
placeholder.valueEncoder = newEncoder
addEncoderToCache(cacheKey, newEncoder)
return newEncoder, err
encoder = getTypeEncoderFromExtension(typ)
if encoder != nil {
cfg.addEncoderToCache(cacheKey, encoder)
return encoder, nil
}
encoder = &placeholderEncoder{cfg: cfg, cacheKey: cacheKey}
cfg.addEncoderToCache(cacheKey, encoder)
encoder, err := createEncoderOfType(cfg, typ)
for _, extension := range extensions {
encoder = extension.DecorateEncoder(typ, encoder)
}
cfg.addEncoderToCache(cacheKey, encoder)
return encoder, err
}
func createEncoderOfType(typ reflect.Type) (Encoder, error) {
func createEncoderOfType(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
if typ.String() == "[]uint8" {
return &base64Codec{}, nil
}
if typ.AssignableTo(jsonRawMessageType) {
return &jsonRawMessageCodec{}, nil
}
if typ.AssignableTo(jsoniterRawMessageType) {
return &jsoniterRawMessageCodec{}, nil
}
if typ.AssignableTo(jsonNumberType) {
return &jsonNumberCodec{}, nil
}
if typ.ConvertibleTo(marshalerType) {
templateInterface := reflect.New(typ).Elem().Interface()
return &marshalerEncoder{extractInterface(templateInterface)}, nil
var encoder ValEncoder = &marshalerEncoder{extractInterface(templateInterface)}
if typ.Kind() != reflect.Struct {
encoder = &optionalEncoder{encoder}
}
return encoder, nil
}
if typ.ConvertibleTo(anyType) {
return &anyCodec{}, nil
@ -474,58 +401,64 @@ func createEncoderOfType(typ reflect.Type) (Encoder, error) {
return &nonEmptyInterfaceCodec{}, nil
}
case reflect.Struct:
return prefix(fmt.Sprintf("[%s]", typ.String())).addToEncoder(encoderOfStruct(typ))
return prefix(fmt.Sprintf("[%s]", typ.String())).addToEncoder(encoderOfStruct(cfg, typ))
case reflect.Array:
return prefix("[array]").addToEncoder(encoderOfArray(cfg, typ))
case reflect.Slice:
return prefix("[slice]").addToEncoder(encoderOfSlice(typ))
return prefix("[slice]").addToEncoder(encoderOfSlice(cfg, typ))
case reflect.Map:
return prefix("[map]").addToEncoder(encoderOfMap(typ))
return prefix("[map]").addToEncoder(encoderOfMap(cfg, typ))
case reflect.Ptr:
return prefix("[optional]").addToEncoder(encoderOfOptional(typ))
return prefix("[optional]").addToEncoder(encoderOfOptional(cfg, typ))
default:
return nil, fmt.Errorf("unsupported type: %v", typ)
}
}
func decoderOfOptional(typ reflect.Type) (Decoder, error) {
func decoderOfOptional(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) {
elemType := typ.Elem()
decoder, err := decoderOfType(elemType)
decoder, err := decoderOfType(cfg, elemType)
if err != nil {
return nil, err
}
return &optionalDecoder{elemType, decoder}, nil
}
func encoderOfOptional(typ reflect.Type) (Encoder, error) {
func encoderOfOptional(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
elemType := typ.Elem()
decoder, err := encoderOfType(elemType)
elemEncoder, err := encoderOfType(cfg, elemType)
if err != nil {
return nil, err
}
return &optionalEncoder{ decoder}, nil
encoder := &optionalEncoder{elemEncoder}
if elemType.Kind() == reflect.Map {
encoder = &optionalEncoder{encoder}
}
return encoder, nil
}
func decoderOfMap(typ reflect.Type) (Decoder, error) {
decoder, err := decoderOfType(typ.Elem())
func decoderOfMap(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) {
decoder, err := decoderOfType(cfg, typ.Elem())
if err != nil {
return nil, err
}
mapInterface := reflect.New(typ).Interface()
return &mapDecoder{typ, typ.Elem(), decoder, extractInterface(mapInterface)}, nil
return &mapDecoder{typ, typ.Key(), typ.Elem(), decoder, extractInterface(mapInterface)}, nil
}
func extractInterface(val interface{}) emptyInterface {
return *((*emptyInterface)(unsafe.Pointer(&val)))
}
func encoderOfMap(typ reflect.Type) (Encoder, error) {
func encoderOfMap(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
elemType := typ.Elem()
encoder, err := encoderOfType(elemType)
encoder, err := encoderOfType(cfg, elemType)
if err != nil {
return nil, err
}
mapInterface := reflect.New(typ).Elem().Interface()
if elemType.Kind() == reflect.Interface && elemType.NumMethod() == 0 {
return &mapInterfaceEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}, nil
if cfg.sortMapKeys {
return &sortKeysMapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}, nil
} else {
return &mapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}, nil
}

View File

@ -1,158 +1,84 @@
package jsoniter
import (
"unsafe"
"reflect"
"io"
"fmt"
"io"
"reflect"
"unsafe"
)
func decoderOfSlice(typ reflect.Type) (Decoder, error) {
decoder, err := decoderOfType(typ.Elem())
func decoderOfArray(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) {
decoder, err := decoderOfType(cfg, typ.Elem())
if err != nil {
return nil, err
}
return &sliceDecoder{typ, typ.Elem(), decoder}, nil
return &arrayDecoder{typ, typ.Elem(), decoder}, nil
}
func encoderOfSlice(typ reflect.Type) (Encoder, error) {
encoder, err := encoderOfType(typ.Elem())
func encoderOfArray(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
encoder, err := encoderOfType(cfg, typ.Elem())
if err != nil {
return nil, err
}
if typ.Elem().Kind() == reflect.Map {
encoder = &optionalEncoder{ encoder}
encoder = &optionalEncoder{encoder}
}
return &sliceEncoder{typ, typ.Elem(), encoder}, nil
return &arrayEncoder{typ, typ.Elem(), encoder}, nil
}
type sliceEncoder struct {
sliceType reflect.Type
type arrayEncoder struct {
arrayType reflect.Type
elemType reflect.Type
elemEncoder Encoder
elemEncoder ValEncoder
}
func (encoder *sliceEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
slice := (*sliceHeader)(ptr)
if slice.Len == 0 {
stream.WriteEmptyArray()
func (encoder *arrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if ptr == nil {
stream.WriteNil()
return
}
stream.WriteArrayStart()
elemPtr := uintptr(slice.Data)
encoder.elemEncoder.encode(unsafe.Pointer(elemPtr), stream)
for i := 1; i < slice.Len; i++ {
elemPtr := uintptr(ptr)
encoder.elemEncoder.Encode(unsafe.Pointer(elemPtr), stream)
for i := 1; i < encoder.arrayType.Len(); i++ {
stream.WriteMore()
elemPtr += encoder.elemType.Size()
encoder.elemEncoder.encode(unsafe.Pointer(elemPtr), stream)
encoder.elemEncoder.Encode(unsafe.Pointer(elemPtr), stream)
}
stream.WriteArrayEnd()
if stream.Error != nil && stream.Error != io.EOF {
stream.Error = fmt.Errorf("%v: %s", encoder.sliceType, stream.Error.Error())
stream.Error = fmt.Errorf("%v: %s", encoder.arrayType, stream.Error.Error())
}
}
func (encoder *sliceEncoder) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func (encoder *arrayEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *sliceEncoder) isEmpty(ptr unsafe.Pointer) bool {
slice := (*sliceHeader)(ptr)
return slice.Len == 0
func (encoder *arrayEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return false
}
type sliceDecoder struct {
sliceType reflect.Type
type arrayDecoder struct {
arrayType reflect.Type
elemType reflect.Type
elemDecoder Decoder
elemDecoder ValDecoder
}
// sliceHeader is a safe version of SliceHeader used within this package.
type sliceHeader struct {
Data unsafe.Pointer
Len int
Cap int
}
func (decoder *sliceDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
func (decoder *arrayDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
decoder.doDecode(ptr, iter)
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.sliceType, iter.Error.Error())
iter.Error = fmt.Errorf("%v: %s", decoder.arrayType, iter.Error.Error())
}
}
func (decoder *sliceDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) {
slice := (*sliceHeader)(ptr)
reuseSlice(slice, decoder.sliceType, 4)
if !iter.ReadArray() {
return
}
func (decoder *arrayDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) {
offset := uintptr(0)
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data) + offset), iter)
if !iter.ReadArray() {
slice.Len = 1
return
}
offset += decoder.elemType.Size()
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data) + offset), iter)
if !iter.ReadArray() {
slice.Len = 2
return
}
offset += decoder.elemType.Size()
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data) + offset), iter)
if !iter.ReadArray() {
slice.Len = 3
return
}
offset += decoder.elemType.Size()
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data) + offset), iter)
slice.Len = 4
for iter.ReadArray() {
growOne(slice, decoder.sliceType, decoder.elemType)
offset += decoder.elemType.Size()
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data) + offset), iter)
}
}
// grow grows the slice s so that it can hold extra more values, allocating
// more capacity if needed. It also returns the old and new slice lengths.
func growOne(slice *sliceHeader, sliceType reflect.Type, elementType reflect.Type) {
newLen := slice.Len + 1
if newLen <= slice.Cap {
slice.Len = newLen
return
}
newCap := slice.Cap
if newCap == 0 {
newCap = 1
} else {
for newCap < newLen {
if slice.Len < 1024 {
newCap += newCap
} else {
newCap += newCap / 4
}
for ; iter.ReadArray(); offset += decoder.elemType.Size() {
if offset < decoder.arrayType.Size() {
decoder.elemDecoder.Decode(unsafe.Pointer(uintptr(ptr)+offset), iter)
} else {
iter.Skip()
}
}
dst := unsafe.Pointer(reflect.MakeSlice(sliceType, newLen, newCap).Pointer())
// copy old array into new array
originalBytesCount := uintptr(slice.Len) * elementType.Size()
srcPtr := (*[1 << 30]byte)(slice.Data)
dstPtr := (*[1 << 30]byte)(dst)
for i := uintptr(0); i < originalBytesCount; i++ {
dstPtr[i] = srcPtr[i]
}
slice.Len = newLen
slice.Cap = newCap
slice.Data = dst
}
func reuseSlice(slice *sliceHeader, sliceType reflect.Type, expectedCap int) {
if expectedCap <= slice.Cap {
return
}
dst := unsafe.Pointer(reflect.MakeSlice(sliceType, 0, expectedCap).Pointer())
slice.Cap = expectedCap
slice.Data = dst
}

View File

@ -0,0 +1,299 @@
package jsoniter
import (
"fmt"
"reflect"
"strings"
"unicode"
"unsafe"
)
var typeDecoders = map[string]ValDecoder{}
var fieldDecoders = map[string]ValDecoder{}
var typeEncoders = map[string]ValEncoder{}
var fieldEncoders = map[string]ValEncoder{}
var extensions = []Extension{}
type StructDescriptor struct {
Type reflect.Type
Fields []*Binding
}
func (structDescriptor *StructDescriptor) GetField(fieldName string) *Binding {
for _, binding := range structDescriptor.Fields {
if binding.Field.Name == fieldName {
return binding
}
}
return nil
}
type Binding struct {
Field *reflect.StructField
FromNames []string
ToNames []string
Encoder ValEncoder
Decoder ValDecoder
}
type Extension interface {
UpdateStructDescriptor(structDescriptor *StructDescriptor)
CreateDecoder(typ reflect.Type) ValDecoder
CreateEncoder(typ reflect.Type) ValEncoder
DecorateDecoder(typ reflect.Type, decoder ValDecoder) ValDecoder
DecorateEncoder(typ reflect.Type, encoder ValEncoder) ValEncoder
}
type DummyExtension struct {
}
func (extension *DummyExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
}
func (extension *DummyExtension) CreateDecoder(typ reflect.Type) ValDecoder {
return nil
}
func (extension *DummyExtension) CreateEncoder(typ reflect.Type) ValEncoder {
return nil
}
func (extension *DummyExtension) DecorateDecoder(typ reflect.Type, decoder ValDecoder) ValDecoder {
return decoder
}
func (extension *DummyExtension) DecorateEncoder(typ reflect.Type, encoder ValEncoder) ValEncoder {
return encoder
}
type funcDecoder struct {
fun DecoderFunc
}
func (decoder *funcDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
decoder.fun(ptr, iter)
}
type funcEncoder struct {
fun EncoderFunc
isEmptyFunc func(ptr unsafe.Pointer) bool
}
func (encoder *funcEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
encoder.fun(ptr, stream)
}
func (encoder *funcEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *funcEncoder) IsEmpty(ptr unsafe.Pointer) bool {
if encoder.isEmptyFunc == nil {
return false
}
return encoder.isEmptyFunc(ptr)
}
func RegisterTypeDecoderFunc(typ string, fun DecoderFunc) {
typeDecoders[typ] = &funcDecoder{fun}
}
func RegisterTypeDecoder(typ string, decoder ValDecoder) {
typeDecoders[typ] = decoder
}
func RegisterFieldDecoderFunc(typ string, field string, fun DecoderFunc) {
RegisterFieldDecoder(typ, field, &funcDecoder{fun})
}
func RegisterFieldDecoder(typ string, field string, decoder ValDecoder) {
fieldDecoders[fmt.Sprintf("%s/%s", typ, field)] = decoder
}
func RegisterTypeEncoderFunc(typ string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) {
typeEncoders[typ] = &funcEncoder{fun, isEmptyFunc}
}
func RegisterTypeEncoder(typ string, encoder ValEncoder) {
typeEncoders[typ] = encoder
}
func RegisterFieldEncoderFunc(typ string, field string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) {
RegisterFieldEncoder(typ, field, &funcEncoder{fun, isEmptyFunc})
}
func RegisterFieldEncoder(typ string, field string, encoder ValEncoder) {
fieldEncoders[fmt.Sprintf("%s/%s", typ, field)] = encoder
}
func RegisterExtension(extension Extension) {
extensions = append(extensions, extension)
}
func getTypeDecoderFromExtension(typ reflect.Type) ValDecoder {
decoder := _getTypeDecoderFromExtension(typ)
if decoder != nil {
for _, extension := range extensions {
decoder = extension.DecorateDecoder(typ, decoder)
}
}
return decoder
}
func _getTypeDecoderFromExtension(typ reflect.Type) ValDecoder {
for _, extension := range extensions {
decoder := extension.CreateDecoder(typ)
if decoder != nil {
return decoder
}
}
typeName := typ.String()
decoder := typeDecoders[typeName]
if decoder != nil {
return decoder
}
if typ.Kind() == reflect.Ptr {
decoder := typeDecoders[typ.Elem().String()]
if decoder != nil {
return &optionalDecoder{typ.Elem(), decoder}
}
}
return nil
}
func getTypeEncoderFromExtension(typ reflect.Type) ValEncoder {
encoder := _getTypeEncoderFromExtension(typ)
if encoder != nil {
for _, extension := range extensions {
encoder = extension.DecorateEncoder(typ, encoder)
}
}
return encoder
}
func _getTypeEncoderFromExtension(typ reflect.Type) ValEncoder {
for _, extension := range extensions {
encoder := extension.CreateEncoder(typ)
if encoder != nil {
return encoder
}
}
typeName := typ.String()
encoder := typeEncoders[typeName]
if encoder != nil {
return encoder
}
if typ.Kind() == reflect.Ptr {
encoder := typeEncoders[typ.Elem().String()]
if encoder != nil {
return &optionalEncoder{encoder}
}
}
return nil
}
func describeStruct(cfg *frozenConfig, typ reflect.Type) (*StructDescriptor, error) {
anonymousBindings := []*Binding{}
bindings := []*Binding{}
for i := 0; i < typ.NumField(); i++ {
field := typ.Field(i)
if field.Anonymous {
if field.Type.Kind() == reflect.Struct {
structDescriptor, err := describeStruct(cfg, field.Type)
if err != nil {
return nil, err
}
for _, binding := range structDescriptor.Fields {
anonymousBindings = append(anonymousBindings, binding)
}
} else if field.Type.Kind() == reflect.Ptr && field.Type.Elem().Kind() == reflect.Struct {
structDescriptor, err := describeStruct(cfg, field.Type.Elem())
if err != nil {
return nil, err
}
for _, binding := range structDescriptor.Fields {
binding.Encoder = &optionalEncoder{binding.Encoder}
binding.Encoder = &structFieldEncoder{&field, binding.Encoder, false}
binding.Decoder = &optionalDecoder{field.Type, binding.Decoder}
binding.Decoder = &structFieldDecoder{&field, binding.Decoder}
anonymousBindings = append(anonymousBindings, binding)
}
}
} else {
tagParts := strings.Split(field.Tag.Get("json"), ",")
fieldNames := calcFieldNames(field.Name, tagParts[0])
fieldCacheKey := fmt.Sprintf("%s/%s", typ.String(), field.Name)
decoder := fieldDecoders[fieldCacheKey]
if decoder == nil {
var err error
decoder, err = decoderOfType(cfg, field.Type)
if err != nil {
return nil, err
}
}
encoder := fieldEncoders[fieldCacheKey]
if encoder == nil {
var err error
encoder, err = encoderOfType(cfg, field.Type)
if err != nil {
return nil, err
}
// map is stored as pointer in the struct
if field.Type.Kind() == reflect.Map {
encoder = &optionalEncoder{encoder}
}
}
binding := &Binding{
Field: &field,
FromNames: fieldNames,
ToNames: fieldNames,
Decoder: decoder,
Encoder: encoder,
}
bindings = append(bindings, binding)
}
}
structDescriptor := &StructDescriptor{
Type: typ,
Fields: bindings,
}
for _, extension := range extensions {
extension.UpdateStructDescriptor(structDescriptor)
}
for _, binding := range structDescriptor.Fields {
shouldOmitEmpty := false
tagParts := strings.Split(binding.Field.Tag.Get("json"), ",")
for _, tagPart := range tagParts[1:] {
if tagPart == "omitempty" {
shouldOmitEmpty = true
} else if tagPart == "string" {
binding.Decoder = &stringModeDecoder{binding.Decoder}
binding.Encoder = &stringModeEncoder{binding.Encoder}
}
}
binding.Decoder = &structFieldDecoder{binding.Field, binding.Decoder}
binding.Encoder = &structFieldEncoder{binding.Field, binding.Encoder, shouldOmitEmpty}
}
// insert anonymous bindings to the head
structDescriptor.Fields = append(anonymousBindings, structDescriptor.Fields...)
return structDescriptor, nil
}
func calcFieldNames(originalFieldName string, tagProvidedFieldName string) []string {
// tag => exported? => original
isNotExported := unicode.IsLower(rune(originalFieldName[0]))
var fieldNames []string
/// tagParts[0] always present, even if no tags
switch tagProvidedFieldName {
case "":
if isNotExported {
fieldNames = []string{}
} else {
fieldNames = []string{originalFieldName}
}
case "-":
fieldNames = []string{}
default:
fieldNames = []string{tagProvidedFieldName}
}
return fieldNames
}

View File

@ -1,18 +1,23 @@
package jsoniter
import (
"unsafe"
"encoding"
"encoding/json"
"reflect"
"sort"
"strconv"
"unsafe"
)
type mapDecoder struct {
mapType reflect.Type
keyType reflect.Type
elemType reflect.Type
elemDecoder Decoder
elemDecoder ValDecoder
mapInterface emptyInterface
}
func (decoder *mapDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
func (decoder *mapDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
// dark magic to cast unsafe.Pointer back to interface{} using reflect.Type
mapInterface := decoder.mapInterface
mapInterface.word = ptr
@ -21,22 +26,57 @@ func (decoder *mapDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
if realVal.IsNil() {
realVal.Set(reflect.MakeMap(realVal.Type()))
}
for field := iter.ReadObject(); field != ""; field = iter.ReadObject() {
iter.ReadMapCB(func(iter *Iterator, keyStr string) bool {
elem := reflect.New(decoder.elemType)
decoder.elemDecoder.decode(unsafe.Pointer(elem.Pointer()), iter)
decoder.elemDecoder.Decode(unsafe.Pointer(elem.Pointer()), iter)
// to put into map, we have to use reflection
realVal.SetMapIndex(reflect.ValueOf(string([]byte(field))), elem.Elem())
}
keyType := decoder.keyType
switch {
case keyType.Kind() == reflect.String:
realVal.SetMapIndex(reflect.ValueOf(keyStr), elem.Elem())
return true
case keyType.Implements(textUnmarshalerType):
textUnmarshaler := reflect.New(keyType.Elem()).Interface().(encoding.TextUnmarshaler)
err := textUnmarshaler.UnmarshalText([]byte(keyStr))
if err != nil {
iter.ReportError("read map key as TextUnmarshaler", err.Error())
return false
}
realVal.SetMapIndex(reflect.ValueOf(textUnmarshaler), elem.Elem())
return true
default:
switch keyType.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
n, err := strconv.ParseInt(keyStr, 10, 64)
if err != nil || reflect.Zero(keyType).OverflowInt(n) {
iter.ReportError("read map key as int64", "read int64 failed")
return false
}
realVal.SetMapIndex(reflect.ValueOf(n).Convert(keyType), elem.Elem())
return true
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
n, err := strconv.ParseUint(keyStr, 10, 64)
if err != nil || reflect.Zero(keyType).OverflowUint(n) {
iter.ReportError("read map key as uint64", "read uint64 failed")
return false
}
realVal.SetMapIndex(reflect.ValueOf(n).Convert(keyType), elem.Elem())
return true
}
}
iter.ReportError("read map key", "unexpected map key type "+keyType.String())
return true
})
}
type mapEncoder struct {
mapType reflect.Type
elemType reflect.Type
elemEncoder Encoder
elemEncoder ValEncoder
mapInterface emptyInterface
}
func (encoder *mapEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
func (encoder *mapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
mapInterface := encoder.mapInterface
mapInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
@ -47,18 +87,50 @@ func (encoder *mapEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
if i != 0 {
stream.WriteMore()
}
stream.WriteObjectField(key.String())
encodeMapKey(key, stream)
stream.writeByte(':')
val := realVal.MapIndex(key).Interface()
encoder.elemEncoder.encodeInterface(val, stream)
encoder.elemEncoder.EncodeInterface(val, stream)
}
stream.WriteObjectEnd()
}
func (encoder *mapEncoder) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func encodeMapKey(key reflect.Value, stream *Stream) {
if key.Kind() == reflect.String {
stream.WriteString(key.String())
return
}
if tm, ok := key.Interface().(encoding.TextMarshaler); ok {
buf, err := tm.MarshalText()
if err != nil {
stream.Error = err
return
}
stream.writeByte('"')
stream.Write(buf)
stream.writeByte('"')
return
}
switch key.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
stream.writeByte('"')
stream.WriteInt64(key.Int())
stream.writeByte('"')
return
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
stream.writeByte('"')
stream.WriteUint64(key.Uint())
stream.writeByte('"')
return
}
stream.Error = &json.UnsupportedTypeError{key.Type()}
}
func (encoder *mapEncoder) isEmpty(ptr unsafe.Pointer) bool {
func (encoder *mapEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *mapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
mapInterface := encoder.mapInterface
mapInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
@ -66,40 +138,53 @@ func (encoder *mapEncoder) isEmpty(ptr unsafe.Pointer) bool {
return realVal.Len() == 0
}
type mapInterfaceEncoder struct {
type sortKeysMapEncoder struct {
mapType reflect.Type
elemType reflect.Type
elemEncoder Encoder
elemEncoder ValEncoder
mapInterface emptyInterface
}
func (encoder *mapInterfaceEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
mapInterface := encoder.mapInterface
mapInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
realVal := reflect.ValueOf(*realInterface)
// Extract and sort the keys.
var sv stringValues = realVal.MapKeys()
sort.Sort(sv)
stream.WriteObjectStart()
for i, key := range realVal.MapKeys() {
for i, key := range sv {
if i != 0 {
stream.WriteMore()
}
stream.WriteObjectField(key.String())
encodeMapKey(key, stream)
stream.writeByte(':')
val := realVal.MapIndex(key).Interface()
encoder.elemEncoder.encode(unsafe.Pointer(&val), stream)
encoder.elemEncoder.EncodeInterface(val, stream)
}
stream.WriteObjectEnd()
}
func (encoder *mapInterfaceEncoder) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
// stringValues is a slice of reflect.Value holding *reflect.StringValue.
// It implements the methods to sort by string.
type stringValues []reflect.Value
func (sv stringValues) Len() int { return len(sv) }
func (sv stringValues) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] }
func (sv stringValues) Less(i, j int) bool { return sv.get(i) < sv.get(j) }
func (sv stringValues) get(i int) string { return sv[i].String() }
func (encoder *sortKeysMapEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *mapInterfaceEncoder) isEmpty(ptr unsafe.Pointer) bool {
func (encoder *sortKeysMapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
mapInterface := encoder.mapInterface
mapInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
realVal := reflect.ValueOf(*realInterface)
return realVal.Len() == 0
}
}

View File

@ -1,301 +1,306 @@
package jsoniter
import (
"unsafe"
"encoding/base64"
"encoding/json"
"unsafe"
)
type stringCodec struct {
}
func (codec *stringCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *stringCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*string)(ptr)) = iter.ReadString()
}
func (codec *stringCodec) encode(ptr unsafe.Pointer, stream *Stream) {
func (codec *stringCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
str := *((*string)(ptr))
stream.WriteString(str)
}
func (encoder *stringCodec) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func (codec *stringCodec) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, codec)
}
func (codec *stringCodec) isEmpty(ptr unsafe.Pointer) bool {
func (codec *stringCodec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*string)(ptr)) == ""
}
type intCodec struct {
}
func (codec *intCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *intCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*int)(ptr)) = iter.ReadInt()
}
func (codec *intCodec) encode(ptr unsafe.Pointer, stream *Stream) {
func (codec *intCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteInt(*((*int)(ptr)))
}
func (encoder *intCodec) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func (encoder *intCodec) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (codec *intCodec) isEmpty(ptr unsafe.Pointer) bool {
func (codec *intCodec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*int)(ptr)) == 0
}
type int8Codec struct {
}
func (codec *int8Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *int8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*int8)(ptr)) = iter.ReadInt8()
}
func (codec *int8Codec) encode(ptr unsafe.Pointer, stream *Stream) {
func (codec *int8Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteInt8(*((*int8)(ptr)))
}
func (encoder *int8Codec) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func (encoder *int8Codec) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (codec *int8Codec) isEmpty(ptr unsafe.Pointer) bool {
func (codec *int8Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*int8)(ptr)) == 0
}
type int16Codec struct {
}
func (codec *int16Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *int16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*int16)(ptr)) = iter.ReadInt16()
}
func (codec *int16Codec) encode(ptr unsafe.Pointer, stream *Stream) {
func (codec *int16Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteInt16(*((*int16)(ptr)))
}
func (encoder *int16Codec) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func (encoder *int16Codec) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (codec *int16Codec) isEmpty(ptr unsafe.Pointer) bool {
func (codec *int16Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*int16)(ptr)) == 0
}
type int32Codec struct {
}
func (codec *int32Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *int32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*int32)(ptr)) = iter.ReadInt32()
}
func (codec *int32Codec) encode(ptr unsafe.Pointer, stream *Stream) {
func (codec *int32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteInt32(*((*int32)(ptr)))
}
func (encoder *int32Codec) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func (encoder *int32Codec) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (codec *int32Codec) isEmpty(ptr unsafe.Pointer) bool {
func (codec *int32Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*int32)(ptr)) == 0
}
type int64Codec struct {
}
func (codec *int64Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *int64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*int64)(ptr)) = iter.ReadInt64()
}
func (codec *int64Codec) encode(ptr unsafe.Pointer, stream *Stream) {
func (codec *int64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteInt64(*((*int64)(ptr)))
}
func (encoder *int64Codec) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func (encoder *int64Codec) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (codec *int64Codec) isEmpty(ptr unsafe.Pointer) bool {
func (codec *int64Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*int64)(ptr)) == 0
}
type uintCodec struct {
}
func (codec *uintCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *uintCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*uint)(ptr)) = iter.ReadUint()
}
func (codec *uintCodec) encode(ptr unsafe.Pointer, stream *Stream) {
func (codec *uintCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint(*((*uint)(ptr)))
}
func (encoder *uintCodec) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func (encoder *uintCodec) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (codec *uintCodec) isEmpty(ptr unsafe.Pointer) bool {
func (codec *uintCodec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*uint)(ptr)) == 0
}
type uint8Codec struct {
}
func (codec *uint8Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *uint8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*uint8)(ptr)) = iter.ReadUint8()
}
func (codec *uint8Codec) encode(ptr unsafe.Pointer, stream *Stream) {
func (codec *uint8Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint8(*((*uint8)(ptr)))
}
func (encoder *uint8Codec) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func (encoder *uint8Codec) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (codec *uint8Codec) isEmpty(ptr unsafe.Pointer) bool {
func (codec *uint8Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*uint8)(ptr)) == 0
}
type uint16Codec struct {
}
func (decoder *uint16Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
func (decoder *uint16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*uint16)(ptr)) = iter.ReadUint16()
}
func (codec *uint16Codec) encode(ptr unsafe.Pointer, stream *Stream) {
func (codec *uint16Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint16(*((*uint16)(ptr)))
}
func (encoder *uint16Codec) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func (encoder *uint16Codec) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (codec *uint16Codec) isEmpty(ptr unsafe.Pointer) bool {
func (codec *uint16Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*uint16)(ptr)) == 0
}
type uint32Codec struct {
}
func (codec *uint32Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *uint32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*uint32)(ptr)) = iter.ReadUint32()
}
func (codec *uint32Codec) encode(ptr unsafe.Pointer, stream *Stream) {
func (codec *uint32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint32(*((*uint32)(ptr)))
}
func (encoder *uint32Codec) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func (encoder *uint32Codec) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (codec *uint32Codec) isEmpty(ptr unsafe.Pointer) bool {
func (codec *uint32Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*uint32)(ptr)) == 0
}
type uint64Codec struct {
}
func (codec *uint64Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *uint64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*uint64)(ptr)) = iter.ReadUint64()
}
func (codec *uint64Codec) encode(ptr unsafe.Pointer, stream *Stream) {
func (codec *uint64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint64(*((*uint64)(ptr)))
}
func (encoder *uint64Codec) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func (encoder *uint64Codec) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (codec *uint64Codec) isEmpty(ptr unsafe.Pointer) bool {
func (codec *uint64Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*uint64)(ptr)) == 0
}
type float32Codec struct {
}
func (codec *float32Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *float32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*float32)(ptr)) = iter.ReadFloat32()
}
func (codec *float32Codec) encode(ptr unsafe.Pointer, stream *Stream) {
func (codec *float32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteFloat32(*((*float32)(ptr)))
}
func (encoder *float32Codec) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func (encoder *float32Codec) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (codec *float32Codec) isEmpty(ptr unsafe.Pointer) bool {
func (codec *float32Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*float32)(ptr)) == 0
}
type float64Codec struct {
}
func (codec *float64Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *float64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*float64)(ptr)) = iter.ReadFloat64()
}
func (codec *float64Codec) encode(ptr unsafe.Pointer, stream *Stream) {
func (codec *float64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteFloat64(*((*float64)(ptr)))
}
func (encoder *float64Codec) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func (encoder *float64Codec) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (codec *float64Codec) isEmpty(ptr unsafe.Pointer) bool {
func (codec *float64Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*float64)(ptr)) == 0
}
type boolCodec struct {
}
func (codec *boolCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *boolCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*bool)(ptr)) = iter.ReadBool()
}
func (codec *boolCodec) encode(ptr unsafe.Pointer, stream *Stream) {
func (codec *boolCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteBool(*((*bool)(ptr)))
}
func (encoder *boolCodec) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func (encoder *boolCodec) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (codec *boolCodec) isEmpty(ptr unsafe.Pointer) bool {
func (codec *boolCodec) IsEmpty(ptr unsafe.Pointer) bool {
return !(*((*bool)(ptr)))
}
type emptyInterfaceCodec struct {
}
func (codec *emptyInterfaceCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *emptyInterfaceCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*interface{})(ptr)) = iter.Read()
}
func (codec *emptyInterfaceCodec) encode(ptr unsafe.Pointer, stream *Stream) {
func (codec *emptyInterfaceCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteVal(*((*interface{})(ptr)))
}
func (encoder *emptyInterfaceCodec) encodeInterface(val interface{}, stream *Stream) {
func (encoder *emptyInterfaceCodec) EncodeInterface(val interface{}, stream *Stream) {
stream.WriteVal(val)
}
func (codec *emptyInterfaceCodec) isEmpty(ptr unsafe.Pointer) bool {
func (codec *emptyInterfaceCodec) IsEmpty(ptr unsafe.Pointer) bool {
return ptr == nil
}
type nonEmptyInterfaceCodec struct {
}
func (codec *nonEmptyInterfaceCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *nonEmptyInterfaceCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
nonEmptyInterface := (*nonEmptyInterface)(ptr)
if nonEmptyInterface.itab == nil {
iter.ReportError("read non-empty interface", "do not know which concrete type to decode to")
return
}
var i interface{}
e := (*emptyInterface)(unsafe.Pointer(&i))
e.typ = nonEmptyInterface.itab.typ
@ -304,7 +309,7 @@ func (codec *nonEmptyInterfaceCodec) decode(ptr unsafe.Pointer, iter *Iterator)
nonEmptyInterface.word = e.word
}
func (codec *nonEmptyInterfaceCodec) encode(ptr unsafe.Pointer, stream *Stream) {
func (codec *nonEmptyInterfaceCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
nonEmptyInterface := (*nonEmptyInterface)(ptr)
var i interface{}
e := (*emptyInterface)(unsafe.Pointer(&i))
@ -313,11 +318,11 @@ func (codec *nonEmptyInterfaceCodec) encode(ptr unsafe.Pointer, stream *Stream)
stream.WriteVal(i)
}
func (encoder *nonEmptyInterfaceCodec) encodeInterface(val interface{}, stream *Stream) {
func (encoder *nonEmptyInterfaceCodec) EncodeInterface(val interface{}, stream *Stream) {
stream.WriteVal(val)
}
func (codec *nonEmptyInterfaceCodec) isEmpty(ptr unsafe.Pointer) bool {
func (codec *nonEmptyInterfaceCodec) IsEmpty(ptr unsafe.Pointer) bool {
nonEmptyInterface := (*nonEmptyInterface)(ptr)
return nonEmptyInterface.word == nil
}
@ -325,86 +330,166 @@ func (codec *nonEmptyInterfaceCodec) isEmpty(ptr unsafe.Pointer) bool {
type anyCodec struct {
}
func (codec *anyCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *anyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*Any)(ptr)) = iter.ReadAny()
}
func (codec *anyCodec) encode(ptr unsafe.Pointer, stream *Stream) {
func (codec *anyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
(*((*Any)(ptr))).WriteTo(stream)
}
func (encoder *anyCodec) encodeInterface(val interface{}, stream *Stream) {
func (encoder *anyCodec) EncodeInterface(val interface{}, stream *Stream) {
(val.(Any)).WriteTo(stream)
}
func (encoder *anyCodec) isEmpty(ptr unsafe.Pointer) bool {
func (encoder *anyCodec) IsEmpty(ptr unsafe.Pointer) bool {
return (*((*Any)(ptr))).Size() == 0
}
type jsonNumberCodec struct {
}
func (codec *jsonNumberCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *jsonNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*json.Number)(ptr)) = json.Number([]byte(iter.readNumberAsString()))
}
func (codec *jsonNumberCodec) encode(ptr unsafe.Pointer, stream *Stream) {
func (codec *jsonNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteRaw(string(*((*json.Number)(ptr))))
}
func (encoder *jsonNumberCodec) encodeInterface(val interface{}, stream *Stream) {
func (encoder *jsonNumberCodec) EncodeInterface(val interface{}, stream *Stream) {
stream.WriteRaw(string(val.(json.Number)))
}
func (encoder *jsonNumberCodec) isEmpty(ptr unsafe.Pointer) bool {
func (encoder *jsonNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
return len(*((*json.Number)(ptr))) == 0
}
type jsonRawMessageCodec struct {
}
func (codec *jsonRawMessageCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *jsonRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*json.RawMessage)(ptr)) = json.RawMessage(iter.SkipAndReturnBytes())
}
func (codec *jsonRawMessageCodec) encode(ptr unsafe.Pointer, stream *Stream) {
func (codec *jsonRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteRaw(string(*((*json.RawMessage)(ptr))))
}
func (encoder *jsonRawMessageCodec) encodeInterface(val interface{}, stream *Stream) {
func (encoder *jsonRawMessageCodec) EncodeInterface(val interface{}, stream *Stream) {
stream.WriteRaw(string(val.(json.RawMessage)))
}
func (encoder *jsonRawMessageCodec) isEmpty(ptr unsafe.Pointer) bool {
func (encoder *jsonRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
return len(*((*json.RawMessage)(ptr))) == 0
}
type stringNumberDecoder struct {
elemDecoder Decoder
type jsoniterRawMessageCodec struct {
}
func (decoder *stringNumberDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *jsoniterRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*RawMessage)(ptr)) = RawMessage(iter.SkipAndReturnBytes())
}
func (codec *jsoniterRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteRaw(string(*((*RawMessage)(ptr))))
}
func (encoder *jsoniterRawMessageCodec) EncodeInterface(val interface{}, stream *Stream) {
stream.WriteRaw(string(val.(RawMessage)))
}
func (encoder *jsoniterRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
return len(*((*RawMessage)(ptr))) == 0
}
type base64Codec struct {
}
func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
encoding := base64.StdEncoding
src := iter.SkipAndReturnBytes()
src = src[1 : len(src)-1]
decodedLen := encoding.DecodedLen(len(src))
dst := make([]byte, decodedLen)
_, err := encoding.Decode(dst, src)
if err != nil {
iter.ReportError("decode base64", err.Error())
} else {
*((*[]byte)(ptr)) = dst
}
}
func (codec *base64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
encoding := base64.StdEncoding
stream.writeByte('"')
src := *((*[]byte)(ptr))
toGrow := encoding.EncodedLen(len(src))
stream.ensure(toGrow)
encoding.Encode(stream.buf[stream.n:], src)
stream.n += toGrow
stream.writeByte('"')
}
func (encoder *base64Codec) EncodeInterface(val interface{}, stream *Stream) {
encoding := base64.StdEncoding
stream.writeByte('"')
src := val.([]byte)
toGrow := encoding.EncodedLen(len(src))
stream.ensure(toGrow)
encoding.Encode(stream.buf[stream.n:], src)
stream.n += toGrow
stream.writeByte('"')
}
func (encoder *base64Codec) IsEmpty(ptr unsafe.Pointer) bool {
return len(*((*[]byte)(ptr))) == 0
}
type stringModeDecoder struct {
elemDecoder ValDecoder
}
func (decoder *stringModeDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
c := iter.nextToken()
if c != '"' {
iter.reportError("stringNumberDecoder", `expect "`)
iter.ReportError("stringModeDecoder", `expect "`)
return
}
decoder.elemDecoder.decode(ptr, iter)
decoder.elemDecoder.Decode(ptr, iter)
if iter.Error != nil {
return
}
c = iter.readByte()
if c != '"' {
iter.reportError("stringNumberDecoder", `expect "`)
iter.ReportError("stringModeDecoder", `expect "`)
return
}
}
type stringModeEncoder struct {
elemEncoder ValEncoder
}
func (encoder *stringModeEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.writeByte('"')
encoder.elemEncoder.Encode(ptr, stream)
stream.writeByte('"')
}
func (encoder *stringModeEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *stringModeEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return encoder.elemEncoder.IsEmpty(ptr)
}
type marshalerEncoder struct {
templateInterface emptyInterface
}
func (encoder *marshalerEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
func (encoder *marshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
templateInterface := encoder.templateInterface
templateInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&templateInterface))
@ -416,11 +501,11 @@ func (encoder *marshalerEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
stream.Write(bytes)
}
}
func (encoder *marshalerEncoder) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func (encoder *marshalerEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *marshalerEncoder) isEmpty(ptr unsafe.Pointer) bool {
func (encoder *marshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
templateInterface := encoder.templateInterface
templateInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&templateInterface))
@ -437,7 +522,7 @@ type unmarshalerDecoder struct {
templateInterface emptyInterface
}
func (decoder *unmarshalerDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
func (decoder *unmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
templateInterface := decoder.templateInterface
templateInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&templateInterface))
@ -445,6 +530,6 @@ func (decoder *unmarshalerDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
bytes := iter.SkipAndReturnBytes()
err := unmarshaler.UnmarshalJSON(bytes)
if err != nil {
iter.reportError("unmarshaler", err.Error())
iter.ReportError("unmarshaler", err.Error())
}
}
}

View File

@ -1,144 +1,46 @@
package jsoniter
import (
"io"
"fmt"
"io"
"reflect"
"unsafe"
"strings"
"unicode"
)
func encoderOfStruct(typ reflect.Type) (Encoder, error) {
structEncoder_ := &structEncoder{}
func encoderOfStruct(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
fields := map[string]*structFieldEncoder{}
for _, field := range listStructFields(typ) {
fieldEncoderKey := fmt.Sprintf("%s/%s", typ.String(), field.Name)
var extensionProvidedFieldNames []string
for _, extension := range extensions {
alternativeFieldNames, fun, _ := extension(typ, field)
if alternativeFieldNames != nil {
extensionProvidedFieldNames = alternativeFieldNames
}
if fun != nil {
fieldEncoders[fieldEncoderKey] = &funcEncoder{fun}
}
structDescriptor, err := describeStruct(cfg, typ)
if err != nil {
return nil, err
}
for _, binding := range structDescriptor.Fields {
for _, toName := range binding.ToNames {
fields[toName] = binding.Encoder.(*structFieldEncoder)
}
tagParts := strings.Split(field.Tag.Get("json"), ",")
// if fieldNames set by extension, use theirs, otherwise try tags
fieldNames := calcFieldNames(field.Name, tagParts[0], extensionProvidedFieldNames)
omitempty := false
for _, tagPart := range tagParts {
if tagPart == "omitempty" {
omitempty = true
}
}
encoder := fieldEncoders[fieldEncoderKey]
var err error
if encoder == nil && len(fieldNames) > 0 {
encoder, err = encoderOfType(field.Type)
if err != nil {
return prefix(fmt.Sprintf("{%s}", field.Name)).addToEncoder(encoder, err)
}
// map is stored as pointer in the struct
if field.Type.Kind() == reflect.Map {
encoder = &optionalEncoder{encoder}
}
}
for _, fieldName := range fieldNames {
fields[fieldName] = &structFieldEncoder{field, fieldName, encoder, omitempty}
}
}
if len(fields) == 0 {
return &emptyStructEncoder{}, nil
}
for _, field := range fields {
structEncoder_.fields = append(structEncoder_.fields, field)
}
return structEncoder_, nil
return &structEncoder{fields}, nil
}
func listStructFields(typ reflect.Type) []*reflect.StructField {
fields := []*reflect.StructField{}
for i := 0; i < typ.NumField(); i++ {
field := typ.Field(i)
if field.Anonymous {
fields = append(fields, listStructFields(field.Type)...)
} else {
fields = append(fields, &field)
}
}
return fields
}
func decoderOfStruct(typ reflect.Type) (Decoder, error) {
func decoderOfStruct(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) {
fields := map[string]*structFieldDecoder{}
for i := 0; i < typ.NumField(); i++ {
field := typ.Field(i)
fieldDecoderKey := fmt.Sprintf("%s/%s", typ.String(), field.Name)
var extensionProviedFieldNames []string
for _, extension := range extensions {
alternativeFieldNames, _, fun := extension(typ, &field)
if alternativeFieldNames != nil {
extensionProviedFieldNames = alternativeFieldNames
}
if fun != nil {
fieldDecoders[fieldDecoderKey] = &funcDecoder{fun}
}
}
decoder := fieldDecoders[fieldDecoderKey]
tagParts := strings.Split(field.Tag.Get("json"), ",")
fieldNames := calcFieldNames(field.Name, tagParts[0], extensionProviedFieldNames)
if decoder == nil && len(fieldNames) > 0 {
var err error
decoder, err = decoderOfType(field.Type)
if err != nil {
return prefix(fmt.Sprintf("{%s}", field.Name)).addToDecoder(decoder, err)
}
}
if len(tagParts) > 1 && tagParts[1] == "string" {
decoder = &stringNumberDecoder{decoder}
}
for _, fieldName := range fieldNames {
fields[fieldName] = &structFieldDecoder{&field, decoder}
structDescriptor, err := describeStruct(cfg, typ)
if err != nil {
return nil, err
}
for _, binding := range structDescriptor.Fields {
for _, fromName := range binding.FromNames {
fields[fromName] = binding.Decoder.(*structFieldDecoder)
}
}
return createStructDecoder(typ, fields)
}
func calcFieldNames(originalFieldName string, tagProvidedFieldName string, extensionProvidedFieldNames []string) []string {
// tag => extension => exported? => original
isNotExported := unicode.IsLower(rune(originalFieldName[0]))
var fieldNames []string
/// tagParts[0] always present, even if no tags
switch tagProvidedFieldName {
case "":
if extensionProvidedFieldNames != nil {
fieldNames = extensionProvidedFieldNames
} else {
if isNotExported {
fieldNames = []string{}
} else {
fieldNames = []string{originalFieldName}
}
}
case "-":
fieldNames = []string{}
default:
fieldNames = []string{tagProvidedFieldName}
}
return fieldNames
}
func EnableUnexportedStructFieldsSupport() {
RegisterExtension(func(type_ reflect.Type, field *reflect.StructField) ([]string, EncoderFunc, DecoderFunc) {
return []string{field.Name}, nil, nil
})
}
func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder) (Decoder, error) {
func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder) (ValDecoder, error) {
knownHash := map[int32]struct{}{
0: struct{}{},
0: {},
}
switch len(fields) {
case 0:
@ -203,7 +105,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &threeFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3}, nil
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3}, nil
case 4:
var fieldName1 int32
var fieldName2 int32
@ -236,8 +138,8 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &fourFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4}, nil
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4}, nil
case 5:
var fieldName1 int32
var fieldName2 int32
@ -275,8 +177,8 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &fiveFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5}, nil
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5}, nil
case 6:
var fieldName1 int32
var fieldName2 int32
@ -319,8 +221,8 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &sixFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6}, nil
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6}, nil
case 7:
var fieldName1 int32
var fieldName2 int32
@ -368,9 +270,9 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &sevenFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7}, nil
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7}, nil
case 8:
var fieldName1 int32
var fieldName2 int32
@ -423,9 +325,9 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &eightFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8}, nil
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8}, nil
case 9:
var fieldName1 int32
var fieldName2 int32
@ -483,9 +385,9 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &nineFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8, fieldName9, fieldDecoder9}, nil
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8, fieldName9, fieldDecoder9}, nil
case 10:
var fieldName1 int32
var fieldName2 int32
@ -548,10 +450,10 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &tenFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8, fieldName9, fieldDecoder9,
fieldName10, fieldDecoder10}, nil
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8, fieldName9, fieldDecoder9,
fieldName10, fieldDecoder10}, nil
}
return &generalStructDecoder{typ, fields}, nil
}
@ -561,7 +463,7 @@ type generalStructDecoder struct {
fields map[string]*structFieldDecoder
}
func (decoder *generalStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.readObjectStart() {
return
}
@ -571,7 +473,7 @@ func (decoder *generalStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator)
if fieldDecoder == nil {
iter.Skip()
} else {
fieldDecoder.decode(ptr, iter)
fieldDecoder.Decode(ptr, iter)
}
for iter.nextToken() == ',' {
fieldBytes = iter.readObjectFieldAsBytes()
@ -580,7 +482,7 @@ func (decoder *generalStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator)
if fieldDecoder == nil {
iter.Skip()
} else {
fieldDecoder.decode(ptr, iter)
fieldDecoder.Decode(ptr, iter)
}
}
if iter.Error != nil && iter.Error != io.EOF {
@ -592,7 +494,7 @@ type skipDecoder struct {
typ reflect.Type
}
func (decoder *skipDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
func (decoder *skipDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
iter.Skip()
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
@ -605,13 +507,13 @@ type oneFieldStructDecoder struct {
fieldDecoder *structFieldDecoder
}
func (decoder *oneFieldStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.readObjectStart() {
return
}
for {
if iter.readFieldHash() == decoder.fieldHash {
decoder.fieldDecoder.decode(ptr, iter)
decoder.fieldDecoder.Decode(ptr, iter)
} else {
iter.Skip()
}
@ -632,16 +534,16 @@ type twoFieldsStructDecoder struct {
fieldDecoder2 *structFieldDecoder
}
func (decoder *twoFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.readObjectStart() {
return
}
for {
switch iter.readFieldHash() {
case decoder.fieldHash1:
decoder.fieldDecoder1.decode(ptr, iter)
decoder.fieldDecoder1.Decode(ptr, iter)
case decoder.fieldHash2:
decoder.fieldDecoder2.decode(ptr, iter)
decoder.fieldDecoder2.Decode(ptr, iter)
default:
iter.Skip()
}
@ -664,18 +566,18 @@ type threeFieldsStructDecoder struct {
fieldDecoder3 *structFieldDecoder
}
func (decoder *threeFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.readObjectStart() {
return
}
for {
switch iter.readFieldHash() {
case decoder.fieldHash1:
decoder.fieldDecoder1.decode(ptr, iter)
decoder.fieldDecoder1.Decode(ptr, iter)
case decoder.fieldHash2:
decoder.fieldDecoder2.decode(ptr, iter)
decoder.fieldDecoder2.Decode(ptr, iter)
case decoder.fieldHash3:
decoder.fieldDecoder3.decode(ptr, iter)
decoder.fieldDecoder3.Decode(ptr, iter)
default:
iter.Skip()
}
@ -700,20 +602,20 @@ type fourFieldsStructDecoder struct {
fieldDecoder4 *structFieldDecoder
}
func (decoder *fourFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.readObjectStart() {
return
}
for {
switch iter.readFieldHash() {
case decoder.fieldHash1:
decoder.fieldDecoder1.decode(ptr, iter)
decoder.fieldDecoder1.Decode(ptr, iter)
case decoder.fieldHash2:
decoder.fieldDecoder2.decode(ptr, iter)
decoder.fieldDecoder2.Decode(ptr, iter)
case decoder.fieldHash3:
decoder.fieldDecoder3.decode(ptr, iter)
decoder.fieldDecoder3.Decode(ptr, iter)
case decoder.fieldHash4:
decoder.fieldDecoder4.decode(ptr, iter)
decoder.fieldDecoder4.Decode(ptr, iter)
default:
iter.Skip()
}
@ -740,22 +642,22 @@ type fiveFieldsStructDecoder struct {
fieldDecoder5 *structFieldDecoder
}
func (decoder *fiveFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.readObjectStart() {
return
}
for {
switch iter.readFieldHash() {
case decoder.fieldHash1:
decoder.fieldDecoder1.decode(ptr, iter)
decoder.fieldDecoder1.Decode(ptr, iter)
case decoder.fieldHash2:
decoder.fieldDecoder2.decode(ptr, iter)
decoder.fieldDecoder2.Decode(ptr, iter)
case decoder.fieldHash3:
decoder.fieldDecoder3.decode(ptr, iter)
decoder.fieldDecoder3.Decode(ptr, iter)
case decoder.fieldHash4:
decoder.fieldDecoder4.decode(ptr, iter)
decoder.fieldDecoder4.Decode(ptr, iter)
case decoder.fieldHash5:
decoder.fieldDecoder5.decode(ptr, iter)
decoder.fieldDecoder5.Decode(ptr, iter)
default:
iter.Skip()
}
@ -784,24 +686,24 @@ type sixFieldsStructDecoder struct {
fieldDecoder6 *structFieldDecoder
}
func (decoder *sixFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.readObjectStart() {
return
}
for {
switch iter.readFieldHash() {
case decoder.fieldHash1:
decoder.fieldDecoder1.decode(ptr, iter)
decoder.fieldDecoder1.Decode(ptr, iter)
case decoder.fieldHash2:
decoder.fieldDecoder2.decode(ptr, iter)
decoder.fieldDecoder2.Decode(ptr, iter)
case decoder.fieldHash3:
decoder.fieldDecoder3.decode(ptr, iter)
decoder.fieldDecoder3.Decode(ptr, iter)
case decoder.fieldHash4:
decoder.fieldDecoder4.decode(ptr, iter)
decoder.fieldDecoder4.Decode(ptr, iter)
case decoder.fieldHash5:
decoder.fieldDecoder5.decode(ptr, iter)
decoder.fieldDecoder5.Decode(ptr, iter)
case decoder.fieldHash6:
decoder.fieldDecoder6.decode(ptr, iter)
decoder.fieldDecoder6.Decode(ptr, iter)
default:
iter.Skip()
}
@ -832,26 +734,26 @@ type sevenFieldsStructDecoder struct {
fieldDecoder7 *structFieldDecoder
}
func (decoder *sevenFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.readObjectStart() {
return
}
for {
switch iter.readFieldHash() {
case decoder.fieldHash1:
decoder.fieldDecoder1.decode(ptr, iter)
decoder.fieldDecoder1.Decode(ptr, iter)
case decoder.fieldHash2:
decoder.fieldDecoder2.decode(ptr, iter)
decoder.fieldDecoder2.Decode(ptr, iter)
case decoder.fieldHash3:
decoder.fieldDecoder3.decode(ptr, iter)
decoder.fieldDecoder3.Decode(ptr, iter)
case decoder.fieldHash4:
decoder.fieldDecoder4.decode(ptr, iter)
decoder.fieldDecoder4.Decode(ptr, iter)
case decoder.fieldHash5:
decoder.fieldDecoder5.decode(ptr, iter)
decoder.fieldDecoder5.Decode(ptr, iter)
case decoder.fieldHash6:
decoder.fieldDecoder6.decode(ptr, iter)
decoder.fieldDecoder6.Decode(ptr, iter)
case decoder.fieldHash7:
decoder.fieldDecoder7.decode(ptr, iter)
decoder.fieldDecoder7.Decode(ptr, iter)
default:
iter.Skip()
}
@ -884,28 +786,28 @@ type eightFieldsStructDecoder struct {
fieldDecoder8 *structFieldDecoder
}
func (decoder *eightFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.readObjectStart() {
return
}
for {
switch iter.readFieldHash() {
case decoder.fieldHash1:
decoder.fieldDecoder1.decode(ptr, iter)
decoder.fieldDecoder1.Decode(ptr, iter)
case decoder.fieldHash2:
decoder.fieldDecoder2.decode(ptr, iter)
decoder.fieldDecoder2.Decode(ptr, iter)
case decoder.fieldHash3:
decoder.fieldDecoder3.decode(ptr, iter)
decoder.fieldDecoder3.Decode(ptr, iter)
case decoder.fieldHash4:
decoder.fieldDecoder4.decode(ptr, iter)
decoder.fieldDecoder4.Decode(ptr, iter)
case decoder.fieldHash5:
decoder.fieldDecoder5.decode(ptr, iter)
decoder.fieldDecoder5.Decode(ptr, iter)
case decoder.fieldHash6:
decoder.fieldDecoder6.decode(ptr, iter)
decoder.fieldDecoder6.Decode(ptr, iter)
case decoder.fieldHash7:
decoder.fieldDecoder7.decode(ptr, iter)
decoder.fieldDecoder7.Decode(ptr, iter)
case decoder.fieldHash8:
decoder.fieldDecoder8.decode(ptr, iter)
decoder.fieldDecoder8.Decode(ptr, iter)
default:
iter.Skip()
}
@ -940,30 +842,30 @@ type nineFieldsStructDecoder struct {
fieldDecoder9 *structFieldDecoder
}
func (decoder *nineFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.readObjectStart() {
return
}
for {
switch iter.readFieldHash() {
case decoder.fieldHash1:
decoder.fieldDecoder1.decode(ptr, iter)
decoder.fieldDecoder1.Decode(ptr, iter)
case decoder.fieldHash2:
decoder.fieldDecoder2.decode(ptr, iter)
decoder.fieldDecoder2.Decode(ptr, iter)
case decoder.fieldHash3:
decoder.fieldDecoder3.decode(ptr, iter)
decoder.fieldDecoder3.Decode(ptr, iter)
case decoder.fieldHash4:
decoder.fieldDecoder4.decode(ptr, iter)
decoder.fieldDecoder4.Decode(ptr, iter)
case decoder.fieldHash5:
decoder.fieldDecoder5.decode(ptr, iter)
decoder.fieldDecoder5.Decode(ptr, iter)
case decoder.fieldHash6:
decoder.fieldDecoder6.decode(ptr, iter)
decoder.fieldDecoder6.Decode(ptr, iter)
case decoder.fieldHash7:
decoder.fieldDecoder7.decode(ptr, iter)
decoder.fieldDecoder7.Decode(ptr, iter)
case decoder.fieldHash8:
decoder.fieldDecoder8.decode(ptr, iter)
decoder.fieldDecoder8.Decode(ptr, iter)
case decoder.fieldHash9:
decoder.fieldDecoder9.decode(ptr, iter)
decoder.fieldDecoder9.Decode(ptr, iter)
default:
iter.Skip()
}
@ -1000,32 +902,32 @@ type tenFieldsStructDecoder struct {
fieldDecoder10 *structFieldDecoder
}
func (decoder *tenFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.readObjectStart() {
return
}
for {
switch iter.readFieldHash() {
case decoder.fieldHash1:
decoder.fieldDecoder1.decode(ptr, iter)
decoder.fieldDecoder1.Decode(ptr, iter)
case decoder.fieldHash2:
decoder.fieldDecoder2.decode(ptr, iter)
decoder.fieldDecoder2.Decode(ptr, iter)
case decoder.fieldHash3:
decoder.fieldDecoder3.decode(ptr, iter)
decoder.fieldDecoder3.Decode(ptr, iter)
case decoder.fieldHash4:
decoder.fieldDecoder4.decode(ptr, iter)
decoder.fieldDecoder4.Decode(ptr, iter)
case decoder.fieldHash5:
decoder.fieldDecoder5.decode(ptr, iter)
decoder.fieldDecoder5.Decode(ptr, iter)
case decoder.fieldHash6:
decoder.fieldDecoder6.decode(ptr, iter)
decoder.fieldDecoder6.Decode(ptr, iter)
case decoder.fieldHash7:
decoder.fieldDecoder7.decode(ptr, iter)
decoder.fieldDecoder7.Decode(ptr, iter)
case decoder.fieldHash8:
decoder.fieldDecoder8.decode(ptr, iter)
decoder.fieldDecoder8.Decode(ptr, iter)
case decoder.fieldHash9:
decoder.fieldDecoder9.decode(ptr, iter)
decoder.fieldDecoder9.Decode(ptr, iter)
case decoder.fieldHash10:
decoder.fieldDecoder10.decode(ptr, iter)
decoder.fieldDecoder10.Decode(ptr, iter)
default:
iter.Skip()
}
@ -1040,12 +942,12 @@ func (decoder *tenFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator
type structFieldDecoder struct {
field *reflect.StructField
fieldDecoder Decoder
fieldDecoder ValDecoder
}
func (decoder *structFieldDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
func (decoder *structFieldDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
fieldPtr := uintptr(ptr) + decoder.field.Offset
decoder.fieldDecoder.decode(unsafe.Pointer(fieldPtr), iter)
decoder.fieldDecoder.Decode(unsafe.Pointer(fieldPtr), iter)
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%s: %s", decoder.field.Name, iter.Error.Error())
}
@ -1053,73 +955,78 @@ func (decoder *structFieldDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
type structFieldEncoder struct {
field *reflect.StructField
fieldName string
fieldEncoder Encoder
fieldEncoder ValEncoder
omitempty bool
}
func (encoder *structFieldEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
func (encoder *structFieldEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
fieldPtr := uintptr(ptr) + encoder.field.Offset
stream.WriteObjectField(encoder.fieldName)
encoder.fieldEncoder.encode(unsafe.Pointer(fieldPtr), stream)
encoder.fieldEncoder.Encode(unsafe.Pointer(fieldPtr), stream)
if stream.Error != nil && stream.Error != io.EOF {
stream.Error = fmt.Errorf("%s: %s", encoder.field.Name, stream.Error.Error())
}
}
func (encoder *structFieldEncoder) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func (encoder *structFieldEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *structFieldEncoder) isEmpty(ptr unsafe.Pointer) bool {
func (encoder *structFieldEncoder) IsEmpty(ptr unsafe.Pointer) bool {
fieldPtr := uintptr(ptr) + encoder.field.Offset
return encoder.fieldEncoder.isEmpty(unsafe.Pointer(fieldPtr))
return encoder.fieldEncoder.IsEmpty(unsafe.Pointer(fieldPtr))
}
type structEncoder struct {
fields []*structFieldEncoder
fields map[string]*structFieldEncoder
}
func (encoder *structEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
func (encoder *structEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteObjectStart()
isNotFirst := false
for _, field := range encoder.fields {
if field.omitempty && field.isEmpty(ptr) {
for fieldName, field := range encoder.fields {
if field.omitempty && field.IsEmpty(ptr) {
continue
}
if isNotFirst {
stream.WriteMore()
}
field.encode(ptr, stream)
stream.WriteObjectField(fieldName)
field.Encode(ptr, stream)
isNotFirst = true
}
stream.WriteObjectEnd()
}
func (encoder *structEncoder) encodeInterface(val interface{}, stream *Stream) {
var encoderToUse Encoder
func (encoder *structEncoder) EncodeInterface(val interface{}, stream *Stream) {
var encoderToUse ValEncoder
encoderToUse = encoder
if len(encoder.fields) == 1 {
firstEncoder := encoder.fields[0].fieldEncoder
var firstField *structFieldEncoder
var firstFieldName string
for fieldName, field := range encoder.fields {
firstFieldName = fieldName
firstField = field
}
firstEncoder := firstField.fieldEncoder
firstEncoderName := reflect.TypeOf(firstEncoder).String()
// interface{} has inline optimization for this case
if firstEncoderName == "*jsoniter.optionalEncoder" {
encoderToUse = &structEncoder{
fields: []*structFieldEncoder{{
field: encoder.fields[0].field,
fieldName: encoder.fields[0].fieldName,
fieldEncoder: firstEncoder.(*optionalEncoder).valueEncoder,
omitempty: encoder.fields[0].omitempty,
}},
fields: map[string]*structFieldEncoder{
firstFieldName: {
field: firstField.field,
fieldEncoder: firstEncoder.(*optionalEncoder).valueEncoder,
omitempty: firstField.omitempty,
}},
}
}
}
writeToStream(val, stream, encoderToUse)
WriteToStream(val, stream, encoderToUse)
}
func (encoder *structEncoder) isEmpty(ptr unsafe.Pointer) bool {
func (encoder *structEncoder) IsEmpty(ptr unsafe.Pointer) bool {
for _, field := range encoder.fields {
if !field.isEmpty(ptr) {
if !field.IsEmpty(ptr) {
return false
}
}
@ -1129,14 +1036,14 @@ func (encoder *structEncoder) isEmpty(ptr unsafe.Pointer) bool {
type emptyStructEncoder struct {
}
func (encoder *emptyStructEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
func (encoder *emptyStructEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteEmptyObject()
}
func (encoder *emptyStructEncoder) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
func (encoder *emptyStructEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *emptyStructEncoder) isEmpty(ptr unsafe.Pointer) bool {
func (encoder *emptyStructEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return true
}

162
feature_reflect_slice.go Normal file
View File

@ -0,0 +1,162 @@
package jsoniter
import (
"fmt"
"io"
"reflect"
"unsafe"
)
func decoderOfSlice(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) {
decoder, err := decoderOfType(cfg, typ.Elem())
if err != nil {
return nil, err
}
return &sliceDecoder{typ, typ.Elem(), decoder}, nil
}
func encoderOfSlice(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
encoder, err := encoderOfType(cfg, typ.Elem())
if err != nil {
return nil, err
}
if typ.Elem().Kind() == reflect.Map {
encoder = &optionalEncoder{encoder}
}
return &sliceEncoder{typ, typ.Elem(), encoder}, nil
}
type sliceEncoder struct {
sliceType reflect.Type
elemType reflect.Type
elemEncoder ValEncoder
}
func (encoder *sliceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
slice := (*sliceHeader)(ptr)
if slice.Data == nil {
stream.WriteNil()
return
}
if slice.Len == 0 {
stream.WriteEmptyArray()
return
}
stream.WriteArrayStart()
elemPtr := uintptr(slice.Data)
encoder.elemEncoder.Encode(unsafe.Pointer(elemPtr), stream)
for i := 1; i < slice.Len; i++ {
stream.WriteMore()
elemPtr += encoder.elemType.Size()
encoder.elemEncoder.Encode(unsafe.Pointer(elemPtr), stream)
}
stream.WriteArrayEnd()
if stream.Error != nil && stream.Error != io.EOF {
stream.Error = fmt.Errorf("%v: %s", encoder.sliceType, stream.Error.Error())
}
}
func (encoder *sliceEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *sliceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
slice := (*sliceHeader)(ptr)
return slice.Len == 0
}
type sliceDecoder struct {
sliceType reflect.Type
elemType reflect.Type
elemDecoder ValDecoder
}
// sliceHeader is a safe version of SliceHeader used within this package.
type sliceHeader struct {
Data unsafe.Pointer
Len int
Cap int
}
func (decoder *sliceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
decoder.doDecode(ptr, iter)
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.sliceType, iter.Error.Error())
}
}
func (decoder *sliceDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) {
slice := (*sliceHeader)(ptr)
reuseSlice(slice, decoder.sliceType, 4)
if !iter.ReadArray() {
return
}
offset := uintptr(0)
decoder.elemDecoder.Decode(unsafe.Pointer(uintptr(slice.Data)+offset), iter)
if !iter.ReadArray() {
slice.Len = 1
return
}
offset += decoder.elemType.Size()
decoder.elemDecoder.Decode(unsafe.Pointer(uintptr(slice.Data)+offset), iter)
if !iter.ReadArray() {
slice.Len = 2
return
}
offset += decoder.elemType.Size()
decoder.elemDecoder.Decode(unsafe.Pointer(uintptr(slice.Data)+offset), iter)
if !iter.ReadArray() {
slice.Len = 3
return
}
offset += decoder.elemType.Size()
decoder.elemDecoder.Decode(unsafe.Pointer(uintptr(slice.Data)+offset), iter)
slice.Len = 4
for iter.ReadArray() {
growOne(slice, decoder.sliceType, decoder.elemType)
offset += decoder.elemType.Size()
decoder.elemDecoder.Decode(unsafe.Pointer(uintptr(slice.Data)+offset), iter)
}
}
// grow grows the slice s so that it can hold extra more values, allocating
// more capacity if needed. It also returns the old and new slice lengths.
func growOne(slice *sliceHeader, sliceType reflect.Type, elementType reflect.Type) {
newLen := slice.Len + 1
if newLen <= slice.Cap {
slice.Len = newLen
return
}
newCap := slice.Cap
if newCap == 0 {
newCap = 1
} else {
for newCap < newLen {
if slice.Len < 1024 {
newCap += newCap
} else {
newCap += newCap / 4
}
}
}
dst := unsafe.Pointer(reflect.MakeSlice(sliceType, newLen, newCap).Pointer())
// copy old array into new array
originalBytesCount := uintptr(slice.Len) * elementType.Size()
srcPtr := (*[1 << 30]byte)(slice.Data)
dstPtr := (*[1 << 30]byte)(dst)
for i := uintptr(0); i < originalBytesCount; i++ {
dstPtr[i] = srcPtr[i]
}
slice.Len = newLen
slice.Cap = newCap
slice.Data = dst
}
func reuseSlice(slice *sliceHeader, sliceType reflect.Type, expectedCap int) {
if expectedCap <= slice.Cap {
return
}
dst := unsafe.Pointer(reflect.MakeSlice(sliceType, 0, expectedCap).Pointer())
slice.Cap = expectedCap
slice.Data = dst
}

View File

@ -5,16 +5,23 @@ import (
)
type Stream struct {
out io.Writer
buf []byte
n int
Error error
indention int
IndentionStep int
cfg *frozenConfig
out io.Writer
buf []byte
n int
Error error
indention int
}
func NewStream(out io.Writer, bufSize int) *Stream {
return &Stream{out, make([]byte, bufSize), 0, nil, 0, 0}
func NewStream(cfg *frozenConfig, out io.Writer, bufSize int) *Stream {
return &Stream{
cfg: cfg,
out: out,
buf: make([]byte, bufSize),
n: 0,
Error: nil,
indention: 0,
}
}
func (b *Stream) Reset(out io.Writer) {
@ -32,24 +39,32 @@ func (b *Stream) Buffered() int {
return b.n
}
func (b *Stream) Buffer() []byte {
return b.buf[:b.n]
}
// Write writes the contents of p into the buffer.
// It returns the number of bytes written.
// If nn < len(p), it also returns an error explaining
// why the write is short.
func (b *Stream) Write(p []byte) (nn int, err error) {
for len(p) > b.Available() && b.Error == nil {
var n int
if b.Buffered() == 0 {
// Large write, empty buffer.
// Write directly from p to avoid copy.
n, b.Error = b.out.Write(p)
if b.out == nil {
b.growAtLeast(len(p))
} else {
n = copy(b.buf[b.n:], p)
b.n += n
b.Flush()
var n int
if b.Buffered() == 0 {
// Large write, empty buffer.
// Write directly from p to avoid copy.
n, b.Error = b.out.Write(p)
} else {
n = copy(b.buf[b.n:], p)
b.n += n
b.Flush()
}
nn += n
p = p[n:]
}
nn += n
p = p[n:]
}
if b.Error != nil {
return nn, b.Error
@ -60,14 +75,13 @@ func (b *Stream) Write(p []byte) (nn int, err error) {
return nn, nil
}
// WriteByte writes a single byte.
func (b *Stream) writeByte(c byte) {
if b.Error != nil {
return
}
if b.Available() <= 0 && b.Flush() != nil {
return
if b.Available() < 1 {
b.growAtLeast(1)
}
b.buf[b.n] = c
b.n++
@ -77,11 +91,11 @@ func (b *Stream) writeTwoBytes(c1 byte, c2 byte) {
if b.Error != nil {
return
}
if b.Available() <= 1 && b.Flush() != nil {
return
if b.Available() < 2 {
b.growAtLeast(2)
}
b.buf[b.n] = c1
b.buf[b.n + 1] = c2
b.buf[b.n+1] = c2
b.n += 2
}
@ -89,12 +103,12 @@ func (b *Stream) writeThreeBytes(c1 byte, c2 byte, c3 byte) {
if b.Error != nil {
return
}
if b.Available() <= 2 && b.Flush() != nil {
return
if b.Available() < 3 {
b.growAtLeast(3)
}
b.buf[b.n] = c1
b.buf[b.n + 1] = c2
b.buf[b.n + 2] = c3
b.buf[b.n+1] = c2
b.buf[b.n+2] = c3
b.n += 3
}
@ -102,13 +116,13 @@ func (b *Stream) writeFourBytes(c1 byte, c2 byte, c3 byte, c4 byte) {
if b.Error != nil {
return
}
if b.Available() <= 3 && b.Flush() != nil {
return
if b.Available() < 4 {
b.growAtLeast(4)
}
b.buf[b.n] = c1
b.buf[b.n + 1] = c2
b.buf[b.n + 2] = c3
b.buf[b.n + 3] = c4
b.buf[b.n+1] = c2
b.buf[b.n+2] = c3
b.buf[b.n+3] = c4
b.n += 4
}
@ -116,19 +130,22 @@ func (b *Stream) writeFiveBytes(c1 byte, c2 byte, c3 byte, c4 byte, c5 byte) {
if b.Error != nil {
return
}
if b.Available() <= 3 && b.Flush() != nil {
return
if b.Available() < 5 {
b.growAtLeast(5)
}
b.buf[b.n] = c1
b.buf[b.n + 1] = c2
b.buf[b.n + 2] = c3
b.buf[b.n + 3] = c4
b.buf[b.n + 4] = c5
b.buf[b.n+1] = c2
b.buf[b.n+2] = c3
b.buf[b.n+3] = c4
b.buf[b.n+4] = c5
b.n += 5
}
// Flush writes any buffered data to the underlying io.Writer.
func (b *Stream) Flush() error {
if b.out == nil {
return nil
}
if b.Error != nil {
return b.Error
}
@ -141,7 +158,7 @@ func (b *Stream) Flush() error {
}
if err != nil {
if n > 0 && n < b.n {
copy(b.buf[0:b.n - n], b.buf[n:b.n])
copy(b.buf[0:b.n-n], b.buf[n:b.n])
}
b.n -= n
b.Error = err
@ -151,13 +168,28 @@ func (b *Stream) Flush() error {
return nil
}
func (b *Stream) WriteRaw(s string) {
for len(s) > b.Available() && b.Error == nil {
n := copy(b.buf[b.n:], s)
b.n += n
s = s[n:]
b.Flush()
func (b *Stream) ensure(minimal int) {
available := b.Available()
if available < minimal {
if b.n > 1024 {
b.Flush()
}
b.growAtLeast(minimal)
}
}
func (b *Stream) growAtLeast(minimal int) {
toGrow := len(b.buf)
if toGrow < minimal {
toGrow = minimal
}
newBuf := make([]byte, len(b.buf)+toGrow)
copy(newBuf, b.Buffer())
b.buf = newBuf
}
func (b *Stream) WriteRaw(s string) {
b.ensure(len(s))
if b.Error != nil {
return
}
@ -165,69 +197,6 @@ func (b *Stream) WriteRaw(s string) {
b.n += n
}
func (stream *Stream) WriteString(s string) {
valLen := len(s)
toWriteLen := valLen
bufLengthMinusTwo := len(stream.buf) - 2 // make room for the quotes
if stream.n + toWriteLen > bufLengthMinusTwo {
toWriteLen = bufLengthMinusTwo - stream.n
}
if toWriteLen < 0 {
stream.Flush()
if stream.n + toWriteLen > bufLengthMinusTwo {
toWriteLen = bufLengthMinusTwo - stream.n
}
}
n := stream.n
stream.buf[n] = '"'
n++
// write string, the fast path, without utf8 and escape support
i := 0
for ; i < toWriteLen; i++ {
c := s[i]
if c > 31 && c != '"' && c != '\\' {
stream.buf[n] = c
n++
} else {
break;
}
}
if i == valLen {
stream.buf[n] = '"'
n++
stream.n = n
return
}
stream.n = n
// for the remaining parts, we process them char by char
stream.writeStringSlowPath(s, i, valLen);
stream.writeByte('"')
}
func (stream *Stream) writeStringSlowPath(s string, i int, valLen int) {
for ; i < valLen; i++ {
c := s[i]
switch (c) {
case '"':
stream.writeTwoBytes('\\', '"')
case '\\':
stream.writeTwoBytes('\\', '\\')
case '\b':
stream.writeTwoBytes('\\', 'b')
case '\f':
stream.writeTwoBytes('\\', 'f')
case '\n':
stream.writeTwoBytes('\\', 'n')
case '\r':
stream.writeTwoBytes('\\', 'r')
case '\t':
stream.writeTwoBytes('\\', 't')
default:
stream.writeByte(c);
}
}
}
func (stream *Stream) WriteNil() {
stream.writeFourBytes('n', 'u', 'l', 'l')
}
@ -249,7 +218,7 @@ func (stream *Stream) WriteBool(val bool) {
}
func (stream *Stream) WriteObjectStart() {
stream.indention += stream.IndentionStep
stream.indention += stream.cfg.indentionStep
stream.writeByte('{')
stream.writeIndention(0)
}
@ -260,8 +229,8 @@ func (stream *Stream) WriteObjectField(field string) {
}
func (stream *Stream) WriteObjectEnd() {
stream.writeIndention(stream.IndentionStep)
stream.indention -= stream.IndentionStep
stream.writeIndention(stream.cfg.indentionStep)
stream.indention -= stream.cfg.indentionStep
stream.writeByte('}')
}
@ -276,7 +245,7 @@ func (stream *Stream) WriteMore() {
}
func (stream *Stream) WriteArrayStart() {
stream.indention += stream.IndentionStep
stream.indention += stream.cfg.indentionStep
stream.writeByte('[')
stream.writeIndention(0)
}
@ -287,27 +256,20 @@ func (stream *Stream) WriteEmptyArray() {
}
func (stream *Stream) WriteArrayEnd() {
stream.writeIndention(stream.IndentionStep)
stream.indention -= stream.IndentionStep
stream.writeIndention(stream.cfg.indentionStep)
stream.indention -= stream.cfg.indentionStep
stream.writeByte(']')
}
func (stream *Stream) writeIndention(delta int) {
if (stream.indention == 0) {
if stream.indention == 0 {
return
}
stream.writeByte('\n')
toWrite := stream.indention - delta
i := 0
for {
for ; i < toWrite && stream.n < len(stream.buf); i++ {
stream.buf[stream.n] = ' '
stream.n ++
}
if i == toWrite {
break;
} else {
stream.Flush()
}
stream.ensure(toWrite)
for i := 0; i < toWrite && stream.n < len(stream.buf); i++ {
stream.buf[stream.n] = ' '
stream.n++
}
}
}

View File

@ -2,13 +2,12 @@ package jsoniter
import (
"strconv"
"unsafe"
)
var POW10 []uint64
var _POW10 []uint64
func init() {
POW10 = []uint64{1, 10, 100, 1000, 10000, 100000, 1000000}
_POW10 = []uint64{1, 10, 100, 1000, 10000, 100000, 1000000}
}
func (stream *Stream) WriteFloat32(val float32) {
@ -21,26 +20,24 @@ func (stream *Stream) WriteFloat32Lossy(val float32) {
val = -val
}
if val > 0x4ffffff {
stream.WriteRaw(strconv.FormatFloat(float64(val), 'f', -1, 32));
stream.WriteRaw(strconv.FormatFloat(float64(val), 'f', -1, 32))
return
}
precision := 6
exp := uint64(1000000) // 6
lval := uint64(float64(val) * float64(exp) + 0.5)
lval := uint64(float64(val)*float64(exp) + 0.5)
stream.WriteUint64(lval / exp)
fval := lval % exp
if fval == 0 {
return
}
stream.writeByte('.')
if stream.Available() < 10 {
stream.Flush()
}
for p := precision - 1; p > 0 && fval < POW10[p]; p-- {
stream.ensure(10)
for p := precision - 1; p > 0 && fval < _POW10[p]; p-- {
stream.writeByte('0')
}
stream.WriteUint64(fval)
for stream.buf[stream.n - 1] == '0' {
for stream.buf[stream.n-1] == '0' {
stream.n--
}
}
@ -55,38 +52,24 @@ func (stream *Stream) WriteFloat64Lossy(val float64) {
val = -val
}
if val > 0x4ffffff {
stream.WriteRaw(strconv.FormatFloat(val, 'f', -1, 64));
stream.WriteRaw(strconv.FormatFloat(val, 'f', -1, 64))
return
}
precision := 6
exp := uint64(1000000) // 6
lval := uint64(val * float64(exp) + 0.5)
lval := uint64(val*float64(exp) + 0.5)
stream.WriteUint64(lval / exp)
fval := lval % exp
if fval == 0 {
return
}
stream.writeByte('.')
if stream.Available() < 10 {
stream.Flush()
}
for p := precision - 1; p > 0 && fval < POW10[p]; p-- {
stream.ensure(10)
for p := precision - 1; p > 0 && fval < _POW10[p]; p-- {
stream.writeByte('0')
}
stream.WriteUint64(fval)
for stream.buf[stream.n - 1] == '0' {
for stream.buf[stream.n-1] == '0' {
stream.n--
}
}
func EnableLossyFloatMarshalling() {
// for better performance
RegisterTypeEncoder("float32", func(ptr unsafe.Pointer, stream *Stream) {
val := *((*float32)(ptr))
stream.WriteFloat32Lossy(val)
})
RegisterTypeEncoder("float64", func(ptr unsafe.Pointer, stream *Stream) {
val := *((*float64)(ptr))
stream.WriteFloat64Lossy(val)
})
}

View File

@ -1,50 +1,15 @@
package jsoniter
var digits []uint8
var digitTens []uint8
var digitOnes []uint8
var DIGITS []uint32
var _DIGITS []uint32
func init() {
digits = []uint8{
'0', '1', '2', '3', '4', '5',
'6', '7', '8', '9', 'a', 'b',
'c', 'd', 'e', 'f', 'g', 'h',
'i', 'j', 'k', 'l', 'm', 'n',
'o', 'p', 'q', 'r', 's', 't',
'u', 'v', 'w', 'x', 'y', 'z',
}
digitTens = []uint8{
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'1', '1', '1', '1', '1', '1', '1', '1', '1', '1',
'2', '2', '2', '2', '2', '2', '2', '2', '2', '2',
'3', '3', '3', '3', '3', '3', '3', '3', '3', '3',
'4', '4', '4', '4', '4', '4', '4', '4', '4', '4',
'5', '5', '5', '5', '5', '5', '5', '5', '5', '5',
'6', '6', '6', '6', '6', '6', '6', '6', '6', '6',
'7', '7', '7', '7', '7', '7', '7', '7', '7', '7',
'8', '8', '8', '8', '8', '8', '8', '8', '8', '8',
'9', '9', '9', '9', '9', '9', '9', '9', '9', '9',
}
digitOnes = []uint8{
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
}
DIGITS = make([]uint32, 1000)
_DIGITS = make([]uint32, 1000)
for i := uint32(0); i < 1000; i++ {
DIGITS[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i % 10 + '0';
_DIGITS[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i%10 + '0'
if i < 10 {
DIGITS[i] += 2 << 24
_DIGITS[i] += 2 << 24
} else if i < 100 {
DIGITS[i] += 1 << 24
_DIGITS[i] += 1 << 24
}
}
}
@ -67,56 +32,48 @@ func writeFirstBuf(buf []byte, v uint32, n int) int {
func writeBuf(buf []byte, v uint32, n int) {
buf[n] = byte(v >> 16)
buf[n + 1] = byte(v >> 8)
buf[n + 2] = byte(v)
buf[n+1] = byte(v >> 8)
buf[n+2] = byte(v)
}
func (stream *Stream) WriteUint8(val uint8) {
if stream.Available() < 3 {
stream.Flush()
}
stream.n = writeFirstBuf(stream.buf, DIGITS[val], stream.n)
stream.ensure(3)
stream.n = writeFirstBuf(stream.buf, _DIGITS[val], stream.n)
}
func (stream *Stream) WriteInt8(nval int8) {
if stream.Available() < 4 {
stream.Flush()
}
stream.ensure(4)
n := stream.n
var val uint8
if (nval < 0) {
if nval < 0 {
val = uint8(-nval)
stream.buf[n] = '-'
n++
} else {
val = uint8(nval)
}
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
stream.n = writeFirstBuf(stream.buf, _DIGITS[val], n)
}
func (stream *Stream) WriteUint16(val uint16) {
if stream.Available() < 5 {
stream.Flush()
}
stream.ensure(5)
q1 := val / 1000
if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], stream.n)
stream.n = writeFirstBuf(stream.buf, _DIGITS[val], stream.n)
return
}
r1 := val - q1 * 1000;
n := writeFirstBuf(stream.buf, DIGITS[q1], stream.n)
writeBuf(stream.buf, DIGITS[r1], n)
r1 := val - q1*1000
n := writeFirstBuf(stream.buf, _DIGITS[q1], stream.n)
writeBuf(stream.buf, _DIGITS[r1], n)
stream.n = n + 3
return
}
func (stream *Stream) WriteInt16(nval int16) {
if stream.Available() < 6 {
stream.Flush()
}
stream.ensure(6)
n := stream.n
var val uint16
if (nval < 0) {
if nval < 0 {
val = uint16(-nval)
stream.buf[n] = '-'
n++
@ -125,57 +82,53 @@ func (stream *Stream) WriteInt16(nval int16) {
}
q1 := val / 1000
if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
stream.n = writeFirstBuf(stream.buf, _DIGITS[val], n)
return
}
r1 := val - q1 * 1000;
n = writeFirstBuf(stream.buf, DIGITS[q1], n)
writeBuf(stream.buf, DIGITS[r1], n)
r1 := val - q1*1000
n = writeFirstBuf(stream.buf, _DIGITS[q1], n)
writeBuf(stream.buf, _DIGITS[r1], n)
stream.n = n + 3
return
}
func (stream *Stream) WriteUint32(val uint32) {
if stream.Available() < 10 {
stream.Flush()
}
stream.ensure(10)
n := stream.n
q1 := val / 1000
if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
stream.n = writeFirstBuf(stream.buf, _DIGITS[val], n)
return
}
r1 := val - q1 * 1000;
r1 := val - q1*1000
q2 := q1 / 1000
if q2 == 0 {
n := writeFirstBuf(stream.buf, DIGITS[q1], n)
writeBuf(stream.buf, DIGITS[r1], n)
n := writeFirstBuf(stream.buf, _DIGITS[q1], n)
writeBuf(stream.buf, _DIGITS[r1], n)
stream.n = n + 3
return
}
r2 := q1 - q2 * 1000
r2 := q1 - q2*1000
q3 := q2 / 1000
if q3 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q2], n)
n = writeFirstBuf(stream.buf, _DIGITS[q2], n)
} else {
r3 := q2 - q3 * 1000
r3 := q2 - q3*1000
stream.buf[n] = byte(q3 + '0')
n++
writeBuf(stream.buf, DIGITS[r3], n)
writeBuf(stream.buf, _DIGITS[r3], n)
n += 3
}
writeBuf(stream.buf, DIGITS[r2], n)
writeBuf(stream.buf, DIGITS[r1], n + 3)
writeBuf(stream.buf, _DIGITS[r2], n)
writeBuf(stream.buf, _DIGITS[r1], n+3)
stream.n = n + 6
}
func (stream *Stream) WriteInt32(nval int32) {
if stream.Available() < 11 {
stream.Flush()
}
stream.ensure(11)
n := stream.n
var val uint32
if (nval < 0) {
if nval < 0 {
val = uint32(-nval)
stream.buf[n] = '-'
n++
@ -184,106 +137,102 @@ func (stream *Stream) WriteInt32(nval int32) {
}
q1 := val / 1000
if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
stream.n = writeFirstBuf(stream.buf, _DIGITS[val], n)
return
}
r1 := val - q1 * 1000;
r1 := val - q1*1000
q2 := q1 / 1000
if q2 == 0 {
n := writeFirstBuf(stream.buf, DIGITS[q1], n)
writeBuf(stream.buf, DIGITS[r1], n)
n := writeFirstBuf(stream.buf, _DIGITS[q1], n)
writeBuf(stream.buf, _DIGITS[r1], n)
stream.n = n + 3
return
}
r2 := q1 - q2 * 1000
r2 := q1 - q2*1000
q3 := q2 / 1000
if q3 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q2], n)
n = writeFirstBuf(stream.buf, _DIGITS[q2], n)
} else {
r3 := q2 - q3 * 1000
r3 := q2 - q3*1000
stream.buf[n] = byte(q3 + '0')
n++
writeBuf(stream.buf, DIGITS[r3], n)
writeBuf(stream.buf, _DIGITS[r3], n)
n += 3
}
writeBuf(stream.buf, DIGITS[r2], n)
writeBuf(stream.buf, DIGITS[r1], n + 3)
writeBuf(stream.buf, _DIGITS[r2], n)
writeBuf(stream.buf, _DIGITS[r1], n+3)
stream.n = n + 6
}
func (stream *Stream) WriteUint64(val uint64) {
if stream.Available() < 20 {
stream.Flush()
}
stream.ensure(20)
n := stream.n
q1 := val / 1000
if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
stream.n = writeFirstBuf(stream.buf, _DIGITS[val], n)
return
}
r1 := val - q1 * 1000;
r1 := val - q1*1000
q2 := q1 / 1000
if q2 == 0 {
n := writeFirstBuf(stream.buf, DIGITS[q1], n)
writeBuf(stream.buf, DIGITS[r1], n)
n := writeFirstBuf(stream.buf, _DIGITS[q1], n)
writeBuf(stream.buf, _DIGITS[r1], n)
stream.n = n + 3
return
}
r2 := q1 - q2 * 1000
r2 := q1 - q2*1000
q3 := q2 / 1000
if q3 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q2], n)
writeBuf(stream.buf, DIGITS[r2], n)
writeBuf(stream.buf, DIGITS[r1], n + 3)
n = writeFirstBuf(stream.buf, _DIGITS[q2], n)
writeBuf(stream.buf, _DIGITS[r2], n)
writeBuf(stream.buf, _DIGITS[r1], n+3)
stream.n = n + 6
return
}
r3 := q2 - q3 * 1000
r3 := q2 - q3*1000
q4 := q3 / 1000
if q4 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q3], n)
writeBuf(stream.buf, DIGITS[r3], n)
writeBuf(stream.buf, DIGITS[r2], n + 3)
writeBuf(stream.buf, DIGITS[r1], n + 6)
n = writeFirstBuf(stream.buf, _DIGITS[q3], n)
writeBuf(stream.buf, _DIGITS[r3], n)
writeBuf(stream.buf, _DIGITS[r2], n+3)
writeBuf(stream.buf, _DIGITS[r1], n+6)
stream.n = n + 9
return
}
r4 := q3 - q4 * 1000
r4 := q3 - q4*1000
q5 := q4 / 1000
if q5 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q4], n)
writeBuf(stream.buf, DIGITS[r4], n)
writeBuf(stream.buf, DIGITS[r3], n + 3)
writeBuf(stream.buf, DIGITS[r2], n + 6)
writeBuf(stream.buf, DIGITS[r1], n + 9)
n = writeFirstBuf(stream.buf, _DIGITS[q4], n)
writeBuf(stream.buf, _DIGITS[r4], n)
writeBuf(stream.buf, _DIGITS[r3], n+3)
writeBuf(stream.buf, _DIGITS[r2], n+6)
writeBuf(stream.buf, _DIGITS[r1], n+9)
stream.n = n + 12
return
}
r5 := q4 - q5 * 1000
r5 := q4 - q5*1000
q6 := q5 / 1000
if q6 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q5], n)
n = writeFirstBuf(stream.buf, _DIGITS[q5], n)
} else {
n = writeFirstBuf(stream.buf, DIGITS[q6], n)
r6 := q5 - q6 * 1000
writeBuf(stream.buf, DIGITS[r6], n)
n = writeFirstBuf(stream.buf, _DIGITS[q6], n)
r6 := q5 - q6*1000
writeBuf(stream.buf, _DIGITS[r6], n)
n += 3
}
writeBuf(stream.buf, DIGITS[r5], n)
writeBuf(stream.buf, DIGITS[r4], n + 3)
writeBuf(stream.buf, DIGITS[r3], n + 6)
writeBuf(stream.buf, DIGITS[r2], n + 9)
writeBuf(stream.buf, DIGITS[r1], n + 12)
writeBuf(stream.buf, _DIGITS[r5], n)
writeBuf(stream.buf, _DIGITS[r4], n+3)
writeBuf(stream.buf, _DIGITS[r3], n+6)
writeBuf(stream.buf, _DIGITS[r2], n+9)
writeBuf(stream.buf, _DIGITS[r1], n+12)
stream.n = n + 15
}
func (stream *Stream) WriteInt64(nval int64) {
if stream.Available() < 20 {
stream.Flush()
}
stream.ensure(20)
n := stream.n
var val uint64
if (nval < 0) {
if nval < 0 {
val = uint64(-nval)
stream.buf[n] = '-'
n++
@ -292,63 +241,63 @@ func (stream *Stream) WriteInt64(nval int64) {
}
q1 := val / 1000
if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
stream.n = writeFirstBuf(stream.buf, _DIGITS[val], n)
return
}
r1 := val - q1 * 1000;
r1 := val - q1*1000
q2 := q1 / 1000
if q2 == 0 {
n := writeFirstBuf(stream.buf, DIGITS[q1], n)
writeBuf(stream.buf, DIGITS[r1], n)
n := writeFirstBuf(stream.buf, _DIGITS[q1], n)
writeBuf(stream.buf, _DIGITS[r1], n)
stream.n = n + 3
return
}
r2 := q1 - q2 * 1000
r2 := q1 - q2*1000
q3 := q2 / 1000
if q3 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q2], n)
writeBuf(stream.buf, DIGITS[r2], n)
writeBuf(stream.buf, DIGITS[r1], n + 3)
n = writeFirstBuf(stream.buf, _DIGITS[q2], n)
writeBuf(stream.buf, _DIGITS[r2], n)
writeBuf(stream.buf, _DIGITS[r1], n+3)
stream.n = n + 6
return
}
r3 := q2 - q3 * 1000
r3 := q2 - q3*1000
q4 := q3 / 1000
if q4 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q3], n)
writeBuf(stream.buf, DIGITS[r3], n)
writeBuf(stream.buf, DIGITS[r2], n + 3)
writeBuf(stream.buf, DIGITS[r1], n + 6)
n = writeFirstBuf(stream.buf, _DIGITS[q3], n)
writeBuf(stream.buf, _DIGITS[r3], n)
writeBuf(stream.buf, _DIGITS[r2], n+3)
writeBuf(stream.buf, _DIGITS[r1], n+6)
stream.n = n + 9
return
}
r4 := q3 - q4 * 1000
r4 := q3 - q4*1000
q5 := q4 / 1000
if q5 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q4], n)
writeBuf(stream.buf, DIGITS[r4], n)
writeBuf(stream.buf, DIGITS[r3], n + 3)
writeBuf(stream.buf, DIGITS[r2], n + 6)
writeBuf(stream.buf, DIGITS[r1], n + 9)
n = writeFirstBuf(stream.buf, _DIGITS[q4], n)
writeBuf(stream.buf, _DIGITS[r4], n)
writeBuf(stream.buf, _DIGITS[r3], n+3)
writeBuf(stream.buf, _DIGITS[r2], n+6)
writeBuf(stream.buf, _DIGITS[r1], n+9)
stream.n = n + 12
return
}
r5 := q4 - q5 * 1000
r5 := q4 - q5*1000
q6 := q5 / 1000
if q6 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q5], n)
n = writeFirstBuf(stream.buf, _DIGITS[q5], n)
} else {
stream.buf[n] = byte(q6 + '0')
n++
r6 := q5 - q6 * 1000
writeBuf(stream.buf, DIGITS[r6], n)
r6 := q5 - q6*1000
writeBuf(stream.buf, _DIGITS[r6], n)
n += 3
}
writeBuf(stream.buf, DIGITS[r5], n)
writeBuf(stream.buf, DIGITS[r4], n + 3)
writeBuf(stream.buf, DIGITS[r3], n + 6)
writeBuf(stream.buf, DIGITS[r2], n + 9)
writeBuf(stream.buf, DIGITS[r1], n + 12)
writeBuf(stream.buf, _DIGITS[r5], n)
writeBuf(stream.buf, _DIGITS[r4], n+3)
writeBuf(stream.buf, _DIGITS[r3], n+6)
writeBuf(stream.buf, _DIGITS[r2], n+9)
writeBuf(stream.buf, _DIGITS[r1], n+12)
stream.n = n + 15
}
@ -358,4 +307,4 @@ func (stream *Stream) WriteInt(val int) {
func (stream *Stream) WriteUint(val uint) {
stream.WriteUint64(uint64(val))
}
}

395
feature_stream_string.go Normal file
View File

@ -0,0 +1,395 @@
package jsoniter
import (
"unicode/utf8"
)
// htmlSafeSet holds the value true if the ASCII character with the given
// array position can be safely represented inside a JSON string, embedded
// inside of HTML <script> tags, without any additional escaping.
//
// All values are true except for the ASCII control characters (0-31), the
// double quote ("), the backslash character ("\"), HTML opening and closing
// tags ("<" and ">"), and the ampersand ("&").
var htmlSafeSet = [utf8.RuneSelf]bool{
' ': true,
'!': true,
'"': false,
'#': true,
'$': true,
'%': true,
'&': false,
'\'': true,
'(': true,
')': true,
'*': true,
'+': true,
',': true,
'-': true,
'.': true,
'/': true,
'0': true,
'1': true,
'2': true,
'3': true,
'4': true,
'5': true,
'6': true,
'7': true,
'8': true,
'9': true,
':': true,
';': true,
'<': false,
'=': true,
'>': false,
'?': true,
'@': true,
'A': true,
'B': true,
'C': true,
'D': true,
'E': true,
'F': true,
'G': true,
'H': true,
'I': true,
'J': true,
'K': true,
'L': true,
'M': true,
'N': true,
'O': true,
'P': true,
'Q': true,
'R': true,
'S': true,
'T': true,
'U': true,
'V': true,
'W': true,
'X': true,
'Y': true,
'Z': true,
'[': true,
'\\': false,
']': true,
'^': true,
'_': true,
'`': true,
'a': true,
'b': true,
'c': true,
'd': true,
'e': true,
'f': true,
'g': true,
'h': true,
'i': true,
'j': true,
'k': true,
'l': true,
'm': true,
'n': true,
'o': true,
'p': true,
'q': true,
'r': true,
's': true,
't': true,
'u': true,
'v': true,
'w': true,
'x': true,
'y': true,
'z': true,
'{': true,
'|': true,
'}': true,
'~': true,
'\u007f': true,
}
// safeSet holds the value true if the ASCII character with the given array
// position can be represented inside a JSON string without any further
// escaping.
//
// All values are true except for the ASCII control characters (0-31), the
// double quote ("), and the backslash character ("\").
var safeSet = [utf8.RuneSelf]bool{
' ': true,
'!': true,
'"': false,
'#': true,
'$': true,
'%': true,
'&': true,
'\'': true,
'(': true,
')': true,
'*': true,
'+': true,
',': true,
'-': true,
'.': true,
'/': true,
'0': true,
'1': true,
'2': true,
'3': true,
'4': true,
'5': true,
'6': true,
'7': true,
'8': true,
'9': true,
':': true,
';': true,
'<': true,
'=': true,
'>': true,
'?': true,
'@': true,
'A': true,
'B': true,
'C': true,
'D': true,
'E': true,
'F': true,
'G': true,
'H': true,
'I': true,
'J': true,
'K': true,
'L': true,
'M': true,
'N': true,
'O': true,
'P': true,
'Q': true,
'R': true,
'S': true,
'T': true,
'U': true,
'V': true,
'W': true,
'X': true,
'Y': true,
'Z': true,
'[': true,
'\\': false,
']': true,
'^': true,
'_': true,
'`': true,
'a': true,
'b': true,
'c': true,
'd': true,
'e': true,
'f': true,
'g': true,
'h': true,
'i': true,
'j': true,
'k': true,
'l': true,
'm': true,
'n': true,
'o': true,
'p': true,
'q': true,
'r': true,
's': true,
't': true,
'u': true,
'v': true,
'w': true,
'x': true,
'y': true,
'z': true,
'{': true,
'|': true,
'}': true,
'~': true,
'\u007f': true,
}
var hex = "0123456789abcdef"
func (stream *Stream) WriteStringWithHtmlEscaped(s string) {
stream.ensure(32)
valLen := len(s)
toWriteLen := valLen
bufLengthMinusTwo := len(stream.buf) - 2 // make room for the quotes
if stream.n+toWriteLen > bufLengthMinusTwo {
toWriteLen = bufLengthMinusTwo - stream.n
}
n := stream.n
stream.buf[n] = '"'
n++
// write string, the fast path, without utf8 and escape support
i := 0
for ; i < toWriteLen; i++ {
c := s[i]
if c <= utf8.RuneSelf && htmlSafeSet[c] {
stream.buf[n] = c
n++
} else {
break
}
}
if i == valLen {
stream.buf[n] = '"'
n++
stream.n = n
return
}
stream.n = n
writeStringSlowPathWithHtmlEscaped(stream, i, s, valLen)
}
func writeStringSlowPathWithHtmlEscaped(stream *Stream, i int, s string, valLen int) {
start := i
// for the remaining parts, we process them char by char
for ; i < valLen; i++ {
if b := s[i]; b < utf8.RuneSelf {
if htmlSafeSet[b] {
i++
continue
}
if start < i {
stream.WriteRaw(s[start:i])
}
switch b {
case '\\', '"':
stream.writeTwoBytes('\\', b)
case '\n':
stream.writeTwoBytes('\\', 'n')
case '\r':
stream.writeTwoBytes('\\', 'r')
case '\t':
stream.writeTwoBytes('\\', 't')
default:
// This encodes bytes < 0x20 except for \t, \n and \r.
// If escapeHTML is set, it also escapes <, >, and &
// because they can lead to security holes when
// user-controlled strings are rendered into JSON
// and served to some browsers.
stream.WriteRaw(`\u00`)
stream.writeTwoBytes(hex[b>>4], hex[b&0xF])
}
i++
start = i
continue
}
c, size := utf8.DecodeRuneInString(s[i:])
if c == utf8.RuneError && size == 1 {
if start < i {
stream.WriteRaw(s[start:i])
}
start = i
continue
}
// U+2028 is LINE SEPARATOR.
// U+2029 is PARAGRAPH SEPARATOR.
// They are both technically valid characters in JSON strings,
// but don't work in JSONP, which has to be evaluated as JavaScript,
// and can lead to security holes there. It is valid JSON to
// escape them, so we do so unconditionally.
// See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion.
if c == '\u2028' || c == '\u2029' {
if start < i {
stream.WriteRaw(s[start:i])
}
stream.WriteRaw(`\u202`)
stream.writeByte(hex[c&0xF])
i += size
start = i
continue
}
i += size
}
if start < len(s) {
stream.WriteRaw(s[start:])
}
stream.writeByte('"')
}
func (stream *Stream) WriteString(s string) {
stream.ensure(32)
valLen := len(s)
toWriteLen := valLen
bufLengthMinusTwo := len(stream.buf) - 2 // make room for the quotes
if stream.n+toWriteLen > bufLengthMinusTwo {
toWriteLen = bufLengthMinusTwo - stream.n
}
n := stream.n
stream.buf[n] = '"'
n++
// write string, the fast path, without utf8 and escape support
i := 0
for ; i < toWriteLen; i++ {
c := s[i]
if c > 31 && c != '"' && c != '\\' {
stream.buf[n] = c
n++
} else {
break
}
}
if i == valLen {
stream.buf[n] = '"'
n++
stream.n = n
return
}
stream.n = n
writeStringSlowPath(stream, i, s, valLen)
}
func writeStringSlowPath(stream *Stream, i int, s string, valLen int) {
start := i
// for the remaining parts, we process them char by char
for ; i < valLen; i++ {
if b := s[i]; b < utf8.RuneSelf {
if safeSet[b] {
i++
continue
}
if start < i {
stream.WriteRaw(s[start:i])
}
switch b {
case '\\', '"':
stream.writeTwoBytes('\\', b)
case '\n':
stream.writeTwoBytes('\\', 'n')
case '\r':
stream.writeTwoBytes('\\', 'r')
case '\t':
stream.writeTwoBytes('\\', 't')
default:
// This encodes bytes < 0x20 except for \t, \n and \r.
// If escapeHTML is set, it also escapes <, >, and &
// because they can lead to security holes when
// user-controlled strings are rendered into JSON
// and served to some browsers.
stream.WriteRaw(`\u00`)
stream.writeTwoBytes(hex[b>>4], hex[b&0xF])
}
i++
start = i
continue
}
if start < i {
stream.WriteRaw(s[start:i])
}
start = i
continue
}
if start < len(s) {
stream.WriteRaw(s[start:])
}
stream.writeByte('"')
}

View File

@ -1,11 +1,11 @@
package jsoniter
import (
"testing"
"github.com/json-iterator/go/require"
"encoding/json"
"bytes"
"encoding/json"
"github.com/json-iterator/go/require"
"io/ioutil"
"testing"
)
func Test_new_decoder(t *testing.T) {
@ -37,10 +37,34 @@ func Test_new_encoder(t *testing.T) {
should := require.New(t)
buf1 := &bytes.Buffer{}
encoder1 := json.NewEncoder(buf1)
encoder1.SetEscapeHTML(false)
encoder1.Encode([]int{1})
should.Equal("[1]\n", buf1.String())
buf2 := &bytes.Buffer{}
encoder2 := NewEncoder(buf2)
encoder2.SetEscapeHTML(false)
encoder2.Encode([]int{1})
should.Equal("[1]", buf2.String())
}
}
func Test_use_number(t *testing.T) {
should := require.New(t)
decoder1 := json.NewDecoder(bytes.NewBufferString(`123`))
decoder1.UseNumber()
decoder2 := NewDecoder(bytes.NewBufferString(`123`))
decoder2.UseNumber()
var obj1 interface{}
should.Nil(decoder1.Decode(&obj1))
should.Equal(json.Number("123"), obj1)
var obj2 interface{}
should.Nil(decoder2.Decode(&obj2))
should.Equal(json.Number("123"), obj2)
}
func Test_use_number_for_unmarshal(t *testing.T) {
should := require.New(t)
api := Config{UseNumber: true}.Froze()
var obj interface{}
should.Nil(api.UnmarshalFromString("123", &obj))
should.Equal(json.Number("123"), obj)
}

View File

@ -0,0 +1,96 @@
package jsoniter
import (
"github.com/json-iterator/go/require"
"testing"
)
func Test_read_empty_array_as_any(t *testing.T) {
should := require.New(t)
any := Get([]byte("[]"))
should.Equal(Array, any.Get().ValueType())
should.Equal(Invalid, any.Get(0.3).ValueType())
should.Equal(0, any.Size())
should.Equal(Array, any.ValueType())
should.Nil(any.LastError())
should.Equal(0, any.ToInt())
should.Equal(int32(0), any.ToInt32())
should.Equal(int64(0), any.ToInt64())
should.Equal(uint(0), any.ToUint())
should.Equal(uint32(0), any.ToUint32())
should.Equal(uint64(0), any.ToUint64())
should.Equal(float32(0), any.ToFloat32())
should.Equal(float64(0), any.ToFloat64())
}
func Test_read_one_element_array_as_any(t *testing.T) {
should := require.New(t)
any := Get([]byte("[1]"))
should.Equal(1, any.Size())
}
func Test_read_two_element_array_as_any(t *testing.T) {
should := require.New(t)
any := Get([]byte("[1,2]"))
should.Equal(1, any.Get(0).ToInt())
should.Equal(2, any.Size())
should.True(any.ToBool())
should.Equal(1, any.ToInt())
should.Equal(1, any.GetArray()[0].ToInt())
should.Equal([]interface{}{float64(1), float64(2)}, any.GetInterface())
stream := NewStream(ConfigDefault, nil, 32)
any.WriteTo(stream)
should.Equal("[1,2]", string(stream.Buffer()))
arr := []int{}
any.ToVal(&arr)
should.Equal([]int{1, 2}, arr)
}
func Test_wrap_array(t *testing.T) {
should := require.New(t)
any := Wrap([]int{1, 2, 3})
should.Equal("[1,2,3]", any.ToString())
}
func Test_array_lazy_any_get(t *testing.T) {
should := require.New(t)
any := Get([]byte("[1,[2,3],4]"))
should.Equal(3, any.Get(1, 1).ToInt())
should.Equal("[1,[2,3],4]", any.ToString())
}
func Test_array_lazy_any_get_all(t *testing.T) {
should := require.New(t)
any := Get([]byte("[[1],[2],[3,4]]"))
should.Equal("[1,2,3]", any.Get('*', 0).ToString())
any = Get([]byte("[[[1],[2],[3,4]]]"), 0, '*', 0)
should.Equal("[1,2,3]", any.ToString())
}
func Test_array_wrapper_any_get_all(t *testing.T) {
should := require.New(t)
any := wrapArray([][]int{
{1, 2},
{3, 4},
{5, 6},
})
should.Equal("[1,3,5]", any.Get('*', 0).ToString())
should.Equal(Array, any.ValueType())
should.True(any.ToBool())
should.Equal(1, any.Get(0, 0).ToInt())
}
func Test_array_lazy_any_get_invalid(t *testing.T) {
should := require.New(t)
any := Get([]byte("[]"))
should.Equal(Invalid, any.Get(1, 1).ValueType())
should.NotNil(any.Get(1, 1).LastError())
should.Equal(Invalid, any.Get("1").ValueType())
should.NotNil(any.Get("1").LastError())
}
func Test_invalid_array(t *testing.T) {
should := require.New(t)
any := Get([]byte("["), 0)
should.Equal(Invalid, any.ValueType())
}

12
jsoniter_any_bool_test.go Normal file
View File

@ -0,0 +1,12 @@
package jsoniter
import (
"github.com/json-iterator/go/require"
"testing"
)
func Test_read_bool_as_any(t *testing.T) {
should := require.New(t)
any := Get([]byte("true"))
should.True(any.ToBool())
}

View File

@ -0,0 +1,14 @@
package jsoniter
import (
"github.com/json-iterator/go/require"
"testing"
)
func Test_read_float_as_any(t *testing.T) {
should := require.New(t)
any := Get([]byte("12.3"))
should.Equal(float64(12.3), any.ToFloat64())
should.Equal("12.3", any.ToString())
should.True(any.ToBool())
}

22
jsoniter_any_int_test.go Normal file
View File

@ -0,0 +1,22 @@
package jsoniter
import (
"github.com/json-iterator/go/require"
"io"
"testing"
)
func Test_read_int64_as_any(t *testing.T) {
should := require.New(t)
any := Get([]byte("1234"))
should.Equal(1234, any.ToInt())
should.Equal(io.EOF, any.LastError())
should.Equal("1234", any.ToString())
should.True(any.ToBool())
}
func Test_int_lazy_any_get(t *testing.T) {
should := require.New(t)
any := Get([]byte("1234"))
should.Equal(Invalid, any.Get(1, "2").ValueType())
}

14
jsoniter_any_map_test.go Normal file
View File

@ -0,0 +1,14 @@
package jsoniter
import (
"github.com/json-iterator/go/require"
"testing"
)
func Test_wrap_map(t *testing.T) {
should := require.New(t)
any := Wrap(map[string]string{"Field1": "hello"})
should.Equal("hello", any.Get("Field1").ToString())
any = Wrap(map[string]string{"Field1": "hello"})
should.Equal(1, any.Size())
}

15
jsoniter_any_null_test.go Normal file
View File

@ -0,0 +1,15 @@
package jsoniter
import (
"github.com/json-iterator/go/require"
"testing"
)
func Test_read_null_as_any(t *testing.T) {
should := require.New(t)
any := Get([]byte(`null`))
should.Equal(0, any.ToInt())
should.Equal(float64(0), any.ToFloat64())
should.Equal("", any.ToString())
should.False(any.ToBool())
}

View File

@ -0,0 +1,75 @@
package jsoniter
import (
"github.com/json-iterator/go/require"
"testing"
)
func Test_read_object_as_any(t *testing.T) {
should := require.New(t)
any := Get([]byte(`{"a":"b","c":"d"}`))
should.Equal(`{"a":"b","c":"d"}`, any.ToString())
// partial parse
should.Equal("b", any.Get("a").ToString())
should.Equal("d", any.Get("c").ToString())
should.Equal(2, len(any.Keys()))
any = Get([]byte(`{"a":"b","c":"d"}`))
// full parse
should.Equal(2, len(any.Keys()))
should.Equal(2, any.Size())
should.True(any.ToBool())
should.Equal(1, any.ToInt())
should.Equal(Object, any.ValueType())
should.Nil(any.LastError())
should.Equal("b", any.GetObject()["a"].ToString())
obj := struct {
A string
}{}
any.ToVal(&obj)
should.Equal("b", obj.A)
}
func Test_object_lazy_any_get(t *testing.T) {
should := require.New(t)
any := Get([]byte(`{"a":{"b":{"c":"d"}}}`))
should.Equal("d", any.Get("a", "b", "c").ToString())
}
func Test_object_lazy_any_get_all(t *testing.T) {
should := require.New(t)
any := Get([]byte(`{"a":[0],"b":[1]}`))
should.Contains(any.Get('*', 0).ToString(), `"a":0`)
}
func Test_object_lazy_any_get_invalid(t *testing.T) {
should := require.New(t)
any := Get([]byte(`{}`))
should.Equal(Invalid, any.Get("a", "b", "c").ValueType())
should.Equal(Invalid, any.Get(1).ValueType())
}
func Test_wrap_object(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
field2 string
}
any := Wrap(TestObject{"hello", "world"})
should.Equal("hello", any.Get("Field1").ToString())
any = Wrap(TestObject{"hello", "world"})
should.Equal(2, any.Size())
should.Equal(`{"Field1":"hello"}`, any.Get('*').ToString())
}
func Test_any_within_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 Any
Field2 Any
}
obj := TestObject{}
err := UnmarshalFromString(`{"Field1": "hello", "Field2": [1,2,3]}`, &obj)
should.Nil(err)
should.Equal("hello", obj.Field1.ToString())
should.Equal("[1,2,3]", obj.Field2.ToString())
}

View File

@ -0,0 +1,25 @@
package jsoniter
import (
"github.com/json-iterator/go/require"
"testing"
)
func Test_read_string_as_any(t *testing.T) {
should := require.New(t)
any := Get([]byte(`"hello"`))
should.Equal("hello", any.ToString())
should.True(any.ToBool())
any = Get([]byte(`" "`))
should.False(any.ToBool())
any = Get([]byte(`"false"`))
should.False(any.ToBool())
any = Get([]byte(`"123"`))
should.Equal(123, any.ToInt())
}
func Test_wrap_string(t *testing.T) {
should := require.New(t)
any := WrapString("123")
should.Equal(123, any.ToInt())
}

View File

@ -1,19 +1,18 @@
package jsoniter
import (
"encoding/json"
"testing"
"github.com/json-iterator/go/require"
"bytes"
"io"
"encoding/json"
"github.com/json-iterator/go/require"
"testing"
)
func Test_empty_array(t *testing.T) {
should := require.New(t)
iter := ParseString(`[]`)
iter := ParseString(ConfigDefault, `[]`)
cont := iter.ReadArray()
should.False(cont)
iter = ParseString(`[]`)
iter = ParseString(ConfigDefault, `[]`)
iter.ReadArrayCB(func(iter *Iterator) bool {
should.FailNow("should not call")
return true
@ -22,11 +21,11 @@ func Test_empty_array(t *testing.T) {
func Test_one_element(t *testing.T) {
should := require.New(t)
iter := ParseString(`[1]`)
iter := ParseString(ConfigDefault, `[1]`)
should.True(iter.ReadArray())
should.Equal(1, iter.ReadInt())
should.False(iter.ReadArray())
iter = ParseString(`[1]`)
iter = ParseString(ConfigDefault, `[1]`)
iter.ReadArrayCB(func(iter *Iterator) bool {
should.Equal(1, iter.ReadInt())
return true
@ -35,124 +34,18 @@ func Test_one_element(t *testing.T) {
func Test_two_elements(t *testing.T) {
should := require.New(t)
iter := ParseString(`[1,2]`)
iter := ParseString(ConfigDefault, `[1,2]`)
should.True(iter.ReadArray())
should.Equal(int64(1), iter.ReadInt64())
should.True(iter.ReadArray())
should.Equal(int64(2), iter.ReadInt64())
should.False(iter.ReadArray())
iter = ParseString(`[1,2]`)
iter = ParseString(ConfigDefault, `[1,2]`)
should.Equal([]interface{}{float64(1), float64(2)}, iter.Read())
}
func Test_read_empty_array_as_any(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString("[]")
should.Nil(err)
should.Equal(0, any.Size())
}
func Test_read_one_element_array_as_any(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString("[1]")
should.Nil(err)
should.Equal(1, any.Size())
}
func Test_read_two_element_array_as_any(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString("[1,2]")
should.Nil(err)
should.Equal(1, any.Get(0).ToInt())
should.Equal(2, any.Size())
should.True(any.ToBool())
should.Equal(1, any.ToInt())
}
func Test_read_array_with_any_iterator(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString("[1,2]")
should.Nil(err)
var element Any
var elements []int
for next, hasNext := any.IterateArray(); hasNext; {
element, hasNext = next()
elements = append(elements, element.ToInt())
}
should.Equal([]int{1, 2}, elements)
}
func Test_wrap_array(t *testing.T) {
should := require.New(t)
any := Wrap([]int{1,2,3})
should.Equal("[1,2,3]", any.ToString())
var element Any
var elements []int
for next, hasNext := any.IterateArray(); hasNext; {
element, hasNext = next()
elements = append(elements, element.ToInt())
}
should.Equal([]int{1, 2, 3}, elements)
any = Wrap([]int{1,2,3})
should.Equal(3, any.Size())
any = Wrap([]int{1,2,3})
should.Equal(2, any.Get(1).ToInt())
}
func Test_array_lazy_any_get(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString("[1,[2,3],4]")
should.Nil(err)
should.Equal(3, any.Get(1,1).ToInt())
should.Equal("[1,[2,3],4]", any.ToString())
}
func Test_array_lazy_any_get_all(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString("[[1],[2],[3,4]]")
should.Nil(err)
should.Equal("[1,2,3]", any.Get('*',0).ToString())
}
func Test_array_wrapper_any_get_all(t *testing.T) {
should := require.New(t)
any := wrapArray([][]int{
[]int{1, 2},
[]int{3, 4},
[]int{5, 6},
})
should.Equal("[1,3,5]", any.Get('*',0).ToString())
}
func Test_array_lazy_any_get_invalid(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString("[]")
should.Nil(err)
should.Equal(Invalid, any.Get(1,1).ValueType())
should.NotNil(any.Get(1,1).LastError())
should.Equal(Invalid, any.Get("1").ValueType())
should.NotNil(any.Get("1").LastError())
}
func Test_array_lazy_any_set(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString("[1,[2,3],4]")
should.Nil(err)
any.GetArray()[0] = WrapInt64(2)
str, err := MarshalToString(any)
should.Nil(err)
should.Equal("[2,[2,3],4]", str)
}
func Test_invalid_array(t *testing.T) {
_, err := UnmarshalAnyFromString("[")
if err == nil || err == io.EOF {
t.FailNow()
}
}
func Test_whitespace_in_head(t *testing.T) {
iter := ParseString(` [1]`)
iter := ParseString(ConfigDefault, ` [1]`)
cont := iter.ReadArray()
if cont != true {
t.FailNow()
@ -163,7 +56,7 @@ func Test_whitespace_in_head(t *testing.T) {
}
func Test_whitespace_after_array_start(t *testing.T) {
iter := ParseString(`[ 1]`)
iter := ParseString(ConfigDefault, `[ 1]`)
cont := iter.ReadArray()
if cont != true {
t.FailNow()
@ -174,7 +67,7 @@ func Test_whitespace_after_array_start(t *testing.T) {
}
func Test_whitespace_before_array_end(t *testing.T) {
iter := ParseString(`[1 ]`)
iter := ParseString(ConfigDefault, `[1 ]`)
cont := iter.ReadArray()
if cont != true {
t.FailNow()
@ -189,7 +82,7 @@ func Test_whitespace_before_array_end(t *testing.T) {
}
func Test_whitespace_before_comma(t *testing.T) {
iter := ParseString(`[1 ,2]`)
iter := ParseString(ConfigDefault, `[1 ,2]`)
cont := iter.ReadArray()
if cont != true {
t.FailNow()
@ -213,8 +106,7 @@ func Test_whitespace_before_comma(t *testing.T) {
func Test_write_array(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream.IndentionStep = 2
stream := NewStream(Config{IndentionStep: 2}.Froze(), buf, 4096)
stream.WriteArrayStart()
stream.WriteInt(1)
stream.WriteMore()
@ -228,7 +120,7 @@ func Test_write_array(t *testing.T) {
func Test_write_val_array(t *testing.T) {
should := require.New(t)
val := []int{1, 2, 3}
str, err := MarshalToString(val)
str, err := MarshalToString(&val)
should.Nil(err)
should.Equal("[1,2,3]", str)
}
@ -244,7 +136,7 @@ func Test_write_val_empty_array(t *testing.T) {
func Test_write_array_of_interface_in_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field []interface{}
Field []interface{}
Field2 string
}
val := TestObject{[]interface{}{1, 2}, ""}
@ -254,20 +146,45 @@ func Test_write_array_of_interface_in_struct(t *testing.T) {
should.Contains(str, `"Field2":""`)
}
func Test_json_RawMessage(t *testing.T) {
func Test_encode_byte_array(t *testing.T) {
should := require.New(t)
var data json.RawMessage
should.Nil(Unmarshal([]byte(`[1,2,3]`), &data))
should.Equal(`[1,2,3]`, string(data))
str, err := MarshalToString(data)
bytes, err := json.Marshal([]byte{1, 2, 3})
should.Nil(err)
should.Equal(`[1,2,3]`, str)
should.Equal(`"AQID"`, string(bytes))
bytes, err = Marshal([]byte{1, 2, 3})
should.Nil(err)
should.Equal(`"AQID"`, string(bytes))
}
func Test_decode_byte_array(t *testing.T) {
should := require.New(t)
data := []byte{}
err := json.Unmarshal([]byte(`"AQID"`), &data)
should.Nil(err)
should.Equal([]byte{1, 2, 3}, data)
err = Unmarshal([]byte(`"AQID"`), &data)
should.Nil(err)
should.Equal([]byte{1, 2, 3}, data)
}
func Test_decode_slice(t *testing.T) {
should := require.New(t)
slice := make([]string, 0, 5)
UnmarshalFromString(`["hello", "world"]`, &slice)
should.Equal([]string{"hello", "world"}, slice)
}
func Test_decode_large_slice(t *testing.T) {
should := require.New(t)
slice := make([]int, 0, 1)
UnmarshalFromString(`[1,2,3,4,5,6,7,8,9]`, &slice)
should.Equal([]int{1, 2, 3, 4, 5, 6, 7, 8, 9}, slice)
}
func Benchmark_jsoniter_array(b *testing.B) {
b.ReportAllocs()
input := []byte(`[1,2,3,4,5,6,7,8,9]`)
iter := ParseBytes(input)
iter := ParseBytes(ConfigDefault, input)
b.ResetTimer()
for n := 0; n < b.N; n++ {
iter.ResetBytes(input)

View File

@ -1,36 +1,30 @@
package jsoniter
import (
"testing"
"bytes"
"encoding/json"
"github.com/json-iterator/go/require"
"testing"
)
func Test_true(t *testing.T) {
should := require.New(t)
iter := ParseString(`true`)
iter := ParseString(ConfigDefault, `true`)
should.True(iter.ReadBool())
iter = ParseString(`true`)
iter = ParseString(ConfigDefault, `true`)
should.Equal(true, iter.Read())
}
func Test_false(t *testing.T) {
should := require.New(t)
iter := ParseString(`false`)
iter := ParseString(ConfigDefault, `false`)
should.False(iter.ReadBool())
}
func Test_read_bool_as_any(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString("true")
should.Nil(err)
should.True(any.ToBool())
}
func Test_write_true_false(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteTrue()
stream.WriteFalse()
stream.Flush()
@ -38,13 +32,49 @@ func Test_write_true_false(t *testing.T) {
should.Equal("truefalse", buf.String())
}
func Test_write_val_bool(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteVal(true)
stream.Flush()
should.Nil(stream.Error)
should.Equal("true", buf.String())
}
}
func Test_encode_string_bool(t *testing.T) {
type TestObject struct {
Field bool `json:",omitempty,string"`
}
should := require.New(t)
output, err := json.Marshal(TestObject{true})
should.Nil(err)
should.Equal(`{"Field":"true"}`, string(output))
output, err = Marshal(TestObject{true})
should.Nil(err)
should.Equal(`{"Field":"true"}`, string(output))
}
func Test_decode_string_bool(t *testing.T) {
type TestObject struct {
Field bool `json:",omitempty,string"`
}
should := require.New(t)
obj := TestObject{}
err := json.Unmarshal([]byte(`{"Field":"true"}`), &obj)
should.Nil(err)
should.True(obj.Field)
obj = TestObject{}
err = json.Unmarshal([]byte(`{"Field":true}`), &obj)
should.NotNil(err)
obj = TestObject{}
err = Unmarshal([]byte(`{"Field":"true"}`), &obj)
should.Nil(err)
should.True(obj.Field)
obj = TestObject{}
err = Unmarshal([]byte(`{"Field":true}`), &obj)
should.NotNil(err)
}

View File

@ -1,17 +1,16 @@
package jsoniter
import (
"reflect"
"encoding/json"
"github.com/json-iterator/go/require"
"strconv"
"testing"
"time"
"unsafe"
"github.com/json-iterator/go/require"
"encoding/json"
)
func Test_customize_type_decoder(t *testing.T) {
RegisterTypeDecoder("time.Time", func(ptr unsafe.Pointer, iter *Iterator) {
RegisterTypeDecoderFunc("time.Time", func(ptr unsafe.Pointer, iter *Iterator) {
t, err := time.ParseInLocation("2006-01-02 15:04:05", iter.ReadString(), time.UTC)
if err != nil {
iter.Error = err
@ -19,7 +18,7 @@ func Test_customize_type_decoder(t *testing.T) {
}
*((*time.Time)(ptr)) = t
})
defer CleanDecoders()
defer ConfigDefault.cleanDecoders()
val := time.Time{}
err := Unmarshal([]byte(`"2016-12-05 08:43:28"`), &val)
if err != nil {
@ -33,11 +32,11 @@ func Test_customize_type_decoder(t *testing.T) {
func Test_customize_type_encoder(t *testing.T) {
should := require.New(t)
RegisterTypeEncoder("time.Time", func(ptr unsafe.Pointer, stream *Stream) {
RegisterTypeEncoderFunc("time.Time", func(ptr unsafe.Pointer, stream *Stream) {
t := *((*time.Time)(ptr))
stream.WriteString(t.UTC().Format("2006-01-02 15:04:05"))
})
defer CleanEncoders()
}, nil)
defer ConfigDefault.cleanEncoders()
val := time.Unix(0, 0)
str, err := MarshalToString(val)
should.Nil(err)
@ -45,12 +44,13 @@ func Test_customize_type_encoder(t *testing.T) {
}
func Test_customize_byte_array_encoder(t *testing.T) {
ConfigDefault.cleanEncoders()
should := require.New(t)
RegisterTypeEncoder("[]uint8", func(ptr unsafe.Pointer, stream *Stream) {
RegisterTypeEncoderFunc("[]uint8", func(ptr unsafe.Pointer, stream *Stream) {
t := *((*[]byte)(ptr))
stream.WriteString(string(t))
})
defer CleanEncoders()
}, nil)
defer ConfigDefault.cleanEncoders()
val := []byte("abc")
str, err := MarshalToString(val)
should.Nil(err)
@ -59,9 +59,8 @@ func Test_customize_byte_array_encoder(t *testing.T) {
func Test_customize_float_marshal(t *testing.T) {
should := require.New(t)
EnableLossyFloatMarshalling()
defer CleanEncoders()
str, err := MarshalToString(float32(1.23456789))
json := Config{MarshalFloatWith6Digits: true}.Froze()
str, err := json.MarshalToString(float32(1.23456789))
should.Nil(err)
should.Equal("1.234568", str)
}
@ -71,10 +70,10 @@ type Tom struct {
}
func Test_customize_field_decoder(t *testing.T) {
RegisterFieldDecoder("jsoniter.Tom", "field1", func(ptr unsafe.Pointer, iter *Iterator) {
RegisterFieldDecoderFunc("jsoniter.Tom", "field1", func(ptr unsafe.Pointer, iter *Iterator) {
*((*string)(ptr)) = strconv.Itoa(iter.ReadInt())
})
defer CleanDecoders()
defer ConfigDefault.cleanDecoders()
tom := Tom{}
err := Unmarshal([]byte(`{"field1": 100}`), &tom)
if err != nil {
@ -86,22 +85,30 @@ type TestObject1 struct {
field1 string
}
type testExtension struct {
DummyExtension
}
func (extension *testExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
if structDescriptor.Type.String() != "jsoniter.TestObject1" {
return
}
binding := structDescriptor.GetField("field1")
binding.Encoder = &funcEncoder{fun: func(ptr unsafe.Pointer, stream *Stream) {
str := *((*string)(ptr))
val, _ := strconv.Atoi(str)
stream.WriteInt(val)
}}
binding.Decoder = &funcDecoder{func(ptr unsafe.Pointer, iter *Iterator) {
*((*string)(ptr)) = strconv.Itoa(iter.ReadInt())
}}
binding.ToNames = []string{"field-1"}
binding.FromNames = []string{"field-1"}
}
func Test_customize_field_by_extension(t *testing.T) {
should := require.New(t)
RegisterExtension(func(type_ reflect.Type, field *reflect.StructField) ([]string, EncoderFunc, DecoderFunc) {
if type_.String() == "jsoniter.TestObject1" && field.Name == "field1" {
encode := func(ptr unsafe.Pointer, stream *Stream) {
str := *((*string)(ptr))
val, _ := strconv.Atoi(str)
stream.WriteInt(val)
}
decode := func(ptr unsafe.Pointer, iter *Iterator) {
*((*string)(ptr)) = strconv.Itoa(iter.ReadInt())
}
return []string{"field-1"}, encode, decode
}
return nil, nil, nil
})
RegisterExtension(&testExtension{})
obj := TestObject1{}
err := UnmarshalFromString(`{"field-1": 100}`, &obj)
should.Nil(err)
@ -111,59 +118,61 @@ func Test_customize_field_by_extension(t *testing.T) {
should.Equal(`{"field-1":100}`, str)
}
func Test_unexported_fields(t *testing.T) {
EnableUnexportedStructFieldsSupport()
should := require.New(t)
type TestObject struct {
field1 string
field2 string `json:"field-2"`
}
obj := TestObject{}
obj.field1 = "hello"
should.Nil(UnmarshalFromString(`{}`, &obj))
should.Equal("hello", obj.field1)
should.Nil(UnmarshalFromString(`{"field1": "world", "field-2": "abc"}`, &obj))
should.Equal("world", obj.field1)
should.Equal("abc", obj.field2)
str, err := MarshalToString(obj)
should.Nil(err)
should.Contains(str, `"field-2":"abc"`)
}
//func Test_unexported_fields(t *testing.T) {
// jsoniter := Config{SupportUnexportedStructFields: true}.Froze()
// should := require.New(t)
// type TestObject struct {
// field1 string
// field2 string `json:"field-2"`
// }
// obj := TestObject{}
// obj.field1 = "hello"
// should.Nil(jsoniter.UnmarshalFromString(`{}`, &obj))
// should.Equal("hello", obj.field1)
// should.Nil(jsoniter.UnmarshalFromString(`{"field1": "world", "field-2": "abc"}`, &obj))
// should.Equal("world", obj.field1)
// should.Equal("abc", obj.field2)
// str, err := jsoniter.MarshalToString(obj)
// should.Nil(err)
// should.Contains(str, `"field-2":"abc"`)
//}
type ObjectImplementedMarshaler int
type timeImplementedMarshaler time.Time
func (obj *ObjectImplementedMarshaler) MarshalJSON() ([]byte, error) {
return []byte(`"hello"`), nil
func (obj timeImplementedMarshaler) MarshalJSON() ([]byte, error) {
seconds := time.Time(obj).Unix()
return []byte(strconv.FormatInt(seconds, 10)), nil
}
func Test_marshaler(t *testing.T) {
type TestObject struct {
Field *ObjectImplementedMarshaler
Field timeImplementedMarshaler
}
should := require.New(t)
val := ObjectImplementedMarshaler(100)
obj := TestObject{&val}
val := timeImplementedMarshaler(time.Unix(123, 0))
obj := TestObject{val}
bytes, err := json.Marshal(obj)
should.Nil(err)
should.Equal(`{"Field":"hello"}`, string(bytes))
should.Equal(`{"Field":123}`, string(bytes))
str, err := MarshalToString(obj)
should.Nil(err)
should.Equal(`{"Field":"hello"}`, str)
should.Equal(`{"Field":123}`, str)
}
func Test_marshaler_and_encoder(t *testing.T) {
type TestObject struct {
Field *ObjectImplementedMarshaler
Field *timeImplementedMarshaler
}
ConfigDefault.cleanEncoders()
should := require.New(t)
RegisterTypeEncoder("jsoniter.ObjectImplementedMarshaler", func(ptr unsafe.Pointer, stream *Stream) {
RegisterTypeEncoderFunc("jsoniter.timeImplementedMarshaler", func(ptr unsafe.Pointer, stream *Stream) {
stream.WriteString("hello from encoder")
})
val := ObjectImplementedMarshaler(100)
}, nil)
val := timeImplementedMarshaler(time.Unix(123, 0))
obj := TestObject{&val}
bytes, err := json.Marshal(obj)
should.Nil(err)
should.Equal(`{"Field":"hello"}`, string(bytes))
should.Equal(`{"Field":123}`, string(bytes))
str, err := MarshalToString(obj)
should.Nil(err)
should.Equal(`{"Field":"hello from encoder"}`, str)
@ -178,7 +187,7 @@ func (obj *ObjectImplementedUnmarshaler) UnmarshalJSON([]byte) error {
func Test_unmarshaler(t *testing.T) {
type TestObject struct {
Field *ObjectImplementedUnmarshaler
Field *ObjectImplementedUnmarshaler
Field2 string
}
should := require.New(t)
@ -195,11 +204,12 @@ func Test_unmarshaler(t *testing.T) {
func Test_unmarshaler_and_decoder(t *testing.T) {
type TestObject struct {
Field *ObjectImplementedUnmarshaler
Field *ObjectImplementedUnmarshaler
Field2 string
}
ConfigDefault.cleanDecoders()
should := require.New(t)
RegisterTypeDecoder("jsoniter.ObjectImplementedUnmarshaler", func(ptr unsafe.Pointer, iter *Iterator) {
RegisterTypeDecoderFunc("jsoniter.ObjectImplementedUnmarshaler", func(ptr unsafe.Pointer, iter *Iterator) {
*(*ObjectImplementedUnmarshaler)(ptr) = 10
iter.Skip()
})
@ -212,4 +222,23 @@ func Test_unmarshaler_and_decoder(t *testing.T) {
err = Unmarshal([]byte(`{"Field":"hello"}`), &obj)
should.Nil(err)
should.Equal(10, int(*obj.Field))
}
}
type tmString string
type tmStruct struct {
String tmString
}
func (s tmStruct) MarshalJSON() ([]byte, error) {
var b []byte
b = append(b, '"')
b = append(b, s.String...)
b = append(b, '"')
return b, nil
}
func Test_marshaler_on_struct(t *testing.T) {
fixed := tmStruct{"hello"}
//json.Marshal(fixed)
Marshal(fixed)
}

View File

@ -1,9 +1,10 @@
package jsoniter
import (
"encoding/json"
"fmt"
"testing"
"github.com/json-iterator/go/require"
"testing"
)
func Test_bind_api_demo(t *testing.T) {
@ -15,10 +16,71 @@ func Test_bind_api_demo(t *testing.T) {
}
func Test_iterator_api_demo(t *testing.T) {
iter := ParseString(`[0,1,2,3]`)
iter := ParseString(ConfigDefault, `[0,1,2,3]`)
total := 0
for iter.ReadArray() {
total += iter.ReadInt()
}
fmt.Println(total)
}
type People struct {
Name string
Gender string
Age int
Address string
Mobile string
Country string
Height int
}
func jsoniterMarshal(p *People) error {
_, err := Marshal(p)
if nil != err {
return err
}
return nil
}
func stdMarshal(p *People) error {
_, err := json.Marshal(p)
if nil != err {
return err
}
return nil
}
func BenchmarkJosniterMarshal(b *testing.B) {
var p People
p.Address = "上海市徐汇区漕宝路"
p.Age = 30
p.Country = "中国"
p.Gender = "male"
p.Height = 170
p.Mobile = "18502120533"
p.Name = "Elvin"
b.ReportAllocs()
for i := 0; i < b.N; i++ {
err := jsoniterMarshal(&p)
if nil != err {
b.Error(err)
}
}
}
func BenchmarkStdMarshal(b *testing.B) {
var p People
p.Address = "上海市徐汇区漕宝路"
p.Age = 30
p.Country = "中国"
p.Gender = "male"
p.Height = 170
p.Mobile = "18502120533"
p.Name = "Elvin"
b.ReportAllocs()
for i := 0; i < b.N; i++ {
err := stdMarshal(&p)
if nil != err {
b.Error(err)
}
}
}

View File

@ -1,62 +1,62 @@
package jsoniter
import (
"github.com/json-iterator/go/require"
"io"
"testing"
"github.com/json-iterator/go/require"
)
func Test_string_end(t *testing.T) {
end, escaped := ParseString(`abc"`).findStringEnd()
end, escaped := ParseString(ConfigDefault, `abc"`).findStringEnd()
if end != 4 {
t.Fatal(end)
}
if escaped != false {
t.Fatal(escaped)
}
end, escaped = ParseString(`abc\\"`).findStringEnd()
end, escaped = ParseString(ConfigDefault, `abc\\"`).findStringEnd()
if end != 6 {
t.Fatal(end)
}
if escaped != true {
t.Fatal(escaped)
}
end, escaped = ParseString(`abc\\\\"`).findStringEnd()
end, escaped = ParseString(ConfigDefault, `abc\\\\"`).findStringEnd()
if end != 8 {
t.Fatal(end)
}
if escaped != true {
t.Fatal(escaped)
}
end, escaped = ParseString(`abc\"`).findStringEnd()
end, escaped = ParseString(ConfigDefault, `abc\"`).findStringEnd()
if end != -1 {
t.Fatal(end)
}
if escaped != false {
t.Fatal(escaped)
}
end, escaped = ParseString(`abc\`).findStringEnd()
end, escaped = ParseString(ConfigDefault, `abc\`).findStringEnd()
if end != -1 {
t.Fatal(end)
}
if escaped != true {
t.Fatal(escaped)
}
end, escaped = ParseString(`abc\\`).findStringEnd()
end, escaped = ParseString(ConfigDefault, `abc\\`).findStringEnd()
if end != -1 {
t.Fatal(end)
}
if escaped != false {
t.Fatal(escaped)
}
end, escaped = ParseString(`\\`).findStringEnd()
end, escaped = ParseString(ConfigDefault, `\\`).findStringEnd()
if end != -1 {
t.Fatal(end)
}
if escaped != false {
t.Fatal(escaped)
}
end, escaped = ParseString(`\`).findStringEnd()
end, escaped = ParseString(ConfigDefault, `\`).findStringEnd()
if end != -1 {
t.Fatal(end)
}
@ -91,54 +91,54 @@ func (reader *StagedReader) Read(p []byte) (n int, err error) {
func Test_skip_string(t *testing.T) {
should := require.New(t)
iter := ParseString(`"abc`)
iter := ParseString(ConfigDefault, `"abc`)
iter.skipString()
should.Equal(1, iter.head)
iter = ParseString(`\""abc`)
iter = ParseString(ConfigDefault, `\""abc`)
iter.skipString()
should.Equal(3, iter.head)
reader := &StagedReader{
r1: `abc`,
r2: `"`,
}
iter = Parse(reader, 4096)
iter = Parse(ConfigDefault, reader, 4096)
iter.skipString()
should.Equal(1, iter.head)
reader = &StagedReader{
r1: `abc`,
r2: `1"`,
}
iter = Parse(reader, 4096)
iter = Parse(ConfigDefault, reader, 4096)
iter.skipString()
should.Equal(2, iter.head)
reader = &StagedReader{
r1: `abc\`,
r2: `"`,
}
iter = Parse(reader, 4096)
iter = Parse(ConfigDefault, reader, 4096)
iter.skipString()
should.NotNil(iter.Error)
reader = &StagedReader{
r1: `abc\`,
r2: `""`,
}
iter = Parse(reader, 4096)
iter = Parse(ConfigDefault, reader, 4096)
iter.skipString()
should.Equal(2, iter.head)
}
func Test_skip_object(t *testing.T) {
iter := ParseString(`}`)
iter := ParseString(ConfigDefault, `}`)
iter.skipObject()
if iter.head != 1 {
t.Fatal(iter.head)
}
iter = ParseString(`a}`)
iter = ParseString(ConfigDefault, `a}`)
iter.skipObject()
if iter.head != 2 {
t.Fatal(iter.head)
}
iter = ParseString(`{}}a`)
iter = ParseString(ConfigDefault, `{}}a`)
iter.skipObject()
if iter.head != 3 {
t.Fatal(iter.head)
@ -147,12 +147,12 @@ func Test_skip_object(t *testing.T) {
r1: `{`,
r2: `}}a`,
}
iter = Parse(reader, 4096)
iter = Parse(ConfigDefault, reader, 4096)
iter.skipObject()
if iter.head != 2 {
t.Fatal(iter.head)
}
iter = ParseString(`"}"}a`)
iter = ParseString(ConfigDefault, `"}"}a`)
iter.skipObject()
if iter.head != 4 {
t.Fatal(iter.head)

View File

@ -0,0 +1,37 @@
package jsoniter
import (
"encoding/json"
"github.com/json-iterator/go/require"
"testing"
)
func Test_encode_fixed_array(t *testing.T) {
should := require.New(t)
type FixedArray [2]float64
fixed := FixedArray{0.1, 1.0}
output, err := MarshalToString(fixed)
should.Nil(err)
should.Equal("[0.1,1]", output)
}
func Test_encode_fixed_array_of_map(t *testing.T) {
should := require.New(t)
type FixedArray [2]map[string]string
fixed := FixedArray{map[string]string{"1": "2"}, map[string]string{"3": "4"}}
output, err := MarshalToString(fixed)
should.Nil(err)
should.Equal(`[{"1":"2"},{"3":"4"}]`, output)
}
func Test_decode_fixed_array(t *testing.T) {
should := require.New(t)
type FixedArray [2]float64
var fixed FixedArray
should.Nil(json.Unmarshal([]byte("[1,2,3]"), &fixed))
should.Equal(float64(1), fixed[0])
should.Equal(float64(2), fixed[1])
should.Nil(Unmarshal([]byte("[1,2,3]"), &fixed))
should.Equal(float64(1), fixed[0])
should.Equal(float64(2), fixed[1])
}

View File

@ -1,17 +1,17 @@
package jsoniter
import (
"bytes"
"encoding/json"
"fmt"
"testing"
"github.com/json-iterator/go/require"
"bytes"
"strconv"
"testing"
)
func Test_read_big_float(t *testing.T) {
should := require.New(t)
iter := ParseString(`12.3`)
iter := ParseString(ConfigDefault, `12.3`)
val := iter.ReadBigFloat()
val64, _ := val.Float64()
should.Equal(12.3, val64)
@ -19,7 +19,7 @@ func Test_read_big_float(t *testing.T) {
func Test_read_big_int(t *testing.T) {
should := require.New(t)
iter := ParseString(`92233720368547758079223372036854775807`)
iter := ParseString(ConfigDefault, `92233720368547758079223372036854775807`)
val := iter.ReadBigInt()
should.NotNil(val)
should.Equal(`92233720368547758079223372036854775807`, val.String())
@ -31,14 +31,14 @@ func Test_read_float(t *testing.T) {
// non-streaming
t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
should := require.New(t)
iter := ParseString(input + ",")
iter := ParseString(ConfigDefault, input+",")
expected, err := strconv.ParseFloat(input, 32)
should.Nil(err)
should.Equal(float32(expected), iter.ReadFloat32())
})
t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
should := require.New(t)
iter := ParseString(input + ",")
iter := ParseString(ConfigDefault, input+",")
expected, err := strconv.ParseFloat(input, 64)
should.Nil(err)
should.Equal(expected, iter.ReadFloat64())
@ -46,14 +46,14 @@ func Test_read_float(t *testing.T) {
// streaming
t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
should := require.New(t)
iter := Parse(bytes.NewBufferString(input + ","), 2)
iter := Parse(ConfigDefault, bytes.NewBufferString(input+","), 2)
expected, err := strconv.ParseFloat(input, 32)
should.Nil(err)
should.Equal(float32(expected), iter.ReadFloat32())
})
t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
should := require.New(t)
iter := Parse(bytes.NewBufferString(input + ","), 2)
iter := Parse(ConfigDefault, bytes.NewBufferString(input+","), 2)
expected, err := strconv.ParseFloat(input, 64)
should.Nil(err)
should.Equal(expected, iter.ReadFloat64())
@ -63,19 +63,10 @@ func Test_read_float(t *testing.T) {
func Test_read_float_as_interface(t *testing.T) {
should := require.New(t)
iter := ParseString(`12.3`)
iter := ParseString(ConfigDefault, `12.3`)
should.Equal(float64(12.3), iter.Read())
}
func Test_read_float_as_any(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString("12.3")
should.Nil(err)
should.Equal(float64(12.3), any.ToFloat64())
should.Equal("12.3", any.ToString())
should.True(any.ToBool())
}
func Test_wrap_float(t *testing.T) {
should := require.New(t)
str, err := MarshalToString(WrapFloat64(12.3))
@ -85,12 +76,12 @@ func Test_wrap_float(t *testing.T) {
func Test_write_float32(t *testing.T) {
vals := []float32{0, 1, -1, 99, 0xff, 0xfff, 0xffff, 0xfffff, 0xffffff, 0x4ffffff, 0xfffffff,
-0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001}
-0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001}
for _, val := range vals {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteFloat32Lossy(val)
stream.Flush()
should.Nil(stream.Error)
@ -99,7 +90,7 @@ func Test_write_float32(t *testing.T) {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteVal(val)
stream.Flush()
should.Nil(stream.Error)
@ -108,7 +99,7 @@ func Test_write_float32(t *testing.T) {
}
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 10)
stream := NewStream(ConfigDefault, buf, 10)
stream.WriteRaw("abcdefg")
stream.WriteFloat32Lossy(1.123456)
stream.Flush()
@ -118,12 +109,12 @@ func Test_write_float32(t *testing.T) {
func Test_write_float64(t *testing.T) {
vals := []float64{0, 1, -1, 99, 0xff, 0xfff, 0xffff, 0xfffff, 0xffffff, 0x4ffffff, 0xfffffff,
-0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001}
-0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001}
for _, val := range vals {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteFloat64Lossy(val)
stream.Flush()
should.Nil(stream.Error)
@ -132,7 +123,7 @@ func Test_write_float64(t *testing.T) {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteVal(val)
stream.Flush()
should.Nil(stream.Error)
@ -141,7 +132,7 @@ func Test_write_float64(t *testing.T) {
}
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 10)
stream := NewStream(ConfigDefault, buf, 10)
stream.WriteRaw("abcdefg")
stream.WriteFloat64Lossy(1.123456)
stream.Flush()
@ -151,17 +142,41 @@ func Test_write_float64(t *testing.T) {
func Test_read_float64_cursor(t *testing.T) {
should := require.New(t)
iter := ParseString("[1.23456789\n,2,3]")
iter := ParseString(ConfigDefault, "[1.23456789\n,2,3]")
should.True(iter.ReadArray())
should.Equal(1.23456789, iter.Read())
should.True(iter.ReadArray())
should.Equal(float64(2), iter.Read())
}
func Test_read_float_scientific(t *testing.T) {
should := require.New(t)
var obj interface{}
should.Nil(UnmarshalFromString(`1e1`, &obj))
should.Equal(float64(10), obj)
should.Nil(json.Unmarshal([]byte(`1e1`), &obj))
should.Equal(float64(10), obj)
should.Nil(UnmarshalFromString(`1.0e1`, &obj))
should.Equal(float64(10), obj)
should.Nil(json.Unmarshal([]byte(`1.0e1`), &obj))
should.Equal(float64(10), obj)
}
func Test_lossy_float_marshal(t *testing.T) {
should := require.New(t)
api := Config{MarshalFloatWith6Digits: true}.Froze()
output, err := api.MarshalToString(float64(0.1234567))
should.Nil(err)
should.Equal("0.123457", output)
output, err = api.MarshalToString(float32(0.1234567))
should.Nil(err)
should.Equal("0.123457", output)
}
func Benchmark_jsoniter_float(b *testing.B) {
b.ReportAllocs()
input := []byte(`1.1123,`)
iter := NewIterator()
iter := NewIterator(ConfigDefault)
for n := 0; n < b.N; n++ {
iter.ResetBytes(input)
iter.ReadFloat64()

View File

@ -3,17 +3,16 @@ package jsoniter
import (
"bytes"
"encoding/json"
"testing"
"github.com/json-iterator/go/require"
"fmt"
"strconv"
"github.com/json-iterator/go/require"
"io/ioutil"
"io"
"strconv"
"testing"
)
func Test_read_uint64_invalid(t *testing.T) {
should := require.New(t)
iter := ParseString(",")
iter := ParseString(ConfigDefault, ",")
iter.ReadUint64()
should.NotNil(iter.Error)
}
@ -23,7 +22,7 @@ func Test_read_int8(t *testing.T) {
for _, input := range inputs {
t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
should := require.New(t)
iter := ParseString(input)
iter := ParseString(ConfigDefault, input)
expected, err := strconv.ParseInt(input, 10, 8)
should.Nil(err)
should.Equal(int8(expected), iter.ReadInt8())
@ -36,7 +35,7 @@ func Test_read_int16(t *testing.T) {
for _, input := range inputs {
t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
should := require.New(t)
iter := ParseString(input)
iter := ParseString(ConfigDefault, input)
expected, err := strconv.ParseInt(input, 10, 16)
should.Nil(err)
should.Equal(int16(expected), iter.ReadInt16())
@ -49,14 +48,14 @@ func Test_read_int32(t *testing.T) {
for _, input := range inputs {
t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
should := require.New(t)
iter := ParseString(input)
iter := ParseString(ConfigDefault, input)
expected, err := strconv.ParseInt(input, 10, 32)
should.Nil(err)
should.Equal(int32(expected), iter.ReadInt32())
})
t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
should := require.New(t)
iter := Parse(bytes.NewBufferString(input), 2)
iter := Parse(ConfigDefault, bytes.NewBufferString(input), 2)
expected, err := strconv.ParseInt(input, 10, 32)
should.Nil(err)
should.Equal(int32(expected), iter.ReadInt32())
@ -83,7 +82,7 @@ func Test_read_int64_array(t *testing.T) {
func Test_read_int32_overflow(t *testing.T) {
should := require.New(t)
input := "123456789123456789,"
iter := ParseString(input)
iter := ParseString(ConfigDefault, input)
iter.ReadInt32()
should.NotNil(iter.Error)
}
@ -93,14 +92,14 @@ func Test_read_int64(t *testing.T) {
for _, input := range inputs {
t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
should := require.New(t)
iter := ParseString(input)
iter := ParseString(ConfigDefault, input)
expected, err := strconv.ParseInt(input, 10, 64)
should.Nil(err)
should.Equal(expected, iter.ReadInt64())
})
t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
should := require.New(t)
iter := Parse(bytes.NewBufferString(input), 2)
iter := Parse(ConfigDefault, bytes.NewBufferString(input), 2)
expected, err := strconv.ParseInt(input, 10, 64)
should.Nil(err)
should.Equal(expected, iter.ReadInt64())
@ -111,28 +110,11 @@ func Test_read_int64(t *testing.T) {
func Test_read_int64_overflow(t *testing.T) {
should := require.New(t)
input := "123456789123456789123456789123456789,"
iter := ParseString(input)
iter := ParseString(ConfigDefault, input)
iter.ReadInt64()
should.NotNil(iter.Error)
}
func Test_read_int64_as_any(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString("1234")
should.Nil(err)
should.Equal(1234, any.ToInt())
should.Equal(io.EOF, any.LastError())
should.Equal("1234", any.ToString())
should.True(any.ToBool())
}
func Test_int_lazy_any_get(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString("1234")
should.Nil(err)
should.Equal(Invalid, any.Get(1, "2").ValueType())
}
func Test_wrap_int(t *testing.T) {
should := require.New(t)
str, err := MarshalToString(WrapInt64(100))
@ -146,7 +128,7 @@ func Test_write_uint8(t *testing.T) {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteUint8(val)
stream.Flush()
should.Nil(stream.Error)
@ -155,7 +137,7 @@ func Test_write_uint8(t *testing.T) {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteVal(val)
stream.Flush()
should.Nil(stream.Error)
@ -164,7 +146,7 @@ func Test_write_uint8(t *testing.T) {
}
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 3)
stream := NewStream(ConfigDefault, buf, 3)
stream.WriteRaw("a")
stream.WriteUint8(100) // should clear buffer
stream.Flush()
@ -178,7 +160,7 @@ func Test_write_int8(t *testing.T) {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteInt8(val)
stream.Flush()
should.Nil(stream.Error)
@ -187,7 +169,7 @@ func Test_write_int8(t *testing.T) {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteVal(val)
stream.Flush()
should.Nil(stream.Error)
@ -196,7 +178,7 @@ func Test_write_int8(t *testing.T) {
}
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4)
stream := NewStream(ConfigDefault, buf, 4)
stream.WriteRaw("a")
stream.WriteInt8(-100) // should clear buffer
stream.Flush()
@ -210,7 +192,7 @@ func Test_write_uint16(t *testing.T) {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteUint16(val)
stream.Flush()
should.Nil(stream.Error)
@ -219,7 +201,7 @@ func Test_write_uint16(t *testing.T) {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteVal(val)
stream.Flush()
should.Nil(stream.Error)
@ -228,7 +210,7 @@ func Test_write_uint16(t *testing.T) {
}
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 5)
stream := NewStream(ConfigDefault, buf, 5)
stream.WriteRaw("a")
stream.WriteUint16(10000) // should clear buffer
stream.Flush()
@ -242,7 +224,7 @@ func Test_write_int16(t *testing.T) {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteInt16(val)
stream.Flush()
should.Nil(stream.Error)
@ -251,7 +233,7 @@ func Test_write_int16(t *testing.T) {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteVal(val)
stream.Flush()
should.Nil(stream.Error)
@ -260,7 +242,7 @@ func Test_write_int16(t *testing.T) {
}
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 6)
stream := NewStream(ConfigDefault, buf, 6)
stream.WriteRaw("a")
stream.WriteInt16(-10000) // should clear buffer
stream.Flush()
@ -274,7 +256,7 @@ func Test_write_uint32(t *testing.T) {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteUint32(val)
stream.Flush()
should.Nil(stream.Error)
@ -283,7 +265,7 @@ func Test_write_uint32(t *testing.T) {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteVal(val)
stream.Flush()
should.Nil(stream.Error)
@ -292,7 +274,7 @@ func Test_write_uint32(t *testing.T) {
}
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 10)
stream := NewStream(ConfigDefault, buf, 10)
stream.WriteRaw("a")
stream.WriteUint32(0xffffffff) // should clear buffer
stream.Flush()
@ -306,7 +288,7 @@ func Test_write_int32(t *testing.T) {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteInt32(val)
stream.Flush()
should.Nil(stream.Error)
@ -315,7 +297,7 @@ func Test_write_int32(t *testing.T) {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteVal(val)
stream.Flush()
should.Nil(stream.Error)
@ -324,7 +306,7 @@ func Test_write_int32(t *testing.T) {
}
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 11)
stream := NewStream(ConfigDefault, buf, 11)
stream.WriteRaw("a")
stream.WriteInt32(-0x7fffffff) // should clear buffer
stream.Flush()
@ -340,7 +322,7 @@ func Test_write_uint64(t *testing.T) {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteUint64(val)
stream.Flush()
should.Nil(stream.Error)
@ -349,7 +331,7 @@ func Test_write_uint64(t *testing.T) {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteVal(val)
stream.Flush()
should.Nil(stream.Error)
@ -358,7 +340,7 @@ func Test_write_uint64(t *testing.T) {
}
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 10)
stream := NewStream(ConfigDefault, buf, 10)
stream.WriteRaw("a")
stream.WriteUint64(0xffffffff) // should clear buffer
stream.Flush()
@ -374,7 +356,7 @@ func Test_write_int64(t *testing.T) {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteInt64(val)
stream.Flush()
should.Nil(stream.Error)
@ -383,7 +365,7 @@ func Test_write_int64(t *testing.T) {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteVal(val)
stream.Flush()
should.Nil(stream.Error)
@ -392,7 +374,7 @@ func Test_write_int64(t *testing.T) {
}
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 10)
stream := NewStream(ConfigDefault, buf, 10)
stream.WriteRaw("a")
stream.WriteInt64(0xffffffff) // should clear buffer
stream.Flush()
@ -403,7 +385,7 @@ func Test_write_int64(t *testing.T) {
func Test_write_val_int(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteVal(1001)
stream.Flush()
should.Nil(stream.Error)
@ -413,7 +395,7 @@ func Test_write_val_int(t *testing.T) {
func Test_write_val_int_ptr(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
val := 1001
stream.WriteVal(&val)
stream.Flush()
@ -433,7 +415,7 @@ func Test_json_number(t *testing.T) {
}
func Benchmark_jsoniter_encode_int(b *testing.B) {
stream := NewStream(ioutil.Discard, 64)
stream := NewStream(ConfigDefault, ioutil.Discard, 64)
for n := 0; n < b.N; n++ {
stream.n = 0
stream.WriteUint64(0xffffffff)
@ -447,7 +429,7 @@ func Benchmark_itoa(b *testing.B) {
}
func Benchmark_jsoniter_int(b *testing.B) {
iter := NewIterator()
iter := NewIterator(ConfigDefault)
input := []byte(`100`)
for n := 0; n < b.N; n++ {
iter.ResetBytes(input)

View File

@ -1,8 +1,9 @@
package jsoniter
import (
"testing"
"encoding/json"
"github.com/json-iterator/go/require"
"testing"
"unsafe"
)
@ -16,7 +17,7 @@ func Test_write_array_of_interface(t *testing.T) {
func Test_write_map_of_interface(t *testing.T) {
should := require.New(t)
val := map[string]interface{}{"hello":"world"}
val := map[string]interface{}{"hello": "world"}
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"hello":"world"}`, str)
@ -27,7 +28,7 @@ func Test_write_map_of_interface_in_struct(t *testing.T) {
Field map[string]interface{}
}
should := require.New(t)
val := TestObject{map[string]interface{}{"hello":"world"}}
val := TestObject{map[string]interface{}{"hello": "world"}}
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"Field":{"hello":"world"}}`, str)
@ -35,11 +36,11 @@ func Test_write_map_of_interface_in_struct(t *testing.T) {
func Test_write_map_of_interface_in_struct_with_two_fields(t *testing.T) {
type TestObject struct {
Field map[string]interface{}
Field map[string]interface{}
Field2 string
}
should := require.New(t)
val := TestObject{map[string]interface{}{"hello":"world"}, ""}
val := TestObject{map[string]interface{}{"hello": "world"}, ""}
str, err := MarshalToString(val)
should.Nil(err)
should.Contains(str, `"Field":{"hello":"world"}`)
@ -59,7 +60,7 @@ func Test_write_map_of_custom_interface(t *testing.T) {
should := require.New(t)
myStr := MyString("world")
should.Equal("world", myStr.Hello())
val := map[string]MyInterface{"hello":myStr}
val := map[string]MyInterface{"hello": myStr}
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"hello":"world"}`, str)
@ -85,7 +86,7 @@ func Test_read_interface(t *testing.T) {
func Test_read_custom_interface(t *testing.T) {
should := require.New(t)
var val MyInterface
RegisterTypeDecoder("jsoniter.MyInterface", func(ptr unsafe.Pointer, iter *Iterator) {
RegisterTypeDecoderFunc("jsoniter.MyInterface", func(ptr unsafe.Pointer, iter *Iterator) {
*((*MyInterface)(ptr)) = MyString(iter.ReadString())
})
err := UnmarshalFromString(`"hello"`, &val)
@ -137,4 +138,27 @@ func Test_encode_object_contain_non_empty_interface(t *testing.T) {
str, err := MarshalToString(obj)
should.Nil(err)
should.Equal(`{"Field":"hello"}`, str)
}
}
func Test_nil_non_empty_interface(t *testing.T) {
ConfigDefault.cleanEncoders()
ConfigDefault.cleanDecoders()
type TestObject struct {
Field []MyInterface
}
should := require.New(t)
obj := TestObject{}
b := []byte(`{"Field":["AAA"]}`)
should.NotNil(json.Unmarshal(b, &obj))
should.NotNil(Unmarshal(b, &obj))
}
func Test_read_large_number_as_interface(t *testing.T) {
should := require.New(t)
var val interface{}
err := Config{UseNumber: true}.Froze().UnmarshalFromString(`123456789123456789123456789`, &val)
should.Nil(err)
output, err := MarshalToString(val)
should.Nil(err)
should.Equal(`123456789123456789123456789`, output)
}

View File

@ -7,7 +7,7 @@ import (
)
func Test_read_by_one(t *testing.T) {
iter := Parse(bytes.NewBufferString("abc"), 1)
iter := Parse(ConfigDefault, bytes.NewBufferString("abc"), 1)
b := iter.readByte()
if iter.Error != nil {
t.Fatal(iter.Error)
@ -34,7 +34,7 @@ func Test_read_by_one(t *testing.T) {
}
func Test_read_by_two(t *testing.T) {
iter := Parse(bytes.NewBufferString("abc"), 2)
iter := Parse(ConfigDefault, bytes.NewBufferString("abc"), 2)
b := iter.readByte()
if iter.Error != nil {
t.Fatal(iter.Error)
@ -67,7 +67,7 @@ func Test_read_by_two(t *testing.T) {
}
func Test_read_until_eof(t *testing.T) {
iter := Parse(bytes.NewBufferString("abc"), 2)
iter := Parse(ConfigDefault, bytes.NewBufferString("abc"), 2)
iter.readByte()
iter.readByte()
b := iter.readByte()

File diff suppressed because one or more lines are too long

View File

@ -27,7 +27,7 @@ func Benchmark_jsoniter_large_file(b *testing.B) {
b.ReportAllocs()
for n := 0; n < b.N; n++ {
file, _ := os.Open("/tmp/large-file.json")
iter := Parse(file, 4096)
iter := Parse(ConfigDefault, file, 4096)
count := 0
for iter.ReadArray() {
iter.Skip()

View File

@ -1,13 +1,15 @@
package jsoniter
import (
"testing"
"encoding/json"
"github.com/json-iterator/go/require"
"math/big"
"testing"
)
func Test_read_map(t *testing.T) {
should := require.New(t)
iter := ParseString(`{"hello": "world"}`)
iter := ParseString(ConfigDefault, `{"hello": "world"}`)
m := map[string]string{"1": "2"}
iter.ReadVal(&m)
copy(iter.buf, []byte{0, 0, 0, 0, 0, 0})
@ -16,36 +18,17 @@ func Test_read_map(t *testing.T) {
func Test_read_map_of_interface(t *testing.T) {
should := require.New(t)
iter := ParseString(`{"hello": "world"}`)
iter := ParseString(ConfigDefault, `{"hello": "world"}`)
m := map[string]interface{}{"1": "2"}
iter.ReadVal(&m)
should.Equal(map[string]interface{}{"1": "2", "hello": "world"}, m)
iter = ParseString(`{"hello": "world"}`)
iter = ParseString(ConfigDefault, `{"hello": "world"}`)
should.Equal(map[string]interface{}{"hello": "world"}, iter.Read())
}
func Test_wrap_map(t *testing.T) {
should := require.New(t)
any := Wrap(map[string]string{"Field1": "hello"})
should.Equal("hello", any.Get("Field1").ToString())
any = Wrap(map[string]string{"Field1": "hello"})
should.Equal(1, any.Size())
any = Wrap(map[string]string{"Field1": "hello"})
vals := map[string]string{}
var k string
var v Any
for next, hasNext := any.IterateObject(); hasNext; {
k, v, hasNext = next()
if v.ValueType() == String {
vals[k] = v.ToString()
}
}
should.Equal(map[string]string{"Field1":"hello"}, vals)
}
func Test_map_wrapper_any_get_all(t *testing.T) {
should := require.New(t)
any := Wrap(map[string][]int{"Field1": []int{1, 2}})
any := Wrap(map[string][]int{"Field1": {1, 2}})
should.Equal(`{"Field1":1}`, any.Get('*', 0).ToString())
}
@ -66,4 +49,74 @@ func Test_slice_of_map(t *testing.T) {
val = []map[string]string{}
should.Nil(UnmarshalFromString(str, &val))
should.Equal("2", val[0]["1"])
}
}
func Test_encode_int_key_map(t *testing.T) {
should := require.New(t)
val := map[int]string{1: "2"}
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"1":"2"}`, str)
}
func Test_decode_int_key_map(t *testing.T) {
should := require.New(t)
var val map[int]string
should.Nil(UnmarshalFromString(`{"1":"2"}`, &val))
should.Equal(map[int]string{1: "2"}, val)
}
func Test_encode_TextMarshaler_key_map(t *testing.T) {
should := require.New(t)
f, _, _ := big.ParseFloat("1", 10, 64, big.ToZero)
val := map[*big.Float]string{f: "2"}
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"1":"2"}`, str)
}
func Test_decode_TextMarshaler_key_map(t *testing.T) {
should := require.New(t)
var val map[*big.Float]string
should.Nil(UnmarshalFromString(`{"1":"2"}`, &val))
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"1":"2"}`, str)
}
func Test_map_key_with_escaped_char(t *testing.T) {
type Ttest struct {
Map map[string]string
}
var jsonBytes = []byte(`
{
"Map":{
"k\"ey": "val"
}
}`)
should := require.New(t)
{
var obj Ttest
should.Nil(json.Unmarshal(jsonBytes, &obj))
should.Equal(map[string]string{"k\"ey": "val"}, obj.Map)
}
{
var obj Ttest
should.Nil(Unmarshal(jsonBytes, &obj))
should.Equal(map[string]string{"k\"ey": "val"}, obj.Map)
}
}
func Test_encode_map_with_sorted_keys(t *testing.T) {
should := require.New(t)
m := map[string]interface{}{
"3": 3,
"1": 1,
"2": 2,
}
bytes, err := json.Marshal(m)
should.Nil(err)
output, err := ConfigCompatibleWithStandardLibrary.MarshalToString(m)
should.Nil(err)
should.Equal(string(bytes), output)
}

View File

@ -15,7 +15,7 @@ type Level2 struct {
}
func Test_nested(t *testing.T) {
iter := ParseString(`{"hello": [{"world": "value1"}, {"world": "value2"}]}`)
iter := ParseString(ConfigDefault, `{"hello": [{"world": "value1"}, {"world": "value2"}]}`)
l1 := Level1{}
for l1Field := iter.ReadObject(); l1Field != ""; l1Field = iter.ReadObject() {
switch l1Field {
@ -28,14 +28,14 @@ func Test_nested(t *testing.T) {
case "world":
l2.World = iter.ReadString()
default:
iter.reportError("bind l2", "unexpected field: "+l2Field)
iter.ReportError("bind l2", "unexpected field: "+l2Field)
}
}
l2Array = append(l2Array, l2)
}
l1.Hello = l2Array
default:
iter.reportError("bind l1", "unexpected field: "+l1Field)
iter.ReportError("bind l1", "unexpected field: "+l1Field)
}
}
if !reflect.DeepEqual(l1, Level1{
@ -50,7 +50,7 @@ func Test_nested(t *testing.T) {
func Benchmark_jsoniter_nested(b *testing.B) {
for n := 0; n < b.N; n++ {
iter := ParseString(`{"hello": [{"world": "value1"}, {"world": "value2"}]}`)
iter := ParseString(ConfigDefault, `{"hello": [{"world": "value1"}, {"world": "value2"}]}`)
l1 := Level1{}
for l1Field := iter.ReadObject(); l1Field != ""; l1Field = iter.ReadObject() {
switch l1Field {

View File

@ -1,30 +1,24 @@
package jsoniter
import (
"testing"
"github.com/json-iterator/go/require"
"bytes"
"encoding/json"
"github.com/json-iterator/go/require"
"testing"
)
func Test_read_null(t *testing.T) {
should := require.New(t)
iter := ParseString(`null`)
iter := ParseString(ConfigDefault, `null`)
should.True(iter.ReadNil())
iter = ParseString(`null`)
iter = ParseString(ConfigDefault, `null`)
should.Nil(iter.Read())
iter = ParseString(`null`)
any, err := UnmarshalAnyFromString(`null`)
should.Nil(err)
should.Equal(0, any.ToInt())
should.Equal(float64(0), any.ToFloat64())
should.Equal("", any.ToString())
should.False(any.ToBool())
}
func Test_write_null(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteNil()
stream.Flush()
should.Nil(stream.Error)
@ -40,7 +34,7 @@ func Test_encode_null(t *testing.T) {
func Test_decode_null_object(t *testing.T) {
should := require.New(t)
iter := ParseString(`[null,"a"]`)
iter := ParseString(ConfigDefault, `[null,"a"]`)
iter.ReadArray()
if iter.ReadObject() != "" {
t.FailNow()
@ -58,7 +52,7 @@ func Test_decode_null_object(t *testing.T) {
}
func Test_decode_null_array(t *testing.T) {
iter := ParseString(`[null,"a"]`)
iter := ParseString(ConfigDefault, `[null,"a"]`)
iter.ReadArray()
if iter.ReadArray() != false {
t.FailNow()
@ -71,7 +65,7 @@ func Test_decode_null_array(t *testing.T) {
func Test_decode_null_string(t *testing.T) {
should := require.New(t)
iter := ParseString(`[null,"a"]`)
iter := ParseString(ConfigDefault, `[null,"a"]`)
should.True(iter.ReadArray())
should.Equal("", iter.ReadString())
should.True(iter.ReadArray())
@ -79,7 +73,7 @@ func Test_decode_null_string(t *testing.T) {
}
func Test_decode_null_skip(t *testing.T) {
iter := ParseString(`[null,"a"]`)
iter := ParseString(ConfigDefault, `[null,"a"]`)
iter.ReadArray()
iter.Skip()
iter.ReadArray()
@ -87,3 +81,39 @@ func Test_decode_null_skip(t *testing.T) {
t.FailNow()
}
}
func Test_encode_nil_map(t *testing.T) {
should := require.New(t)
type Ttest map[string]string
var obj1 Ttest
output, err := json.Marshal(obj1)
should.Nil(err)
should.Equal("null", string(output))
output, err = json.Marshal(&obj1)
should.Nil(err)
should.Equal("null", string(output))
output, err = Marshal(obj1)
should.Nil(err)
should.Equal("null", string(output))
output, err = Marshal(&obj1)
should.Nil(err)
should.Equal("null", string(output))
}
func Test_encode_nil_array(t *testing.T) {
should := require.New(t)
type Ttest []string
var obj1 Ttest
output, err := json.Marshal(obj1)
should.Nil(err)
should.Equal("null", string(output))
output, err = json.Marshal(&obj1)
should.Nil(err)
should.Equal("null", string(output))
output, err = Marshal(obj1)
should.Nil(err)
should.Equal("null", string(output))
output, err = Marshal(&obj1)
should.Nil(err)
should.Equal("null", string(output))
}

View File

@ -1,18 +1,18 @@
package jsoniter
import (
"encoding/json"
"testing"
"github.com/json-iterator/go/require"
"bytes"
"fmt"
"github.com/json-iterator/go/require"
"testing"
)
func Test_empty_object(t *testing.T) {
should := require.New(t)
iter := ParseString(`{}`)
iter := ParseString(ConfigDefault, `{}`)
field := iter.ReadObject()
should.Equal("", field)
iter = ParseString(`{}`)
iter = ParseString(ConfigDefault, `{}`)
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
should.FailNow("should not call")
return true
@ -21,14 +21,14 @@ func Test_empty_object(t *testing.T) {
func Test_one_field(t *testing.T) {
should := require.New(t)
iter := ParseString(`{"a": "b"}`)
iter := ParseString(ConfigDefault, `{"a": "b"}`)
field := iter.ReadObject()
should.Equal("a", field)
value := iter.ReadString()
should.Equal("b", value)
field = iter.ReadObject()
should.Equal("", field)
iter = ParseString(`{"a": "b"}`)
iter = ParseString(ConfigDefault, `{"a": "b"}`)
should.True(iter.ReadObjectCB(func(iter *Iterator, field string) bool {
should.Equal("a", field)
return true
@ -37,7 +37,7 @@ func Test_one_field(t *testing.T) {
func Test_two_field(t *testing.T) {
should := require.New(t)
iter := ParseString(`{ "a": "b" , "c": "d" }`)
iter := ParseString(ConfigDefault, `{ "a": "b" , "c": "d" }`)
field := iter.ReadObject()
should.Equal("a", field)
value := iter.ReadString()
@ -48,7 +48,7 @@ func Test_two_field(t *testing.T) {
should.Equal("d", value)
field = iter.ReadObject()
should.Equal("", field)
iter = ParseString(`{"field1": "1", "field2": 2}`)
iter = ParseString(ConfigDefault, `{"field1": "1", "field2": 2}`)
for field := iter.ReadObject(); field != ""; field = iter.ReadObject() {
switch field {
case "field1":
@ -56,147 +56,11 @@ func Test_two_field(t *testing.T) {
case "field2":
iter.ReadInt64()
default:
iter.reportError("bind object", "unexpected field")
iter.ReportError("bind object", "unexpected field")
}
}
}
func Test_read_object_as_any(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString(`{"a":"b","c":"d"}`)
should.Nil(err)
should.Equal(`{"a":"b","c":"d"}`, any.ToString())
// partial parse
should.Equal("b", any.Get("a").ToString())
should.Equal("d", any.Get("c").ToString())
should.Equal(2, len(any.Keys()))
any, err = UnmarshalAnyFromString(`{"a":"b","c":"d"}`)
// full parse
should.Equal(2, len(any.Keys()))
should.Equal(2, any.Size())
should.True(any.ToBool())
should.Equal(1, any.ToInt())
}
func Test_object_any_lazy_iterator(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString(`{"a":"b","c":"d"}`)
should.Nil(err)
// iterator parse
vals := map[string]string{}
var k string
var v Any
next, hasNext := any.IterateObject()
should.True(hasNext)
k, v, hasNext = next()
should.True(hasNext)
vals[k] = v.ToString()
// trigger full parse
should.Equal(2, len(any.Keys()))
k, v, hasNext = next()
should.False(hasNext)
vals[k] = v.ToString()
should.Equal(map[string]string{"a":"b", "c":"d"}, vals)
vals = map[string]string{}
for next, hasNext := any.IterateObject(); hasNext; {
k, v, hasNext = next()
if v.ValueType() == String {
vals[k] = v.ToString()
}
}
should.Equal(map[string]string{"a":"b", "c":"d"}, vals)
}
func Test_object_any_with_two_lazy_iterators(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString(`{"a":"b","c":"d","e":"f"}`)
should.Nil(err)
var k string
var v Any
next1, hasNext1 := any.IterateObject()
next2, hasNext2 := any.IterateObject()
should.True(hasNext1)
k, v, hasNext1 = next1()
should.True(hasNext1)
should.Equal("a", k)
should.Equal("b", v.ToString())
should.True(hasNext2)
k, v, hasNext2 = next2()
should.True(hasNext2)
should.Equal("a", k)
should.Equal("b", v.ToString())
k, v, hasNext1 = next1()
should.True(hasNext1)
should.Equal("c", k)
should.Equal("d", v.ToString())
k, v, hasNext2 = next2()
should.True(hasNext2)
should.Equal("c", k)
should.Equal("d", v.ToString())
}
func Test_object_lazy_any_get(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString(`{"a":{"b":{"c":"d"}}}`)
should.Nil(err)
should.Equal("d", any.Get("a", "b", "c").ToString())
}
func Test_object_lazy_any_get_all(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString(`{"a":[0],"b":[1]}`)
should.Nil(err)
should.Contains(any.Get('*', 0).ToString(), `"a":0`)
}
func Test_object_lazy_any_get_invalid(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString(`{}`)
should.Nil(err)
should.Equal(Invalid, any.Get("a", "b", "c").ValueType())
should.Equal(Invalid, any.Get(1).ValueType())
}
func Test_object_lazy_any_set(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString(`{"a":{"b":{"c":"d"}}}`)
should.Nil(err)
any.GetObject()["a"] = WrapInt64(1)
str, err := MarshalToString(any)
should.Nil(err)
should.Equal(`{"a":1}`, str)
}
func Test_wrap_object(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
field2 string
}
any := Wrap(TestObject{"hello", "world"})
should.Equal("hello", any.Get("Field1").ToString())
any = Wrap(TestObject{"hello", "world"})
should.Equal(2, any.Size())
any = Wrap(TestObject{"hello", "world"})
vals := map[string]string{}
var k string
var v Any
for next, hasNext := any.IterateObject(); hasNext; {
k, v, hasNext = next()
if v.ValueType() == String {
vals[k] = v.ToString()
}
}
should.Equal(map[string]string{"Field1":"hello"}, vals)
}
func Test_object_wrapper_any_get_all(t *testing.T) {
should := require.New(t)
type TestObject struct {
@ -210,8 +74,7 @@ func Test_object_wrapper_any_get_all(t *testing.T) {
func Test_write_object(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream.IndentionStep = 2
stream := NewStream(Config{IndentionStep: 2}.Froze(), buf, 4096)
stream.WriteObjectStart()
stream.WriteObjectField("hello")
stream.WriteInt(1)
@ -224,34 +87,348 @@ func Test_write_object(t *testing.T) {
should.Equal("{\n \"hello\":1,\n \"world\":2\n}", buf.String())
}
func Benchmark_jsoniter_object(b *testing.B) {
type TestObj struct {
func Test_decode_one_field_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
Field2 uint64
}
for n := 0; n < b.N; n++ {
iter := ParseString(`{"field1": "1", "field2": 2}`)
obj := TestObj{}
for field := iter.ReadObject(); field != ""; field = iter.ReadObject() {
switch field {
case "field1":
obj.Field1 = iter.ReadString()
case "field2":
obj.Field2 = iter.ReadUint64()
default:
iter.reportError("bind object", "unexpected field")
}
}
}
obj := TestObject{}
should.Nil(UnmarshalFromString(`{}`, &obj))
should.Equal("", obj.Field1)
should.Nil(UnmarshalFromString(`{"field1": "hello"}`, &obj))
should.Equal("hello", obj.Field1)
}
func Benchmark_json_object(b *testing.B) {
type TestObj struct {
func Test_decode_two_fields_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
Field2 uint64
Field2 string
}
for n := 0; n < b.N; n++ {
result := TestObj{}
json.Unmarshal([]byte(`{"field1": "1", "field2": 2}`), &result)
obj := TestObject{}
should.Nil(UnmarshalFromString(`{}`, &obj))
should.Equal("", obj.Field1)
should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "b"}`, &obj))
should.Equal("a", obj.Field1)
should.Equal("b", obj.Field2)
}
func Test_decode_three_fields_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
Field2 string
Field3 string
}
obj := TestObject{}
should.Nil(UnmarshalFromString(`{}`, &obj))
should.Equal("", obj.Field1)
should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "b", "Field3": "c"}`, &obj))
should.Equal("a", obj.Field1)
should.Equal("b", obj.Field2)
should.Equal("c", obj.Field3)
}
func Test_decode_four_fields_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
Field2 string
Field3 string
Field4 string
}
obj := TestObject{}
should.Nil(UnmarshalFromString(`{}`, &obj))
should.Equal("", obj.Field1)
should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "b", "Field3": "c", "Field4": "d"}`, &obj))
should.Equal("a", obj.Field1)
should.Equal("b", obj.Field2)
should.Equal("c", obj.Field3)
should.Equal("d", obj.Field4)
}
func Test_decode_five_fields_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
Field2 string
Field3 string
Field4 string
Field5 string
}
obj := TestObject{}
should.Nil(UnmarshalFromString(`{}`, &obj))
should.Equal("", obj.Field1)
should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "b", "Field3": "c", "Field4": "d", "Field5": "e"}`, &obj))
should.Equal("a", obj.Field1)
should.Equal("b", obj.Field2)
should.Equal("c", obj.Field3)
should.Equal("d", obj.Field4)
should.Equal("e", obj.Field5)
}
func Test_decode_ten_fields_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
Field2 string
Field3 string
Field4 string
Field5 string
Field6 string
Field7 string
Field8 string
Field9 string
Field10 string
}
obj := TestObject{}
should.Nil(UnmarshalFromString(`{}`, &obj))
should.Equal("", obj.Field1)
should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "b", "Field3": "c", "Field4": "d", "Field5": "e"}`, &obj))
should.Equal("a", obj.Field1)
should.Equal("b", obj.Field2)
should.Equal("c", obj.Field3)
should.Equal("d", obj.Field4)
should.Equal("e", obj.Field5)
}
func Test_decode_struct_field_with_tag(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string `json:"field-1"`
Field2 string `json:"-"`
Field3 int `json:",string"`
}
obj := TestObject{Field2: "world"}
UnmarshalFromString(`{"field-1": "hello", "field2": "", "Field3": "100"}`, &obj)
should.Equal("hello", obj.Field1)
should.Equal("world", obj.Field2)
should.Equal(100, obj.Field3)
}
func Test_decode_struct_field_with_tag_string(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 int `json:",string"`
}
obj := TestObject{Field1: 100}
should.Nil(UnmarshalFromString(`{"Field1": "100"}`, &obj))
should.Equal(100, obj.Field1)
}
func Test_write_val_zero_field_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
}
obj := TestObject{}
str, err := MarshalToString(obj)
should.Nil(err)
should.Equal(`{}`, str)
}
func Test_write_val_one_field_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string `json:"field-1"`
}
obj := TestObject{"hello"}
str, err := MarshalToString(obj)
should.Nil(err)
should.Equal(`{"field-1":"hello"}`, str)
}
func Test_mixed(t *testing.T) {
should := require.New(t)
type AA struct {
ID int `json:"id"`
Payload map[string]interface{} `json:"payload"`
buf *bytes.Buffer `json:"-"`
}
aa := AA{}
err := UnmarshalFromString(` {"id":1, "payload":{"account":"123","password":"456"}}`, &aa)
should.Nil(err)
should.Equal(1, aa.ID)
should.Equal("123", aa.Payload["account"])
}
func Test_omit_empty(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string `json:"field-1,omitempty"`
Field2 string `json:"field-2,omitempty"`
Field3 string `json:"field-3,omitempty"`
}
obj := TestObject{}
obj.Field2 = "hello"
str, err := MarshalToString(&obj)
should.Nil(err)
should.Equal(`{"field-2":"hello"}`, str)
}
func Test_recursive_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
Me *TestObject
}
obj := TestObject{}
str, err := MarshalToString(obj)
should.Nil(err)
should.Contains(str, `"Field1":""`)
should.Contains(str, `"Me":null`)
err = UnmarshalFromString(str, &obj)
should.Nil(err)
}
func Test_one_field_struct(t *testing.T) {
should := require.New(t)
type YetYetAnotherObject struct {
Field string
}
type YetAnotherObject struct {
Field *YetYetAnotherObject
}
type AnotherObject struct {
Field *YetAnotherObject
}
type TestObject struct {
Me *AnotherObject
}
obj := TestObject{&AnotherObject{&YetAnotherObject{&YetYetAnotherObject{"abc"}}}}
str, err := MarshalToString(obj)
should.Nil(err)
should.Equal(`{"Me":{"Field":{"Field":{"Field":"abc"}}}}`, str)
str, err = MarshalToString(&obj)
should.Nil(err)
should.Equal(`{"Me":{"Field":{"Field":{"Field":"abc"}}}}`, str)
}
func Test_encode_anonymous_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field string
}
str, err := MarshalToString(struct {
TestObject
Field int
}{
Field: 100,
})
should.Nil(err)
should.Equal(`{"Field":100}`, str)
}
func Test_decode_anonymous_struct(t *testing.T) {
should := require.New(t)
type Inner struct {
Key string `json:"key"`
}
type Outer struct {
Inner
}
var outer Outer
j := []byte("{\"key\":\"value\"}")
should.Nil(Unmarshal(j, &outer))
should.Equal("value", outer.Key)
}
func Test_multiple_level_anonymous_struct(t *testing.T) {
type Level1 struct {
Field1 string
}
type Level2 struct {
Level1
Field2 string
}
type Level3 struct {
Level2
Field3 string
}
should := require.New(t)
obj := Level3{Level2{Level1{"1"}, "2"}, "3"}
output, err := MarshalToString(obj)
should.Nil(err)
fmt.Println(output)
should.Contains(output, `"Field1":"1"`)
should.Contains(output, `"Field2":"2"`)
should.Contains(output, `"Field3":"3"`)
}
func Test_multiple_level_anonymous_struct_with_ptr(t *testing.T) {
type Level1 struct {
Field1 string
Field2 string
Field4 string
}
type Level2 struct {
*Level1
Field2 string
Field3 string
}
type Level3 struct {
*Level2
Field3 string
}
should := require.New(t)
obj := Level3{&Level2{&Level1{"1", "", "4"}, "2", ""}, "3"}
output, err := MarshalToString(obj)
should.Nil(err)
should.Contains(output, `"Field1":"1"`)
should.Contains(output, `"Field2":"2"`)
should.Contains(output, `"Field3":"3"`)
should.Contains(output, `"Field4":"4"`)
}
func Test_shadow_struct_field(t *testing.T) {
should := require.New(t)
type omit *struct{}
type CacheItem struct {
Key string `json:"key"`
MaxAge int `json:"cacheAge"`
}
output, err := MarshalToString(struct {
*CacheItem
// Omit bad keys
OmitMaxAge omit `json:"cacheAge,omitempty"`
// Add nice keys
MaxAge int `json:"max_age"`
}{
CacheItem: &CacheItem{
Key: "value",
MaxAge: 100,
},
MaxAge: 20,
})
should.Nil(err)
should.Contains(output, `"key":"value"`)
should.Contains(output, `"max_age":20`)
}
func Test_decode_nested(t *testing.T) {
type StructOfString struct {
Field1 string
Field2 string
}
iter := ParseString(ConfigDefault, `[{"field1": "hello"}, null, {"field2": "world"}]`)
slice := []*StructOfString{}
iter.ReadVal(&slice)
if len(slice) != 3 {
fmt.Println(iter.Error)
t.Fatal(len(slice))
}
if slice[0].Field1 != "hello" {
fmt.Println(iter.Error)
t.Fatal(slice[0])
}
if slice[1] != nil {
fmt.Println(iter.Error)
t.Fatal(slice[1])
}
if slice[2].Field2 != "world" {
fmt.Println(iter.Error)
t.Fatal(slice[2])
}
}

View File

@ -1,8 +1,8 @@
package jsoniter
import (
"testing"
"github.com/json-iterator/go/require"
"testing"
)
func Test_encode_optional_int_pointer(t *testing.T) {
@ -21,26 +21,26 @@ func Test_encode_optional_int_pointer(t *testing.T) {
func Test_decode_struct_with_optional_field(t *testing.T) {
should := require.New(t)
type TestObject struct {
field1 *string
field2 *string
Field1 *string
Field2 *string
}
obj := TestObject{}
UnmarshalFromString(`{"field1": null, "field2": "world"}`, &obj)
should.Nil(obj.field1)
should.Equal("world", *obj.field2)
should.Nil(obj.Field1)
should.Equal("world", *obj.Field2)
}
func Test_encode_struct_with_optional_field(t *testing.T) {
should := require.New(t)
type TestObject struct {
field1 *string
field2 *string
Field1 *string
Field2 *string
}
obj := TestObject{}
world := "world"
obj.field2 = &world
obj.Field2 = &world
str, err := MarshalToString(obj)
should.Nil(err)
should.Contains(str, `"field1":null`)
should.Contains(str, `"field2":"world"`)
}
should.Contains(str, `"Field1":null`)
should.Contains(str, `"Field2":"world"`)
}

View File

@ -0,0 +1,74 @@
package jsoniter
import (
"encoding/json"
"github.com/json-iterator/go/require"
"testing"
)
func Test_json_RawMessage(t *testing.T) {
should := require.New(t)
var data json.RawMessage
should.Nil(Unmarshal([]byte(`[1,2,3]`), &data))
should.Equal(`[1,2,3]`, string(data))
str, err := MarshalToString(data)
should.Nil(err)
should.Equal(`[1,2,3]`, str)
}
func Test_jsoniter_RawMessage(t *testing.T) {
should := require.New(t)
var data RawMessage
should.Nil(Unmarshal([]byte(`[1,2,3]`), &data))
should.Equal(`[1,2,3]`, string(data))
str, err := MarshalToString(data)
should.Nil(err)
should.Equal(`[1,2,3]`, str)
}
func Test_json_RawMessage_in_struct(t *testing.T) {
type TestObject struct {
Field1 string
Field2 json.RawMessage
}
should := require.New(t)
var data TestObject
should.Nil(Unmarshal([]byte(`{"field1": "hello", "field2": [1,2,3]}`), &data))
should.Equal(` [1,2,3]`, string(data.Field2))
should.Equal(`hello`, data.Field1)
}
func Test_decode_map_of_raw_message(t *testing.T) {
should := require.New(t)
type RawMap map[string]*json.RawMessage
b := []byte("{\"test\":[{\"key\":\"value\"}]}")
var rawMap RawMap
should.Nil(Unmarshal(b, &rawMap))
should.Equal(`[{"key":"value"}]`, string(*rawMap["test"]))
type Inner struct {
Key string `json:"key"`
}
var inner []Inner
Unmarshal(*rawMap["test"], &inner)
should.Equal("value", inner[0].Key)
}
func Test_encode_map_of_raw_message(t *testing.T) {
should := require.New(t)
type RawMap map[string]*json.RawMessage
value := json.RawMessage("[]")
rawMap := RawMap{"hello": &value}
output, err := MarshalToString(rawMap)
should.Nil(err)
should.Equal(`{"hello":[]}`, output)
}
func Test_encode_map_of_jsoniter_raw_message(t *testing.T) {
should := require.New(t)
type RawMap map[string]*RawMessage
value := RawMessage("[]")
rawMap := RawMap{"hello": &value}
output, err := MarshalToString(rawMap)
should.Nil(err)
should.Equal(`{"hello":[]}`, output)
}

View File

@ -1,12 +1,12 @@
package jsoniter
import (
"testing"
"fmt"
"testing"
)
func Test_reflect_str(t *testing.T) {
iter := ParseString(`"hello"`)
iter := ParseString(ConfigDefault, `"hello"`)
str := ""
iter.ReadVal(&str)
if str != "hello" {
@ -16,7 +16,7 @@ func Test_reflect_str(t *testing.T) {
}
func Test_reflect_ptr_str(t *testing.T) {
iter := ParseString(`"hello"`)
iter := ParseString(ConfigDefault, `"hello"`)
var str *string
iter.ReadVal(&str)
if *str != "hello" {
@ -25,7 +25,7 @@ func Test_reflect_ptr_str(t *testing.T) {
}
func Test_reflect_int(t *testing.T) {
iter := ParseString(`123`)
iter := ParseString(ConfigDefault, `123`)
val := int(0)
iter.ReadVal(&val)
if val != 123 {
@ -34,7 +34,7 @@ func Test_reflect_int(t *testing.T) {
}
func Test_reflect_int8(t *testing.T) {
iter := ParseString(`123`)
iter := ParseString(ConfigDefault, `123`)
val := int8(0)
iter.ReadVal(&val)
if val != 123 {
@ -43,7 +43,7 @@ func Test_reflect_int8(t *testing.T) {
}
func Test_reflect_int16(t *testing.T) {
iter := ParseString(`123`)
iter := ParseString(ConfigDefault, `123`)
val := int16(0)
iter.ReadVal(&val)
if val != 123 {
@ -52,7 +52,7 @@ func Test_reflect_int16(t *testing.T) {
}
func Test_reflect_int32(t *testing.T) {
iter := ParseString(`123`)
iter := ParseString(ConfigDefault, `123`)
val := int32(0)
iter.ReadVal(&val)
if val != 123 {
@ -61,7 +61,7 @@ func Test_reflect_int32(t *testing.T) {
}
func Test_reflect_int64(t *testing.T) {
iter := ParseString(`123`)
iter := ParseString(ConfigDefault, `123`)
val := int64(0)
iter.ReadVal(&val)
if val != 123 {
@ -70,7 +70,7 @@ func Test_reflect_int64(t *testing.T) {
}
func Test_reflect_uint(t *testing.T) {
iter := ParseString(`123`)
iter := ParseString(ConfigDefault, `123`)
val := uint(0)
iter.ReadVal(&val)
if val != 123 {
@ -79,7 +79,7 @@ func Test_reflect_uint(t *testing.T) {
}
func Test_reflect_uint8(t *testing.T) {
iter := ParseString(`123`)
iter := ParseString(ConfigDefault, `123`)
val := uint8(0)
iter.ReadVal(&val)
if val != 123 {
@ -88,7 +88,7 @@ func Test_reflect_uint8(t *testing.T) {
}
func Test_reflect_uint16(t *testing.T) {
iter := ParseString(`123`)
iter := ParseString(ConfigDefault, `123`)
val := uint16(0)
iter.ReadVal(&val)
if val != 123 {
@ -97,7 +97,7 @@ func Test_reflect_uint16(t *testing.T) {
}
func Test_reflect_uint32(t *testing.T) {
iter := ParseString(`123`)
iter := ParseString(ConfigDefault, `123`)
val := uint32(0)
iter.ReadVal(&val)
if val != 123 {
@ -106,7 +106,7 @@ func Test_reflect_uint32(t *testing.T) {
}
func Test_reflect_uint64(t *testing.T) {
iter := ParseString(`123`)
iter := ParseString(ConfigDefault, `123`)
val := uint64(0)
iter.ReadVal(&val)
if val != 123 {
@ -115,7 +115,7 @@ func Test_reflect_uint64(t *testing.T) {
}
func Test_reflect_byte(t *testing.T) {
iter := ParseString(`123`)
iter := ParseString(ConfigDefault, `123`)
val := byte(0)
iter.ReadVal(&val)
if val != 123 {
@ -124,7 +124,7 @@ func Test_reflect_byte(t *testing.T) {
}
func Test_reflect_float32(t *testing.T) {
iter := ParseString(`1.23`)
iter := ParseString(ConfigDefault, `1.23`)
val := float32(0)
iter.ReadVal(&val)
if val != 1.23 {
@ -134,7 +134,7 @@ func Test_reflect_float32(t *testing.T) {
}
func Test_reflect_float64(t *testing.T) {
iter := ParseString(`1.23`)
iter := ParseString(ConfigDefault, `1.23`)
val := float64(0)
iter.ReadVal(&val)
if val != 1.23 {
@ -144,11 +144,11 @@ func Test_reflect_float64(t *testing.T) {
}
func Test_reflect_bool(t *testing.T) {
iter := ParseString(`true`)
iter := ParseString(ConfigDefault, `true`)
val := false
iter.ReadVal(&val)
if val != true {
fmt.Println(iter.Error)
t.Fatal(val)
}
}
}

View File

@ -1,241 +0,0 @@
package jsoniter
import (
"testing"
"github.com/json-iterator/go/require"
"bytes"
)
func Test_decode_one_field_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
}
obj := TestObject{}
should.Nil(UnmarshalFromString(`{}`, &obj))
should.Equal("", obj.Field1)
should.Nil(UnmarshalFromString(`{"field1": "hello"}`, &obj))
should.Equal("hello", obj.Field1)
}
func Test_decode_two_fields_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
Field2 string
}
obj := TestObject{}
should.Nil(UnmarshalFromString(`{}`, &obj))
should.Equal("", obj.Field1)
should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "b"}`, &obj))
should.Equal("a", obj.Field1)
should.Equal("b", obj.Field2)
}
func Test_decode_three_fields_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
Field2 string
Field3 string
}
obj := TestObject{}
should.Nil(UnmarshalFromString(`{}`, &obj))
should.Equal("", obj.Field1)
should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "b", "Field3": "c"}`, &obj))
should.Equal("a", obj.Field1)
should.Equal("b", obj.Field2)
should.Equal("c", obj.Field3)
}
func Test_decode_four_fields_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
Field2 string
Field3 string
Field4 string
}
obj := TestObject{}
should.Nil(UnmarshalFromString(`{}`, &obj))
should.Equal("", obj.Field1)
should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "b", "Field3": "c", "Field4": "d"}`, &obj))
should.Equal("a", obj.Field1)
should.Equal("b", obj.Field2)
should.Equal("c", obj.Field3)
should.Equal("d", obj.Field4)
}
func Test_decode_five_fields_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
Field2 string
Field3 string
Field4 string
Field5 string
}
obj := TestObject{}
should.Nil(UnmarshalFromString(`{}`, &obj))
should.Equal("", obj.Field1)
should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "b", "Field3": "c", "Field4": "d", "Field5": "e"}`, &obj))
should.Equal("a", obj.Field1)
should.Equal("b", obj.Field2)
should.Equal("c", obj.Field3)
should.Equal("d", obj.Field4)
should.Equal("e", obj.Field5)
}
func Test_decode_ten_fields_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
Field2 string
Field3 string
Field4 string
Field5 string
Field6 string
Field7 string
Field8 string
Field9 string
Field10 string
}
obj := TestObject{}
should.Nil(UnmarshalFromString(`{}`, &obj))
should.Equal("", obj.Field1)
should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "b", "Field3": "c", "Field4": "d", "Field5": "e"}`, &obj))
should.Equal("a", obj.Field1)
should.Equal("b", obj.Field2)
should.Equal("c", obj.Field3)
should.Equal("d", obj.Field4)
should.Equal("e", obj.Field5)
}
func Test_decode_struct_field_with_tag(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string `json:"field-1"`
Field2 string `json:"-"`
Field3 int `json:",string"`
}
obj := TestObject{Field2: "world"}
UnmarshalFromString(`{"field-1": "hello", "field2": "", "Field3": "100"}`, &obj)
should.Equal("hello", obj.Field1)
should.Equal("world", obj.Field2)
should.Equal(100, obj.Field3)
}
func Test_write_val_zero_field_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
}
obj := TestObject{}
str, err := MarshalToString(obj)
should.Nil(err)
should.Equal(`{}`, str)
}
func Test_write_val_one_field_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string `json:"field-1"`
}
obj := TestObject{"hello"}
str, err := MarshalToString(obj)
should.Nil(err)
should.Equal(`{"field-1":"hello"}`, str)
}
func Test_mixed(t *testing.T) {
should := require.New(t)
type AA struct {
ID int `json:"id"`
Payload map[string]interface{} `json:"payload"`
buf *bytes.Buffer `json:"-"`
}
aa := AA{}
err := UnmarshalFromString(` {"id":1, "payload":{"account":"123","password":"456"}}`, &aa)
should.Nil(err)
should.Equal(1, aa.ID)
should.Equal("123", aa.Payload["account"])
}
func Test_omit_empty(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string `json:"field-1,omitempty"`
Field2 string `json:"field-2,omitempty"`
Field3 string `json:"field-3,omitempty"`
}
obj := TestObject{}
obj.Field2 = "hello"
str, err := MarshalToString(&obj)
should.Nil(err)
should.Equal(`{"field-2":"hello"}`, str)
}
func Test_any_within_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 Any
Field2 Any
}
obj := TestObject{}
err := UnmarshalFromString(`{"Field1": "hello", "Field2": [1,2,3]}`, &obj)
should.Nil(err)
should.Equal("hello", obj.Field1.ToString())
should.Equal("[1,2,3]", obj.Field2.ToString())
}
func Test_recursive_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
Me *TestObject
}
obj := TestObject{}
str, err := MarshalToString(obj)
should.Nil(err)
should.Contains(str, `"Field1":""`)
should.Contains(str, `"Me":null`)
err = UnmarshalFromString(str, &obj)
should.Nil(err)
}
func Test_one_field_struct(t *testing.T) {
should := require.New(t)
type YetYetAnotherObject struct {
Field string
}
type YetAnotherObject struct {
Field *YetYetAnotherObject
}
type AnotherObject struct {
Field *YetAnotherObject
}
type TestObject struct {
Me *AnotherObject
}
obj := TestObject{&AnotherObject{&YetAnotherObject{&YetYetAnotherObject{"abc"}}}}
str, err := MarshalToString(obj)
should.Nil(err)
should.Equal(`{"Me":{"Field":{"Field":{"Field":"abc"}}}}`, str)
str, err = MarshalToString(&obj)
should.Nil(err)
should.Equal(`{"Me":{"Field":{"Field":{"Field":"abc"}}}}`, str)
}
func Test_anonymous_struct_marshal(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field string
}
str, err := MarshalToString(struct{
TestObject
Field int
}{
Field: 100,
})
should.Nil(err)
should.Equal(`{"Field":100}`, str)
}

View File

@ -1,115 +0,0 @@
package jsoniter
import (
"encoding/json"
"fmt"
"testing"
"unsafe"
"github.com/json-iterator/go/require"
)
func Test_decode_slice(t *testing.T) {
should := require.New(t)
slice := make([]string, 0, 5)
UnmarshalFromString(`["hello", "world"]`, &slice)
should.Equal([]string{"hello", "world"}, slice)
}
func Test_decode_large_slice(t *testing.T) {
should := require.New(t)
slice := make([]int, 0, 1)
UnmarshalFromString(`[1,2,3,4,5,6,7,8,9]`, &slice)
should.Equal([]int{1, 2, 3, 4, 5, 6, 7, 8, 9}, slice)
}
func Test_decode_nested(t *testing.T) {
type StructOfString struct {
field1 string
field2 string
}
iter := ParseString(`[{"field1": "hello"}, null, {"field2": "world"}]`)
slice := []*StructOfString{}
iter.ReadVal(&slice)
if len(slice) != 3 {
fmt.Println(iter.Error)
t.Fatal(len(slice))
}
if slice[0].field1 != "hello" {
fmt.Println(iter.Error)
t.Fatal(slice[0])
}
if slice[1] != nil {
fmt.Println(iter.Error)
t.Fatal(slice[1])
}
if slice[2].field2 != "world" {
fmt.Println(iter.Error)
t.Fatal(slice[2])
}
}
func Test_decode_base64(t *testing.T) {
iter := ParseString(`"YWJj"`)
val := []byte{}
RegisterTypeDecoder("[]uint8", func(ptr unsafe.Pointer, iter *Iterator) {
*((*[]byte)(ptr)) = iter.ReadBase64()
})
defer CleanDecoders()
iter.ReadVal(&val)
if "abc" != string(val) {
t.Fatal(string(val))
}
}
type StructOfTagOne struct {
Field1 string `json:"field1"`
Field2 string `json:"field2"`
Field3 int `json:"field3,string"`
Field4 int `json:"field4,string"`
}
func Benchmark_jsoniter_reflect(b *testing.B) {
b.ReportAllocs()
iter := NewIterator()
Struct := &StructOfTagOne{}
//var Struct *StructOfTagOne
input := []byte(`{"field3": "100", "field4": "100"}`)
//input := []byte(`null`)
for n := 0; n < b.N; n++ {
iter.ResetBytes(input)
iter.ReadVal(&Struct)
}
}
func Benchmark_jsoniter_direct(b *testing.B) {
b.ReportAllocs()
for n := 0; n < b.N; n++ {
//iter := ParseString(`{"field1": "hello", "field2": "world"}`)
//struct_ := StructOfString{}
//for field := iter.ReadObject(); field != ""; field = iter.ReadObject() {
// switch field {
// case "field1":
// struct_.Field1 = iter.ReadString()
// case "field2":
// struct_.Field2 = iter.ReadString()
// default:
// iter.Skip()
// }
//}
iter := ParseString(`["hello", "world"]`)
array := make([]string, 0, 2)
for iter.ReadArray() {
array = append(array, iter.ReadString())
}
}
}
func Benchmark_json_reflect(b *testing.B) {
b.ReportAllocs()
for n := 0; n < b.N; n++ {
Struct := StructOfTagOne{}
json.Unmarshal([]byte(`{"field3": "100"}`), &Struct)
//array := make([]string, 0, 2)
//json.Unmarshal([]byte(`["hello", "world"]`), &array)
}
}

View File

@ -1,12 +1,14 @@
package jsoniter
import (
"bytes"
"encoding/json"
"github.com/json-iterator/go/require"
"testing"
)
func Test_skip_number(t *testing.T) {
iter := ParseString(`[-0.12, "b"]`)
iter := ParseString(ConfigDefault, `[-0.12, "b"]`)
iter.ReadArray()
iter.Skip()
iter.ReadArray()
@ -16,7 +18,7 @@ func Test_skip_number(t *testing.T) {
}
func Test_skip_null(t *testing.T) {
iter := ParseString(`[null , "b"]`)
iter := ParseString(ConfigDefault, `[null , "b"]`)
iter.ReadArray()
iter.Skip()
iter.ReadArray()
@ -26,7 +28,7 @@ func Test_skip_null(t *testing.T) {
}
func Test_skip_true(t *testing.T) {
iter := ParseString(`[true , "b"]`)
iter := ParseString(ConfigDefault, `[true , "b"]`)
iter.ReadArray()
iter.Skip()
iter.ReadArray()
@ -36,7 +38,7 @@ func Test_skip_true(t *testing.T) {
}
func Test_skip_false(t *testing.T) {
iter := ParseString(`[false , "b"]`)
iter := ParseString(ConfigDefault, `[false , "b"]`)
iter.ReadArray()
iter.Skip()
iter.ReadArray()
@ -46,7 +48,7 @@ func Test_skip_false(t *testing.T) {
}
func Test_skip_array(t *testing.T) {
iter := ParseString(`[[1, [2, [3], 4]], "b"]`)
iter := ParseString(ConfigDefault, `[[1, [2, [3], 4]], "b"]`)
iter.ReadArray()
iter.Skip()
iter.ReadArray()
@ -56,7 +58,7 @@ func Test_skip_array(t *testing.T) {
}
func Test_skip_empty_array(t *testing.T) {
iter := ParseString(`[ [ ], "b"]`)
iter := ParseString(ConfigDefault, `[ [ ], "b"]`)
iter.ReadArray()
iter.Skip()
iter.ReadArray()
@ -66,7 +68,7 @@ func Test_skip_empty_array(t *testing.T) {
}
func Test_skip_nested(t *testing.T) {
iter := ParseString(`[ {"a" : [{"b": "c"}], "d": 102 }, "b"]`)
iter := ParseString(ConfigDefault, `[ {"a" : [{"b": "c"}], "d": 102 }, "b"]`)
iter.ReadArray()
iter.Skip()
iter.ReadArray()
@ -75,6 +77,22 @@ func Test_skip_nested(t *testing.T) {
}
}
func Test_skip_and_return_bytes(t *testing.T) {
should := require.New(t)
iter := ParseString(ConfigDefault, `[ {"a" : [{"b": "c"}], "d": 102 }, "b"]`)
iter.ReadArray()
skipped := iter.SkipAndReturnBytes()
should.Equal(`{"a" : [{"b": "c"}], "d": 102 }`, string(skipped))
}
func Test_skip_and_return_bytes_with_reader(t *testing.T) {
should := require.New(t)
iter := Parse(ConfigDefault, bytes.NewBufferString(`[ {"a" : [{"b": "c"}], "d": 102 }, "b"]`), 4)
iter.ReadArray()
skipped := iter.SkipAndReturnBytes()
should.Equal(`{"a" : [{"b": "c"}], "d": 102 }`, string(skipped))
}
type TestResp struct {
Code uint64
}
@ -106,7 +124,7 @@ func Benchmark_jsoniter_skip(b *testing.B) {
}`)
for n := 0; n < b.N; n++ {
result := TestResp{}
iter := ParseBytes(input)
iter := ParseBytes(ConfigDefault, input)
for field := iter.ReadObject(); field != ""; field = iter.ReadObject() {
switch field {
case "code":

53
jsoniter_stream_test.go Normal file
View File

@ -0,0 +1,53 @@
package jsoniter
import (
"github.com/json-iterator/go/require"
"testing"
)
func Test_writeByte_should_grow_buffer(t *testing.T) {
should := require.New(t)
stream := NewStream(ConfigDefault, nil, 1)
stream.writeByte('1')
should.Equal("1", string(stream.Buffer()))
should.Equal(1, len(stream.buf))
stream.writeByte('2')
should.Equal("12", string(stream.Buffer()))
should.Equal(2, len(stream.buf))
stream.writeThreeBytes('3', '4', '5')
should.Equal("12345", string(stream.Buffer()))
}
func Test_writeBytes_should_grow_buffer(t *testing.T) {
should := require.New(t)
stream := NewStream(ConfigDefault, nil, 1)
stream.Write([]byte{'1', '2'})
should.Equal("12", string(stream.Buffer()))
should.Equal(3, len(stream.buf))
stream.Write([]byte{'3', '4', '5', '6', '7'})
should.Equal("1234567", string(stream.Buffer()))
should.Equal(8, len(stream.buf))
}
func Test_writeIndention_should_grow_buffer(t *testing.T) {
should := require.New(t)
stream := NewStream(Config{IndentionStep: 2}.Froze(), nil, 1)
stream.WriteVal([]int{1, 2, 3})
should.Equal("[\n 1,\n 2,\n 3\n]", string(stream.Buffer()))
}
func Test_writeRaw_should_grow_buffer(t *testing.T) {
should := require.New(t)
stream := NewStream(ConfigDefault, nil, 1)
stream.WriteRaw("123")
should.Nil(stream.Error)
should.Equal("123", string(stream.Buffer()))
}
func Test_writeString_should_grow_buffer(t *testing.T) {
should := require.New(t)
stream := NewStream(ConfigDefault, nil, 0)
stream.WriteString("123")
should.Nil(stream.Error)
should.Equal(`"123"`, string(stream.Buffer()))
}

View File

@ -3,36 +3,37 @@ package jsoniter
import (
"bytes"
"encoding/json"
"testing"
"github.com/json-iterator/go/require"
"fmt"
"github.com/json-iterator/go/require"
"testing"
"unicode/utf8"
)
func Test_read_normal_string(t *testing.T) {
cases := map[string]string{
`"0123456789012345678901234567890123456789"`: `0123456789012345678901234567890123456789`,
`""`: ``,
`""`: ``,
`"hello"`: `hello`,
}
for input, output := range cases {
t.Run(fmt.Sprintf("%v:%v", input, output), func(t *testing.T) {
should := require.New(t)
iter := ParseString(input)
iter := ParseString(ConfigDefault, input)
should.Equal(output, iter.ReadString())
})
t.Run(fmt.Sprintf("%v:%v", input, output), func(t *testing.T) {
should := require.New(t)
iter := Parse(bytes.NewBufferString(input), 2)
iter := Parse(ConfigDefault, bytes.NewBufferString(input), 2)
should.Equal(output, iter.ReadString())
})
t.Run(fmt.Sprintf("%v:%v", input, output), func(t *testing.T) {
should := require.New(t)
iter := ParseString(input)
iter := ParseString(ConfigDefault, input)
should.Equal(output, string(iter.ReadStringAsSlice()))
})
t.Run(fmt.Sprintf("%v:%v", input, output), func(t *testing.T) {
should := require.New(t)
iter := Parse(bytes.NewBufferString(input), 2)
iter := Parse(ConfigDefault, bytes.NewBufferString(input), 2)
should.Equal(output, string(iter.ReadStringAsSlice()))
})
}
@ -40,20 +41,20 @@ func Test_read_normal_string(t *testing.T) {
func Test_read_exotic_string(t *testing.T) {
cases := map[string]string{
`"hel\"lo"`: `hel"lo`,
`"hel\nlo"`: "hel\nlo",
`"hel\"lo"`: `hel"lo`,
`"hel\nlo"`: "hel\nlo",
`"\u4e2d\u6587"`: "中文",
`"\ud83d\udc4a"`: "\xf0\x9f\x91\x8a", // surrogate
}
for input, output := range cases {
t.Run(fmt.Sprintf("%v:%v", input, output), func(t *testing.T) {
should := require.New(t)
iter := ParseString(input)
iter := ParseString(ConfigDefault, input)
should.Equal(output, iter.ReadString())
})
t.Run(fmt.Sprintf("%v:%v", input, output), func(t *testing.T) {
should := require.New(t)
iter := Parse(bytes.NewBufferString(input), 2)
iter := Parse(ConfigDefault, bytes.NewBufferString(input), 2)
should.Equal(output, iter.ReadString())
})
}
@ -61,30 +62,10 @@ func Test_read_exotic_string(t *testing.T) {
func Test_read_string_as_interface(t *testing.T) {
should := require.New(t)
iter := ParseString(`"hello"`)
iter := ParseString(ConfigDefault, `"hello"`)
should.Equal("hello", iter.Read())
}
func Test_read_string_as_any(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString(`"hello"`)
should.Nil(err)
should.Equal("hello", any.ToString())
should.True(any.ToBool())
any, err = UnmarshalAnyFromString(`" "`)
should.False(any.ToBool())
any, err = UnmarshalAnyFromString(`"false"`)
should.False(any.ToBool())
any, err = UnmarshalAnyFromString(`"123"`)
should.Equal(123, any.ToInt())
}
func Test_wrap_string(t *testing.T) {
should := require.New(t)
any := WrapString("123")
should.Equal(123, any.ToInt())
}
func Test_write_string(t *testing.T) {
should := require.New(t)
str, err := MarshalToString("hello")
@ -98,22 +79,80 @@ func Test_write_string(t *testing.T) {
func Test_write_val_string(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream := NewStream(ConfigDefault, buf, 4096)
stream.WriteVal("hello")
stream.Flush()
should.Nil(stream.Error)
should.Equal(`"hello"`, buf.String())
}
func Test_decode_slash(t *testing.T) {
should := require.New(t)
var obj interface{}
should.NotNil(json.Unmarshal([]byte("\\"), &obj))
should.NotNil(UnmarshalFromString("\\", &obj))
}
func Test_html_escape(t *testing.T) {
should := require.New(t)
output, err := json.Marshal(`>`)
should.Nil(err)
should.Equal(`"\u003e"`, string(output))
output, err = ConfigCompatibleWithStandardLibrary.Marshal(`>`)
should.Nil(err)
should.Equal(`"\u003e"`, string(output))
}
func Test_string_encode_with_std(t *testing.T) {
should := require.New(t)
for i := 0; i < utf8.RuneSelf; i++ {
input := string([]byte{byte(i)})
stdOutputBytes, err := json.Marshal(input)
should.Nil(err)
stdOutput := string(stdOutputBytes)
jsoniterOutputBytes, err := ConfigCompatibleWithStandardLibrary.Marshal(input)
should.Nil(err)
jsoniterOutput := string(jsoniterOutputBytes)
should.Equal(stdOutput, jsoniterOutput)
}
}
func Test_string_encode_with_std_without_html_escape(t *testing.T) {
api := Config{EscapeHtml: false}.Froze()
should := require.New(t)
for i := 0; i < utf8.RuneSelf; i++ {
input := string([]byte{byte(i)})
buf := &bytes.Buffer{}
encoder := json.NewEncoder(buf)
encoder.SetEscapeHTML(false)
err := encoder.Encode(input)
should.Nil(err)
stdOutput := buf.String()
stdOutput = stdOutput[:len(stdOutput)-1]
jsoniterOutputBytes, err := api.Marshal(input)
should.Nil(err)
jsoniterOutput := string(jsoniterOutputBytes)
should.Equal(stdOutput, jsoniterOutput)
}
}
func Test_unicode(t *testing.T) {
should := require.New(t)
output , _ := MarshalToString(map[string]interface{}{"a": "数字山谷"})
should.Equal(`{"a":"数字山谷"}`, output)
output , _ = Config{EscapeHtml: false}.Froze().MarshalToString(map[string]interface{}{"a": "数字山谷"})
should.Equal(`{"a":"数字山谷"}`, output)
}
func Benchmark_jsoniter_unicode(b *testing.B) {
for n := 0; n < b.N; n++ {
iter := ParseString(`"\ud83d\udc4a"`)
iter := ParseString(ConfigDefault, `"\ud83d\udc4a"`)
iter.ReadString()
}
}
func Benchmark_jsoniter_ascii(b *testing.B) {
iter := NewIterator()
iter := NewIterator(ConfigDefault)
input := []byte(`"hello, world! hello, world!"`)
b.ResetTimer()
for n := 0; n < b.N; n++ {
@ -123,7 +162,7 @@ func Benchmark_jsoniter_ascii(b *testing.B) {
}
func Benchmark_jsoniter_string_as_bytes(b *testing.B) {
iter := ParseString(`"hello, world!"`)
iter := ParseString(ConfigDefault, `"hello, world!"`)
b.ResetTimer()
for n := 0; n < b.N; n++ {
iter.ResetBytes(iter.buf)

View File

@ -0,0 +1,144 @@
package test
import (
"bytes"
"encoding/json"
"testing"
"github.com/davecgh/go-spew/spew"
fuzz "github.com/google/gofuzz"
jsoniter "github.com/json-iterator/go"
)
func Test_Roundtrip(t *testing.T) {
fz := fuzz.New().MaxDepth(10).NilChance(0.3)
for i := 0; i < 1000; i++ {
var before T
fz.Fuzz(&before)
jbStd, err := json.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with stdlib: %v", err)
}
jbIter, err := jsoniter.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with jsoniter: %v", err)
}
if string(jbStd) != string(jbIter) {
t.Errorf("marshal expected:\n %s\ngot:\n %s\nobj:\n %s",
indent(jbStd, " "), indent(jbIter, " "), dump(before))
}
var afterStd T
err = json.Unmarshal(jbIter, &afterStd)
if err != nil {
t.Errorf("failed to unmarshal with stdlib: %v", err)
}
var afterIter T
err = jsoniter.Unmarshal(jbIter, &afterIter)
if err != nil {
t.Errorf("failed to unmarshal with jsoniter: %v", err)
}
if fingerprint(afterStd) != fingerprint(afterIter) {
t.Errorf("unmarshal expected:\n %s\ngot:\n %s\nvia:\n %s",
dump(afterStd), dump(afterIter), indent(jbIter, " "))
}
}
}
const indentStr = "> "
func fingerprint(obj interface{}) string {
c := spew.ConfigState{
SortKeys: true,
SpewKeys: true,
}
return c.Sprintf("%v", obj)
}
func dump(obj interface{}) string {
cfg := spew.ConfigState{
Indent: indentStr,
}
return cfg.Sdump(obj)
}
func indent(src []byte, prefix string) string {
var buf bytes.Buffer
json.Indent(&buf, src, prefix, indentStr)
return buf.String()
}
func BenchmarkStandardMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := json.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkStandardUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = json.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}
func BenchmarkJSONIterMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := jsoniter.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkJSONIterUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = jsoniter.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}

View File

@ -0,0 +1,3 @@
package test
type T bool

View File

@ -0,0 +1,144 @@
package test
import (
"bytes"
"encoding/json"
"testing"
"github.com/davecgh/go-spew/spew"
fuzz "github.com/google/gofuzz"
jsoniter "github.com/json-iterator/go"
)
func Test_Roundtrip(t *testing.T) {
fz := fuzz.New().MaxDepth(10).NilChance(0.3)
for i := 0; i < 1000; i++ {
var before T
fz.Fuzz(&before)
jbStd, err := json.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with stdlib: %v", err)
}
jbIter, err := jsoniter.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with jsoniter: %v", err)
}
if string(jbStd) != string(jbIter) {
t.Errorf("marshal expected:\n %s\ngot:\n %s\nobj:\n %s",
indent(jbStd, " "), indent(jbIter, " "), dump(before))
}
var afterStd T
err = json.Unmarshal(jbIter, &afterStd)
if err != nil {
t.Errorf("failed to unmarshal with stdlib: %v", err)
}
var afterIter T
err = jsoniter.Unmarshal(jbIter, &afterIter)
if err != nil {
t.Errorf("failed to unmarshal with jsoniter: %v", err)
}
if fingerprint(afterStd) != fingerprint(afterIter) {
t.Errorf("unmarshal expected:\n %s\ngot:\n %s\nvia:\n %s",
dump(afterStd), dump(afterIter), indent(jbIter, " "))
}
}
}
const indentStr = "> "
func fingerprint(obj interface{}) string {
c := spew.ConfigState{
SortKeys: true,
SpewKeys: true,
}
return c.Sprintf("%v", obj)
}
func dump(obj interface{}) string {
cfg := spew.ConfigState{
Indent: indentStr,
}
return cfg.Sdump(obj)
}
func indent(src []byte, prefix string) string {
var buf bytes.Buffer
json.Indent(&buf, src, prefix, indentStr)
return buf.String()
}
func BenchmarkStandardMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := json.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkStandardUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = json.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}
func BenchmarkJSONIterMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := jsoniter.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkJSONIterUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = jsoniter.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}

View File

@ -0,0 +1,3 @@
package test
type T bool

View File

@ -0,0 +1,144 @@
package test
import (
"bytes"
"encoding/json"
"testing"
"github.com/davecgh/go-spew/spew"
fuzz "github.com/google/gofuzz"
jsoniter "github.com/json-iterator/go"
)
func Test_Roundtrip(t *testing.T) {
fz := fuzz.New().MaxDepth(10).NilChance(0.3)
for i := 0; i < 1000; i++ {
var before T
fz.Fuzz(&before)
jbStd, err := json.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with stdlib: %v", err)
}
jbIter, err := jsoniter.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with jsoniter: %v", err)
}
if string(jbStd) != string(jbIter) {
t.Errorf("marshal expected:\n %s\ngot:\n %s\nobj:\n %s",
indent(jbStd, " "), indent(jbIter, " "), dump(before))
}
var afterStd T
err = json.Unmarshal(jbIter, &afterStd)
if err != nil {
t.Errorf("failed to unmarshal with stdlib: %v", err)
}
var afterIter T
err = jsoniter.Unmarshal(jbIter, &afterIter)
if err != nil {
t.Errorf("failed to unmarshal with jsoniter: %v", err)
}
if fingerprint(afterStd) != fingerprint(afterIter) {
t.Errorf("unmarshal expected:\n %s\ngot:\n %s\nvia:\n %s",
dump(afterStd), dump(afterIter), indent(jbIter, " "))
}
}
}
const indentStr = "> "
func fingerprint(obj interface{}) string {
c := spew.ConfigState{
SortKeys: true,
SpewKeys: true,
}
return c.Sprintf("%v", obj)
}
func dump(obj interface{}) string {
cfg := spew.ConfigState{
Indent: indentStr,
}
return cfg.Sdump(obj)
}
func indent(src []byte, prefix string) string {
var buf bytes.Buffer
json.Indent(&buf, src, prefix, indentStr)
return buf.String()
}
func BenchmarkStandardMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := json.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkStandardUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = json.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}
func BenchmarkJSONIterMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := jsoniter.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkJSONIterUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = jsoniter.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}

View File

@ -0,0 +1,3 @@
package test
type T byte

View File

@ -0,0 +1,144 @@
package test
import (
"bytes"
"encoding/json"
"testing"
"github.com/davecgh/go-spew/spew"
fuzz "github.com/google/gofuzz"
jsoniter "github.com/json-iterator/go"
)
func Test_Roundtrip(t *testing.T) {
fz := fuzz.New().MaxDepth(10).NilChance(0.3)
for i := 0; i < 1000; i++ {
var before T
fz.Fuzz(&before)
jbStd, err := json.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with stdlib: %v", err)
}
jbIter, err := jsoniter.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with jsoniter: %v", err)
}
if string(jbStd) != string(jbIter) {
t.Errorf("marshal expected:\n %s\ngot:\n %s\nobj:\n %s",
indent(jbStd, " "), indent(jbIter, " "), dump(before))
}
var afterStd T
err = json.Unmarshal(jbIter, &afterStd)
if err != nil {
t.Errorf("failed to unmarshal with stdlib: %v", err)
}
var afterIter T
err = jsoniter.Unmarshal(jbIter, &afterIter)
if err != nil {
t.Errorf("failed to unmarshal with jsoniter: %v", err)
}
if fingerprint(afterStd) != fingerprint(afterIter) {
t.Errorf("unmarshal expected:\n %s\ngot:\n %s\nvia:\n %s",
dump(afterStd), dump(afterIter), indent(jbIter, " "))
}
}
}
const indentStr = "> "
func fingerprint(obj interface{}) string {
c := spew.ConfigState{
SortKeys: true,
SpewKeys: true,
}
return c.Sprintf("%v", obj)
}
func dump(obj interface{}) string {
cfg := spew.ConfigState{
Indent: indentStr,
}
return cfg.Sdump(obj)
}
func indent(src []byte, prefix string) string {
var buf bytes.Buffer
json.Indent(&buf, src, prefix, indentStr)
return buf.String()
}
func BenchmarkStandardMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := json.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkStandardUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = json.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}
func BenchmarkJSONIterMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := jsoniter.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkJSONIterUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = jsoniter.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}

View File

@ -0,0 +1,3 @@
package test
type T byte

View File

@ -0,0 +1,144 @@
package test
import (
"bytes"
"encoding/json"
"testing"
"github.com/davecgh/go-spew/spew"
fuzz "github.com/google/gofuzz"
jsoniter "github.com/json-iterator/go"
)
func Test_Roundtrip(t *testing.T) {
fz := fuzz.New().MaxDepth(10).NilChance(0.3)
for i := 0; i < 1000; i++ {
var before T
fz.Fuzz(&before)
jbStd, err := json.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with stdlib: %v", err)
}
jbIter, err := jsoniter.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with jsoniter: %v", err)
}
if string(jbStd) != string(jbIter) {
t.Errorf("marshal expected:\n %s\ngot:\n %s\nobj:\n %s",
indent(jbStd, " "), indent(jbIter, " "), dump(before))
}
var afterStd T
err = json.Unmarshal(jbIter, &afterStd)
if err != nil {
t.Errorf("failed to unmarshal with stdlib: %v", err)
}
var afterIter T
err = jsoniter.Unmarshal(jbIter, &afterIter)
if err != nil {
t.Errorf("failed to unmarshal with jsoniter: %v", err)
}
if fingerprint(afterStd) != fingerprint(afterIter) {
t.Errorf("unmarshal expected:\n %s\ngot:\n %s\nvia:\n %s",
dump(afterStd), dump(afterIter), indent(jbIter, " "))
}
}
}
const indentStr = "> "
func fingerprint(obj interface{}) string {
c := spew.ConfigState{
SortKeys: true,
SpewKeys: true,
}
return c.Sprintf("%v", obj)
}
func dump(obj interface{}) string {
cfg := spew.ConfigState{
Indent: indentStr,
}
return cfg.Sdump(obj)
}
func indent(src []byte, prefix string) string {
var buf bytes.Buffer
json.Indent(&buf, src, prefix, indentStr)
return buf.String()
}
func BenchmarkStandardMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := json.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkStandardUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = json.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}
func BenchmarkJSONIterMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := jsoniter.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkJSONIterUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = jsoniter.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}

View File

@ -0,0 +1,3 @@
package test
type T float32

View File

@ -0,0 +1,144 @@
package test
import (
"bytes"
"encoding/json"
"testing"
"github.com/davecgh/go-spew/spew"
fuzz "github.com/google/gofuzz"
jsoniter "github.com/json-iterator/go"
)
func Test_Roundtrip(t *testing.T) {
fz := fuzz.New().MaxDepth(10).NilChance(0.3)
for i := 0; i < 1000; i++ {
var before T
fz.Fuzz(&before)
jbStd, err := json.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with stdlib: %v", err)
}
jbIter, err := jsoniter.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with jsoniter: %v", err)
}
if string(jbStd) != string(jbIter) {
t.Errorf("marshal expected:\n %s\ngot:\n %s\nobj:\n %s",
indent(jbStd, " "), indent(jbIter, " "), dump(before))
}
var afterStd T
err = json.Unmarshal(jbIter, &afterStd)
if err != nil {
t.Errorf("failed to unmarshal with stdlib: %v", err)
}
var afterIter T
err = jsoniter.Unmarshal(jbIter, &afterIter)
if err != nil {
t.Errorf("failed to unmarshal with jsoniter: %v", err)
}
if fingerprint(afterStd) != fingerprint(afterIter) {
t.Errorf("unmarshal expected:\n %s\ngot:\n %s\nvia:\n %s",
dump(afterStd), dump(afterIter), indent(jbIter, " "))
}
}
}
const indentStr = "> "
func fingerprint(obj interface{}) string {
c := spew.ConfigState{
SortKeys: true,
SpewKeys: true,
}
return c.Sprintf("%v", obj)
}
func dump(obj interface{}) string {
cfg := spew.ConfigState{
Indent: indentStr,
}
return cfg.Sdump(obj)
}
func indent(src []byte, prefix string) string {
var buf bytes.Buffer
json.Indent(&buf, src, prefix, indentStr)
return buf.String()
}
func BenchmarkStandardMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := json.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkStandardUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = json.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}
func BenchmarkJSONIterMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := jsoniter.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkJSONIterUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = jsoniter.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}

View File

@ -0,0 +1,3 @@
package test
type T float32

View File

@ -0,0 +1,144 @@
package test
import (
"bytes"
"encoding/json"
"testing"
"github.com/davecgh/go-spew/spew"
fuzz "github.com/google/gofuzz"
jsoniter "github.com/json-iterator/go"
)
func Test_Roundtrip(t *testing.T) {
fz := fuzz.New().MaxDepth(10).NilChance(0.3)
for i := 0; i < 1000; i++ {
var before T
fz.Fuzz(&before)
jbStd, err := json.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with stdlib: %v", err)
}
jbIter, err := jsoniter.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with jsoniter: %v", err)
}
if string(jbStd) != string(jbIter) {
t.Errorf("marshal expected:\n %s\ngot:\n %s\nobj:\n %s",
indent(jbStd, " "), indent(jbIter, " "), dump(before))
}
var afterStd T
err = json.Unmarshal(jbIter, &afterStd)
if err != nil {
t.Errorf("failed to unmarshal with stdlib: %v", err)
}
var afterIter T
err = jsoniter.Unmarshal(jbIter, &afterIter)
if err != nil {
t.Errorf("failed to unmarshal with jsoniter: %v", err)
}
if fingerprint(afterStd) != fingerprint(afterIter) {
t.Errorf("unmarshal expected:\n %s\ngot:\n %s\nvia:\n %s",
dump(afterStd), dump(afterIter), indent(jbIter, " "))
}
}
}
const indentStr = "> "
func fingerprint(obj interface{}) string {
c := spew.ConfigState{
SortKeys: true,
SpewKeys: true,
}
return c.Sprintf("%v", obj)
}
func dump(obj interface{}) string {
cfg := spew.ConfigState{
Indent: indentStr,
}
return cfg.Sdump(obj)
}
func indent(src []byte, prefix string) string {
var buf bytes.Buffer
json.Indent(&buf, src, prefix, indentStr)
return buf.String()
}
func BenchmarkStandardMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := json.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkStandardUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = json.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}
func BenchmarkJSONIterMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := jsoniter.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkJSONIterUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = jsoniter.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}

View File

@ -0,0 +1,3 @@
package test
type T float64

View File

@ -0,0 +1,144 @@
package test
import (
"bytes"
"encoding/json"
"testing"
"github.com/davecgh/go-spew/spew"
fuzz "github.com/google/gofuzz"
jsoniter "github.com/json-iterator/go"
)
func Test_Roundtrip(t *testing.T) {
fz := fuzz.New().MaxDepth(10).NilChance(0.3)
for i := 0; i < 1000; i++ {
var before T
fz.Fuzz(&before)
jbStd, err := json.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with stdlib: %v", err)
}
jbIter, err := jsoniter.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with jsoniter: %v", err)
}
if string(jbStd) != string(jbIter) {
t.Errorf("marshal expected:\n %s\ngot:\n %s\nobj:\n %s",
indent(jbStd, " "), indent(jbIter, " "), dump(before))
}
var afterStd T
err = json.Unmarshal(jbIter, &afterStd)
if err != nil {
t.Errorf("failed to unmarshal with stdlib: %v", err)
}
var afterIter T
err = jsoniter.Unmarshal(jbIter, &afterIter)
if err != nil {
t.Errorf("failed to unmarshal with jsoniter: %v", err)
}
if fingerprint(afterStd) != fingerprint(afterIter) {
t.Errorf("unmarshal expected:\n %s\ngot:\n %s\nvia:\n %s",
dump(afterStd), dump(afterIter), indent(jbIter, " "))
}
}
}
const indentStr = "> "
func fingerprint(obj interface{}) string {
c := spew.ConfigState{
SortKeys: true,
SpewKeys: true,
}
return c.Sprintf("%v", obj)
}
func dump(obj interface{}) string {
cfg := spew.ConfigState{
Indent: indentStr,
}
return cfg.Sdump(obj)
}
func indent(src []byte, prefix string) string {
var buf bytes.Buffer
json.Indent(&buf, src, prefix, indentStr)
return buf.String()
}
func BenchmarkStandardMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := json.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkStandardUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = json.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}
func BenchmarkJSONIterMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := jsoniter.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkJSONIterUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = jsoniter.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}

View File

@ -0,0 +1,3 @@
package test
type T float64

View File

@ -0,0 +1,144 @@
package test
import (
"bytes"
"encoding/json"
"testing"
"github.com/davecgh/go-spew/spew"
fuzz "github.com/google/gofuzz"
jsoniter "github.com/json-iterator/go"
)
func Test_Roundtrip(t *testing.T) {
fz := fuzz.New().MaxDepth(10).NilChance(0.3)
for i := 0; i < 1000; i++ {
var before T
fz.Fuzz(&before)
jbStd, err := json.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with stdlib: %v", err)
}
jbIter, err := jsoniter.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with jsoniter: %v", err)
}
if string(jbStd) != string(jbIter) {
t.Errorf("marshal expected:\n %s\ngot:\n %s\nobj:\n %s",
indent(jbStd, " "), indent(jbIter, " "), dump(before))
}
var afterStd T
err = json.Unmarshal(jbIter, &afterStd)
if err != nil {
t.Errorf("failed to unmarshal with stdlib: %v", err)
}
var afterIter T
err = jsoniter.Unmarshal(jbIter, &afterIter)
if err != nil {
t.Errorf("failed to unmarshal with jsoniter: %v", err)
}
if fingerprint(afterStd) != fingerprint(afterIter) {
t.Errorf("unmarshal expected:\n %s\ngot:\n %s\nvia:\n %s",
dump(afterStd), dump(afterIter), indent(jbIter, " "))
}
}
}
const indentStr = "> "
func fingerprint(obj interface{}) string {
c := spew.ConfigState{
SortKeys: true,
SpewKeys: true,
}
return c.Sprintf("%v", obj)
}
func dump(obj interface{}) string {
cfg := spew.ConfigState{
Indent: indentStr,
}
return cfg.Sdump(obj)
}
func indent(src []byte, prefix string) string {
var buf bytes.Buffer
json.Indent(&buf, src, prefix, indentStr)
return buf.String()
}
func BenchmarkStandardMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := json.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkStandardUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = json.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}
func BenchmarkJSONIterMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := jsoniter.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkJSONIterUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = jsoniter.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}

View File

@ -0,0 +1,3 @@
package test
type T int16

View File

@ -0,0 +1,144 @@
package test
import (
"bytes"
"encoding/json"
"testing"
"github.com/davecgh/go-spew/spew"
fuzz "github.com/google/gofuzz"
jsoniter "github.com/json-iterator/go"
)
func Test_Roundtrip(t *testing.T) {
fz := fuzz.New().MaxDepth(10).NilChance(0.3)
for i := 0; i < 1000; i++ {
var before T
fz.Fuzz(&before)
jbStd, err := json.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with stdlib: %v", err)
}
jbIter, err := jsoniter.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with jsoniter: %v", err)
}
if string(jbStd) != string(jbIter) {
t.Errorf("marshal expected:\n %s\ngot:\n %s\nobj:\n %s",
indent(jbStd, " "), indent(jbIter, " "), dump(before))
}
var afterStd T
err = json.Unmarshal(jbIter, &afterStd)
if err != nil {
t.Errorf("failed to unmarshal with stdlib: %v", err)
}
var afterIter T
err = jsoniter.Unmarshal(jbIter, &afterIter)
if err != nil {
t.Errorf("failed to unmarshal with jsoniter: %v", err)
}
if fingerprint(afterStd) != fingerprint(afterIter) {
t.Errorf("unmarshal expected:\n %s\ngot:\n %s\nvia:\n %s",
dump(afterStd), dump(afterIter), indent(jbIter, " "))
}
}
}
const indentStr = "> "
func fingerprint(obj interface{}) string {
c := spew.ConfigState{
SortKeys: true,
SpewKeys: true,
}
return c.Sprintf("%v", obj)
}
func dump(obj interface{}) string {
cfg := spew.ConfigState{
Indent: indentStr,
}
return cfg.Sdump(obj)
}
func indent(src []byte, prefix string) string {
var buf bytes.Buffer
json.Indent(&buf, src, prefix, indentStr)
return buf.String()
}
func BenchmarkStandardMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := json.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkStandardUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = json.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}
func BenchmarkJSONIterMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := jsoniter.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkJSONIterUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = jsoniter.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}

View File

@ -0,0 +1,3 @@
package test
type T int16

View File

@ -0,0 +1,144 @@
package test
import (
"bytes"
"encoding/json"
"testing"
"github.com/davecgh/go-spew/spew"
fuzz "github.com/google/gofuzz"
jsoniter "github.com/json-iterator/go"
)
func Test_Roundtrip(t *testing.T) {
fz := fuzz.New().MaxDepth(10).NilChance(0.3)
for i := 0; i < 1000; i++ {
var before T
fz.Fuzz(&before)
jbStd, err := json.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with stdlib: %v", err)
}
jbIter, err := jsoniter.Marshal(before)
if err != nil {
t.Errorf("failed to marshal with jsoniter: %v", err)
}
if string(jbStd) != string(jbIter) {
t.Errorf("marshal expected:\n %s\ngot:\n %s\nobj:\n %s",
indent(jbStd, " "), indent(jbIter, " "), dump(before))
}
var afterStd T
err = json.Unmarshal(jbIter, &afterStd)
if err != nil {
t.Errorf("failed to unmarshal with stdlib: %v", err)
}
var afterIter T
err = jsoniter.Unmarshal(jbIter, &afterIter)
if err != nil {
t.Errorf("failed to unmarshal with jsoniter: %v", err)
}
if fingerprint(afterStd) != fingerprint(afterIter) {
t.Errorf("unmarshal expected:\n %s\ngot:\n %s\nvia:\n %s",
dump(afterStd), dump(afterIter), indent(jbIter, " "))
}
}
}
const indentStr = "> "
func fingerprint(obj interface{}) string {
c := spew.ConfigState{
SortKeys: true,
SpewKeys: true,
}
return c.Sprintf("%v", obj)
}
func dump(obj interface{}) string {
cfg := spew.ConfigState{
Indent: indentStr,
}
return cfg.Sdump(obj)
}
func indent(src []byte, prefix string) string {
var buf bytes.Buffer
json.Indent(&buf, src, prefix, indentStr)
return buf.String()
}
func BenchmarkStandardMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := json.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkStandardUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = json.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}
func BenchmarkJSONIterMarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var obj T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&obj)
for i := 0; i < t.N; i++ {
jb, err := jsoniter.Marshal(obj)
if err != nil {
t.Fatalf("failed to marshal:\n input: %s\n error: %v", dump(obj), err)
}
_ = jb
}
}
func BenchmarkJSONIterUnmarshal(t *testing.B) {
t.ReportAllocs()
t.ResetTimer()
var before T
fz := fuzz.NewWithSeed(0).MaxDepth(10).NilChance(0.3)
fz.Fuzz(&before)
jb, err := json.Marshal(before)
if err != nil {
t.Fatalf("failed to marshal: %v", err)
}
for i := 0; i < t.N; i++ {
var after T
err = jsoniter.Unmarshal(jb, &after)
if err != nil {
t.Fatalf("failed to unmarshal:\n input: %q\n error: %v", string(jb), err)
}
}
}

View File

@ -0,0 +1,3 @@
package test
type T int32

Some files were not shown because too many files have changed in this diff Show More