1
0
mirror of https://github.com/json-iterator/go.git synced 2025-06-15 22:50:24 +02:00

91 Commits

Author SHA1 Message Date
787edc95b0 Revert "WIP: Tests to compare against stdlib" 2017-06-12 15:07:52 +08:00
6e5817b773 Merge pull request #55 from thockin/output_tests
WIP: Tests to compare against stdlib
2017-06-12 01:47:50 -05:00
7480e41836 Add output tests for maps of builtins
This tests for exact stdlib compatibility.
2017-06-11 21:09:56 -07:00
9215b3c508 Add output tests for builtin types
This fuzzes a type, marshals it with stdlib and json-iterator, compares,
then unmarshals with stdlib and json-iterator and compares.  This is
checking for literal, byte-for-byte compatibility.

In every case the test is exactly the same.

It also include benchmark functions to compare stdlib vs json-iterator.

This depends on a couple PRs to be merged in gofuzz.
2017-06-11 21:04:59 -07:00
64e500f3c8 Merge branch 'master' of https://github.com/json-iterator/go 2017-06-12 10:13:22 +08:00
3307ce3ba2 #50 map key unlike object field, can contain escaped char 2017-06-12 10:13:13 +08:00
6f50f15678 decoder/encoder;float precision doc 2017-06-11 16:30:31 +08:00
cee09816e3 decoder/encoder;float precision doc 2017-06-11 16:28:31 +08:00
cdbad22d22 test more package description 2017-06-11 15:35:45 +08:00
b0c9f047e2 test more than one pakcage description 2017-06-11 15:32:58 +08:00
6bd13c2948 Merge branch 'master' of https://github.com/json-iterator/go 2017-06-09 17:06:38 +08:00
84ad508437 #48 should return error if concrete tpye unknown 2017-06-09 17:06:27 +08:00
4f909776cf Merge pull request #49 from zhaitianduo/master
Use jsoniter instead of json in example
2017-06-09 03:32:31 -05:00
962c470806 fix import not use 2017-06-09 16:28:20 +08:00
46d443fbad use jsoniter for example 2017-06-09 16:25:58 +08:00
2608d40f2a example unmarshal 2017-06-08 12:08:47 +08:00
3cf822853f example unmarshal 2017-06-08 12:07:03 +08:00
26708bccc9 report error when string end not found 2017-06-08 09:46:19 +08:00
d75b539bad add test for scientific float 2017-06-07 21:34:56 +08:00
cfffa29c8a gofmt 2017-06-06 23:27:00 +08:00
925df245d3 good enough indent implementation 2017-06-06 23:18:37 +08:00
962a8cd303 #40 support UseNumber 2017-06-06 23:15:15 +08:00
6509ba05df Merge pull request #41 from 1046102779/master
解析时,如果输出参数不是指针类型,直接报错,避免程序挂掉
2017-06-06 10:03:06 -05:00
579dbf3c1d Merge pull request #42 from 1046102779/patch-1
把floatDigits改为intDigits
2017-06-06 10:02:26 -05:00
aa5181db67 把floatDigits改为intDigits 2017-06-06 21:08:04 +08:00
67be6df2b1 Update feature_adapter.go 2017-06-06 20:01:43 +08:00
0f5379494a unmarshal failed return non-pointer error 2017-06-06 19:36:33 +08:00
d09e2419ba update benchmark 2017-06-06 16:55:32 +08:00
e1a71f6ba1 update benchmark 2017-06-06 16:54:26 +08:00
dcb78991c4 flush when buffer is large enough 2017-06-06 14:16:54 +08:00
9e8238cdc6 remove unused file 2017-06-06 12:41:13 +08:00
a4e5abf492 support []byte; marshal without copy 2017-06-06 09:44:56 +08:00
3979955e69 support TextMarshaler as map key 2017-06-06 00:09:33 +08:00
5fd09f0e02 remove mapInterfaceEncoder 2017-06-05 23:56:37 +08:00
af4982b22c support decode int key map 2017-06-05 23:53:48 +08:00
29dc1d407d write map with int key 2017-06-05 23:01:00 +08:00
5b27aaa62c update test 2017-06-05 22:10:01 +08:00
106636a191 update test 2017-06-05 22:08:28 +08:00
f50c4cfbbe Merge branch 'master' of https://github.com/json-iterator/go 2017-06-05 22:05:02 +08:00
87149ae489 add simple marshal benchmark 2017-06-05 22:04:52 +08:00
c0a4ad72e1 example test 2017-06-05 20:37:08 +08:00
404c0ee44b Decoder doc 2017-06-05 19:57:20 +08:00
10c1506f87 link test 2017-06-05 19:38:34 +08:00
9a43fe6468 adapter api comment 2017-06-05 19:31:30 +08:00
95e03f2937 Marshal comment 2017-06-05 19:19:46 +08:00
4406ed9e62 Marshal comment 2017-06-05 19:18:12 +08:00
ff027701f5 Marshal comment 2017-06-05 19:15:56 +08:00
c69b61f879 Marshal comment 2017-06-05 19:14:40 +08:00
d97f5db769 Marshal comment 2017-06-05 19:11:16 +08:00
45bbb40a9f #34 implement NewEncoder 2017-06-02 18:46:44 +08:00
e36f926072 fix random go test failure 2017-06-02 17:34:40 +08:00
59e71bacc8 #36 handle anonymous 2017-06-02 16:52:20 +08:00
5cb0d35610 eof is not error 2017-06-02 16:06:33 +08:00
69b742e73a #34 support More() and Buffered() 2017-06-02 16:00:12 +08:00
a7f992f0e1 #35 fix json.Number matches string 2017-06-02 15:43:58 +08:00
4cc44e7380 #34 add decoder adapter 2017-06-02 15:38:20 +08:00
5310d4aa9a syntax highlight 2017-06-02 11:20:54 +08:00
2051e3b8ae simplify readme 2017-06-02 11:20:24 +08:00
fe9fa8900e #31 support json.RawMessage 2017-06-02 10:50:23 +08:00
ad3a7fde32 #30 support json.Number 2017-06-02 10:21:43 +08:00
377b892102 support big float and int 2017-05-31 12:40:50 +08:00
707ed3b091 support non empty interface 2017-05-27 00:36:21 +08:00
a7a7c7879a allocate less buffer for writing to []byte 2017-05-26 07:52:54 +08:00
f20f74519d RegisterTypeEncoder and RegisterTypeDecoder should have higher priority 2017-05-24 23:39:34 +08:00
7d2ae80c37 #27 support json.Unmarshaler 2017-05-24 16:04:11 +08:00
f6f159e108 #27 support json.Marshaler 2017-05-24 14:34:00 +08:00
e5a1e704ad #25 make fielding binding case insensitive 2017-05-24 13:16:09 +08:00
7d5f90261e #28 extension should support specifying encoder 2017-05-24 10:58:56 +08:00
6126a6d3ca #23 hide unexported fields by default 2017-05-24 09:39:11 +08:00
5fbe4e387d #21 #22 marshal float precisely by default 2017-05-24 09:08:15 +08:00
fc44cb2d91 #26 do not enforce pointer as optional 2017-05-23 18:46:11 +08:00
7e046e6aa7 simplify read string, and support null 2017-05-23 18:32:39 +08:00
5488fde97f fix one field struct interface{} optimization compatibility 2017-05-23 17:44:50 +08:00
53f8d370b5 fix wrap any 2017-05-19 19:44:27 +08:00
3f1fcaff87 demonstrate how to customize float encoding 2017-05-11 08:00:50 +08:00
1df353727b customize []byte encoder 2017-05-06 20:52:36 +08:00
b893a0359d trim end space 2017-05-05 17:44:09 +08:00
a92111261c fix struct with one pointer field 2017-05-05 17:27:41 +08:00
91b9e828b7 support recursive type 2017-05-05 16:51:05 +08:00
6bd835aeb1 test type encoder 2017-05-05 08:22:19 +08:00
90888390bc fix readFloat64SlowPath not advancing the cursor 2017-05-02 10:15:21 +08:00
ccb972f58c merge 2017-04-28 09:10:06 +08:00
8711c74c85 support Any as field type 2017-04-28 09:09:24 +08:00
abcf2759ed Merge pull request #14 from eruca/master
fix the omitempty bug
2017-04-18 10:23:05 +10:00
e5476f70e7 #16 fix slice of map 2017-04-16 14:05:08 +08:00
b986d86f26 add test 2017-03-11 18:19:39 +08:00
9a138f8b6a fix bug of another comma added
previous code will add comma if the field is empty when is not first, like 
```
{"account":"1120","name":"nick",,"department":"fsaf","role":1,"privilege":32,,"created_at":1489226500}
```
2017-03-11 18:17:34 +08:00
d1aa59e34e #12 implement omitempty 2017-03-08 07:38:25 -08:00
ceb8c8a733 create map if nil 2017-03-07 18:36:58 -08:00
62028f1ede Merge pull request #11 from Kisesy/patch-1
fix #10
2017-02-26 09:42:45 -06:00
696f962eda fix https://github.com/json-iterator/go/issues/10
Update feature_iter_string.go

Update feature_iter_string.go
2017-02-25 13:30:55 +08:00
47 changed files with 2192 additions and 828 deletions

10
.idea/libraries/Go_SDK.xml generated Normal file
View File

@ -0,0 +1,10 @@
<component name="libraryTable">
<library name="Go SDK">
<CLASSES>
<root url="file:///usr/local/go/src" />
</CLASSES>
<SOURCES>
<root url="file:///usr/local/go/src" />
</SOURCES>
</library>
</component>

View File

@ -2,61 +2,55 @@
jsoniter (json-iterator) is fast and flexible JSON parser available in [Java](https://github.com/json-iterator/java) and [Go](https://github.com/json-iterator/go) jsoniter (json-iterator) is fast and flexible JSON parser available in [Java](https://github.com/json-iterator/java) and [Go](https://github.com/json-iterator/go)
# Why jsoniter? # Benchmark
* Jsoniter is the fastest JSON parser. It could be up to 10x faster than normal parser, data binding included. Shameless self [benchmark](http://jsoniter.com/benchmark.html) ![benchmark](http://jsoniter.com/benchmarks/go-benchmark.png)
* Extremely flexible api. You can mix and match three different styles: bind-api, any-api or iterator-api. Checkout your [api choices](http://jsoniter.com/api.html)
* Unique iterator api can iterate through JSON directly, zero memory allocation! See how [iterator](http://jsoniter.com/api.html#iterator-api) works
# Show off Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/github.com/json-iterator/go-benchmark/benchmark_medium_payload_test.go
Here is a quick show off, for more complete report you can checkout the full [benchmark](http://jsoniter.com/benchmark.html) with [in-depth optimization](http://jsoniter.com/benchmark.html#optimization-used) to back the numbers up Raw Result (easyjson requires static code generation)
![go-medium](http://jsoniter.com/benchmarks/go-medium.png) | | ns/op | allocation bytes | allocation times |
| --- | --- | --- | --- |
| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
# Bind-API is the best # Usage
Bind-api should always be the first choice. Given this JSON document `[0,1,2,3]` 100% compatibility with standard lib
Parse with Go bind-api Replace
```go
import "encoding/json"
json.Marshal(&data)
```
with
```go ```go
import "github.com/json-iterator/go" import "github.com/json-iterator/go"
iter := jsoniter.ParseString(`[0,1,2,3]`) jsoniter.Marshal(&data)
var := iter.Read()
fmt.Println(val)
``` ```
# Iterator-API for quick extraction Replace
When you do not need to get all the data back, just extract some. ```go
import "encoding/json"
json.Unmarshal(input, &data)
```
Parse with Go iterator-api with
```go ```go
import "github.com/json-iterator/go" import "github.com/json-iterator/go"
iter := ParseString(`[0, [1, 2], [3, 4], 5]`) jsoniter.Unmarshal(input, &data)
count := 0
for iter.ReadArray() {
iter.Skip()
count++
}
fmt.Println(count) // 4
``` ```
# Any-API for maximum flexibility
Parse with Go any-api
```go
import "github.com/json-iterator/go"
iter := jsoniter.ParseString(`[{"field1":"11","field2":"12"},{"field1":"21","field2":"22"}]`)
val := iter.ReadAny()
fmt.Println(val.ToInt(1, "field2")) // 22
```
Notice you can extract from nested data structure, and convert any type to the type to you want.
# How to get # How to get
``` ```

47
example_test.go Normal file
View File

@ -0,0 +1,47 @@
package jsoniter_test
import (
"fmt"
"os"
"github.com/json-iterator/go"
)
func ExampleMarshal() {
type ColorGroup struct {
ID int
Name string
Colors []string
}
group := ColorGroup{
ID: 1,
Name: "Reds",
Colors: []string{"Crimson", "Red", "Ruby", "Maroon"},
}
b, err := jsoniter.Marshal(group)
if err != nil {
fmt.Println("error:", err)
}
os.Stdout.Write(b)
// Output:
// {"ID":1,"Name":"Reds","Colors":["Crimson","Red","Ruby","Maroon"]}
}
func ExampleUnmarshal() {
var jsonBlob = []byte(`[
{"Name": "Platypus", "Order": "Monotremata"},
{"Name": "Quoll", "Order": "Dasyuromorphia"}
]`)
type Animal struct {
Name string
Order string
}
var animals []Animal
err := jsoniter.Unmarshal(jsonBlob, &animals)
if err != nil {
fmt.Println("error:", err)
}
fmt.Printf("%+v", animals)
// Output:
// [{Name:Platypus Order:Monotremata} {Name:Quoll Order:Dasyuromorphia}]
}

View File

@ -1,13 +1,37 @@
// Package jsoniter implements encoding and decoding of JSON as defined in
// RFC 4627 and provides interfaces with identical syntax of standard lib encoding/json.
// Converting from encoding/json to jsoniter is no more than replacing the package with jsoniter
// and variable type declarations (if any).
// jsoniter interfaces gives 100% compatibility with code using standard lib.
//
// "JSON and Go"
// (https://golang.org/doc/articles/json_and_go.html)
// gives a description of how Marshal/Unmarshal operate
// between arbitrary or predefined json objects and bytes,
// and it applies to jsoniter.Marshal/Unmarshal as well.
package jsoniter package jsoniter
import ( import (
"io"
"bytes" "bytes"
"encoding/json"
"errors"
"io"
"reflect"
"unsafe"
) )
// Unmarshal adapts to json/encoding APIs // Unmarshal adapts to json/encoding Unmarshal API
//
// Unmarshal parses the JSON-encoded data and stores the result in the value pointed to by v.
// Refer to https://godoc.org/encoding/json#Unmarshal for more information
func Unmarshal(data []byte, v interface{}) error { func Unmarshal(data []byte, v interface{}) error {
data = data[:lastNotSpacePos(data)]
iter := ParseBytes(data) iter := ParseBytes(data)
typ := reflect.TypeOf(v)
if typ.Kind() != reflect.Ptr {
// return non-pointer error
return errors.New("the second param must be ptr type")
}
iter.ReadVal(v) iter.ReadVal(v)
if iter.head == iter.tail { if iter.head == iter.tail {
iter.loadMore() iter.loadMore()
@ -21,7 +45,9 @@ func Unmarshal(data []byte, v interface{}) error {
return iter.Error return iter.Error
} }
// UnmarshalAny adapts to
func UnmarshalAny(data []byte) (Any, error) { func UnmarshalAny(data []byte) (Any, error) {
data = data[:lastNotSpacePos(data)]
iter := ParseBytes(data) iter := ParseBytes(data)
any := iter.ReadAny() any := iter.ReadAny()
if iter.head == iter.tail { if iter.head == iter.tail {
@ -36,8 +62,18 @@ func UnmarshalAny(data []byte) (Any, error) {
return any, iter.Error return any, iter.Error
} }
func lastNotSpacePos(data []byte) int {
for i := len(data) - 1; i >= 0; i-- {
if data[i] != ' ' && data[i] != '\t' && data[i] != '\r' && data[i] != '\n' {
return i + 1
}
}
return 0
}
func UnmarshalFromString(str string, v interface{}) error { func UnmarshalFromString(str string, v interface{}) error {
data := []byte(str) data := []byte(str)
data = data[:lastNotSpacePos(data)]
iter := ParseBytes(data) iter := ParseBytes(data)
iter.ReadVal(v) iter.ReadVal(v)
if iter.head == iter.tail { if iter.head == iter.tail {
@ -54,6 +90,7 @@ func UnmarshalFromString(str string, v interface{}) error {
func UnmarshalAnyFromString(str string) (Any, error) { func UnmarshalAnyFromString(str string) (Any, error) {
data := []byte(str) data := []byte(str)
data = data[:lastNotSpacePos(data)]
iter := ParseBytes(data) iter := ParseBytes(data)
any := iter.ReadAny() any := iter.ReadAny()
if iter.head == iter.tail { if iter.head == iter.tail {
@ -68,15 +105,17 @@ func UnmarshalAnyFromString(str string) (Any, error) {
return nil, iter.Error return nil, iter.Error
} }
// Marshal adapts to json/encoding Marshal API
//
// Marshal returns the JSON encoding of v, adapts to json/encoding Marshal API
// Refer to https://godoc.org/encoding/json#Marshal for more information
func Marshal(v interface{}) ([]byte, error) { func Marshal(v interface{}) ([]byte, error) {
buf := &bytes.Buffer{} stream := NewStream(nil, 256)
stream := NewStream(buf, 4096)
stream.WriteVal(v) stream.WriteVal(v)
stream.Flush()
if stream.Error != nil { if stream.Error != nil {
return nil, stream.Error return nil, stream.Error
} }
return buf.Bytes(), nil return stream.Buffer(), nil
} }
func MarshalToString(v interface{}) (string, error) { func MarshalToString(v interface{}) (string, error) {
@ -86,3 +125,67 @@ func MarshalToString(v interface{}) (string, error) {
} }
return string(buf), nil return string(buf), nil
} }
// NewDecoder adapts to json/stream NewDecoder API.
//
// NewDecoder returns a new decoder that reads from r.
//
// Instead of a json/encoding Decoder, an AdaptedDecoder is returned
// Refer to https://godoc.org/encoding/json#NewDecoder for more information
func NewDecoder(reader io.Reader) *AdaptedDecoder {
iter := Parse(reader, 512)
return &AdaptedDecoder{iter}
}
// AdaptedDecoder reads and decodes JSON values from an input stream.
// AdaptedDecoder provides identical APIs with json/stream Decoder (Token() and UseNumber() are in progress)
type AdaptedDecoder struct {
iter *Iterator
}
func (adapter *AdaptedDecoder) Decode(obj interface{}) error {
adapter.iter.ReadVal(obj)
err := adapter.iter.Error
if err == io.EOF {
return nil
}
return adapter.iter.Error
}
func (adapter *AdaptedDecoder) More() bool {
return adapter.iter.head != adapter.iter.tail
}
func (adapter *AdaptedDecoder) Buffered() io.Reader {
remaining := adapter.iter.buf[adapter.iter.head:adapter.iter.tail]
return bytes.NewReader(remaining)
}
func (decoder *AdaptedDecoder) UseNumber() {
RegisterTypeDecoder("interface {}", func(ptr unsafe.Pointer, iter *Iterator) {
if iter.WhatIsNext() == Number {
*((*interface{})(ptr)) = json.Number(iter.readNumberAsString())
} else {
*((*interface{})(ptr)) = iter.Read()
}
})
}
func NewEncoder(writer io.Writer) *AdaptedEncoder {
stream := NewStream(writer, 512)
return &AdaptedEncoder{stream}
}
type AdaptedEncoder struct {
stream *Stream
}
func (adapter *AdaptedEncoder) Encode(val interface{}) error {
adapter.stream.WriteVal(val)
adapter.stream.Flush()
return adapter.stream.Error
}
func (adapter *AdaptedEncoder) SetIndent(prefix, indent string) {
adapter.stream.IndentionStep = len(indent)
}

View File

@ -98,6 +98,10 @@ func Wrap(val interface{}) Any {
if val == nil { if val == nil {
return &nilAny{} return &nilAny{}
} }
asAny, isAny := val.(Any)
if isAny {
return asAny
}
type_ := reflect.TypeOf(val) type_ := reflect.TypeOf(val)
switch type_.Kind() { switch type_.Kind() {
case reflect.Slice: case reflect.Slice:

View File

@ -1,9 +1,9 @@
package jsoniter package jsoniter
import ( import (
"unsafe"
"fmt" "fmt"
"reflect" "reflect"
"unsafe"
) )
type arrayLazyAny struct { type arrayLazyAny struct {
@ -44,7 +44,7 @@ func (any *arrayLazyAny) fillCacheUntil(target int) Any {
return any.cache[target] return any.cache[target]
} }
iter := any.Parse() iter := any.Parse()
if (len(any.remaining) == len(any.buf)) { if len(any.remaining) == len(any.buf) {
iter.head++ iter.head++
c := iter.nextToken() c := iter.nextToken()
if c != ']' { if c != ']' {
@ -337,9 +337,9 @@ func (any *arrayLazyAny) GetInterface() interface{} {
type arrayAny struct { type arrayAny struct {
baseAny baseAny
err error err error
cache []Any cache []Any
val reflect.Value val reflect.Value
} }
func wrapArray(val interface{}) *arrayAny { func wrapArray(val interface{}) *arrayAny {

View File

@ -2,15 +2,15 @@ package jsoniter
import ( import (
"io" "io"
"unsafe"
"strconv" "strconv"
"unsafe"
) )
type float64LazyAny struct { type float64LazyAny struct {
baseAny baseAny
buf []byte buf []byte
iter *Iterator iter *Iterator
err error err error
cache float64 cache float64
} }

View File

@ -2,8 +2,8 @@ package jsoniter
import ( import (
"io" "io"
"unsafe"
"strconv" "strconv"
"unsafe"
) )
type int64LazyAny struct { type int64LazyAny struct {

View File

@ -1,9 +1,9 @@
package jsoniter package jsoniter
import ( import (
"unsafe"
"fmt" "fmt"
"reflect" "reflect"
"unsafe"
) )
type objectLazyAny struct { type objectLazyAny struct {
@ -322,6 +322,7 @@ func (any *objectLazyAny) IterateObject() (func() (string, Any, bool), bool) {
any.err = iter.Error any.err = iter.Error
return key, value, true return key, value, true
} else { } else {
nextKey = ""
remaining = nil remaining = nil
any.remaining = nil any.remaining = nil
any.err = iter.Error any.err = iter.Error

View File

@ -5,7 +5,7 @@ import (
"strconv" "strconv"
) )
type stringLazyAny struct{ type stringLazyAny struct {
baseAny baseAny
buf []byte buf []byte
iter *Iterator iter *Iterator
@ -136,9 +136,9 @@ func (any *stringLazyAny) GetInterface() interface{} {
return any.cache return any.cache
} }
type stringAny struct{ type stringAny struct {
baseAny baseAny
err error err error
val string val string
} }
@ -146,7 +146,6 @@ func (any *stringAny) Parse() *Iterator {
return nil return nil
} }
func (any *stringAny) ValueType() ValueType { func (any *stringAny) ValueType() ValueType {
return String return String
} }

View File

@ -1,12 +1,11 @@
package jsoniter package jsoniter
import ( import (
"io"
"strconv" "strconv"
"unsafe" "unsafe"
"io"
) )
type uint64LazyAny struct { type uint64LazyAny struct {
baseAny baseAny
buf []byte buf []byte

View File

@ -1,3 +1,9 @@
//
// Besides, jsoniter.Iterator provides a different set of interfaces
// iterating given bytes/string/reader
// and yielding parsed elements one by one.
// This set of interfaces reads input as required and gives
// better performance.
package jsoniter package jsoniter
import ( import (
@ -276,4 +282,3 @@ func (iter *Iterator) ReadBase64() (ret []byte) {
} }
return ret[:n] return ret[:n]
} }

View File

@ -18,12 +18,11 @@ func (iter *Iterator) ReadArray() (ret bool) {
case ',': case ',':
return true return true
default: default:
iter.reportError("ReadArray", "expect [ or , or ] or n, but found: " + string([]byte{c})) iter.reportError("ReadArray", "expect [ or , or ] or n, but found: "+string([]byte{c}))
return return
} }
} }
func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) { func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
c := iter.nextToken() c := iter.nextToken()
if c == '[' { if c == '[' {
@ -46,6 +45,6 @@ func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
iter.skipFixedBytes(3) iter.skipFixedBytes(3)
return true // null return true // null
} }
iter.reportError("ReadArrayCB", "expect [ or n, but found: " + string([]byte{c})) iter.reportError("ReadArrayCB", "expect [ or n, but found: "+string([]byte{c}))
return false return false
} }

View File

@ -2,11 +2,13 @@ package jsoniter
import ( import (
"io" "io"
"math/big"
"strconv" "strconv"
"unsafe" "unsafe"
) )
var floatDigits []int8 var floatDigits []int8
const invalidCharForNumber = int8(-1) const invalidCharForNumber = int8(-1)
const endOfNumber = int8(-2) const endOfNumber = int8(-2)
const dotInNumber = int8(-3) const dotInNumber = int8(-3)
@ -19,11 +21,45 @@ func init() {
for i := int8('0'); i <= int8('9'); i++ { for i := int8('0'); i <= int8('9'); i++ {
floatDigits[i] = i - int8('0') floatDigits[i] = i - int8('0')
} }
floatDigits[','] = endOfNumber; floatDigits[','] = endOfNumber
floatDigits[']'] = endOfNumber; floatDigits[']'] = endOfNumber
floatDigits['}'] = endOfNumber; floatDigits['}'] = endOfNumber
floatDigits[' '] = endOfNumber; floatDigits[' '] = endOfNumber
floatDigits['.'] = dotInNumber; floatDigits['\t'] = endOfNumber
floatDigits['\n'] = endOfNumber
floatDigits['.'] = dotInNumber
}
func (iter *Iterator) ReadBigFloat() (ret *big.Float) {
str := iter.readNumberAsString()
if iter.Error != nil && iter.Error != io.EOF {
return nil
}
prec := 64
if len(str) > prec {
prec = len(str)
}
val, _, err := big.ParseFloat(str, 10, uint(prec), big.ToZero)
if err != nil {
iter.Error = err
return nil
}
return val
}
func (iter *Iterator) ReadBigInt() (ret *big.Int) {
str := iter.readNumberAsString()
if iter.Error != nil && iter.Error != io.EOF {
return nil
}
ret = big.NewInt(0)
var success bool
ret, success = ret.SetString(str, 10)
if !success {
iter.reportError("ReadBigInt", "invalid big int")
return nil
}
return ret
} }
func (iter *Iterator) ReadFloat32() (ret float32) { func (iter *Iterator) ReadFloat32() (ret float32) {
@ -40,7 +76,7 @@ func (iter *Iterator) readPositiveFloat32() (ret float32) {
value := uint64(0) value := uint64(0)
c := byte(' ') c := byte(' ')
i := iter.head i := iter.head
non_decimal_loop: non_decimal_loop:
for ; i < iter.tail; i++ { for ; i < iter.tail; i++ {
c = iter.buf[i] c = iter.buf[i]
ind := floatDigits[c] ind := floatDigits[c]
@ -56,14 +92,14 @@ func (iter *Iterator) readPositiveFloat32() (ret float32) {
if value > uint64SafeToMultiple10 { if value > uint64SafeToMultiple10 {
return iter.readFloat32SlowPath() return iter.readFloat32SlowPath()
} }
value = (value << 3) + (value << 1) + uint64(ind); // value = value * 10 + ind; value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
} }
if c == '.' { if c == '.' {
i++ i++
decimalPlaces := 0; decimalPlaces := 0
for ; i < iter.tail; i++ { for ; i < iter.tail; i++ {
c = iter.buf[i] c = iter.buf[i]
ind := floatDigits[c]; ind := floatDigits[c]
switch ind { switch ind {
case endOfNumber: case endOfNumber:
if decimalPlaces > 0 && decimalPlaces < len(POW10) { if decimalPlaces > 0 && decimalPlaces < len(POW10) {
@ -71,7 +107,7 @@ func (iter *Iterator) readPositiveFloat32() (ret float32) {
return float32(float64(value) / float64(POW10[decimalPlaces])) return float32(float64(value) / float64(POW10[decimalPlaces]))
} }
// too many decimal places // too many decimal places
return iter.readFloat32SlowPath() return iter.readFloat32SlowPath()
case invalidCharForNumber: case invalidCharForNumber:
fallthrough fallthrough
case dotInNumber: case dotInNumber:
@ -87,10 +123,10 @@ func (iter *Iterator) readPositiveFloat32() (ret float32) {
return iter.readFloat32SlowPath() return iter.readFloat32SlowPath()
} }
func (iter *Iterator) readFloat32SlowPath() (ret float32) { func (iter *Iterator) readNumberAsString() (ret string) {
strBuf := [16]byte{} strBuf := [16]byte{}
str := strBuf[0:0] str := strBuf[0:0]
load_loop: load_loop:
for { for {
for i := iter.head; i < iter.tail; i++ { for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i] c := iter.buf[i]
@ -99,6 +135,7 @@ func (iter *Iterator) readFloat32SlowPath() (ret float32) {
str = append(str, c) str = append(str, c)
continue continue
default: default:
iter.head = i
break load_loop break load_loop
} }
} }
@ -109,7 +146,18 @@ func (iter *Iterator) readFloat32SlowPath() (ret float32) {
if iter.Error != nil && iter.Error != io.EOF { if iter.Error != nil && iter.Error != io.EOF {
return return
} }
val, err := strconv.ParseFloat(*(*string)(unsafe.Pointer(&str)), 32) if len(str) == 0 {
iter.reportError("readNumberAsString", "invalid number")
}
return *(*string)(unsafe.Pointer(&str))
}
func (iter *Iterator) readFloat32SlowPath() (ret float32) {
str := iter.readNumberAsString()
if iter.Error != nil && iter.Error != io.EOF {
return
}
val, err := strconv.ParseFloat(str, 32)
if err != nil { if err != nil {
iter.Error = err iter.Error = err
return return
@ -131,7 +179,7 @@ func (iter *Iterator) readPositiveFloat64() (ret float64) {
value := uint64(0) value := uint64(0)
c := byte(' ') c := byte(' ')
i := iter.head i := iter.head
non_decimal_loop: non_decimal_loop:
for ; i < iter.tail; i++ { for ; i < iter.tail; i++ {
c = iter.buf[i] c = iter.buf[i]
ind := floatDigits[c] ind := floatDigits[c]
@ -147,14 +195,14 @@ func (iter *Iterator) readPositiveFloat64() (ret float64) {
if value > uint64SafeToMultiple10 { if value > uint64SafeToMultiple10 {
return iter.readFloat64SlowPath() return iter.readFloat64SlowPath()
} }
value = (value << 3) + (value << 1) + uint64(ind); // value = value * 10 + ind; value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
} }
if c == '.' { if c == '.' {
i++ i++
decimalPlaces := 0; decimalPlaces := 0
for ; i < iter.tail; i++ { for ; i < iter.tail; i++ {
c = iter.buf[i] c = iter.buf[i]
ind := floatDigits[c]; ind := floatDigits[c]
switch ind { switch ind {
case endOfNumber: case endOfNumber:
if decimalPlaces > 0 && decimalPlaces < len(POW10) { if decimalPlaces > 0 && decimalPlaces < len(POW10) {
@ -179,28 +227,11 @@ func (iter *Iterator) readPositiveFloat64() (ret float64) {
} }
func (iter *Iterator) readFloat64SlowPath() (ret float64) { func (iter *Iterator) readFloat64SlowPath() (ret float64) {
strBuf := [16]byte{} str := iter.readNumberAsString()
str := strBuf[0:0]
load_loop:
for {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
switch c {
case '-', '.', 'e', 'E', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
str = append(str, c)
continue
default:
break load_loop
}
}
if !iter.loadMore() {
break
}
}
if iter.Error != nil && iter.Error != io.EOF { if iter.Error != nil && iter.Error != io.EOF {
return return
} }
val, err := strconv.ParseFloat(*(*string)(unsafe.Pointer(&str)), 64) val, err := strconv.ParseFloat(str, 64)
if err != nil { if err != nil {
iter.Error = err iter.Error = err
return return

View File

@ -6,8 +6,8 @@ import (
var intDigits []int8 var intDigits []int8
const uint32SafeToMultiply10 = uint32(0xffffffff) / 10 - 1 const uint32SafeToMultiply10 = uint32(0xffffffff)/10 - 1
const uint64SafeToMultiple10 = uint64(0xffffffffffffffff) / 10 - 1 const uint64SafeToMultiple10 = uint64(0xffffffffffffffff)/10 - 1
const int64Max = uint64(0x7fffffffffffffff) const int64Max = uint64(0x7fffffffffffffff)
const int32Max = uint32(0x7fffffff) const int32Max = uint32(0x7fffffff)
const int16Max = uint32(0x7fff) const int16Max = uint32(0x7fff)
@ -17,7 +17,7 @@ const uint8Max = uint32(0xffff)
func init() { func init() {
intDigits = make([]int8, 256) intDigits = make([]int8, 256)
for i := 0; i < len(floatDigits); i++ { for i := 0; i < len(intDigits); i++ {
intDigits[i] = invalidCharForNumber intDigits[i] = invalidCharForNumber
} }
for i := int8('0'); i <= int8('9'); i++ { for i := int8('0'); i <= int8('9'); i++ {
@ -37,15 +37,15 @@ func (iter *Iterator) ReadInt8() (ret int8) {
c := iter.nextToken() c := iter.nextToken()
if c == '-' { if c == '-' {
val := iter.readUint32(iter.readByte()) val := iter.readUint32(iter.readByte())
if val > int8Max + 1 { if val > int8Max+1 {
iter.reportError("ReadInt8", "overflow: " + strconv.FormatInt(int64(val), 10)) iter.reportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
return return
} }
return -int8(val) return -int8(val)
} else { } else {
val := iter.readUint32(c) val := iter.readUint32(c)
if val > int8Max { if val > int8Max {
iter.reportError("ReadInt8", "overflow: " + strconv.FormatInt(int64(val), 10)) iter.reportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
return return
} }
return int8(val) return int8(val)
@ -55,7 +55,7 @@ func (iter *Iterator) ReadInt8() (ret int8) {
func (iter *Iterator) ReadUint8() (ret uint8) { func (iter *Iterator) ReadUint8() (ret uint8) {
val := iter.readUint32(iter.nextToken()) val := iter.readUint32(iter.nextToken())
if val > uint8Max { if val > uint8Max {
iter.reportError("ReadUint8", "overflow: " + strconv.FormatInt(int64(val), 10)) iter.reportError("ReadUint8", "overflow: "+strconv.FormatInt(int64(val), 10))
return return
} }
return uint8(val) return uint8(val)
@ -65,15 +65,15 @@ func (iter *Iterator) ReadInt16() (ret int16) {
c := iter.nextToken() c := iter.nextToken()
if c == '-' { if c == '-' {
val := iter.readUint32(iter.readByte()) val := iter.readUint32(iter.readByte())
if val > int16Max + 1 { if val > int16Max+1 {
iter.reportError("ReadInt16", "overflow: " + strconv.FormatInt(int64(val), 10)) iter.reportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
return return
} }
return -int16(val) return -int16(val)
} else { } else {
val := iter.readUint32(c) val := iter.readUint32(c)
if val > int16Max { if val > int16Max {
iter.reportError("ReadInt16", "overflow: " + strconv.FormatInt(int64(val), 10)) iter.reportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
return return
} }
return int16(val) return int16(val)
@ -83,7 +83,7 @@ func (iter *Iterator) ReadInt16() (ret int16) {
func (iter *Iterator) ReadUint16() (ret uint16) { func (iter *Iterator) ReadUint16() (ret uint16) {
val := iter.readUint32(iter.nextToken()) val := iter.readUint32(iter.nextToken())
if val > uint16Max { if val > uint16Max {
iter.reportError("ReadUint16", "overflow: " + strconv.FormatInt(int64(val), 10)) iter.reportError("ReadUint16", "overflow: "+strconv.FormatInt(int64(val), 10))
return return
} }
return uint16(val) return uint16(val)
@ -93,15 +93,15 @@ func (iter *Iterator) ReadInt32() (ret int32) {
c := iter.nextToken() c := iter.nextToken()
if c == '-' { if c == '-' {
val := iter.readUint32(iter.readByte()) val := iter.readUint32(iter.readByte())
if val > int32Max + 1 { if val > int32Max+1 {
iter.reportError("ReadInt32", "overflow: " + strconv.FormatInt(int64(val), 10)) iter.reportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
return return
} }
return -int32(val) return -int32(val)
} else { } else {
val := iter.readUint32(c) val := iter.readUint32(c)
if val > int32Max { if val > int32Max {
iter.reportError("ReadInt32", "overflow: " + strconv.FormatInt(int64(val), 10)) iter.reportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
return return
} }
return int32(val) return int32(val)
@ -118,11 +118,11 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
return 0 // single zero return 0 // single zero
} }
if ind == invalidCharForNumber { if ind == invalidCharForNumber {
iter.reportError("readUint32", "unexpected character: " + string([]byte{byte(ind)})) iter.reportError("readUint32", "unexpected character: "+string([]byte{byte(ind)}))
return return
} }
value := uint32(ind) value := uint32(ind)
if iter.tail - iter.head > 10 { if iter.tail-iter.head > 10 {
i := iter.head i := iter.head
ind2 := intDigits[iter.buf[i]] ind2 := intDigits[iter.buf[i]]
if ind2 == invalidCharForNumber { if ind2 == invalidCharForNumber {
@ -133,7 +133,7 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind3 := intDigits[iter.buf[i]] ind3 := intDigits[iter.buf[i]]
if ind3 == invalidCharForNumber { if ind3 == invalidCharForNumber {
iter.head = i iter.head = i
return value * 10 + uint32(ind2) return value*10 + uint32(ind2)
} }
//iter.head = i + 1 //iter.head = i + 1
//value = value * 100 + uint32(ind2) * 10 + uint32(ind3) //value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
@ -141,35 +141,35 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind4 := intDigits[iter.buf[i]] ind4 := intDigits[iter.buf[i]]
if ind4 == invalidCharForNumber { if ind4 == invalidCharForNumber {
iter.head = i iter.head = i
return value * 100 + uint32(ind2) * 10 + uint32(ind3) return value*100 + uint32(ind2)*10 + uint32(ind3)
} }
i++ i++
ind5 := intDigits[iter.buf[i]] ind5 := intDigits[iter.buf[i]]
if ind5 == invalidCharForNumber { if ind5 == invalidCharForNumber {
iter.head = i iter.head = i
return value * 1000 + uint32(ind2) * 100 + uint32(ind3) * 10 + uint32(ind4) return value*1000 + uint32(ind2)*100 + uint32(ind3)*10 + uint32(ind4)
} }
i++ i++
ind6 := intDigits[iter.buf[i]] ind6 := intDigits[iter.buf[i]]
if ind6 == invalidCharForNumber { if ind6 == invalidCharForNumber {
iter.head = i iter.head = i
return value * 10000 + uint32(ind2) * 1000 + uint32(ind3) * 100 + uint32(ind4) * 10 + uint32(ind5) return value*10000 + uint32(ind2)*1000 + uint32(ind3)*100 + uint32(ind4)*10 + uint32(ind5)
} }
i++ i++
ind7 := intDigits[iter.buf[i]] ind7 := intDigits[iter.buf[i]]
if ind7 == invalidCharForNumber { if ind7 == invalidCharForNumber {
iter.head = i iter.head = i
return value * 100000 + uint32(ind2) * 10000 + uint32(ind3) * 1000 + uint32(ind4) * 100 + uint32(ind5) * 10 + uint32(ind6) return value*100000 + uint32(ind2)*10000 + uint32(ind3)*1000 + uint32(ind4)*100 + uint32(ind5)*10 + uint32(ind6)
} }
i++ i++
ind8 := intDigits[iter.buf[i]] ind8 := intDigits[iter.buf[i]]
if ind8 == invalidCharForNumber { if ind8 == invalidCharForNumber {
iter.head = i iter.head = i
return value * 1000000 + uint32(ind2) * 100000 + uint32(ind3) * 10000 + uint32(ind4) * 1000 + uint32(ind5) * 100 + uint32(ind6) * 10 + uint32(ind7) return value*1000000 + uint32(ind2)*100000 + uint32(ind3)*10000 + uint32(ind4)*1000 + uint32(ind5)*100 + uint32(ind6)*10 + uint32(ind7)
} }
i++ i++
ind9 := intDigits[iter.buf[i]] ind9 := intDigits[iter.buf[i]]
value = value * 10000000 + uint32(ind2) * 1000000 + uint32(ind3) * 100000 + uint32(ind4) * 10000 + uint32(ind5) * 1000 + uint32(ind6) * 100 + uint32(ind7) * 10 + uint32(ind8) value = value*10000000 + uint32(ind2)*1000000 + uint32(ind3)*100000 + uint32(ind4)*10000 + uint32(ind5)*1000 + uint32(ind6)*100 + uint32(ind7)*10 + uint32(ind8)
iter.head = i iter.head = i
if ind9 == invalidCharForNumber { if ind9 == invalidCharForNumber {
return value return value
@ -194,7 +194,7 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
} }
value = (value << 3) + (value << 1) + uint32(ind) value = (value << 3) + (value << 1) + uint32(ind)
} }
if (!iter.loadMore()) { if !iter.loadMore() {
return value return value
} }
} }
@ -204,15 +204,15 @@ func (iter *Iterator) ReadInt64() (ret int64) {
c := iter.nextToken() c := iter.nextToken()
if c == '-' { if c == '-' {
val := iter.readUint64(iter.readByte()) val := iter.readUint64(iter.readByte())
if val > int64Max + 1 { if val > int64Max+1 {
iter.reportError("ReadInt64", "overflow: " + strconv.FormatUint(uint64(val), 10)) iter.reportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
return return
} }
return -int64(val) return -int64(val)
} else { } else {
val := iter.readUint64(c) val := iter.readUint64(c)
if val > int64Max { if val > int64Max {
iter.reportError("ReadInt64", "overflow: " + strconv.FormatUint(uint64(val), 10)) iter.reportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
return return
} }
return int64(val) return int64(val)
@ -229,7 +229,7 @@ func (iter *Iterator) readUint64(c byte) (ret uint64) {
return 0 // single zero return 0 // single zero
} }
if ind == invalidCharForNumber { if ind == invalidCharForNumber {
iter.reportError("readUint64", "unexpected character: " + string([]byte{byte(ind)})) iter.reportError("readUint64", "unexpected character: "+string([]byte{byte(ind)}))
return return
} }
value := uint64(ind) value := uint64(ind)
@ -252,7 +252,7 @@ func (iter *Iterator) readUint64(c byte) (ret uint64) {
} }
value = (value << 3) + (value << 1) + uint64(ind) value = (value << 3) + (value << 1) + uint64(ind)
} }
if (!iter.loadMore()) { if !iter.loadMore() {
return value return value
} }
} }

View File

@ -1,5 +1,10 @@
package jsoniter package jsoniter
import (
"fmt"
"unicode"
)
func (iter *Iterator) ReadObject() (ret string) { func (iter *Iterator) ReadObject() (ret string) {
c := iter.nextToken() c := iter.nextToken()
switch c { switch c {
@ -22,7 +27,7 @@ func (iter *Iterator) ReadObject() (ret string) {
case '}': case '}':
return "" // end of object return "" // end of object
default: default:
iter.reportError("ReadObject", `expect { or , or } or n`) iter.reportError("ReadObject", fmt.Sprintf(`expect { or , or } or n, but found %s`, string([]byte{c})))
return return
} }
} }
@ -35,11 +40,14 @@ func (iter *Iterator) readFieldHash() int32 {
for i := iter.head; i < iter.tail; i++ { for i := iter.head; i < iter.tail; i++ {
// require ascii string and no escape // require ascii string and no escape
b := iter.buf[i] b := iter.buf[i]
if 'A' <= b && b <= 'Z' {
b += 'a' - 'A'
}
if b == '"' { if b == '"' {
iter.head = i+1 iter.head = i + 1
c = iter.nextToken() c = iter.nextToken()
if c != ':' { if c != ':' {
iter.reportError("readFieldHash", `expect :, but found ` + string([]byte{c})) iter.reportError("readFieldHash", `expect :, but found `+string([]byte{c}))
} }
return int32(hash) return int32(hash)
} }
@ -52,14 +60,14 @@ func (iter *Iterator) readFieldHash() int32 {
} }
} }
} }
iter.reportError("readFieldHash", `expect ", but found ` + string([]byte{c})) iter.reportError("readFieldHash", `expect ", but found `+string([]byte{c}))
return 0 return 0
} }
func calcHash(str string) int32 { func calcHash(str string) int32 {
hash := int64(0x811c9dc5) hash := int64(0x811c9dc5)
for _, b := range str { for _, b := range str {
hash ^= int64(b) hash ^= int64(unicode.ToLower(b))
hash *= 0x1000193 hash *= 0x1000193
} }
return int32(hash) return int32(hash)
@ -76,7 +84,7 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
return false return false
} }
for iter.nextToken() == ',' { for iter.nextToken() == ',' {
field := string(iter.readObjectFieldAsBytes()) field = string(iter.readObjectFieldAsBytes())
if !callback(iter, field) { if !callback(iter, field) {
return false return false
} }
@ -97,6 +105,46 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
return false return false
} }
func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
c := iter.nextToken()
if c == '{' {
c = iter.nextToken()
if c == '"' {
iter.unreadByte()
field := iter.ReadString()
if iter.nextToken() != ':' {
iter.reportError("ReadMapCB", "expect : after object field")
return false
}
if !callback(iter, field) {
return false
}
for iter.nextToken() == ',' {
field = iter.ReadString()
if iter.nextToken() != ':' {
iter.reportError("ReadMapCB", "expect : after object field")
return false
}
if !callback(iter, field) {
return false
}
}
return true
}
if c == '}' {
return true
}
iter.reportError("ReadMapCB", `expect " after }`)
return false
}
if c == 'n' {
iter.skipFixedBytes(3)
return true // null
}
iter.reportError("ReadMapCB", `expect { or n`)
return false
}
func (iter *Iterator) readObjectStart() bool { func (iter *Iterator) readObjectStart() bool {
c := iter.nextToken() c := iter.nextToken()
if c == '{' { if c == '{' {
@ -106,8 +154,11 @@ func (iter *Iterator) readObjectStart() bool {
} }
iter.unreadByte() iter.unreadByte()
return true return true
} else if c == 'n' {
iter.skipFixedBytes(3)
return false
} }
iter.reportError("readObjectStart", "expect { ") iter.reportError("readObjectStart", "expect { or n")
return false return false
} }

View File

@ -29,6 +29,15 @@ func (iter *Iterator) ReadBool() (ret bool) {
return return
} }
func (iter *Iterator) SkipAndReturnBytes() []byte {
if iter.reader != nil {
panic("reader input does not support this api")
}
before := iter.head
iter.Skip()
after := iter.head
return iter.buf[before:after]
}
// Skip skips a json object and positions to relatively the next json object // Skip skips a json object and positions to relatively the next json object
func (iter *Iterator) Skip() { func (iter *Iterator) Skip() {
@ -193,15 +202,15 @@ func (iter *Iterator) skipUntilBreak() {
} }
func (iter *Iterator) skipFixedBytes(n int) { func (iter *Iterator) skipFixedBytes(n int) {
iter.head += n; iter.head += n
if (iter.head >= iter.tail) { if iter.head >= iter.tail {
more := iter.head - iter.tail; more := iter.head - iter.tail
if !iter.loadMore() { if !iter.loadMore() {
if more > 0 { if more > 0 {
iter.reportError("skipFixedBytes", "unexpected end"); iter.reportError("skipFixedBytes", "unexpected end")
} }
return return
} }
iter.head += more; iter.head += more
} }
} }

View File

@ -2,64 +2,42 @@ package jsoniter
import ( import (
"unicode/utf16" "unicode/utf16"
"unsafe"
) )
// TODO: avoid append
func (iter *Iterator) ReadString() (ret string) { func (iter *Iterator) ReadString() (ret string) {
c := iter.nextToken() c := iter.nextToken()
if c == '"' { if c == '"' {
copied := make([]byte, 32) for i := iter.head; i < iter.tail; i++ {
j := 0 c := iter.buf[i]
fast_loop: if c == '"' {
for { ret = string(iter.buf[iter.head:i])
i := iter.head iter.head = i + 1
for ; i < iter.tail && j < len(copied); i++ { return ret
c := iter.buf[i] } else if c == '\\' {
if c == '"' { break
iter.head = i + 1
copied = copied[:j]
return *(*string)(unsafe.Pointer(&copied))
} else if c == '\\' {
iter.head = i
break fast_loop
}
copied[j] = c
j++
}
if i == iter.tail {
if iter.loadMore() {
i = iter.head
continue
} else {
iter.reportError("ReadString", "incomplete string")
return
}
}
iter.head = i
if j == len(copied) {
newBuf := make([]byte, len(copied) * 2)
copy(newBuf, copied)
copied = newBuf
} }
} }
return iter.readStringSlowPath(copied[:j]) return iter.readStringSlowPath()
} else if c == 'n' {
iter.skipFixedBytes(3)
return ""
} }
iter.reportError("ReadString", `expects " or n`) iter.reportError("ReadString", `expects " or n`)
return return
} }
func (iter *Iterator) readStringSlowPath(str []byte) (ret string) { func (iter *Iterator) readStringSlowPath() (ret string) {
var str []byte
var c byte var c byte
for iter.Error == nil { for iter.Error == nil {
c = iter.readByte() c = iter.readByte()
if c == '"' { if c == '"' {
return *(*string)(unsafe.Pointer(&str)) return string(str)
} }
if c == '\\' { if c == '\\' {
c = iter.readByte() c = iter.readByte()
switch c { switch c {
case 'u': case 'u', 'U':
r := iter.readU4() r := iter.readU4()
if utf16.IsSurrogate(r) { if utf16.IsSurrogate(r) {
c = iter.readByte() c = iter.readByte()
@ -75,7 +53,7 @@ func (iter *Iterator) readStringSlowPath(str []byte) (ret string) {
if iter.Error != nil { if iter.Error != nil {
return return
} }
if c != 'u' { if c != 'u' && c != 'U' {
iter.reportError("ReadString", iter.reportError("ReadString",
`expects \u after utf16 surrogate, but \u not found`) `expects \u after utf16 surrogate, but \u not found`)
return return
@ -114,6 +92,7 @@ func (iter *Iterator) readStringSlowPath(str []byte) (ret string) {
str = append(str, c) str = append(str, c)
} }
} }
iter.reportError("ReadString", "unexpected end of input")
return return
} }
@ -125,13 +104,13 @@ func (iter *Iterator) ReadStringAsSlice() (ret []byte) {
// for: field name, base64, number // for: field name, base64, number
if iter.buf[i] == '"' { if iter.buf[i] == '"' {
// fast path: reuse the underlying buffer // fast path: reuse the underlying buffer
ret = iter.buf[iter.head : i] ret = iter.buf[iter.head:i]
iter.head = i + 1 iter.head = i + 1
return ret return ret
} }
} }
readLen := iter.tail - iter.head readLen := iter.tail - iter.head
copied := make([]byte, readLen, readLen * 2) copied := make([]byte, readLen, readLen*2)
copy(copied, iter.buf[iter.head:iter.tail]) copy(copied, iter.buf[iter.head:iter.tail])
iter.head = iter.tail iter.head = iter.tail
for iter.Error == nil { for iter.Error == nil {
@ -154,9 +133,11 @@ func (iter *Iterator) readU4() (ret rune) {
return return
} }
if c >= '0' && c <= '9' { if c >= '0' && c <= '9' {
ret = ret * 16 + rune(c - '0') ret = ret*16 + rune(c-'0')
} else if c >= 'a' && c <= 'f' { } else if c >= 'a' && c <= 'f' {
ret = ret * 16 + rune(c - 'a' + 10) ret = ret*16 + rune(c-'a'+10)
} else if c >= 'A' && c <= 'F' {
ret = ret*16 + rune(c-'A'+10)
} else { } else {
iter.reportError("readU4", "expects 0~9 or a~f") iter.reportError("readU4", "expects 0~9 or a~f")
return return
@ -178,14 +159,14 @@ const (
mask3 = 0x0F // 0000 1111 mask3 = 0x0F // 0000 1111
mask4 = 0x07 // 0000 0111 mask4 = 0x07 // 0000 0111
rune1Max = 1 << 7 - 1 rune1Max = 1<<7 - 1
rune2Max = 1 << 11 - 1 rune2Max = 1<<11 - 1
rune3Max = 1 << 16 - 1 rune3Max = 1<<16 - 1
surrogateMin = 0xD800 surrogateMin = 0xD800
surrogateMax = 0xDFFF surrogateMax = 0xDFFF
maxRune = '\U0010FFFF' // Maximum valid Unicode code point. maxRune = '\U0010FFFF' // Maximum valid Unicode code point.
runeError = '\uFFFD' // the "error" Rune or "Unicode replacement character" runeError = '\uFFFD' // the "error" Rune or "Unicode replacement character"
) )
@ -196,22 +177,22 @@ func appendRune(p []byte, r rune) []byte {
p = append(p, byte(r)) p = append(p, byte(r))
return p return p
case i <= rune2Max: case i <= rune2Max:
p = append(p, t2 | byte(r >> 6)) p = append(p, t2|byte(r>>6))
p = append(p, tx | byte(r) & maskx) p = append(p, tx|byte(r)&maskx)
return p return p
case i > maxRune, surrogateMin <= i && i <= surrogateMax: case i > maxRune, surrogateMin <= i && i <= surrogateMax:
r = runeError r = runeError
fallthrough fallthrough
case i <= rune3Max: case i <= rune3Max:
p = append(p, t3 | byte(r >> 12)) p = append(p, t3|byte(r>>12))
p = append(p, tx | byte(r >> 6) & maskx) p = append(p, tx|byte(r>>6)&maskx)
p = append(p, tx | byte(r) & maskx) p = append(p, tx|byte(r)&maskx)
return p return p
default: default:
p = append(p, t4 | byte(r >> 18)) p = append(p, t4|byte(r>>18))
p = append(p, tx | byte(r >> 12) & maskx) p = append(p, tx|byte(r>>12)&maskx)
p = append(p, tx | byte(r >> 6) & maskx) p = append(p, tx|byte(r>>6)&maskx)
p = append(p, tx | byte(r) & maskx) p = append(p, tx|byte(r)&maskx)
return p return p
} }
} }

View File

@ -1,32 +1,38 @@
package jsoniter package jsoniter
import ( import (
"encoding"
"encoding/json"
"fmt" "fmt"
"reflect" "reflect"
"sync/atomic" "sync/atomic"
"unsafe" "unsafe"
"errors"
) )
/* // Decoder is an internal type registered to cache as needed.
Reflection on type to create decoders, which is then cached // Don't confuse jsoniter.Decoder with json.Decoder.
Reflection on value is avoided as we can, as the reflect.Value itself will allocate, with following exceptions // For json.Decoder's adapter, refer to jsoniter.AdapterDecoder(todo link).
1. create instance of new value, for example *int will need a int to be allocated //
2. append to slice, if the existing cap is not enough, allocate will be done using Reflect.New // Reflection on type to create decoders, which is then cached
3. assignment to map, both key and value will be reflect.Value // Reflection on value is avoided as we can, as the reflect.Value itself will allocate, with following exceptions
For a simple struct binding, it will be reflect.Value free and allocation free // 1. create instance of new value, for example *int will need a int to be allocated
*/ // 2. append to slice, if the existing cap is not enough, allocate will be done using Reflect.New
// 3. assignment to map, both key and value will be reflect.Value
// For a simple struct binding, it will be reflect.Value free and allocation free
type Decoder interface { type Decoder interface {
decode(ptr unsafe.Pointer, iter *Iterator) decode(ptr unsafe.Pointer, iter *Iterator)
} }
// Encoder is an internal type registered to cache as needed.
// Don't confuse jsoniter.Encoder with json.Encoder.
// For json.Encoder's adapter, refer to jsoniter.AdapterEncoder(todo godoc link).
type Encoder interface { type Encoder interface {
isEmpty(ptr unsafe.Pointer) bool
encode(ptr unsafe.Pointer, stream *Stream) encode(ptr unsafe.Pointer, stream *Stream)
encodeInterface(val interface{}, stream *Stream) encodeInterface(val interface{}, stream *Stream)
} }
func WriteToStream(val interface{}, stream *Stream, encoder Encoder) { func writeToStream(val interface{}, stream *Stream, encoder Encoder) {
e := (*emptyInterface)(unsafe.Pointer(&val)) e := (*emptyInterface)(unsafe.Pointer(&val))
if reflect.TypeOf(val).Kind() == reflect.Ptr { if reflect.TypeOf(val).Kind() == reflect.Ptr {
encoder.encode(unsafe.Pointer(&e.word), stream) encoder.encode(unsafe.Pointer(&e.word), stream)
@ -37,7 +43,7 @@ func WriteToStream(val interface{}, stream *Stream, encoder Encoder) {
type DecoderFunc func(ptr unsafe.Pointer, iter *Iterator) type DecoderFunc func(ptr unsafe.Pointer, iter *Iterator)
type EncoderFunc func(ptr unsafe.Pointer, stream *Stream) type EncoderFunc func(ptr unsafe.Pointer, stream *Stream)
type ExtensionFunc func(typ reflect.Type, field *reflect.StructField) ([]string, DecoderFunc) type ExtensionFunc func(typ reflect.Type, field *reflect.StructField) ([]string, EncoderFunc, DecoderFunc)
type funcDecoder struct { type funcDecoder struct {
fun DecoderFunc fun DecoderFunc
@ -56,7 +62,11 @@ func (encoder *funcEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
} }
func (encoder *funcEncoder) encodeInterface(val interface{}, stream *Stream) { func (encoder *funcEncoder) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) writeToStream(val, stream, encoder)
}
func (encoder *funcEncoder) isEmpty(ptr unsafe.Pointer) bool {
return false
} }
var DECODERS unsafe.Pointer var DECODERS unsafe.Pointer
@ -67,7 +77,12 @@ var fieldDecoders map[string]Decoder
var typeEncoders map[string]Encoder var typeEncoders map[string]Encoder
var fieldEncoders map[string]Encoder var fieldEncoders map[string]Encoder
var extensions []ExtensionFunc var extensions []ExtensionFunc
var jsonNumberType reflect.Type
var jsonRawMessageType reflect.Type
var anyType reflect.Type var anyType reflect.Type
var marshalerType reflect.Type
var unmarshalerType reflect.Type
var textUnmarshalerType reflect.Type
func init() { func init() {
typeDecoders = map[string]Decoder{} typeDecoders = map[string]Decoder{}
@ -77,7 +92,12 @@ func init() {
extensions = []ExtensionFunc{} extensions = []ExtensionFunc{}
atomic.StorePointer(&DECODERS, unsafe.Pointer(&map[string]Decoder{})) atomic.StorePointer(&DECODERS, unsafe.Pointer(&map[string]Decoder{}))
atomic.StorePointer(&ENCODERS, unsafe.Pointer(&map[string]Encoder{})) atomic.StorePointer(&ENCODERS, unsafe.Pointer(&map[string]Encoder{}))
jsonNumberType = reflect.TypeOf((*json.Number)(nil)).Elem()
jsonRawMessageType = reflect.TypeOf((*json.RawMessage)(nil)).Elem()
anyType = reflect.TypeOf((*Any)(nil)).Elem() anyType = reflect.TypeOf((*Any)(nil)).Elem()
marshalerType = reflect.TypeOf((*json.Marshaler)(nil)).Elem()
unmarshalerType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem()
textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
} }
func addDecoderToCache(cacheKey reflect.Type, decoder Decoder) { func addDecoderToCache(cacheKey reflect.Type, decoder Decoder) {
@ -143,10 +163,18 @@ func RegisterExtension(extension ExtensionFunc) {
extensions = append(extensions, extension) extensions = append(extensions, extension)
} }
// CleanDecoders cleans decoders registered // CleanDecoders cleans decoders registered or cached
func CleanDecoders() { func CleanDecoders() {
typeDecoders = map[string]Decoder{} typeDecoders = map[string]Decoder{}
fieldDecoders = map[string]Decoder{} fieldDecoders = map[string]Decoder{}
atomic.StorePointer(&DECODERS, unsafe.Pointer(&map[string]Decoder{}))
}
// CleanEncoders cleans encoders registered or cached
func CleanEncoders() {
typeEncoders = map[string]Encoder{}
fieldEncoders = map[string]Encoder{}
atomic.StorePointer(&ENCODERS, unsafe.Pointer(&map[string]Encoder{}))
} }
type optionalDecoder struct { type optionalDecoder struct {
@ -171,7 +199,6 @@ func (decoder *optionalDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
} }
type optionalEncoder struct { type optionalEncoder struct {
valueType reflect.Type
valueEncoder Encoder valueEncoder Encoder
} }
@ -184,92 +211,58 @@ func (encoder *optionalEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
} }
func (encoder *optionalEncoder) encodeInterface(val interface{}, stream *Stream) { func (encoder *optionalEncoder) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) writeToStream(val, stream, encoder)
} }
type mapDecoder struct { func (encoder *optionalEncoder) isEmpty(ptr unsafe.Pointer) bool {
mapType reflect.Type if *((*unsafe.Pointer)(ptr)) == nil {
elemType reflect.Type return true
elemDecoder Decoder } else {
mapInterface emptyInterface return encoder.valueEncoder.isEmpty(*((*unsafe.Pointer)(ptr)))
}
func (decoder *mapDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
// dark magic to cast unsafe.Pointer back to interface{} using reflect.Type
mapInterface := decoder.mapInterface
mapInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
realVal := reflect.ValueOf(*realInterface).Elem()
for field := iter.ReadObject(); field != ""; field = iter.ReadObject() {
elem := reflect.New(decoder.elemType)
decoder.elemDecoder.decode(unsafe.Pointer(elem.Pointer()), iter)
// to put into map, we have to use reflection
realVal.SetMapIndex(reflect.ValueOf(string([]byte(field))), elem.Elem())
} }
} }
type mapEncoder struct { type placeholderEncoder struct {
mapType reflect.Type valueEncoder Encoder
elemType reflect.Type
elemEncoder Encoder
mapInterface emptyInterface
} }
func (encoder *mapEncoder) encode(ptr unsafe.Pointer, stream *Stream) { func (encoder *placeholderEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
mapInterface := encoder.mapInterface encoder.valueEncoder.encode(ptr, stream)
mapInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
realVal := reflect.ValueOf(*realInterface)
stream.WriteObjectStart()
for i, key := range realVal.MapKeys() {
if i != 0 {
stream.WriteMore()
}
stream.WriteObjectField(key.String())
val := realVal.MapIndex(key).Interface()
encoder.elemEncoder.encodeInterface(val, stream)
}
stream.WriteObjectEnd()
} }
func (encoder *mapEncoder) encodeInterface(val interface{}, stream *Stream) { func (encoder *placeholderEncoder) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) writeToStream(val, stream, encoder)
} }
type mapInterfaceEncoder struct { func (encoder *placeholderEncoder) isEmpty(ptr unsafe.Pointer) bool {
mapType reflect.Type return encoder.valueEncoder.isEmpty(ptr)
elemType reflect.Type
elemEncoder Encoder
mapInterface emptyInterface
} }
func (encoder *mapInterfaceEncoder) encode(ptr unsafe.Pointer, stream *Stream) { type placeholderDecoder struct {
mapInterface := encoder.mapInterface valueDecoder Decoder
mapInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
realVal := reflect.ValueOf(*realInterface)
stream.WriteObjectStart()
for i, key := range realVal.MapKeys() {
if i != 0 {
stream.WriteMore()
}
stream.WriteObjectField(key.String())
val := realVal.MapIndex(key).Interface()
encoder.elemEncoder.encode(unsafe.Pointer(&val), stream)
}
stream.WriteObjectEnd()
} }
func (encoder *mapInterfaceEncoder) encodeInterface(val interface{}, stream *Stream) { func (decoder *placeholderDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
WriteToStream(val, stream, encoder) decoder.valueDecoder.decode(ptr, iter)
} }
// emptyInterface is the header for an interface{} value. // emptyInterface is the header for an interface{} value.
type emptyInterface struct { type emptyInterface struct {
typ *struct{} typ unsafe.Pointer
word unsafe.Pointer
}
// emptyInterface is the header for an interface with method (not interface{})
type nonEmptyInterface struct {
// see ../runtime/iface.go:/Itab
itab *struct {
ityp unsafe.Pointer // static interface type
typ unsafe.Pointer // dynamic concrete type
link unsafe.Pointer
bad int32
unused int32
fun [100000]unsafe.Pointer // method table
}
word unsafe.Pointer word unsafe.Pointer
} }
@ -291,7 +284,6 @@ func (iter *Iterator) ReadVal(obj interface{}) {
cachedDecoder.decode(e.word, iter) cachedDecoder.decode(e.word, iter)
} }
func (stream *Stream) WriteVal(val interface{}) { func (stream *Stream) WriteVal(val interface{}) {
if nil == val { if nil == val {
stream.WriteNil() stream.WriteNil()
@ -334,6 +326,42 @@ func decoderOfType(typ reflect.Type) (Decoder, error) {
if typeDecoder != nil { if typeDecoder != nil {
return typeDecoder, nil return typeDecoder, nil
} }
if typ.Kind() == reflect.Ptr {
typeDecoder := typeDecoders[typ.Elem().String()]
if typeDecoder != nil {
return &optionalDecoder{typ.Elem(), typeDecoder}, nil
}
}
cacheKey := typ
cachedDecoder := getDecoderFromCache(cacheKey)
if cachedDecoder != nil {
return cachedDecoder, nil
}
placeholder := &placeholderDecoder{}
addDecoderToCache(cacheKey, placeholder)
newDecoder, err := createDecoderOfType(typ)
placeholder.valueDecoder = newDecoder
addDecoderToCache(cacheKey, newDecoder)
return newDecoder, err
}
func createDecoderOfType(typ reflect.Type) (Decoder, error) {
if typ.String() == "[]uint8" {
return &base64Codec{}, nil
}
if typ.AssignableTo(jsonRawMessageType) {
return &jsonRawMessageCodec{}, nil
}
if typ.AssignableTo(jsonNumberType) {
return &jsonNumberCodec{}, nil
}
if typ.ConvertibleTo(unmarshalerType) {
templateInterface := reflect.New(typ).Elem().Interface()
return &optionalDecoder{typ, &unmarshalerDecoder{extractInterface(templateInterface)}}, nil
}
if typ.ConvertibleTo(anyType) {
return &anyCodec{}, nil
}
switch typ.Kind() { switch typ.Kind() {
case reflect.String: case reflect.String:
return &stringCodec{}, nil return &stringCodec{}, nil
@ -365,12 +393,12 @@ func decoderOfType(typ reflect.Type) (Decoder, error) {
return &boolCodec{}, nil return &boolCodec{}, nil
case reflect.Interface: case reflect.Interface:
if typ.NumMethod() == 0 { if typ.NumMethod() == 0 {
return &interfaceCodec{}, nil return &emptyInterfaceCodec{}, nil
} else { } else {
return nil, errors.New("unsupportd type: " + typ.String()) return &nonEmptyInterfaceCodec{}, nil
} }
case reflect.Struct: case reflect.Struct:
return prefix(fmt.Sprintf("[%s]", typeName)).addToDecoder(decoderOfStruct(typ)) return prefix(fmt.Sprintf("[%s]", typ.String())).addToDecoder(decoderOfStruct(typ))
case reflect.Slice: case reflect.Slice:
return prefix("[slice]").addToDecoder(decoderOfSlice(typ)) return prefix("[slice]").addToDecoder(decoderOfSlice(typ))
case reflect.Map: case reflect.Map:
@ -383,15 +411,49 @@ func decoderOfType(typ reflect.Type) (Decoder, error) {
} }
func encoderOfType(typ reflect.Type) (Encoder, error) { func encoderOfType(typ reflect.Type) (Encoder, error) {
if typ.ConvertibleTo(anyType) {
return &anyCodec{}, nil
}
typeName := typ.String() typeName := typ.String()
typeEncoder := typeEncoders[typeName] typeEncoder := typeEncoders[typeName]
if typeEncoder != nil { if typeEncoder != nil {
return typeEncoder, nil return typeEncoder, nil
} }
switch typ.Kind() { if typ.Kind() == reflect.Ptr {
typeEncoder := typeEncoders[typ.Elem().String()]
if typeEncoder != nil {
return &optionalEncoder{typeEncoder}, nil
}
}
cacheKey := typ
cachedEncoder := getEncoderFromCache(cacheKey)
if cachedEncoder != nil {
return cachedEncoder, nil
}
placeholder := &placeholderEncoder{}
addEncoderToCache(cacheKey, placeholder)
newEncoder, err := createEncoderOfType(typ)
placeholder.valueEncoder = newEncoder
addEncoderToCache(cacheKey, newEncoder)
return newEncoder, err
}
func createEncoderOfType(typ reflect.Type) (Encoder, error) {
if typ.String() == "[]uint8" {
return &base64Codec{}, nil
}
if typ.AssignableTo(jsonRawMessageType) {
return &jsonRawMessageCodec{}, nil
}
if typ.AssignableTo(jsonNumberType) {
return &jsonNumberCodec{}, nil
}
if typ.ConvertibleTo(marshalerType) {
templateInterface := reflect.New(typ).Elem().Interface()
return &marshalerEncoder{extractInterface(templateInterface)}, nil
}
if typ.ConvertibleTo(anyType) {
return &anyCodec{}, nil
}
kind := typ.Kind()
switch kind {
case reflect.String: case reflect.String:
return &stringCodec{}, nil return &stringCodec{}, nil
case reflect.Int: case reflect.Int:
@ -421,9 +483,13 @@ func encoderOfType(typ reflect.Type) (Encoder, error) {
case reflect.Bool: case reflect.Bool:
return &boolCodec{}, nil return &boolCodec{}, nil
case reflect.Interface: case reflect.Interface:
return &interfaceCodec{}, nil if typ.NumMethod() == 0 {
return &emptyInterfaceCodec{}, nil
} else {
return &nonEmptyInterfaceCodec{}, nil
}
case reflect.Struct: case reflect.Struct:
return prefix(fmt.Sprintf("[%s]", typeName)).addToEncoder(encoderOfStruct(typ)) return prefix(fmt.Sprintf("[%s]", typ.String())).addToEncoder(encoderOfStruct(typ))
case reflect.Slice: case reflect.Slice:
return prefix("[slice]").addToEncoder(encoderOfSlice(typ)) return prefix("[slice]").addToEncoder(encoderOfSlice(typ))
case reflect.Map: case reflect.Map:
@ -450,7 +516,7 @@ func encoderOfOptional(typ reflect.Type) (Encoder, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
return &optionalEncoder{elemType, decoder}, nil return &optionalEncoder{decoder}, nil
} }
func decoderOfMap(typ reflect.Type) (Decoder, error) { func decoderOfMap(typ reflect.Type) (Decoder, error) {
@ -459,7 +525,11 @@ func decoderOfMap(typ reflect.Type) (Decoder, error) {
return nil, err return nil, err
} }
mapInterface := reflect.New(typ).Interface() mapInterface := reflect.New(typ).Interface()
return &mapDecoder{typ, typ.Elem(), decoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}, nil return &mapDecoder{typ, typ.Key(), typ.Elem(), decoder, extractInterface(mapInterface)}, nil
}
func extractInterface(val interface{}) emptyInterface {
return *((*emptyInterface)(unsafe.Pointer(&val)))
} }
func encoderOfMap(typ reflect.Type) (Encoder, error) { func encoderOfMap(typ reflect.Type) (Encoder, error) {
@ -469,9 +539,5 @@ func encoderOfMap(typ reflect.Type) (Encoder, error) {
return nil, err return nil, err
} }
mapInterface := reflect.New(typ).Elem().Interface() mapInterface := reflect.New(typ).Elem().Interface()
if elemType.Kind() == reflect.Interface && elemType.NumMethod() == 0 { return &mapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}, nil
return &mapInterfaceEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}, nil
} else {
return &mapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}, nil
}
} }

View File

@ -1,10 +1,10 @@
package jsoniter package jsoniter
import ( import (
"unsafe"
"reflect"
"io"
"fmt" "fmt"
"io"
"reflect"
"unsafe"
) )
func decoderOfSlice(typ reflect.Type) (Decoder, error) { func decoderOfSlice(typ reflect.Type) (Decoder, error) {
@ -20,6 +20,9 @@ func encoderOfSlice(typ reflect.Type) (Encoder, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
if typ.Elem().Kind() == reflect.Map {
encoder = &optionalEncoder{encoder}
}
return &sliceEncoder{typ, typ.Elem(), encoder}, nil return &sliceEncoder{typ, typ.Elem(), encoder}, nil
} }
@ -50,7 +53,12 @@ func (encoder *sliceEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
} }
func (encoder *sliceEncoder) encodeInterface(val interface{}, stream *Stream) { func (encoder *sliceEncoder) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) writeToStream(val, stream, encoder)
}
func (encoder *sliceEncoder) isEmpty(ptr unsafe.Pointer) bool {
slice := (*sliceHeader)(ptr)
return slice.Len == 0
} }
type sliceDecoder struct { type sliceDecoder struct {
@ -80,30 +88,30 @@ func (decoder *sliceDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) {
return return
} }
offset := uintptr(0) offset := uintptr(0)
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data) + offset), iter) decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data)+offset), iter)
if !iter.ReadArray() { if !iter.ReadArray() {
slice.Len = 1 slice.Len = 1
return return
} }
offset += decoder.elemType.Size() offset += decoder.elemType.Size()
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data) + offset), iter) decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data)+offset), iter)
if !iter.ReadArray() { if !iter.ReadArray() {
slice.Len = 2 slice.Len = 2
return return
} }
offset += decoder.elemType.Size() offset += decoder.elemType.Size()
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data) + offset), iter) decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data)+offset), iter)
if !iter.ReadArray() { if !iter.ReadArray() {
slice.Len = 3 slice.Len = 3
return return
} }
offset += decoder.elemType.Size() offset += decoder.elemType.Size()
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data) + offset), iter) decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data)+offset), iter)
slice.Len = 4 slice.Len = 4
for iter.ReadArray() { for iter.ReadArray() {
growOne(slice, decoder.sliceType, decoder.elemType) growOne(slice, decoder.sliceType, decoder.elemType)
offset += decoder.elemType.Size() offset += decoder.elemType.Size()
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data) + offset), iter) decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data)+offset), iter)
} }
} }

138
feature_reflect_map.go Normal file
View File

@ -0,0 +1,138 @@
package jsoniter
import (
"encoding"
"encoding/json"
"reflect"
"strconv"
"unsafe"
)
type mapDecoder struct {
mapType reflect.Type
keyType reflect.Type
elemType reflect.Type
elemDecoder Decoder
mapInterface emptyInterface
}
func (decoder *mapDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
// dark magic to cast unsafe.Pointer back to interface{} using reflect.Type
mapInterface := decoder.mapInterface
mapInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
realVal := reflect.ValueOf(*realInterface).Elem()
if realVal.IsNil() {
realVal.Set(reflect.MakeMap(realVal.Type()))
}
iter.ReadMapCB(func(iter *Iterator, keyStr string) bool {
elem := reflect.New(decoder.elemType)
decoder.elemDecoder.decode(unsafe.Pointer(elem.Pointer()), iter)
// to put into map, we have to use reflection
keyType := decoder.keyType
switch {
case keyType.Kind() == reflect.String:
realVal.SetMapIndex(reflect.ValueOf(keyStr), elem.Elem())
return true
case keyType.Implements(textUnmarshalerType):
textUnmarshaler := reflect.New(keyType.Elem()).Interface().(encoding.TextUnmarshaler)
err := textUnmarshaler.UnmarshalText([]byte(keyStr))
if err != nil {
iter.reportError("read map key as TextUnmarshaler", err.Error())
return false
}
realVal.SetMapIndex(reflect.ValueOf(textUnmarshaler), elem.Elem())
return true
default:
switch keyType.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
n, err := strconv.ParseInt(keyStr, 10, 64)
if err != nil || reflect.Zero(keyType).OverflowInt(n) {
iter.reportError("read map key as int64", "read int64 failed")
return false
}
realVal.SetMapIndex(reflect.ValueOf(n).Convert(keyType), elem.Elem())
return true
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
n, err := strconv.ParseUint(keyStr, 10, 64)
if err != nil || reflect.Zero(keyType).OverflowUint(n) {
iter.reportError("read map key as uint64", "read uint64 failed")
return false
}
realVal.SetMapIndex(reflect.ValueOf(n).Convert(keyType), elem.Elem())
return true
}
}
iter.reportError("read map key", "unexpected map key type "+keyType.String())
return true
})
}
type mapEncoder struct {
mapType reflect.Type
elemType reflect.Type
elemEncoder Encoder
mapInterface emptyInterface
}
func (encoder *mapEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
mapInterface := encoder.mapInterface
mapInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
realVal := reflect.ValueOf(*realInterface)
stream.WriteObjectStart()
for i, key := range realVal.MapKeys() {
if i != 0 {
stream.WriteMore()
}
encodeMapKey(key, stream)
stream.writeByte(':')
val := realVal.MapIndex(key).Interface()
encoder.elemEncoder.encodeInterface(val, stream)
}
stream.WriteObjectEnd()
}
func encodeMapKey(key reflect.Value, stream *Stream) {
if key.Kind() == reflect.String {
stream.WriteString(key.String())
return
}
if tm, ok := key.Interface().(encoding.TextMarshaler); ok {
buf, err := tm.MarshalText()
if err != nil {
stream.Error = err
return
}
stream.writeByte('"')
stream.Write(buf)
stream.writeByte('"')
return
}
switch key.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
stream.writeByte('"')
stream.WriteInt64(key.Int())
stream.writeByte('"')
return
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
stream.writeByte('"')
stream.WriteUint64(key.Uint())
stream.writeByte('"')
return
}
stream.Error = &json.UnsupportedTypeError{key.Type()}
}
func (encoder *mapEncoder) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
}
func (encoder *mapEncoder) isEmpty(ptr unsafe.Pointer) bool {
mapInterface := encoder.mapInterface
mapInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
realVal := reflect.ValueOf(*realInterface)
return realVal.Len() == 0
}

View File

@ -1,6 +1,8 @@
package jsoniter package jsoniter
import ( import (
"encoding/base64"
"encoding/json"
"unsafe" "unsafe"
) )
@ -12,11 +14,16 @@ func (codec *stringCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
} }
func (codec *stringCodec) encode(ptr unsafe.Pointer, stream *Stream) { func (codec *stringCodec) encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteString(*((*string)(ptr))) str := *((*string)(ptr))
stream.WriteString(str)
} }
func (encoder *stringCodec) encodeInterface(val interface{}, stream *Stream) { func (encoder *stringCodec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) writeToStream(val, stream, encoder)
}
func (codec *stringCodec) isEmpty(ptr unsafe.Pointer) bool {
return *((*string)(ptr)) == ""
} }
type intCodec struct { type intCodec struct {
@ -31,7 +38,11 @@ func (codec *intCodec) encode(ptr unsafe.Pointer, stream *Stream) {
} }
func (encoder *intCodec) encodeInterface(val interface{}, stream *Stream) { func (encoder *intCodec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) writeToStream(val, stream, encoder)
}
func (codec *intCodec) isEmpty(ptr unsafe.Pointer) bool {
return *((*int)(ptr)) == 0
} }
type int8Codec struct { type int8Codec struct {
@ -46,7 +57,11 @@ func (codec *int8Codec) encode(ptr unsafe.Pointer, stream *Stream) {
} }
func (encoder *int8Codec) encodeInterface(val interface{}, stream *Stream) { func (encoder *int8Codec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) writeToStream(val, stream, encoder)
}
func (codec *int8Codec) isEmpty(ptr unsafe.Pointer) bool {
return *((*int8)(ptr)) == 0
} }
type int16Codec struct { type int16Codec struct {
@ -61,7 +76,11 @@ func (codec *int16Codec) encode(ptr unsafe.Pointer, stream *Stream) {
} }
func (encoder *int16Codec) encodeInterface(val interface{}, stream *Stream) { func (encoder *int16Codec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) writeToStream(val, stream, encoder)
}
func (codec *int16Codec) isEmpty(ptr unsafe.Pointer) bool {
return *((*int16)(ptr)) == 0
} }
type int32Codec struct { type int32Codec struct {
@ -76,7 +95,11 @@ func (codec *int32Codec) encode(ptr unsafe.Pointer, stream *Stream) {
} }
func (encoder *int32Codec) encodeInterface(val interface{}, stream *Stream) { func (encoder *int32Codec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) writeToStream(val, stream, encoder)
}
func (codec *int32Codec) isEmpty(ptr unsafe.Pointer) bool {
return *((*int32)(ptr)) == 0
} }
type int64Codec struct { type int64Codec struct {
@ -91,7 +114,11 @@ func (codec *int64Codec) encode(ptr unsafe.Pointer, stream *Stream) {
} }
func (encoder *int64Codec) encodeInterface(val interface{}, stream *Stream) { func (encoder *int64Codec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) writeToStream(val, stream, encoder)
}
func (codec *int64Codec) isEmpty(ptr unsafe.Pointer) bool {
return *((*int64)(ptr)) == 0
} }
type uintCodec struct { type uintCodec struct {
@ -106,7 +133,11 @@ func (codec *uintCodec) encode(ptr unsafe.Pointer, stream *Stream) {
} }
func (encoder *uintCodec) encodeInterface(val interface{}, stream *Stream) { func (encoder *uintCodec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) writeToStream(val, stream, encoder)
}
func (codec *uintCodec) isEmpty(ptr unsafe.Pointer) bool {
return *((*uint)(ptr)) == 0
} }
type uint8Codec struct { type uint8Codec struct {
@ -121,7 +152,11 @@ func (codec *uint8Codec) encode(ptr unsafe.Pointer, stream *Stream) {
} }
func (encoder *uint8Codec) encodeInterface(val interface{}, stream *Stream) { func (encoder *uint8Codec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) writeToStream(val, stream, encoder)
}
func (codec *uint8Codec) isEmpty(ptr unsafe.Pointer) bool {
return *((*uint8)(ptr)) == 0
} }
type uint16Codec struct { type uint16Codec struct {
@ -136,7 +171,11 @@ func (codec *uint16Codec) encode(ptr unsafe.Pointer, stream *Stream) {
} }
func (encoder *uint16Codec) encodeInterface(val interface{}, stream *Stream) { func (encoder *uint16Codec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) writeToStream(val, stream, encoder)
}
func (codec *uint16Codec) isEmpty(ptr unsafe.Pointer) bool {
return *((*uint16)(ptr)) == 0
} }
type uint32Codec struct { type uint32Codec struct {
@ -151,7 +190,11 @@ func (codec *uint32Codec) encode(ptr unsafe.Pointer, stream *Stream) {
} }
func (encoder *uint32Codec) encodeInterface(val interface{}, stream *Stream) { func (encoder *uint32Codec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) writeToStream(val, stream, encoder)
}
func (codec *uint32Codec) isEmpty(ptr unsafe.Pointer) bool {
return *((*uint32)(ptr)) == 0
} }
type uint64Codec struct { type uint64Codec struct {
@ -166,7 +209,11 @@ func (codec *uint64Codec) encode(ptr unsafe.Pointer, stream *Stream) {
} }
func (encoder *uint64Codec) encodeInterface(val interface{}, stream *Stream) { func (encoder *uint64Codec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) writeToStream(val, stream, encoder)
}
func (codec *uint64Codec) isEmpty(ptr unsafe.Pointer) bool {
return *((*uint64)(ptr)) == 0
} }
type float32Codec struct { type float32Codec struct {
@ -181,7 +228,11 @@ func (codec *float32Codec) encode(ptr unsafe.Pointer, stream *Stream) {
} }
func (encoder *float32Codec) encodeInterface(val interface{}, stream *Stream) { func (encoder *float32Codec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) writeToStream(val, stream, encoder)
}
func (codec *float32Codec) isEmpty(ptr unsafe.Pointer) bool {
return *((*float32)(ptr)) == 0
} }
type float64Codec struct { type float64Codec struct {
@ -196,7 +247,11 @@ func (codec *float64Codec) encode(ptr unsafe.Pointer, stream *Stream) {
} }
func (encoder *float64Codec) encodeInterface(val interface{}, stream *Stream) { func (encoder *float64Codec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) writeToStream(val, stream, encoder)
}
func (codec *float64Codec) isEmpty(ptr unsafe.Pointer) bool {
return *((*float64)(ptr)) == 0
} }
type boolCodec struct { type boolCodec struct {
@ -211,24 +266,67 @@ func (codec *boolCodec) encode(ptr unsafe.Pointer, stream *Stream) {
} }
func (encoder *boolCodec) encodeInterface(val interface{}, stream *Stream) { func (encoder *boolCodec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) writeToStream(val, stream, encoder)
} }
type interfaceCodec struct { func (codec *boolCodec) isEmpty(ptr unsafe.Pointer) bool {
return !(*((*bool)(ptr)))
} }
func (codec *interfaceCodec) decode(ptr unsafe.Pointer, iter *Iterator) { type emptyInterfaceCodec struct {
}
func (codec *emptyInterfaceCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
*((*interface{})(ptr)) = iter.Read() *((*interface{})(ptr)) = iter.Read()
} }
func (codec *interfaceCodec) encode(ptr unsafe.Pointer, stream *Stream) { func (codec *emptyInterfaceCodec) encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteVal(*((*interface{})(ptr))) stream.WriteVal(*((*interface{})(ptr)))
} }
func (encoder *interfaceCodec) encodeInterface(val interface{}, stream *Stream) { func (encoder *emptyInterfaceCodec) encodeInterface(val interface{}, stream *Stream) {
stream.WriteVal(val) stream.WriteVal(val)
} }
func (codec *emptyInterfaceCodec) isEmpty(ptr unsafe.Pointer) bool {
return ptr == nil
}
type nonEmptyInterfaceCodec struct {
}
func (codec *nonEmptyInterfaceCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
nonEmptyInterface := (*nonEmptyInterface)(ptr)
if nonEmptyInterface.itab == nil {
iter.reportError("read non-empty interface", "do not know which concrete type to decode to")
return
}
var i interface{}
e := (*emptyInterface)(unsafe.Pointer(&i))
e.typ = nonEmptyInterface.itab.typ
e.word = nonEmptyInterface.word
iter.ReadVal(&i)
nonEmptyInterface.word = e.word
}
func (codec *nonEmptyInterfaceCodec) encode(ptr unsafe.Pointer, stream *Stream) {
nonEmptyInterface := (*nonEmptyInterface)(ptr)
var i interface{}
e := (*emptyInterface)(unsafe.Pointer(&i))
e.typ = nonEmptyInterface.itab.typ
e.word = nonEmptyInterface.word
stream.WriteVal(i)
}
func (encoder *nonEmptyInterfaceCodec) encodeInterface(val interface{}, stream *Stream) {
stream.WriteVal(val)
}
func (codec *nonEmptyInterfaceCodec) isEmpty(ptr unsafe.Pointer) bool {
nonEmptyInterface := (*nonEmptyInterface)(ptr)
return nonEmptyInterface.word == nil
}
type anyCodec struct { type anyCodec struct {
} }
@ -244,6 +342,91 @@ func (encoder *anyCodec) encodeInterface(val interface{}, stream *Stream) {
(val.(Any)).WriteTo(stream) (val.(Any)).WriteTo(stream)
} }
func (encoder *anyCodec) isEmpty(ptr unsafe.Pointer) bool {
return (*((*Any)(ptr))).Size() == 0
}
type jsonNumberCodec struct {
}
func (codec *jsonNumberCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
*((*json.Number)(ptr)) = json.Number([]byte(iter.readNumberAsString()))
}
func (codec *jsonNumberCodec) encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteRaw(string(*((*json.Number)(ptr))))
}
func (encoder *jsonNumberCodec) encodeInterface(val interface{}, stream *Stream) {
stream.WriteRaw(string(val.(json.Number)))
}
func (encoder *jsonNumberCodec) isEmpty(ptr unsafe.Pointer) bool {
return len(*((*json.Number)(ptr))) == 0
}
type jsonRawMessageCodec struct {
}
func (codec *jsonRawMessageCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
*((*json.RawMessage)(ptr)) = json.RawMessage(iter.SkipAndReturnBytes())
}
func (codec *jsonRawMessageCodec) encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteRaw(string(*((*json.RawMessage)(ptr))))
}
func (encoder *jsonRawMessageCodec) encodeInterface(val interface{}, stream *Stream) {
stream.WriteRaw(string(val.(json.RawMessage)))
}
func (encoder *jsonRawMessageCodec) isEmpty(ptr unsafe.Pointer) bool {
return len(*((*json.RawMessage)(ptr))) == 0
}
type base64Codec struct {
}
func (codec *base64Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
encoding := base64.StdEncoding
src := iter.SkipAndReturnBytes()
src = src[1 : len(src)-1]
decodedLen := encoding.DecodedLen(len(src))
dst := make([]byte, decodedLen)
_, err := encoding.Decode(dst, src)
if err != nil {
iter.reportError("decode base64", err.Error())
} else {
*((*[]byte)(ptr)) = dst
}
}
func (codec *base64Codec) encode(ptr unsafe.Pointer, stream *Stream) {
encoding := base64.StdEncoding
stream.writeByte('"')
src := *((*[]byte)(ptr))
toGrow := encoding.EncodedLen(len(src))
stream.ensure(toGrow)
encoding.Encode(stream.buf[stream.n:], src)
stream.n += toGrow
stream.writeByte('"')
}
func (encoder *base64Codec) encodeInterface(val interface{}, stream *Stream) {
encoding := base64.StdEncoding
stream.writeByte('"')
src := val.([]byte)
toGrow := encoding.EncodedLen(len(src))
stream.ensure(toGrow)
encoding.Encode(stream.buf[stream.n:], src)
stream.n += toGrow
stream.writeByte('"')
}
func (encoder *base64Codec) isEmpty(ptr unsafe.Pointer) bool {
return len(*((*[]byte)(ptr))) == 0
}
type stringNumberDecoder struct { type stringNumberDecoder struct {
elemDecoder Decoder elemDecoder Decoder
} }
@ -264,3 +447,52 @@ func (decoder *stringNumberDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
return return
} }
} }
type marshalerEncoder struct {
templateInterface emptyInterface
}
func (encoder *marshalerEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
templateInterface := encoder.templateInterface
templateInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&templateInterface))
marshaler := (*realInterface).(json.Marshaler)
bytes, err := marshaler.MarshalJSON()
if err != nil {
stream.Error = err
} else {
stream.Write(bytes)
}
}
func (encoder *marshalerEncoder) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
}
func (encoder *marshalerEncoder) isEmpty(ptr unsafe.Pointer) bool {
templateInterface := encoder.templateInterface
templateInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&templateInterface))
marshaler := (*realInterface).(json.Marshaler)
bytes, err := marshaler.MarshalJSON()
if err != nil {
return true
} else {
return len(bytes) > 0
}
}
type unmarshalerDecoder struct {
templateInterface emptyInterface
}
func (decoder *unmarshalerDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
templateInterface := decoder.templateInterface
templateInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&templateInterface))
unmarshaler := (*realInterface).(json.Unmarshaler)
bytes := iter.SkipAndReturnBytes()
err := unmarshaler.UnmarshalJSON(bytes)
if err != nil {
iter.reportError("unmarshaler", err.Error())
}
}

View File

@ -1,71 +1,86 @@
package jsoniter package jsoniter
import ( import (
"io"
"fmt" "fmt"
"io"
"reflect" "reflect"
"unsafe"
"strings" "strings"
"unicode"
"unsafe"
) )
func encoderOfStruct(typ reflect.Type) (Encoder, error) { func encoderOfStruct(typ reflect.Type) (Encoder, error) {
structEncoder_ := &structEncoder{} structEncoder_ := &structEncoder{}
for i := 0; i < typ.NumField(); i++ { fields := map[string]*structFieldEncoder{}
field := typ.Field(i) for _, field := range listStructFields(typ) {
var fieldNames []string fieldEncoderKey := fmt.Sprintf("%s/%s", typ.String(), field.Name)
var extensionProvidedFieldNames []string
for _, extension := range extensions { for _, extension := range extensions {
alternativeFieldNames, _ := extension(typ, &field) alternativeFieldNames, fun, _ := extension(typ, field)
if alternativeFieldNames != nil { if alternativeFieldNames != nil {
fieldNames = alternativeFieldNames extensionProvidedFieldNames = alternativeFieldNames
}
if fun != nil {
fieldEncoders[fieldEncoderKey] = &funcEncoder{fun}
} }
} }
tagParts := strings.Split(field.Tag.Get("json"), ",") tagParts := strings.Split(field.Tag.Get("json"), ",")
// if fieldNames set by extension, use theirs, otherwise try tags // if fieldNames set by extension, use theirs, otherwise try tags
if fieldNames == nil { fieldNames := calcFieldNames(field.Name, tagParts[0], extensionProvidedFieldNames)
/// tagParts[0] always present, even if no tags omitempty := false
switch tagParts[0] { for _, tagPart := range tagParts {
case "": if tagPart == "omitempty" {
fieldNames = []string{field.Name} omitempty = true
case "-":
fieldNames = []string{}
default:
fieldNames = []string{tagParts[0]}
} }
} }
encoder, err := encoderOfType(field.Type) encoder := fieldEncoders[fieldEncoderKey]
if err != nil { var err error
return prefix(fmt.Sprintf("{%s}", field.Name)).addToEncoder(encoder, err) if encoder == nil && len(fieldNames) > 0 {
} encoder, err = encoderOfType(field.Type)
// map is stored as pointer in the struct if err != nil {
// but if struct only has one map, it is inlined return prefix(fmt.Sprintf("{%s}", field.Name)).addToEncoder(encoder, err)
if field.Type.Kind() == reflect.Map && typ.NumField() > 1 { }
encoder = &optionalEncoder{field.Type, encoder} // map is stored as pointer in the struct
if field.Type.Kind() == reflect.Map {
encoder = &optionalEncoder{encoder}
}
} }
for _, fieldName := range fieldNames { for _, fieldName := range fieldNames {
if structEncoder_.firstField == nil { fields[fieldName] = &structFieldEncoder{field, fieldName, encoder, omitempty}
structEncoder_.firstField = &structFieldEncoder{&field, fieldName, encoder}
} else {
structEncoder_.fields = append(structEncoder_.fields, &structFieldEncoder{&field, fieldName, encoder})
}
} }
} }
if structEncoder_.firstField == nil { if len(fields) == 0 {
return &emptyStructEncoder{}, nil return &emptyStructEncoder{}, nil
} }
for _, field := range fields {
structEncoder_.fields = append(structEncoder_.fields, field)
}
return structEncoder_, nil return structEncoder_, nil
} }
func listStructFields(typ reflect.Type) []*reflect.StructField {
fields := []*reflect.StructField{}
for i := 0; i < typ.NumField(); i++ {
field := typ.Field(i)
if field.Anonymous {
fields = append(fields, listStructFields(field.Type)...)
} else {
fields = append(fields, &field)
}
}
return fields
}
func decoderOfStruct(typ reflect.Type) (Decoder, error) { func decoderOfStruct(typ reflect.Type) (Decoder, error) {
fields := map[string]*structFieldDecoder{} fields := map[string]*structFieldDecoder{}
for i := 0; i < typ.NumField(); i++ { for i := 0; i < typ.NumField(); i++ {
field := typ.Field(i) field := typ.Field(i)
fieldDecoderKey := fmt.Sprintf("%s/%s", typ.String(), field.Name) fieldDecoderKey := fmt.Sprintf("%s/%s", typ.String(), field.Name)
var fieldNames []string var extensionProviedFieldNames []string
for _, extension := range extensions { for _, extension := range extensions {
alternativeFieldNames, fun := extension(typ, &field) alternativeFieldNames, _, fun := extension(typ, &field)
if alternativeFieldNames != nil { if alternativeFieldNames != nil {
fieldNames = alternativeFieldNames extensionProviedFieldNames = alternativeFieldNames
} }
if fun != nil { if fun != nil {
fieldDecoders[fieldDecoderKey] = &funcDecoder{fun} fieldDecoders[fieldDecoderKey] = &funcDecoder{fun}
@ -73,19 +88,8 @@ func decoderOfStruct(typ reflect.Type) (Decoder, error) {
} }
decoder := fieldDecoders[fieldDecoderKey] decoder := fieldDecoders[fieldDecoderKey]
tagParts := strings.Split(field.Tag.Get("json"), ",") tagParts := strings.Split(field.Tag.Get("json"), ",")
// if fieldNames set by extension, use theirs, otherwise try tags fieldNames := calcFieldNames(field.Name, tagParts[0], extensionProviedFieldNames)
if fieldNames == nil { if decoder == nil && len(fieldNames) > 0 {
/// tagParts[0] always present, even if no tags
switch tagParts[0] {
case "":
fieldNames = []string{field.Name}
case "-":
fieldNames = []string{}
default:
fieldNames = []string{tagParts[0]}
}
}
if decoder == nil {
var err error var err error
decoder, err = decoderOfType(field.Type) decoder, err = decoderOfType(field.Type)
if err != nil { if err != nil {
@ -102,9 +106,39 @@ func decoderOfStruct(typ reflect.Type) (Decoder, error) {
return createStructDecoder(typ, fields) return createStructDecoder(typ, fields)
} }
func calcFieldNames(originalFieldName string, tagProvidedFieldName string, extensionProvidedFieldNames []string) []string {
// tag => extension => exported? => original
isNotExported := unicode.IsLower(rune(originalFieldName[0]))
var fieldNames []string
/// tagParts[0] always present, even if no tags
switch tagProvidedFieldName {
case "":
if extensionProvidedFieldNames != nil {
fieldNames = extensionProvidedFieldNames
} else {
if isNotExported {
fieldNames = []string{}
} else {
fieldNames = []string{originalFieldName}
}
}
case "-":
fieldNames = []string{}
default:
fieldNames = []string{tagProvidedFieldName}
}
return fieldNames
}
func EnableUnexportedStructFieldsSupport() {
RegisterExtension(func(type_ reflect.Type, field *reflect.StructField) ([]string, EncoderFunc, DecoderFunc) {
return []string{field.Name}, nil, nil
})
}
func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder) (Decoder, error) { func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder) (Decoder, error) {
knownHash := map[int32]struct{}{ knownHash := map[int32]struct{}{
0: struct{}{}, 0: {},
} }
switch len(fields) { switch len(fields) {
case 0: case 0:
@ -591,11 +625,11 @@ func (decoder *oneFieldStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator)
} }
type twoFieldsStructDecoder struct { type twoFieldsStructDecoder struct {
typ reflect.Type typ reflect.Type
fieldHash1 int32 fieldHash1 int32
fieldDecoder1 *structFieldDecoder fieldDecoder1 *structFieldDecoder
fieldHash2 int32 fieldHash2 int32
fieldDecoder2 *structFieldDecoder fieldDecoder2 *structFieldDecoder
} }
func (decoder *twoFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) { func (decoder *twoFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
@ -621,13 +655,13 @@ func (decoder *twoFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator
} }
type threeFieldsStructDecoder struct { type threeFieldsStructDecoder struct {
typ reflect.Type typ reflect.Type
fieldHash1 int32 fieldHash1 int32
fieldDecoder1 *structFieldDecoder fieldDecoder1 *structFieldDecoder
fieldHash2 int32 fieldHash2 int32
fieldDecoder2 *structFieldDecoder fieldDecoder2 *structFieldDecoder
fieldHash3 int32 fieldHash3 int32
fieldDecoder3 *structFieldDecoder fieldDecoder3 *structFieldDecoder
} }
func (decoder *threeFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) { func (decoder *threeFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
@ -655,15 +689,15 @@ func (decoder *threeFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterat
} }
type fourFieldsStructDecoder struct { type fourFieldsStructDecoder struct {
typ reflect.Type typ reflect.Type
fieldHash1 int32 fieldHash1 int32
fieldDecoder1 *structFieldDecoder fieldDecoder1 *structFieldDecoder
fieldHash2 int32 fieldHash2 int32
fieldDecoder2 *structFieldDecoder fieldDecoder2 *structFieldDecoder
fieldHash3 int32 fieldHash3 int32
fieldDecoder3 *structFieldDecoder fieldDecoder3 *structFieldDecoder
fieldHash4 int32 fieldHash4 int32
fieldDecoder4 *structFieldDecoder fieldDecoder4 *structFieldDecoder
} }
func (decoder *fourFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) { func (decoder *fourFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
@ -693,17 +727,17 @@ func (decoder *fourFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterato
} }
type fiveFieldsStructDecoder struct { type fiveFieldsStructDecoder struct {
typ reflect.Type typ reflect.Type
fieldHash1 int32 fieldHash1 int32
fieldDecoder1 *structFieldDecoder fieldDecoder1 *structFieldDecoder
fieldHash2 int32 fieldHash2 int32
fieldDecoder2 *structFieldDecoder fieldDecoder2 *structFieldDecoder
fieldHash3 int32 fieldHash3 int32
fieldDecoder3 *structFieldDecoder fieldDecoder3 *structFieldDecoder
fieldHash4 int32 fieldHash4 int32
fieldDecoder4 *structFieldDecoder fieldDecoder4 *structFieldDecoder
fieldHash5 int32 fieldHash5 int32
fieldDecoder5 *structFieldDecoder fieldDecoder5 *structFieldDecoder
} }
func (decoder *fiveFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) { func (decoder *fiveFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
@ -735,19 +769,19 @@ func (decoder *fiveFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterato
} }
type sixFieldsStructDecoder struct { type sixFieldsStructDecoder struct {
typ reflect.Type typ reflect.Type
fieldHash1 int32 fieldHash1 int32
fieldDecoder1 *structFieldDecoder fieldDecoder1 *structFieldDecoder
fieldHash2 int32 fieldHash2 int32
fieldDecoder2 *structFieldDecoder fieldDecoder2 *structFieldDecoder
fieldHash3 int32 fieldHash3 int32
fieldDecoder3 *structFieldDecoder fieldDecoder3 *structFieldDecoder
fieldHash4 int32 fieldHash4 int32
fieldDecoder4 *structFieldDecoder fieldDecoder4 *structFieldDecoder
fieldHash5 int32 fieldHash5 int32
fieldDecoder5 *structFieldDecoder fieldDecoder5 *structFieldDecoder
fieldHash6 int32 fieldHash6 int32
fieldDecoder6 *structFieldDecoder fieldDecoder6 *structFieldDecoder
} }
func (decoder *sixFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) { func (decoder *sixFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
@ -781,21 +815,21 @@ func (decoder *sixFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator
} }
type sevenFieldsStructDecoder struct { type sevenFieldsStructDecoder struct {
typ reflect.Type typ reflect.Type
fieldHash1 int32 fieldHash1 int32
fieldDecoder1 *structFieldDecoder fieldDecoder1 *structFieldDecoder
fieldHash2 int32 fieldHash2 int32
fieldDecoder2 *structFieldDecoder fieldDecoder2 *structFieldDecoder
fieldHash3 int32 fieldHash3 int32
fieldDecoder3 *structFieldDecoder fieldDecoder3 *structFieldDecoder
fieldHash4 int32 fieldHash4 int32
fieldDecoder4 *structFieldDecoder fieldDecoder4 *structFieldDecoder
fieldHash5 int32 fieldHash5 int32
fieldDecoder5 *structFieldDecoder fieldDecoder5 *structFieldDecoder
fieldHash6 int32 fieldHash6 int32
fieldDecoder6 *structFieldDecoder fieldDecoder6 *structFieldDecoder
fieldHash7 int32 fieldHash7 int32
fieldDecoder7 *structFieldDecoder fieldDecoder7 *structFieldDecoder
} }
func (decoder *sevenFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) { func (decoder *sevenFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
@ -831,23 +865,23 @@ func (decoder *sevenFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterat
} }
type eightFieldsStructDecoder struct { type eightFieldsStructDecoder struct {
typ reflect.Type typ reflect.Type
fieldHash1 int32 fieldHash1 int32
fieldDecoder1 *structFieldDecoder fieldDecoder1 *structFieldDecoder
fieldHash2 int32 fieldHash2 int32
fieldDecoder2 *structFieldDecoder fieldDecoder2 *structFieldDecoder
fieldHash3 int32 fieldHash3 int32
fieldDecoder3 *structFieldDecoder fieldDecoder3 *structFieldDecoder
fieldHash4 int32 fieldHash4 int32
fieldDecoder4 *structFieldDecoder fieldDecoder4 *structFieldDecoder
fieldHash5 int32 fieldHash5 int32
fieldDecoder5 *structFieldDecoder fieldDecoder5 *structFieldDecoder
fieldHash6 int32 fieldHash6 int32
fieldDecoder6 *structFieldDecoder fieldDecoder6 *structFieldDecoder
fieldHash7 int32 fieldHash7 int32
fieldDecoder7 *structFieldDecoder fieldDecoder7 *structFieldDecoder
fieldHash8 int32 fieldHash8 int32
fieldDecoder8 *structFieldDecoder fieldDecoder8 *structFieldDecoder
} }
func (decoder *eightFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) { func (decoder *eightFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
@ -885,25 +919,25 @@ func (decoder *eightFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterat
} }
type nineFieldsStructDecoder struct { type nineFieldsStructDecoder struct {
typ reflect.Type typ reflect.Type
fieldHash1 int32 fieldHash1 int32
fieldDecoder1 *structFieldDecoder fieldDecoder1 *structFieldDecoder
fieldHash2 int32 fieldHash2 int32
fieldDecoder2 *structFieldDecoder fieldDecoder2 *structFieldDecoder
fieldHash3 int32 fieldHash3 int32
fieldDecoder3 *structFieldDecoder fieldDecoder3 *structFieldDecoder
fieldHash4 int32 fieldHash4 int32
fieldDecoder4 *structFieldDecoder fieldDecoder4 *structFieldDecoder
fieldHash5 int32 fieldHash5 int32
fieldDecoder5 *structFieldDecoder fieldDecoder5 *structFieldDecoder
fieldHash6 int32 fieldHash6 int32
fieldDecoder6 *structFieldDecoder fieldDecoder6 *structFieldDecoder
fieldHash7 int32 fieldHash7 int32
fieldDecoder7 *structFieldDecoder fieldDecoder7 *structFieldDecoder
fieldHash8 int32 fieldHash8 int32
fieldDecoder8 *structFieldDecoder fieldDecoder8 *structFieldDecoder
fieldHash9 int32 fieldHash9 int32
fieldDecoder9 *structFieldDecoder fieldDecoder9 *structFieldDecoder
} }
func (decoder *nineFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) { func (decoder *nineFieldsStructDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
@ -1021,6 +1055,7 @@ type structFieldEncoder struct {
field *reflect.StructField field *reflect.StructField
fieldName string fieldName string
fieldEncoder Encoder fieldEncoder Encoder
omitempty bool
} }
func (encoder *structFieldEncoder) encode(ptr unsafe.Pointer, stream *Stream) { func (encoder *structFieldEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
@ -1033,29 +1068,63 @@ func (encoder *structFieldEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
} }
func (encoder *structFieldEncoder) encodeInterface(val interface{}, stream *Stream) { func (encoder *structFieldEncoder) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) writeToStream(val, stream, encoder)
} }
func (encoder *structFieldEncoder) isEmpty(ptr unsafe.Pointer) bool {
fieldPtr := uintptr(ptr) + encoder.field.Offset
return encoder.fieldEncoder.isEmpty(unsafe.Pointer(fieldPtr))
}
type structEncoder struct { type structEncoder struct {
firstField *structFieldEncoder
fields []*structFieldEncoder fields []*structFieldEncoder
} }
func (encoder *structEncoder) encode(ptr unsafe.Pointer, stream *Stream) { func (encoder *structEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteObjectStart() stream.WriteObjectStart()
encoder.firstField.encode(ptr, stream) isNotFirst := false
for _, field := range encoder.fields { for _, field := range encoder.fields {
stream.WriteMore() if field.omitempty && field.isEmpty(ptr) {
continue
}
if isNotFirst {
stream.WriteMore()
}
field.encode(ptr, stream) field.encode(ptr, stream)
isNotFirst = true
} }
stream.WriteObjectEnd() stream.WriteObjectEnd()
} }
func (encoder *structEncoder) encodeInterface(val interface{}, stream *Stream) { func (encoder *structEncoder) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) var encoderToUse Encoder
encoderToUse = encoder
if len(encoder.fields) == 1 {
firstEncoder := encoder.fields[0].fieldEncoder
firstEncoderName := reflect.TypeOf(firstEncoder).String()
// interface{} has inline optimization for this case
if firstEncoderName == "*jsoniter.optionalEncoder" {
encoderToUse = &structEncoder{
fields: []*structFieldEncoder{{
field: encoder.fields[0].field,
fieldName: encoder.fields[0].fieldName,
fieldEncoder: firstEncoder.(*optionalEncoder).valueEncoder,
omitempty: encoder.fields[0].omitempty,
}},
}
}
}
writeToStream(val, stream, encoderToUse)
} }
func (encoder *structEncoder) isEmpty(ptr unsafe.Pointer) bool {
for _, field := range encoder.fields {
if !field.isEmpty(ptr) {
return false
}
}
return true
}
type emptyStructEncoder struct { type emptyStructEncoder struct {
} }
@ -1065,5 +1134,9 @@ func (encoder *emptyStructEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
} }
func (encoder *emptyStructEncoder) encodeInterface(val interface{}, stream *Stream) { func (encoder *emptyStructEncoder) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) writeToStream(val, stream, encoder)
}
func (encoder *emptyStructEncoder) isEmpty(ptr unsafe.Pointer) bool {
return true
} }

View File

@ -32,24 +32,32 @@ func (b *Stream) Buffered() int {
return b.n return b.n
} }
func (b *Stream) Buffer() []byte {
return b.buf[:b.n]
}
// Write writes the contents of p into the buffer. // Write writes the contents of p into the buffer.
// It returns the number of bytes written. // It returns the number of bytes written.
// If nn < len(p), it also returns an error explaining // If nn < len(p), it also returns an error explaining
// why the write is short. // why the write is short.
func (b *Stream) Write(p []byte) (nn int, err error) { func (b *Stream) Write(p []byte) (nn int, err error) {
for len(p) > b.Available() && b.Error == nil { for len(p) > b.Available() && b.Error == nil {
var n int if b.out == nil {
if b.Buffered() == 0 { b.growAtLeast(len(p))
// Large write, empty buffer.
// Write directly from p to avoid copy.
n, b.Error = b.out.Write(p)
} else { } else {
n = copy(b.buf[b.n:], p) var n int
b.n += n if b.Buffered() == 0 {
b.Flush() // Large write, empty buffer.
// Write directly from p to avoid copy.
n, b.Error = b.out.Write(p)
} else {
n = copy(b.buf[b.n:], p)
b.n += n
b.Flush()
}
nn += n
p = p[n:]
} }
nn += n
p = p[n:]
} }
if b.Error != nil { if b.Error != nil {
return nn, b.Error return nn, b.Error
@ -60,14 +68,13 @@ func (b *Stream) Write(p []byte) (nn int, err error) {
return nn, nil return nn, nil
} }
// WriteByte writes a single byte. // WriteByte writes a single byte.
func (b *Stream) writeByte(c byte) { func (b *Stream) writeByte(c byte) {
if b.Error != nil { if b.Error != nil {
return return
} }
if b.Available() <= 0 && b.Flush() != nil { if b.Available() < 1 {
return b.growAtLeast(1)
} }
b.buf[b.n] = c b.buf[b.n] = c
b.n++ b.n++
@ -77,11 +84,11 @@ func (b *Stream) writeTwoBytes(c1 byte, c2 byte) {
if b.Error != nil { if b.Error != nil {
return return
} }
if b.Available() <= 1 && b.Flush() != nil { if b.Available() < 2 {
return b.growAtLeast(2)
} }
b.buf[b.n] = c1 b.buf[b.n] = c1
b.buf[b.n + 1] = c2 b.buf[b.n+1] = c2
b.n += 2 b.n += 2
} }
@ -89,12 +96,12 @@ func (b *Stream) writeThreeBytes(c1 byte, c2 byte, c3 byte) {
if b.Error != nil { if b.Error != nil {
return return
} }
if b.Available() <= 2 && b.Flush() != nil { if b.Available() < 3 {
return b.growAtLeast(3)
} }
b.buf[b.n] = c1 b.buf[b.n] = c1
b.buf[b.n + 1] = c2 b.buf[b.n+1] = c2
b.buf[b.n + 2] = c3 b.buf[b.n+2] = c3
b.n += 3 b.n += 3
} }
@ -102,13 +109,13 @@ func (b *Stream) writeFourBytes(c1 byte, c2 byte, c3 byte, c4 byte) {
if b.Error != nil { if b.Error != nil {
return return
} }
if b.Available() <= 3 && b.Flush() != nil { if b.Available() < 4 {
return b.growAtLeast(4)
} }
b.buf[b.n] = c1 b.buf[b.n] = c1
b.buf[b.n + 1] = c2 b.buf[b.n+1] = c2
b.buf[b.n + 2] = c3 b.buf[b.n+2] = c3
b.buf[b.n + 3] = c4 b.buf[b.n+3] = c4
b.n += 4 b.n += 4
} }
@ -116,19 +123,22 @@ func (b *Stream) writeFiveBytes(c1 byte, c2 byte, c3 byte, c4 byte, c5 byte) {
if b.Error != nil { if b.Error != nil {
return return
} }
if b.Available() <= 3 && b.Flush() != nil { if b.Available() < 5 {
return b.growAtLeast(5)
} }
b.buf[b.n] = c1 b.buf[b.n] = c1
b.buf[b.n + 1] = c2 b.buf[b.n+1] = c2
b.buf[b.n + 2] = c3 b.buf[b.n+2] = c3
b.buf[b.n + 3] = c4 b.buf[b.n+3] = c4
b.buf[b.n + 4] = c5 b.buf[b.n+4] = c5
b.n += 5 b.n += 5
} }
// Flush writes any buffered data to the underlying io.Writer. // Flush writes any buffered data to the underlying io.Writer.
func (b *Stream) Flush() error { func (b *Stream) Flush() error {
if b.out == nil {
return nil
}
if b.Error != nil { if b.Error != nil {
return b.Error return b.Error
} }
@ -141,7 +151,7 @@ func (b *Stream) Flush() error {
} }
if err != nil { if err != nil {
if n > 0 && n < b.n { if n > 0 && n < b.n {
copy(b.buf[0:b.n - n], b.buf[n:b.n]) copy(b.buf[0:b.n-n], b.buf[n:b.n])
} }
b.n -= n b.n -= n
b.Error = err b.Error = err
@ -151,13 +161,28 @@ func (b *Stream) Flush() error {
return nil return nil
} }
func (b *Stream) WriteRaw(s string) { func (b *Stream) ensure(minimal int) {
for len(s) > b.Available() && b.Error == nil { available := b.Available()
n := copy(b.buf[b.n:], s) if available < minimal {
b.n += n if b.n > 1024 {
s = s[n:] b.Flush()
b.Flush() }
b.growAtLeast(minimal)
} }
}
func (b *Stream) growAtLeast(minimal int) {
toGrow := len(b.buf)
if toGrow < minimal {
toGrow = minimal
}
newBuf := make([]byte, len(b.buf)+toGrow)
copy(newBuf, b.Buffer())
b.buf = newBuf
}
func (b *Stream) WriteRaw(s string) {
b.ensure(len(s))
if b.Error != nil { if b.Error != nil {
return return
} }
@ -166,18 +191,13 @@ func (b *Stream) WriteRaw(s string) {
} }
func (stream *Stream) WriteString(s string) { func (stream *Stream) WriteString(s string) {
stream.ensure(32)
valLen := len(s) valLen := len(s)
toWriteLen := valLen toWriteLen := valLen
bufLengthMinusTwo := len(stream.buf) - 2 // make room for the quotes bufLengthMinusTwo := len(stream.buf) - 2 // make room for the quotes
if stream.n + toWriteLen > bufLengthMinusTwo { if stream.n+toWriteLen > bufLengthMinusTwo {
toWriteLen = bufLengthMinusTwo - stream.n toWriteLen = bufLengthMinusTwo - stream.n
} }
if toWriteLen < 0 {
stream.Flush()
if stream.n + toWriteLen > bufLengthMinusTwo {
toWriteLen = bufLengthMinusTwo - stream.n
}
}
n := stream.n n := stream.n
stream.buf[n] = '"' stream.buf[n] = '"'
n++ n++
@ -189,7 +209,7 @@ func (stream *Stream) WriteString(s string) {
stream.buf[n] = c stream.buf[n] = c
n++ n++
} else { } else {
break; break
} }
} }
if i == valLen { if i == valLen {
@ -200,14 +220,14 @@ func (stream *Stream) WriteString(s string) {
} }
stream.n = n stream.n = n
// for the remaining parts, we process them char by char // for the remaining parts, we process them char by char
stream.writeStringSlowPath(s, i, valLen); stream.writeStringSlowPath(s, i, valLen)
stream.writeByte('"') stream.writeByte('"')
} }
func (stream *Stream) writeStringSlowPath(s string, i int, valLen int) { func (stream *Stream) writeStringSlowPath(s string, i int, valLen int) {
for ; i < valLen; i++ { for ; i < valLen; i++ {
c := s[i] c := s[i]
switch (c) { switch c {
case '"': case '"':
stream.writeTwoBytes('\\', '"') stream.writeTwoBytes('\\', '"')
case '\\': case '\\':
@ -223,7 +243,7 @@ func (stream *Stream) writeStringSlowPath(s string, i int, valLen int) {
case '\t': case '\t':
stream.writeTwoBytes('\\', 't') stream.writeTwoBytes('\\', 't')
default: default:
stream.writeByte(c); stream.writeByte(c)
} }
} }
} }
@ -293,21 +313,14 @@ func (stream *Stream) WriteArrayEnd() {
} }
func (stream *Stream) writeIndention(delta int) { func (stream *Stream) writeIndention(delta int) {
if (stream.indention == 0) { if stream.indention == 0 {
return return
} }
stream.writeByte('\n') stream.writeByte('\n')
toWrite := stream.indention - delta toWrite := stream.indention - delta
i := 0 stream.ensure(toWrite)
for { for i := 0; i < toWrite && stream.n < len(stream.buf); i++ {
for ; i < toWrite && stream.n < len(stream.buf); i++ { stream.buf[stream.n] = ' '
stream.buf[stream.n] = ' ' stream.n++
stream.n ++
}
if i == toWrite {
break;
} else {
stream.Flush()
}
} }
} }

View File

@ -2,6 +2,7 @@ package jsoniter
import ( import (
"strconv" "strconv"
"unsafe"
) )
var POW10 []uint64 var POW10 []uint64
@ -11,61 +12,79 @@ func init() {
} }
func (stream *Stream) WriteFloat32(val float32) { func (stream *Stream) WriteFloat32(val float32) {
stream.WriteRaw(strconv.FormatFloat(float64(val), 'f', -1, 32))
}
func (stream *Stream) WriteFloat32Lossy(val float32) {
if val < 0 { if val < 0 {
stream.writeByte('-') stream.writeByte('-')
val = -val val = -val
} }
if val > 0x4ffffff { if val > 0x4ffffff {
stream.WriteRaw(strconv.FormatFloat(float64(val), 'f', -1, 32)); stream.WriteRaw(strconv.FormatFloat(float64(val), 'f', -1, 32))
return return
} }
precision := 6 precision := 6
exp := uint64(1000000) // 6 exp := uint64(1000000) // 6
lval := uint64(float64(val) * float64(exp) + 0.5) lval := uint64(float64(val)*float64(exp) + 0.5)
stream.WriteUint64(lval / exp) stream.WriteUint64(lval / exp)
fval := lval % exp fval := lval % exp
if fval == 0 { if fval == 0 {
return return
} }
stream.writeByte('.') stream.writeByte('.')
if stream.Available() < 10 { stream.ensure(10)
stream.Flush()
}
for p := precision - 1; p > 0 && fval < POW10[p]; p-- { for p := precision - 1; p > 0 && fval < POW10[p]; p-- {
stream.writeByte('0') stream.writeByte('0')
} }
stream.WriteUint64(fval); stream.WriteUint64(fval)
for stream.buf[stream.n - 1] == '0' { for stream.buf[stream.n-1] == '0' {
stream.n--; stream.n--
} }
} }
func (stream *Stream) WriteFloat64(val float64) { func (stream *Stream) WriteFloat64(val float64) {
stream.WriteRaw(strconv.FormatFloat(float64(val), 'f', -1, 64))
}
func (stream *Stream) WriteFloat64Lossy(val float64) {
if val < 0 { if val < 0 {
stream.writeByte('-') stream.writeByte('-')
val = -val val = -val
} }
if val > 0x4ffffff { if val > 0x4ffffff {
stream.WriteRaw(strconv.FormatFloat(val, 'f', -1, 64)); stream.WriteRaw(strconv.FormatFloat(val, 'f', -1, 64))
return return
} }
precision := 6 precision := 6
exp := uint64(1000000) // 6 exp := uint64(1000000) // 6
lval := uint64(val * float64(exp) + 0.5) lval := uint64(val*float64(exp) + 0.5)
stream.WriteUint64(lval / exp) stream.WriteUint64(lval / exp)
fval := lval % exp fval := lval % exp
if fval == 0 { if fval == 0 {
return return
} }
stream.writeByte('.') stream.writeByte('.')
if stream.Available() < 10 { stream.ensure(10)
stream.Flush()
}
for p := precision - 1; p > 0 && fval < POW10[p]; p-- { for p := precision - 1; p > 0 && fval < POW10[p]; p-- {
stream.writeByte('0') stream.writeByte('0')
} }
stream.WriteUint64(fval); stream.WriteUint64(fval)
for stream.buf[stream.n - 1] == '0' { for stream.buf[stream.n-1] == '0' {
stream.n--; stream.n--
} }
} }
// EnableLossyFloatMarshalling keeps 10**(-6) precision
// for float variables for better performance.
func EnableLossyFloatMarshalling() {
// for better performance
RegisterTypeEncoder("float32", func(ptr unsafe.Pointer, stream *Stream) {
val := *((*float32)(ptr))
stream.WriteFloat32Lossy(val)
})
RegisterTypeEncoder("float64", func(ptr unsafe.Pointer, stream *Stream) {
val := *((*float64)(ptr))
stream.WriteFloat64Lossy(val)
})
}

View File

@ -1,46 +1,11 @@
package jsoniter package jsoniter
var digits []uint8
var digitTens []uint8
var digitOnes []uint8
var DIGITS []uint32 var DIGITS []uint32
func init() { func init() {
digits = []uint8{
'0', '1', '2', '3', '4', '5',
'6', '7', '8', '9', 'a', 'b',
'c', 'd', 'e', 'f', 'g', 'h',
'i', 'j', 'k', 'l', 'm', 'n',
'o', 'p', 'q', 'r', 's', 't',
'u', 'v', 'w', 'x', 'y', 'z',
}
digitTens = []uint8{
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'1', '1', '1', '1', '1', '1', '1', '1', '1', '1',
'2', '2', '2', '2', '2', '2', '2', '2', '2', '2',
'3', '3', '3', '3', '3', '3', '3', '3', '3', '3',
'4', '4', '4', '4', '4', '4', '4', '4', '4', '4',
'5', '5', '5', '5', '5', '5', '5', '5', '5', '5',
'6', '6', '6', '6', '6', '6', '6', '6', '6', '6',
'7', '7', '7', '7', '7', '7', '7', '7', '7', '7',
'8', '8', '8', '8', '8', '8', '8', '8', '8', '8',
'9', '9', '9', '9', '9', '9', '9', '9', '9', '9',
}
digitOnes = []uint8{
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
}
DIGITS = make([]uint32, 1000) DIGITS = make([]uint32, 1000)
for i := uint32(0); i < 1000; i++ { for i := uint32(0); i < 1000; i++ {
DIGITS[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i % 10 + '0'; DIGITS[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i%10 + '0'
if i < 10 { if i < 10 {
DIGITS[i] += 2 << 24 DIGITS[i] += 2 << 24
} else if i < 100 { } else if i < 100 {
@ -67,24 +32,20 @@ func writeFirstBuf(buf []byte, v uint32, n int) int {
func writeBuf(buf []byte, v uint32, n int) { func writeBuf(buf []byte, v uint32, n int) {
buf[n] = byte(v >> 16) buf[n] = byte(v >> 16)
buf[n + 1] = byte(v >> 8) buf[n+1] = byte(v >> 8)
buf[n + 2] = byte(v) buf[n+2] = byte(v)
} }
func (stream *Stream) WriteUint8(val uint8) { func (stream *Stream) WriteUint8(val uint8) {
if stream.Available() < 3 { stream.ensure(3)
stream.Flush()
}
stream.n = writeFirstBuf(stream.buf, DIGITS[val], stream.n) stream.n = writeFirstBuf(stream.buf, DIGITS[val], stream.n)
} }
func (stream *Stream) WriteInt8(nval int8) { func (stream *Stream) WriteInt8(nval int8) {
if stream.Available() < 4 { stream.ensure(4)
stream.Flush()
}
n := stream.n n := stream.n
var val uint8 var val uint8
if (nval < 0) { if nval < 0 {
val = uint8(-nval) val = uint8(-nval)
stream.buf[n] = '-' stream.buf[n] = '-'
n++ n++
@ -95,15 +56,13 @@ func (stream *Stream) WriteInt8(nval int8) {
} }
func (stream *Stream) WriteUint16(val uint16) { func (stream *Stream) WriteUint16(val uint16) {
if stream.Available() < 5 { stream.ensure(5)
stream.Flush()
}
q1 := val / 1000 q1 := val / 1000
if q1 == 0 { if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], stream.n) stream.n = writeFirstBuf(stream.buf, DIGITS[val], stream.n)
return return
} }
r1 := val - q1 * 1000; r1 := val - q1*1000
n := writeFirstBuf(stream.buf, DIGITS[q1], stream.n) n := writeFirstBuf(stream.buf, DIGITS[q1], stream.n)
writeBuf(stream.buf, DIGITS[r1], n) writeBuf(stream.buf, DIGITS[r1], n)
stream.n = n + 3 stream.n = n + 3
@ -111,12 +70,10 @@ func (stream *Stream) WriteUint16(val uint16) {
} }
func (stream *Stream) WriteInt16(nval int16) { func (stream *Stream) WriteInt16(nval int16) {
if stream.Available() < 6 { stream.ensure(6)
stream.Flush()
}
n := stream.n n := stream.n
var val uint16 var val uint16
if (nval < 0) { if nval < 0 {
val = uint16(-nval) val = uint16(-nval)
stream.buf[n] = '-' stream.buf[n] = '-'
n++ n++
@ -128,7 +85,7 @@ func (stream *Stream) WriteInt16(nval int16) {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n) stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
return return
} }
r1 := val - q1 * 1000; r1 := val - q1*1000
n = writeFirstBuf(stream.buf, DIGITS[q1], n) n = writeFirstBuf(stream.buf, DIGITS[q1], n)
writeBuf(stream.buf, DIGITS[r1], n) writeBuf(stream.buf, DIGITS[r1], n)
stream.n = n + 3 stream.n = n + 3
@ -136,16 +93,14 @@ func (stream *Stream) WriteInt16(nval int16) {
} }
func (stream *Stream) WriteUint32(val uint32) { func (stream *Stream) WriteUint32(val uint32) {
if stream.Available() < 10 { stream.ensure(10)
stream.Flush()
}
n := stream.n n := stream.n
q1 := val / 1000 q1 := val / 1000
if q1 == 0 { if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n) stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
return return
} }
r1 := val - q1 * 1000; r1 := val - q1*1000
q2 := q1 / 1000 q2 := q1 / 1000
if q2 == 0 { if q2 == 0 {
n := writeFirstBuf(stream.buf, DIGITS[q1], n) n := writeFirstBuf(stream.buf, DIGITS[q1], n)
@ -153,29 +108,27 @@ func (stream *Stream) WriteUint32(val uint32) {
stream.n = n + 3 stream.n = n + 3
return return
} }
r2 := q1 - q2 * 1000 r2 := q1 - q2*1000
q3 := q2 / 1000 q3 := q2 / 1000
if q3 == 0 { if q3 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q2], n) n = writeFirstBuf(stream.buf, DIGITS[q2], n)
} else { } else {
r3 := q2 - q3 * 1000 r3 := q2 - q3*1000
stream.buf[n] = byte(q3 + '0') stream.buf[n] = byte(q3 + '0')
n++ n++
writeBuf(stream.buf, DIGITS[r3], n) writeBuf(stream.buf, DIGITS[r3], n)
n += 3 n += 3
} }
writeBuf(stream.buf, DIGITS[r2], n) writeBuf(stream.buf, DIGITS[r2], n)
writeBuf(stream.buf, DIGITS[r1], n + 3) writeBuf(stream.buf, DIGITS[r1], n+3)
stream.n = n + 6 stream.n = n + 6
} }
func (stream *Stream) WriteInt32(nval int32) { func (stream *Stream) WriteInt32(nval int32) {
if stream.Available() < 11 { stream.ensure(11)
stream.Flush()
}
n := stream.n n := stream.n
var val uint32 var val uint32
if (nval < 0) { if nval < 0 {
val = uint32(-nval) val = uint32(-nval)
stream.buf[n] = '-' stream.buf[n] = '-'
n++ n++
@ -187,7 +140,7 @@ func (stream *Stream) WriteInt32(nval int32) {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n) stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
return return
} }
r1 := val - q1 * 1000; r1 := val - q1*1000
q2 := q1 / 1000 q2 := q1 / 1000
if q2 == 0 { if q2 == 0 {
n := writeFirstBuf(stream.buf, DIGITS[q1], n) n := writeFirstBuf(stream.buf, DIGITS[q1], n)
@ -195,33 +148,31 @@ func (stream *Stream) WriteInt32(nval int32) {
stream.n = n + 3 stream.n = n + 3
return return
} }
r2 := q1 - q2 * 1000 r2 := q1 - q2*1000
q3 := q2 / 1000 q3 := q2 / 1000
if q3 == 0 { if q3 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q2], n) n = writeFirstBuf(stream.buf, DIGITS[q2], n)
} else { } else {
r3 := q2 - q3 * 1000 r3 := q2 - q3*1000
stream.buf[n] = byte(q3 + '0') stream.buf[n] = byte(q3 + '0')
n++ n++
writeBuf(stream.buf, DIGITS[r3], n) writeBuf(stream.buf, DIGITS[r3], n)
n += 3 n += 3
} }
writeBuf(stream.buf, DIGITS[r2], n) writeBuf(stream.buf, DIGITS[r2], n)
writeBuf(stream.buf, DIGITS[r1], n + 3) writeBuf(stream.buf, DIGITS[r1], n+3)
stream.n = n + 6 stream.n = n + 6
} }
func (stream *Stream) WriteUint64(val uint64) { func (stream *Stream) WriteUint64(val uint64) {
if stream.Available() < 20 { stream.ensure(20)
stream.Flush()
}
n := stream.n n := stream.n
q1 := val / 1000 q1 := val / 1000
if q1 == 0 { if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n) stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
return return
} }
r1 := val - q1 * 1000; r1 := val - q1*1000
q2 := q1 / 1000 q2 := q1 / 1000
if q2 == 0 { if q2 == 0 {
n := writeFirstBuf(stream.buf, DIGITS[q1], n) n := writeFirstBuf(stream.buf, DIGITS[q1], n)
@ -229,61 +180,59 @@ func (stream *Stream) WriteUint64(val uint64) {
stream.n = n + 3 stream.n = n + 3
return return
} }
r2 := q1 - q2 * 1000 r2 := q1 - q2*1000
q3 := q2 / 1000 q3 := q2 / 1000
if q3 == 0 { if q3 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q2], n) n = writeFirstBuf(stream.buf, DIGITS[q2], n)
writeBuf(stream.buf, DIGITS[r2], n) writeBuf(stream.buf, DIGITS[r2], n)
writeBuf(stream.buf, DIGITS[r1], n + 3) writeBuf(stream.buf, DIGITS[r1], n+3)
stream.n = n + 6 stream.n = n + 6
return return
} }
r3 := q2 - q3 * 1000 r3 := q2 - q3*1000
q4 := q3 / 1000 q4 := q3 / 1000
if q4 == 0 { if q4 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q3], n) n = writeFirstBuf(stream.buf, DIGITS[q3], n)
writeBuf(stream.buf, DIGITS[r3], n) writeBuf(stream.buf, DIGITS[r3], n)
writeBuf(stream.buf, DIGITS[r2], n + 3) writeBuf(stream.buf, DIGITS[r2], n+3)
writeBuf(stream.buf, DIGITS[r1], n + 6) writeBuf(stream.buf, DIGITS[r1], n+6)
stream.n = n + 9 stream.n = n + 9
return return
} }
r4 := q3 - q4 * 1000 r4 := q3 - q4*1000
q5 := q4 / 1000 q5 := q4 / 1000
if q5 == 0 { if q5 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q4], n) n = writeFirstBuf(stream.buf, DIGITS[q4], n)
writeBuf(stream.buf, DIGITS[r4], n) writeBuf(stream.buf, DIGITS[r4], n)
writeBuf(stream.buf, DIGITS[r3], n + 3) writeBuf(stream.buf, DIGITS[r3], n+3)
writeBuf(stream.buf, DIGITS[r2], n + 6) writeBuf(stream.buf, DIGITS[r2], n+6)
writeBuf(stream.buf, DIGITS[r1], n + 9) writeBuf(stream.buf, DIGITS[r1], n+9)
stream.n = n + 12 stream.n = n + 12
return return
} }
r5 := q4 - q5 * 1000 r5 := q4 - q5*1000
q6 := q5 / 1000 q6 := q5 / 1000
if q6 == 0 { if q6 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q5], n) n = writeFirstBuf(stream.buf, DIGITS[q5], n)
} else { } else {
n = writeFirstBuf(stream.buf, DIGITS[q6], n) n = writeFirstBuf(stream.buf, DIGITS[q6], n)
r6 := q5 - q6 * 1000 r6 := q5 - q6*1000
writeBuf(stream.buf, DIGITS[r6], n) writeBuf(stream.buf, DIGITS[r6], n)
n += 3 n += 3
} }
writeBuf(stream.buf, DIGITS[r5], n) writeBuf(stream.buf, DIGITS[r5], n)
writeBuf(stream.buf, DIGITS[r4], n + 3) writeBuf(stream.buf, DIGITS[r4], n+3)
writeBuf(stream.buf, DIGITS[r3], n + 6) writeBuf(stream.buf, DIGITS[r3], n+6)
writeBuf(stream.buf, DIGITS[r2], n + 9) writeBuf(stream.buf, DIGITS[r2], n+9)
writeBuf(stream.buf, DIGITS[r1], n + 12) writeBuf(stream.buf, DIGITS[r1], n+12)
stream.n = n + 15 stream.n = n + 15
} }
func (stream *Stream) WriteInt64(nval int64) { func (stream *Stream) WriteInt64(nval int64) {
if stream.Available() < 20 { stream.ensure(20)
stream.Flush()
}
n := stream.n n := stream.n
var val uint64 var val uint64
if (nval < 0) { if nval < 0 {
val = uint64(-nval) val = uint64(-nval)
stream.buf[n] = '-' stream.buf[n] = '-'
n++ n++
@ -295,7 +244,7 @@ func (stream *Stream) WriteInt64(nval int64) {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n) stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
return return
} }
r1 := val - q1 * 1000; r1 := val - q1*1000
q2 := q1 / 1000 q2 := q1 / 1000
if q2 == 0 { if q2 == 0 {
n := writeFirstBuf(stream.buf, DIGITS[q1], n) n := writeFirstBuf(stream.buf, DIGITS[q1], n)
@ -303,52 +252,52 @@ func (stream *Stream) WriteInt64(nval int64) {
stream.n = n + 3 stream.n = n + 3
return return
} }
r2 := q1 - q2 * 1000 r2 := q1 - q2*1000
q3 := q2 / 1000 q3 := q2 / 1000
if q3 == 0 { if q3 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q2], n) n = writeFirstBuf(stream.buf, DIGITS[q2], n)
writeBuf(stream.buf, DIGITS[r2], n) writeBuf(stream.buf, DIGITS[r2], n)
writeBuf(stream.buf, DIGITS[r1], n + 3) writeBuf(stream.buf, DIGITS[r1], n+3)
stream.n = n + 6 stream.n = n + 6
return return
} }
r3 := q2 - q3 * 1000 r3 := q2 - q3*1000
q4 := q3 / 1000 q4 := q3 / 1000
if q4 == 0 { if q4 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q3], n) n = writeFirstBuf(stream.buf, DIGITS[q3], n)
writeBuf(stream.buf, DIGITS[r3], n) writeBuf(stream.buf, DIGITS[r3], n)
writeBuf(stream.buf, DIGITS[r2], n + 3) writeBuf(stream.buf, DIGITS[r2], n+3)
writeBuf(stream.buf, DIGITS[r1], n + 6) writeBuf(stream.buf, DIGITS[r1], n+6)
stream.n = n + 9 stream.n = n + 9
return return
} }
r4 := q3 - q4 * 1000 r4 := q3 - q4*1000
q5 := q4 / 1000 q5 := q4 / 1000
if q5 == 0 { if q5 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q4], n) n = writeFirstBuf(stream.buf, DIGITS[q4], n)
writeBuf(stream.buf, DIGITS[r4], n) writeBuf(stream.buf, DIGITS[r4], n)
writeBuf(stream.buf, DIGITS[r3], n + 3) writeBuf(stream.buf, DIGITS[r3], n+3)
writeBuf(stream.buf, DIGITS[r2], n + 6) writeBuf(stream.buf, DIGITS[r2], n+6)
writeBuf(stream.buf, DIGITS[r1], n + 9) writeBuf(stream.buf, DIGITS[r1], n+9)
stream.n = n + 12 stream.n = n + 12
return return
} }
r5 := q4 - q5 * 1000 r5 := q4 - q5*1000
q6 := q5 / 1000 q6 := q5 / 1000
if q6 == 0 { if q6 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q5], n) n = writeFirstBuf(stream.buf, DIGITS[q5], n)
} else { } else {
stream.buf[n] = byte(q6 + '0') stream.buf[n] = byte(q6 + '0')
n++ n++
r6 := q5 - q6 * 1000 r6 := q5 - q6*1000
writeBuf(stream.buf, DIGITS[r6], n) writeBuf(stream.buf, DIGITS[r6], n)
n += 3 n += 3
} }
writeBuf(stream.buf, DIGITS[r5], n) writeBuf(stream.buf, DIGITS[r5], n)
writeBuf(stream.buf, DIGITS[r4], n + 3) writeBuf(stream.buf, DIGITS[r4], n+3)
writeBuf(stream.buf, DIGITS[r3], n + 6) writeBuf(stream.buf, DIGITS[r3], n+6)
writeBuf(stream.buf, DIGITS[r2], n + 9) writeBuf(stream.buf, DIGITS[r2], n+9)
writeBuf(stream.buf, DIGITS[r1], n + 12) writeBuf(stream.buf, DIGITS[r1], n+12)
stream.n = n + 15 stream.n = n + 15
} }

60
jsoniter_adapter_test.go Normal file
View File

@ -0,0 +1,60 @@
package jsoniter
import (
"bytes"
"encoding/json"
"github.com/json-iterator/go/require"
"io/ioutil"
"testing"
)
func Test_new_decoder(t *testing.T) {
should := require.New(t)
decoder1 := json.NewDecoder(bytes.NewBufferString(`[1][2]`))
decoder2 := NewDecoder(bytes.NewBufferString(`[1][2]`))
arr1 := []int{}
should.Nil(decoder1.Decode(&arr1))
should.Equal([]int{1}, arr1)
arr2 := []int{}
should.True(decoder1.More())
buffered, _ := ioutil.ReadAll(decoder1.Buffered())
should.Equal("[2]", string(buffered))
should.Nil(decoder2.Decode(&arr2))
should.Equal([]int{1}, arr2)
should.True(decoder2.More())
buffered, _ = ioutil.ReadAll(decoder2.Buffered())
should.Equal("[2]", string(buffered))
should.Nil(decoder1.Decode(&arr1))
should.Equal([]int{2}, arr1)
should.False(decoder1.More())
should.Nil(decoder2.Decode(&arr2))
should.Equal([]int{2}, arr2)
should.False(decoder2.More())
}
func Test_new_encoder(t *testing.T) {
should := require.New(t)
buf1 := &bytes.Buffer{}
encoder1 := json.NewEncoder(buf1)
encoder1.Encode([]int{1})
should.Equal("[1]\n", buf1.String())
buf2 := &bytes.Buffer{}
encoder2 := NewEncoder(buf2)
encoder2.Encode([]int{1})
should.Equal("[1]", buf2.String())
}
func Test_use_number(t *testing.T) {
should := require.New(t)
decoder1 := json.NewDecoder(bytes.NewBufferString(`123`))
decoder1.UseNumber()
decoder2 := NewDecoder(bytes.NewBufferString(`123`))
decoder2.UseNumber()
var obj1 interface{}
should.Nil(decoder1.Decode(&obj1))
should.Equal(json.Number("123"), obj1)
var obj2 interface{}
should.Nil(decoder2.Decode(&obj2))
should.Equal(json.Number("123"), obj2)
}

View File

@ -1,11 +1,11 @@
package jsoniter package jsoniter
import ( import (
"encoding/json"
"testing"
"github.com/json-iterator/go/require"
"bytes" "bytes"
"encoding/json"
"github.com/json-iterator/go/require"
"io" "io"
"testing"
) )
func Test_empty_array(t *testing.T) { func Test_empty_array(t *testing.T) {
@ -84,7 +84,7 @@ func Test_read_array_with_any_iterator(t *testing.T) {
func Test_wrap_array(t *testing.T) { func Test_wrap_array(t *testing.T) {
should := require.New(t) should := require.New(t)
any := Wrap([]int{1,2,3}) any := Wrap([]int{1, 2, 3})
should.Equal("[1,2,3]", any.ToString()) should.Equal("[1,2,3]", any.ToString())
var element Any var element Any
var elements []int var elements []int
@ -93,9 +93,9 @@ func Test_wrap_array(t *testing.T) {
elements = append(elements, element.ToInt()) elements = append(elements, element.ToInt())
} }
should.Equal([]int{1, 2, 3}, elements) should.Equal([]int{1, 2, 3}, elements)
any = Wrap([]int{1,2,3}) any = Wrap([]int{1, 2, 3})
should.Equal(3, any.Size()) should.Equal(3, any.Size())
any = Wrap([]int{1,2,3}) any = Wrap([]int{1, 2, 3})
should.Equal(2, any.Get(1).ToInt()) should.Equal(2, any.Get(1).ToInt())
} }
@ -103,7 +103,7 @@ func Test_array_lazy_any_get(t *testing.T) {
should := require.New(t) should := require.New(t)
any, err := UnmarshalAnyFromString("[1,[2,3],4]") any, err := UnmarshalAnyFromString("[1,[2,3],4]")
should.Nil(err) should.Nil(err)
should.Equal(3, any.Get(1,1).ToInt()) should.Equal(3, any.Get(1, 1).ToInt())
should.Equal("[1,[2,3],4]", any.ToString()) should.Equal("[1,[2,3],4]", any.ToString())
} }
@ -111,25 +111,25 @@ func Test_array_lazy_any_get_all(t *testing.T) {
should := require.New(t) should := require.New(t)
any, err := UnmarshalAnyFromString("[[1],[2],[3,4]]") any, err := UnmarshalAnyFromString("[[1],[2],[3,4]]")
should.Nil(err) should.Nil(err)
should.Equal("[1,2,3]", any.Get('*',0).ToString()) should.Equal("[1,2,3]", any.Get('*', 0).ToString())
} }
func Test_array_wrapper_any_get_all(t *testing.T) { func Test_array_wrapper_any_get_all(t *testing.T) {
should := require.New(t) should := require.New(t)
any := wrapArray([][]int{ any := wrapArray([][]int{
[]int{1, 2}, {1, 2},
[]int{3, 4}, {3, 4},
[]int{5, 6}, {5, 6},
}) })
should.Equal("[1,3,5]", any.Get('*',0).ToString()) should.Equal("[1,3,5]", any.Get('*', 0).ToString())
} }
func Test_array_lazy_any_get_invalid(t *testing.T) { func Test_array_lazy_any_get_invalid(t *testing.T) {
should := require.New(t) should := require.New(t)
any, err := UnmarshalAnyFromString("[]") any, err := UnmarshalAnyFromString("[]")
should.Nil(err) should.Nil(err)
should.Equal(Invalid, any.Get(1,1).ValueType()) should.Equal(Invalid, any.Get(1, 1).ValueType())
should.NotNil(any.Get(1,1).LastError()) should.NotNil(any.Get(1, 1).LastError())
should.Equal(Invalid, any.Get("1").ValueType()) should.Equal(Invalid, any.Get("1").ValueType())
should.NotNil(any.Get("1").LastError()) should.NotNil(any.Get("1").LastError())
} }
@ -244,13 +244,45 @@ func Test_write_val_empty_array(t *testing.T) {
func Test_write_array_of_interface_in_struct(t *testing.T) { func Test_write_array_of_interface_in_struct(t *testing.T) {
should := require.New(t) should := require.New(t)
type TestObject struct { type TestObject struct {
Field []interface{} Field []interface{}
Field2 string Field2 string
} }
val := TestObject{[]interface{}{1, 2}, ""} val := TestObject{[]interface{}{1, 2}, ""}
str, err := MarshalToString(val) str, err := MarshalToString(val)
should.Nil(err) should.Nil(err)
should.Equal(`{"Field":[1,2],"Field2":""}`, str) should.Contains(str, `"Field":[1,2]`)
should.Contains(str, `"Field2":""`)
}
func Test_json_RawMessage(t *testing.T) {
should := require.New(t)
var data json.RawMessage
should.Nil(Unmarshal([]byte(`[1,2,3]`), &data))
should.Equal(`[1,2,3]`, string(data))
str, err := MarshalToString(data)
should.Nil(err)
should.Equal(`[1,2,3]`, str)
}
func Test_encode_byte_array(t *testing.T) {
should := require.New(t)
bytes, err := json.Marshal([]byte{1, 2, 3})
should.Nil(err)
should.Equal(`"AQID"`, string(bytes))
bytes, err = Marshal([]byte{1, 2, 3})
should.Nil(err)
should.Equal(`"AQID"`, string(bytes))
}
func Test_decode_byte_array(t *testing.T) {
should := require.New(t)
data := []byte{}
err := json.Unmarshal([]byte(`"AQID"`), &data)
should.Nil(err)
should.Equal([]byte{1, 2, 3}, data)
err = Unmarshal([]byte(`"AQID"`), &data)
should.Nil(err)
should.Equal([]byte{1, 2, 3}, data)
} }
func Benchmark_jsoniter_array(b *testing.B) { func Benchmark_jsoniter_array(b *testing.B) {

View File

@ -1,9 +1,9 @@
package jsoniter package jsoniter
import ( import (
"testing"
"bytes" "bytes"
"github.com/json-iterator/go/require" "github.com/json-iterator/go/require"
"testing"
) )
func Test_true(t *testing.T) { func Test_true(t *testing.T) {
@ -38,7 +38,6 @@ func Test_write_true_false(t *testing.T) {
should.Equal("truefalse", buf.String()) should.Equal("truefalse", buf.String())
} }
func Test_write_val_bool(t *testing.T) { func Test_write_val_bool(t *testing.T) {
should := require.New(t) should := require.New(t)
buf := &bytes.Buffer{} buf := &bytes.Buffer{}

View File

@ -1,6 +1,8 @@
package jsoniter package jsoniter
import ( import (
"encoding/json"
"github.com/json-iterator/go/require"
"reflect" "reflect"
"strconv" "strconv"
"testing" "testing"
@ -29,6 +31,42 @@ func Test_customize_type_decoder(t *testing.T) {
} }
} }
func Test_customize_type_encoder(t *testing.T) {
should := require.New(t)
RegisterTypeEncoder("time.Time", func(ptr unsafe.Pointer, stream *Stream) {
t := *((*time.Time)(ptr))
stream.WriteString(t.UTC().Format("2006-01-02 15:04:05"))
})
defer CleanEncoders()
val := time.Unix(0, 0)
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`"1970-01-01 00:00:00"`, str)
}
func Test_customize_byte_array_encoder(t *testing.T) {
CleanEncoders()
should := require.New(t)
RegisterTypeEncoder("[]uint8", func(ptr unsafe.Pointer, stream *Stream) {
t := *((*[]byte)(ptr))
stream.WriteString(string(t))
})
defer CleanEncoders()
val := []byte("abc")
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`"abc"`, str)
}
func Test_customize_float_marshal(t *testing.T) {
should := require.New(t)
EnableLossyFloatMarshalling()
defer CleanEncoders()
str, err := MarshalToString(float32(1.23456789))
should.Nil(err)
should.Equal("1.234568", str)
}
type Tom struct { type Tom struct {
field1 string field1 string
} }
@ -50,20 +88,129 @@ type TestObject1 struct {
} }
func Test_customize_field_by_extension(t *testing.T) { func Test_customize_field_by_extension(t *testing.T) {
RegisterExtension(func(type_ reflect.Type, field *reflect.StructField) ([]string, DecoderFunc) { should := require.New(t)
RegisterExtension(func(type_ reflect.Type, field *reflect.StructField) ([]string, EncoderFunc, DecoderFunc) {
if type_.String() == "jsoniter.TestObject1" && field.Name == "field1" { if type_.String() == "jsoniter.TestObject1" && field.Name == "field1" {
return []string{"field-1"}, func(ptr unsafe.Pointer, iter *Iterator) { encode := func(ptr unsafe.Pointer, stream *Stream) {
str := *((*string)(ptr))
val, _ := strconv.Atoi(str)
stream.WriteInt(val)
}
decode := func(ptr unsafe.Pointer, iter *Iterator) {
*((*string)(ptr)) = strconv.Itoa(iter.ReadInt()) *((*string)(ptr)) = strconv.Itoa(iter.ReadInt())
} }
return []string{"field-1"}, encode, decode
} }
return nil, nil return nil, nil, nil
}) })
obj := TestObject1{} obj := TestObject1{}
err := Unmarshal([]byte(`{"field-1": 100}`), &obj) err := UnmarshalFromString(`{"field-1": 100}`, &obj)
if err != nil { should.Nil(err)
t.Fatal(err) should.Equal("100", obj.field1)
} str, err := MarshalToString(obj)
if obj.field1 != "100" { should.Nil(err)
t.Fatal(obj.field1) should.Equal(`{"field-1":100}`, str)
} }
func Test_unexported_fields(t *testing.T) {
EnableUnexportedStructFieldsSupport()
should := require.New(t)
type TestObject struct {
field1 string
field2 string `json:"field-2"`
}
obj := TestObject{}
obj.field1 = "hello"
should.Nil(UnmarshalFromString(`{}`, &obj))
should.Equal("hello", obj.field1)
should.Nil(UnmarshalFromString(`{"field1": "world", "field-2": "abc"}`, &obj))
should.Equal("world", obj.field1)
should.Equal("abc", obj.field2)
str, err := MarshalToString(obj)
should.Nil(err)
should.Contains(str, `"field-2":"abc"`)
}
type ObjectImplementedMarshaler int
func (obj *ObjectImplementedMarshaler) MarshalJSON() ([]byte, error) {
return []byte(`"hello"`), nil
}
func Test_marshaler(t *testing.T) {
type TestObject struct {
Field *ObjectImplementedMarshaler
}
should := require.New(t)
val := ObjectImplementedMarshaler(100)
obj := TestObject{&val}
bytes, err := json.Marshal(obj)
should.Nil(err)
should.Equal(`{"Field":"hello"}`, string(bytes))
str, err := MarshalToString(obj)
should.Nil(err)
should.Equal(`{"Field":"hello"}`, str)
}
func Test_marshaler_and_encoder(t *testing.T) {
type TestObject struct {
Field *ObjectImplementedMarshaler
}
should := require.New(t)
RegisterTypeEncoder("jsoniter.ObjectImplementedMarshaler", func(ptr unsafe.Pointer, stream *Stream) {
stream.WriteString("hello from encoder")
})
val := ObjectImplementedMarshaler(100)
obj := TestObject{&val}
bytes, err := json.Marshal(obj)
should.Nil(err)
should.Equal(`{"Field":"hello"}`, string(bytes))
str, err := MarshalToString(obj)
should.Nil(err)
should.Equal(`{"Field":"hello from encoder"}`, str)
}
type ObjectImplementedUnmarshaler int
func (obj *ObjectImplementedUnmarshaler) UnmarshalJSON([]byte) error {
*obj = 100
return nil
}
func Test_unmarshaler(t *testing.T) {
type TestObject struct {
Field *ObjectImplementedUnmarshaler
Field2 string
}
should := require.New(t)
obj := TestObject{}
val := ObjectImplementedUnmarshaler(0)
obj.Field = &val
err := json.Unmarshal([]byte(`{"Field":"hello"}`), &obj)
should.Nil(err)
should.Equal(100, int(*obj.Field))
err = Unmarshal([]byte(`{"Field":"hello"}`), &obj)
should.Nil(err)
should.Equal(100, int(*obj.Field))
}
func Test_unmarshaler_and_decoder(t *testing.T) {
type TestObject struct {
Field *ObjectImplementedUnmarshaler
Field2 string
}
should := require.New(t)
RegisterTypeDecoder("jsoniter.ObjectImplementedUnmarshaler", func(ptr unsafe.Pointer, iter *Iterator) {
*(*ObjectImplementedUnmarshaler)(ptr) = 10
iter.Skip()
})
obj := TestObject{}
val := ObjectImplementedUnmarshaler(0)
obj.Field = &val
err := json.Unmarshal([]byte(`{"Field":"hello"}`), &obj)
should.Nil(err)
should.Equal(100, int(*obj.Field))
err = Unmarshal([]byte(`{"Field":"hello"}`), &obj)
should.Nil(err)
should.Equal(10, int(*obj.Field))
} }

View File

@ -1,15 +1,18 @@
package jsoniter package jsoniter
import ( import (
"encoding/json"
"fmt" "fmt"
"github.com/json-iterator/go/require"
"testing" "testing"
) )
func Test_bind_api_demo(t *testing.T) { func Test_bind_api_demo(t *testing.T) {
iter := ParseString(`[0,1,2,3]`) should := require.New(t)
val := []int{} val := []int{}
iter.ReadVal(&val) err := UnmarshalFromString(`[0,1,2,3] `, &val)
fmt.Println(val[3]) should.Nil(err)
should.Equal([]int{0, 1, 2, 3}, val)
} }
func Test_iterator_api_demo(t *testing.T) { func Test_iterator_api_demo(t *testing.T) {
@ -21,19 +24,63 @@ func Test_iterator_api_demo(t *testing.T) {
fmt.Println(total) fmt.Println(total)
} }
type User struct { type People struct {
userID int Name string
name string Gender string
tags []string Age int
Address string
Mobile string
Country string
Height int
} }
func Test_iterator_and_bind_api(t *testing.T) { func jsoniterMarshal(p *People) error {
iter := ParseString(`[123, {"name": "taowen", "tags": ["crazy", "hacker"]}]`) _, err := Marshal(p)
user := User{} if nil != err {
iter.ReadArray() return err
user.userID = iter.ReadInt() }
iter.ReadArray() return nil
iter.ReadVal(&user) }
iter.ReadArray() // array end func stdMarshal(p *People) error {
fmt.Println(user) _, err := json.Marshal(p)
if nil != err {
return err
}
return nil
}
func BenchmarkJosniterMarshal(b *testing.B) {
var p People
p.Address = "上海市徐汇区漕宝路"
p.Age = 30
p.Country = "中国"
p.Gender = "male"
p.Height = 170
p.Mobile = "18502120533"
p.Name = "Elvin"
b.ReportAllocs()
for i := 0; i < b.N; i++ {
err := jsoniterMarshal(&p)
if nil != err {
b.Error(err)
}
}
}
func BenchmarkStdMarshal(b *testing.B) {
var p People
p.Address = "上海市徐汇区漕宝路"
p.Age = 30
p.Country = "中国"
p.Gender = "male"
p.Height = 170
p.Mobile = "18502120533"
p.Name = "Elvin"
b.ReportAllocs()
for i := 0; i < b.N; i++ {
err := stdMarshal(&p)
if nil != err {
b.Error(err)
}
}
} }

View File

@ -1,9 +1,9 @@
package jsoniter package jsoniter
import ( import (
"github.com/json-iterator/go/require"
"io" "io"
"testing" "testing"
"github.com/json-iterator/go/require"
) )
func Test_string_end(t *testing.T) { func Test_string_end(t *testing.T) {

View File

@ -1,14 +1,30 @@
package jsoniter package jsoniter
import ( import (
"bytes"
"encoding/json" "encoding/json"
"fmt" "fmt"
"testing"
"github.com/json-iterator/go/require" "github.com/json-iterator/go/require"
"bytes"
"strconv" "strconv"
"testing"
) )
func Test_read_big_float(t *testing.T) {
should := require.New(t)
iter := ParseString(`12.3`)
val := iter.ReadBigFloat()
val64, _ := val.Float64()
should.Equal(12.3, val64)
}
func Test_read_big_int(t *testing.T) {
should := require.New(t)
iter := ParseString(`92233720368547758079223372036854775807`)
val := iter.ReadBigInt()
should.NotNil(val)
should.Equal(`92233720368547758079223372036854775807`, val.String())
}
func Test_read_float(t *testing.T) { func Test_read_float(t *testing.T) {
inputs := []string{`1.1`, `1000`, `9223372036854775807`, `12.3`, `-12.3`, `720368.54775807`, `720368.547758075`} inputs := []string{`1.1`, `1000`, `9223372036854775807`, `12.3`, `-12.3`, `720368.54775807`, `720368.547758075`}
for _, input := range inputs { for _, input := range inputs {
@ -30,14 +46,14 @@ func Test_read_float(t *testing.T) {
// streaming // streaming
t.Run(fmt.Sprintf("%v", input), func(t *testing.T) { t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
should := require.New(t) should := require.New(t)
iter := Parse(bytes.NewBufferString(input + ","), 2) iter := Parse(bytes.NewBufferString(input+","), 2)
expected, err := strconv.ParseFloat(input, 32) expected, err := strconv.ParseFloat(input, 32)
should.Nil(err) should.Nil(err)
should.Equal(float32(expected), iter.ReadFloat32()) should.Equal(float32(expected), iter.ReadFloat32())
}) })
t.Run(fmt.Sprintf("%v", input), func(t *testing.T) { t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
should := require.New(t) should := require.New(t)
iter := Parse(bytes.NewBufferString(input + ","), 2) iter := Parse(bytes.NewBufferString(input+","), 2)
expected, err := strconv.ParseFloat(input, 64) expected, err := strconv.ParseFloat(input, 64)
should.Nil(err) should.Nil(err)
should.Equal(expected, iter.ReadFloat64()) should.Equal(expected, iter.ReadFloat64())
@ -69,13 +85,13 @@ func Test_wrap_float(t *testing.T) {
func Test_write_float32(t *testing.T) { func Test_write_float32(t *testing.T) {
vals := []float32{0, 1, -1, 99, 0xff, 0xfff, 0xffff, 0xfffff, 0xffffff, 0x4ffffff, 0xfffffff, vals := []float32{0, 1, -1, 99, 0xff, 0xfff, 0xffff, 0xfffff, 0xffffff, 0x4ffffff, 0xfffffff,
-0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001} -0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001}
for _, val := range vals { for _, val := range vals {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) { t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t) should := require.New(t)
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
stream := NewStream(buf, 4096) stream := NewStream(buf, 4096)
stream.WriteFloat32(val) stream.WriteFloat32Lossy(val)
stream.Flush() stream.Flush()
should.Nil(stream.Error) should.Nil(stream.Error)
should.Equal(strconv.FormatFloat(float64(val), 'f', -1, 32), buf.String()) should.Equal(strconv.FormatFloat(float64(val), 'f', -1, 32), buf.String())
@ -94,7 +110,7 @@ func Test_write_float32(t *testing.T) {
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
stream := NewStream(buf, 10) stream := NewStream(buf, 10)
stream.WriteRaw("abcdefg") stream.WriteRaw("abcdefg")
stream.WriteFloat32(1.123456) stream.WriteFloat32Lossy(1.123456)
stream.Flush() stream.Flush()
should.Nil(stream.Error) should.Nil(stream.Error)
should.Equal("abcdefg1.123456", buf.String()) should.Equal("abcdefg1.123456", buf.String())
@ -102,13 +118,13 @@ func Test_write_float32(t *testing.T) {
func Test_write_float64(t *testing.T) { func Test_write_float64(t *testing.T) {
vals := []float64{0, 1, -1, 99, 0xff, 0xfff, 0xffff, 0xfffff, 0xffffff, 0x4ffffff, 0xfffffff, vals := []float64{0, 1, -1, 99, 0xff, 0xfff, 0xffff, 0xfffff, 0xffffff, 0x4ffffff, 0xfffffff,
-0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001} -0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001}
for _, val := range vals { for _, val := range vals {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) { t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t) should := require.New(t)
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
stream := NewStream(buf, 4096) stream := NewStream(buf, 4096)
stream.WriteFloat64(val) stream.WriteFloat64Lossy(val)
stream.Flush() stream.Flush()
should.Nil(stream.Error) should.Nil(stream.Error)
should.Equal(strconv.FormatFloat(val, 'f', -1, 64), buf.String()) should.Equal(strconv.FormatFloat(val, 'f', -1, 64), buf.String())
@ -127,12 +143,34 @@ func Test_write_float64(t *testing.T) {
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
stream := NewStream(buf, 10) stream := NewStream(buf, 10)
stream.WriteRaw("abcdefg") stream.WriteRaw("abcdefg")
stream.WriteFloat64(1.123456) stream.WriteFloat64Lossy(1.123456)
stream.Flush() stream.Flush()
should.Nil(stream.Error) should.Nil(stream.Error)
should.Equal("abcdefg1.123456", buf.String()) should.Equal("abcdefg1.123456", buf.String())
} }
func Test_read_float64_cursor(t *testing.T) {
should := require.New(t)
iter := ParseString("[1.23456789\n,2,3]")
should.True(iter.ReadArray())
should.Equal(1.23456789, iter.Read())
should.True(iter.ReadArray())
should.Equal(float64(2), iter.Read())
}
func Test_read_float_scientific(t *testing.T) {
should := require.New(t)
var obj interface{}
should.Nil(UnmarshalFromString(`1e1`, &obj))
should.Equal(float64(10), obj)
should.Nil(json.Unmarshal([]byte(`1e1`), &obj))
should.Equal(float64(10), obj)
should.Nil(UnmarshalFromString(`1.0e1`, &obj))
should.Equal(float64(10), obj)
should.Nil(json.Unmarshal([]byte(`1.0e1`), &obj))
should.Equal(float64(10), obj)
}
func Benchmark_jsoniter_float(b *testing.B) { func Benchmark_jsoniter_float(b *testing.B) {
b.ReportAllocs() b.ReportAllocs()
input := []byte(`1.1123,`) input := []byte(`1.1123,`)

View File

@ -3,12 +3,12 @@ package jsoniter
import ( import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"testing"
"github.com/json-iterator/go/require"
"fmt" "fmt"
"strconv" "github.com/json-iterator/go/require"
"io/ioutil"
"io" "io"
"io/ioutil"
"strconv"
"testing"
) )
func Test_read_uint64_invalid(t *testing.T) { func Test_read_uint64_invalid(t *testing.T) {
@ -421,6 +421,17 @@ func Test_write_val_int_ptr(t *testing.T) {
should.Equal("1001", buf.String()) should.Equal("1001", buf.String())
} }
func Test_json_number(t *testing.T) {
should := require.New(t)
var arr []json.Number
err := Unmarshal([]byte(`[1]`), &arr)
should.Nil(err)
should.Equal(json.Number("1"), arr[0])
str, err := MarshalToString(arr)
should.Nil(err)
should.Equal(`[1]`, str)
}
func Benchmark_jsoniter_encode_int(b *testing.B) { func Benchmark_jsoniter_encode_int(b *testing.B) {
stream := NewStream(ioutil.Discard, 64) stream := NewStream(ioutil.Discard, 64)
for n := 0; n < b.N; n++ { for n := 0; n < b.N; n++ {

View File

@ -1,9 +1,10 @@
package jsoniter package jsoniter
import ( import (
"testing"
"github.com/json-iterator/go/require" "github.com/json-iterator/go/require"
"testing"
"unsafe" "unsafe"
"encoding/json"
) )
func Test_write_array_of_interface(t *testing.T) { func Test_write_array_of_interface(t *testing.T) {
@ -16,7 +17,7 @@ func Test_write_array_of_interface(t *testing.T) {
func Test_write_map_of_interface(t *testing.T) { func Test_write_map_of_interface(t *testing.T) {
should := require.New(t) should := require.New(t)
val := map[string]interface{}{"hello":"world"} val := map[string]interface{}{"hello": "world"}
str, err := MarshalToString(val) str, err := MarshalToString(val)
should.Nil(err) should.Nil(err)
should.Equal(`{"hello":"world"}`, str) should.Equal(`{"hello":"world"}`, str)
@ -27,7 +28,7 @@ func Test_write_map_of_interface_in_struct(t *testing.T) {
Field map[string]interface{} Field map[string]interface{}
} }
should := require.New(t) should := require.New(t)
val := TestObject{map[string]interface{}{"hello":"world"}} val := TestObject{map[string]interface{}{"hello": "world"}}
str, err := MarshalToString(val) str, err := MarshalToString(val)
should.Nil(err) should.Nil(err)
should.Equal(`{"Field":{"hello":"world"}}`, str) should.Equal(`{"Field":{"hello":"world"}}`, str)
@ -35,14 +36,14 @@ func Test_write_map_of_interface_in_struct(t *testing.T) {
func Test_write_map_of_interface_in_struct_with_two_fields(t *testing.T) { func Test_write_map_of_interface_in_struct_with_two_fields(t *testing.T) {
type TestObject struct { type TestObject struct {
Field map[string]interface{} Field map[string]interface{}
Field2 string Field2 string
} }
should := require.New(t) should := require.New(t)
val := TestObject{map[string]interface{}{"hello":"world"}, ""} val := TestObject{map[string]interface{}{"hello": "world"}, ""}
str, err := MarshalToString(val) str, err := MarshalToString(val)
should.Nil(err) should.Nil(err)
should.Equal(`{"Field":{"hello":"world"},"Field2":""}`, str) should.Contains(str, `"Field":{"hello":"world"}`)
} }
type MyInterface interface { type MyInterface interface {
@ -59,7 +60,7 @@ func Test_write_map_of_custom_interface(t *testing.T) {
should := require.New(t) should := require.New(t)
myStr := MyString("world") myStr := MyString("world")
should.Equal("world", myStr.Hello()) should.Equal("world", myStr.Hello())
val := map[string]MyInterface{"hello":myStr} val := map[string]MyInterface{"hello": myStr}
str, err := MarshalToString(val) str, err := MarshalToString(val)
should.Nil(err) should.Nil(err)
should.Equal(`{"hello":"world"}`, str) should.Equal(`{"hello":"world"}`, str)
@ -92,3 +93,63 @@ func Test_read_custom_interface(t *testing.T) {
should.Nil(err) should.Nil(err)
should.Equal("hello", val.Hello()) should.Equal("hello", val.Hello())
} }
func Test_decode_object_contain_empty_interface(t *testing.T) {
type TestObject struct {
Field interface{}
}
should := require.New(t)
obj := TestObject{}
obj.Field = 1024
should.Nil(UnmarshalFromString(`{"Field": "hello"}`, &obj))
should.Equal("hello", obj.Field)
}
func Test_decode_object_contain_non_empty_interface(t *testing.T) {
type TestObject struct {
Field MyInterface
}
should := require.New(t)
obj := TestObject{}
obj.Field = MyString("abc")
should.Nil(UnmarshalFromString(`{"Field": "hello"}`, &obj))
should.Equal(MyString("hello"), obj.Field)
}
func Test_encode_object_contain_empty_interface(t *testing.T) {
type TestObject struct {
Field interface{}
}
should := require.New(t)
obj := TestObject{}
obj.Field = 1024
str, err := MarshalToString(obj)
should.Nil(err)
should.Equal(`{"Field":1024}`, str)
}
func Test_encode_object_contain_non_empty_interface(t *testing.T) {
type TestObject struct {
Field MyInterface
}
should := require.New(t)
obj := TestObject{}
obj.Field = MyString("hello")
str, err := MarshalToString(obj)
should.Nil(err)
should.Equal(`{"Field":"hello"}`, str)
}
func Test_nil_non_empty_interface(t *testing.T) {
CleanEncoders()
CleanDecoders()
type TestObject struct {
Field []MyInterface
}
should := require.New(t)
obj := TestObject{}
b := []byte(`{"Field":["AAA"]}`)
should.NotNil(json.Unmarshal(b, &obj))
should.NotNil(Unmarshal(b, &obj))
}

View File

@ -1,8 +1,10 @@
package jsoniter package jsoniter
import ( import (
"testing"
"github.com/json-iterator/go/require" "github.com/json-iterator/go/require"
"math/big"
"testing"
"encoding/json"
) )
func Test_read_map(t *testing.T) { func Test_read_map(t *testing.T) {
@ -40,12 +42,12 @@ func Test_wrap_map(t *testing.T) {
vals[k] = v.ToString() vals[k] = v.ToString()
} }
} }
should.Equal(map[string]string{"Field1":"hello"}, vals) should.Equal(map[string]string{"Field1": "hello"}, vals)
} }
func Test_map_wrapper_any_get_all(t *testing.T) { func Test_map_wrapper_any_get_all(t *testing.T) {
should := require.New(t) should := require.New(t)
any := Wrap(map[string][]int{"Field1": []int{1, 2}}) any := Wrap(map[string][]int{"Field1": {1, 2}})
should.Equal(`{"Field1":1}`, any.Get('*', 0).ToString()) should.Equal(`{"Field1":1}`, any.Get('*', 0).ToString())
} }
@ -56,3 +58,72 @@ func Test_write_val_map(t *testing.T) {
should.Nil(err) should.Nil(err)
should.Equal(`{"1":"2"}`, str) should.Equal(`{"1":"2"}`, str)
} }
func Test_slice_of_map(t *testing.T) {
should := require.New(t)
val := []map[string]string{{"1": "2"}}
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`[{"1":"2"}]`, str)
val = []map[string]string{}
should.Nil(UnmarshalFromString(str, &val))
should.Equal("2", val[0]["1"])
}
func Test_encode_int_key_map(t *testing.T) {
should := require.New(t)
val := map[int]string{1: "2"}
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"1":"2"}`, str)
}
func Test_decode_int_key_map(t *testing.T) {
should := require.New(t)
var val map[int]string
should.Nil(UnmarshalFromString(`{"1":"2"}`, &val))
should.Equal(map[int]string{1: "2"}, val)
}
func Test_encode_TextMarshaler_key_map(t *testing.T) {
should := require.New(t)
f, _, _ := big.ParseFloat("1", 10, 64, big.ToZero)
val := map[*big.Float]string{f: "2"}
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"1":"2"}`, str)
}
func Test_decode_TextMarshaler_key_map(t *testing.T) {
should := require.New(t)
var val map[*big.Float]string
should.Nil(UnmarshalFromString(`{"1":"2"}`, &val))
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"1":"2"}`, str)
}
func Test_map_key_with_escaped_char(t *testing.T) {
type Ttest struct {
Map map[string]string
}
var jsonBytes = []byte(`
{
"Map":{
"k\"ey": "val"
}
}`)
should := require.New(t)
{
var obj Ttest
should.Nil(json.Unmarshal(jsonBytes, &obj))
should.Equal(map[string]string{"k\"ey":"val"}, obj.Map)
}
{
var obj Ttest
should.Nil(Unmarshal(jsonBytes, &obj))
should.Equal(map[string]string{"k\"ey":"val"}, obj.Map)
}
}

View File

@ -1,9 +1,9 @@
package jsoniter package jsoniter
import ( import (
"testing"
"github.com/json-iterator/go/require"
"bytes" "bytes"
"github.com/json-iterator/go/require"
"testing"
) )
func Test_read_null(t *testing.T) { func Test_read_null(t *testing.T) {
@ -39,6 +39,7 @@ func Test_encode_null(t *testing.T) {
} }
func Test_decode_null_object(t *testing.T) { func Test_decode_null_object(t *testing.T) {
should := require.New(t)
iter := ParseString(`[null,"a"]`) iter := ParseString(`[null,"a"]`)
iter.ReadArray() iter.ReadArray()
if iter.ReadObject() != "" { if iter.ReadObject() != "" {
@ -48,6 +49,12 @@ func Test_decode_null_object(t *testing.T) {
if iter.ReadString() != "a" { if iter.ReadString() != "a" {
t.FailNow() t.FailNow()
} }
type TestObject struct {
Field string
}
objs := []TestObject{}
should.Nil(UnmarshalFromString("[null]", &objs))
should.Len(objs, 1)
} }
func Test_decode_null_array(t *testing.T) { func Test_decode_null_array(t *testing.T) {
@ -66,7 +73,7 @@ func Test_decode_null_string(t *testing.T) {
should := require.New(t) should := require.New(t)
iter := ParseString(`[null,"a"]`) iter := ParseString(`[null,"a"]`)
should.True(iter.ReadArray()) should.True(iter.ReadArray())
should.True(iter.ReadNil()) should.Equal("", iter.ReadString())
should.True(iter.ReadArray()) should.True(iter.ReadArray())
should.Equal("a", iter.ReadString()) should.Equal("a", iter.ReadString())
} }

View File

@ -1,10 +1,10 @@
package jsoniter package jsoniter
import ( import (
"encoding/json"
"testing"
"github.com/json-iterator/go/require"
"bytes" "bytes"
"encoding/json"
"github.com/json-iterator/go/require"
"testing"
) )
func Test_empty_object(t *testing.T) { func Test_empty_object(t *testing.T) {
@ -100,12 +100,15 @@ func Test_object_any_lazy_iterator(t *testing.T) {
should.False(hasNext) should.False(hasNext)
vals[k] = v.ToString() vals[k] = v.ToString()
should.Equal(map[string]string{"a":"b", "c":"d"}, vals) should.Equal(map[string]string{"a": "b", "c": "d"}, vals)
vals = map[string]string{} vals = map[string]string{}
for next, hasNext := any.IterateObject(); hasNext; k, v, hasNext = next() { for next, hasNext := any.IterateObject(); hasNext; {
vals[k] = v.ToString() k, v, hasNext = next()
if v.ValueType() == String {
vals[k] = v.ToString()
}
} }
should.Equal(map[string]string{"a":"b", "c":"d"}, vals) should.Equal(map[string]string{"a": "b", "c": "d"}, vals)
} }
func Test_object_any_with_two_lazy_iterators(t *testing.T) { func Test_object_any_with_two_lazy_iterators(t *testing.T) {
@ -150,7 +153,7 @@ func Test_object_lazy_any_get_all(t *testing.T) {
should := require.New(t) should := require.New(t)
any, err := UnmarshalAnyFromString(`{"a":[0],"b":[1]}`) any, err := UnmarshalAnyFromString(`{"a":[0],"b":[1]}`)
should.Nil(err) should.Nil(err)
should.Equal(`{"a":0,"b":1}`, any.Get('*', 0).ToString()) should.Contains(any.Get('*', 0).ToString(), `"a":0`)
} }
func Test_object_lazy_any_get_invalid(t *testing.T) { func Test_object_lazy_any_get_invalid(t *testing.T) {
@ -191,7 +194,7 @@ func Test_wrap_object(t *testing.T) {
vals[k] = v.ToString() vals[k] = v.ToString()
} }
} }
should.Equal(map[string]string{"Field1":"hello"}, vals) should.Equal(map[string]string{"Field1": "hello"}, vals)
} }
func Test_object_wrapper_any_get_all(t *testing.T) { func Test_object_wrapper_any_get_all(t *testing.T) {
@ -201,7 +204,7 @@ func Test_object_wrapper_any_get_all(t *testing.T) {
Field2 []int Field2 []int
} }
any := Wrap(TestObject{[]int{1, 2}, []int{3, 4}}) any := Wrap(TestObject{[]int{1, 2}, []int{3, 4}})
should.Equal(`{"Field2":3,"Field1":1}`, any.Get('*', 0).ToString()) should.Contains(any.Get('*', 0).ToString(), `"Field2":3`)
} }
func Test_write_object(t *testing.T) { func Test_write_object(t *testing.T) {

View File

@ -1,8 +1,8 @@
package jsoniter package jsoniter
import ( import (
"testing"
"github.com/json-iterator/go/require" "github.com/json-iterator/go/require"
"testing"
) )
func Test_encode_optional_int_pointer(t *testing.T) { func Test_encode_optional_int_pointer(t *testing.T) {
@ -41,5 +41,6 @@ func Test_encode_struct_with_optional_field(t *testing.T) {
obj.field2 = &world obj.field2 = &world
str, err := MarshalToString(obj) str, err := MarshalToString(obj)
should.Nil(err) should.Nil(err)
should.Equal(`{"field1":null,"field2":"world"}`, str) should.Contains(str, `"field1":null`)
should.Contains(str, `"field2":"world"`)
} }

View File

@ -1,8 +1,8 @@
package jsoniter package jsoniter
import ( import (
"testing"
"fmt" "fmt"
"testing"
) )
func Test_reflect_str(t *testing.T) { func Test_reflect_str(t *testing.T) {

View File

@ -1,113 +1,114 @@
package jsoniter package jsoniter
import ( import (
"testing" "bytes"
"github.com/json-iterator/go/require" "github.com/json-iterator/go/require"
"testing"
) )
func Test_decode_one_field_struct(t *testing.T) { func Test_decode_one_field_struct(t *testing.T) {
should := require.New(t) should := require.New(t)
type TestObject struct { type TestObject struct {
field1 string Field1 string
} }
obj := TestObject{} obj := TestObject{}
should.Nil(UnmarshalFromString(`{}`, &obj)) should.Nil(UnmarshalFromString(`{}`, &obj))
should.Equal("", obj.field1) should.Equal("", obj.Field1)
should.Nil(UnmarshalFromString(`{"field1": "hello"}`, &obj)) should.Nil(UnmarshalFromString(`{"field1": "hello"}`, &obj))
should.Equal("hello", obj.field1) should.Equal("hello", obj.Field1)
} }
func Test_decode_two_fields_struct(t *testing.T) { func Test_decode_two_fields_struct(t *testing.T) {
should := require.New(t) should := require.New(t)
type TestObject struct { type TestObject struct {
field1 string Field1 string
field2 string Field2 string
} }
obj := TestObject{} obj := TestObject{}
should.Nil(UnmarshalFromString(`{}`, &obj)) should.Nil(UnmarshalFromString(`{}`, &obj))
should.Equal("", obj.field1) should.Equal("", obj.Field1)
should.Nil(UnmarshalFromString(`{"field1": "a", "field2": "b"}`, &obj)) should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "b"}`, &obj))
should.Equal("a", obj.field1) should.Equal("a", obj.Field1)
should.Equal("b", obj.field2) should.Equal("b", obj.Field2)
} }
func Test_decode_three_fields_struct(t *testing.T) { func Test_decode_three_fields_struct(t *testing.T) {
should := require.New(t) should := require.New(t)
type TestObject struct { type TestObject struct {
field1 string Field1 string
field2 string Field2 string
field3 string Field3 string
} }
obj := TestObject{} obj := TestObject{}
should.Nil(UnmarshalFromString(`{}`, &obj)) should.Nil(UnmarshalFromString(`{}`, &obj))
should.Equal("", obj.field1) should.Equal("", obj.Field1)
should.Nil(UnmarshalFromString(`{"field1": "a", "field2": "b", "field3": "c"}`, &obj)) should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "b", "Field3": "c"}`, &obj))
should.Equal("a", obj.field1) should.Equal("a", obj.Field1)
should.Equal("b", obj.field2) should.Equal("b", obj.Field2)
should.Equal("c", obj.field3) should.Equal("c", obj.Field3)
} }
func Test_decode_four_fields_struct(t *testing.T) { func Test_decode_four_fields_struct(t *testing.T) {
should := require.New(t) should := require.New(t)
type TestObject struct { type TestObject struct {
field1 string Field1 string
field2 string Field2 string
field3 string Field3 string
field4 string Field4 string
} }
obj := TestObject{} obj := TestObject{}
should.Nil(UnmarshalFromString(`{}`, &obj)) should.Nil(UnmarshalFromString(`{}`, &obj))
should.Equal("", obj.field1) should.Equal("", obj.Field1)
should.Nil(UnmarshalFromString(`{"field1": "a", "field2": "b", "field3": "c", "field4": "d"}`, &obj)) should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "b", "Field3": "c", "Field4": "d"}`, &obj))
should.Equal("a", obj.field1) should.Equal("a", obj.Field1)
should.Equal("b", obj.field2) should.Equal("b", obj.Field2)
should.Equal("c", obj.field3) should.Equal("c", obj.Field3)
should.Equal("d", obj.field4) should.Equal("d", obj.Field4)
} }
func Test_decode_five_fields_struct(t *testing.T) { func Test_decode_five_fields_struct(t *testing.T) {
should := require.New(t) should := require.New(t)
type TestObject struct { type TestObject struct {
field1 string Field1 string
field2 string Field2 string
field3 string Field3 string
field4 string Field4 string
field5 string Field5 string
} }
obj := TestObject{} obj := TestObject{}
should.Nil(UnmarshalFromString(`{}`, &obj)) should.Nil(UnmarshalFromString(`{}`, &obj))
should.Equal("", obj.field1) should.Equal("", obj.Field1)
should.Nil(UnmarshalFromString(`{"field1": "a", "field2": "b", "field3": "c", "field4": "d", "field5": "e"}`, &obj)) should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "b", "Field3": "c", "Field4": "d", "Field5": "e"}`, &obj))
should.Equal("a", obj.field1) should.Equal("a", obj.Field1)
should.Equal("b", obj.field2) should.Equal("b", obj.Field2)
should.Equal("c", obj.field3) should.Equal("c", obj.Field3)
should.Equal("d", obj.field4) should.Equal("d", obj.Field4)
should.Equal("e", obj.field5) should.Equal("e", obj.Field5)
} }
func Test_decode_ten_fields_struct(t *testing.T) { func Test_decode_ten_fields_struct(t *testing.T) {
should := require.New(t) should := require.New(t)
type TestObject struct { type TestObject struct {
field1 string Field1 string
field2 string Field2 string
field3 string Field3 string
field4 string Field4 string
field5 string Field5 string
field6 string Field6 string
field7 string Field7 string
field8 string Field8 string
field9 string Field9 string
field10 string Field10 string
} }
obj := TestObject{} obj := TestObject{}
should.Nil(UnmarshalFromString(`{}`, &obj)) should.Nil(UnmarshalFromString(`{}`, &obj))
should.Equal("", obj.field1) should.Equal("", obj.Field1)
should.Nil(UnmarshalFromString(`{"field1": "a", "field2": "b", "field3": "c", "field4": "d", "field5": "e"}`, &obj)) should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "b", "Field3": "c", "Field4": "d", "Field5": "e"}`, &obj))
should.Equal("a", obj.field1) should.Equal("a", obj.Field1)
should.Equal("b", obj.field2) should.Equal("b", obj.Field2)
should.Equal("c", obj.field3) should.Equal("c", obj.Field3)
should.Equal("d", obj.field4) should.Equal("d", obj.Field4)
should.Equal("e", obj.field5) should.Equal("e", obj.Field5)
} }
func Test_decode_struct_field_with_tag(t *testing.T) { func Test_decode_struct_field_with_tag(t *testing.T) {
@ -144,3 +145,97 @@ func Test_write_val_one_field_struct(t *testing.T) {
should.Nil(err) should.Nil(err)
should.Equal(`{"field-1":"hello"}`, str) should.Equal(`{"field-1":"hello"}`, str)
} }
func Test_mixed(t *testing.T) {
should := require.New(t)
type AA struct {
ID int `json:"id"`
Payload map[string]interface{} `json:"payload"`
buf *bytes.Buffer `json:"-"`
}
aa := AA{}
err := UnmarshalFromString(` {"id":1, "payload":{"account":"123","password":"456"}}`, &aa)
should.Nil(err)
should.Equal(1, aa.ID)
should.Equal("123", aa.Payload["account"])
}
func Test_omit_empty(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string `json:"field-1,omitempty"`
Field2 string `json:"field-2,omitempty"`
Field3 string `json:"field-3,omitempty"`
}
obj := TestObject{}
obj.Field2 = "hello"
str, err := MarshalToString(&obj)
should.Nil(err)
should.Equal(`{"field-2":"hello"}`, str)
}
func Test_any_within_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 Any
Field2 Any
}
obj := TestObject{}
err := UnmarshalFromString(`{"Field1": "hello", "Field2": [1,2,3]}`, &obj)
should.Nil(err)
should.Equal("hello", obj.Field1.ToString())
should.Equal("[1,2,3]", obj.Field2.ToString())
}
func Test_recursive_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
Me *TestObject
}
obj := TestObject{}
str, err := MarshalToString(obj)
should.Nil(err)
should.Contains(str, `"Field1":""`)
should.Contains(str, `"Me":null`)
err = UnmarshalFromString(str, &obj)
should.Nil(err)
}
func Test_one_field_struct(t *testing.T) {
should := require.New(t)
type YetYetAnotherObject struct {
Field string
}
type YetAnotherObject struct {
Field *YetYetAnotherObject
}
type AnotherObject struct {
Field *YetAnotherObject
}
type TestObject struct {
Me *AnotherObject
}
obj := TestObject{&AnotherObject{&YetAnotherObject{&YetYetAnotherObject{"abc"}}}}
str, err := MarshalToString(obj)
should.Nil(err)
should.Equal(`{"Me":{"Field":{"Field":{"Field":"abc"}}}}`, str)
str, err = MarshalToString(&obj)
should.Nil(err)
should.Equal(`{"Me":{"Field":{"Field":{"Field":"abc"}}}}`, str)
}
func Test_anonymous_struct_marshal(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field string
}
str, err := MarshalToString(struct {
TestObject
Field int
}{
Field: 100,
})
should.Nil(err)
should.Equal(`{"Field":100}`, str)
}

View File

@ -3,9 +3,9 @@ package jsoniter
import ( import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/json-iterator/go/require"
"testing" "testing"
"unsafe" "unsafe"
"github.com/json-iterator/go/require"
) )
func Test_decode_slice(t *testing.T) { func Test_decode_slice(t *testing.T) {

54
jsoniter_stream_test.go Normal file
View File

@ -0,0 +1,54 @@
package jsoniter
import (
"github.com/json-iterator/go/require"
"testing"
)
func Test_writeByte_should_grow_buffer(t *testing.T) {
should := require.New(t)
stream := NewStream(nil, 1)
stream.writeByte('1')
should.Equal("1", string(stream.Buffer()))
should.Equal(1, len(stream.buf))
stream.writeByte('2')
should.Equal("12", string(stream.Buffer()))
should.Equal(2, len(stream.buf))
stream.writeThreeBytes('3', '4', '5')
should.Equal("12345", string(stream.Buffer()))
}
func Test_writeBytes_should_grow_buffer(t *testing.T) {
should := require.New(t)
stream := NewStream(nil, 1)
stream.Write([]byte{'1', '2'})
should.Equal("12", string(stream.Buffer()))
should.Equal(3, len(stream.buf))
stream.Write([]byte{'3', '4', '5', '6', '7'})
should.Equal("1234567", string(stream.Buffer()))
should.Equal(8, len(stream.buf))
}
func Test_writeIndention_should_grow_buffer(t *testing.T) {
should := require.New(t)
stream := NewStream(nil, 1)
stream.IndentionStep = 2
stream.WriteVal([]int{1, 2, 3})
should.Equal("[\n 1,\n 2,\n 3\n]", string(stream.Buffer()))
}
func Test_writeRaw_should_grow_buffer(t *testing.T) {
should := require.New(t)
stream := NewStream(nil, 1)
stream.WriteRaw("123")
should.Nil(stream.Error)
should.Equal("123", string(stream.Buffer()))
}
func Test_writeString_should_grow_buffer(t *testing.T) {
should := require.New(t)
stream := NewStream(nil, 0)
stream.WriteString("123")
should.Nil(stream.Error)
should.Equal(`"123"`, string(stream.Buffer()))
}

View File

@ -3,15 +3,15 @@ package jsoniter
import ( import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"testing"
"github.com/json-iterator/go/require"
"fmt" "fmt"
"github.com/json-iterator/go/require"
"testing"
) )
func Test_read_normal_string(t *testing.T) { func Test_read_normal_string(t *testing.T) {
cases := map[string]string{ cases := map[string]string{
`"0123456789012345678901234567890123456789"`: `0123456789012345678901234567890123456789`, `"0123456789012345678901234567890123456789"`: `0123456789012345678901234567890123456789`,
`""`: ``, `""`: ``,
`"hello"`: `hello`, `"hello"`: `hello`,
} }
for input, output := range cases { for input, output := range cases {
@ -40,8 +40,8 @@ func Test_read_normal_string(t *testing.T) {
func Test_read_exotic_string(t *testing.T) { func Test_read_exotic_string(t *testing.T) {
cases := map[string]string{ cases := map[string]string{
`"hel\"lo"`: `hel"lo`, `"hel\"lo"`: `hel"lo`,
`"hel\nlo"`: "hel\nlo", `"hel\nlo"`: "hel\nlo",
`"\u4e2d\u6587"`: "中文", `"\u4e2d\u6587"`: "中文",
`"\ud83d\udc4a"`: "\xf0\x9f\x91\x8a", // surrogate `"\ud83d\udc4a"`: "\xf0\x9f\x91\x8a", // surrogate
} }
@ -105,6 +105,13 @@ func Test_write_val_string(t *testing.T) {
should.Equal(`"hello"`, buf.String()) should.Equal(`"hello"`, buf.String())
} }
func Test_decode_slash(t *testing.T) {
should := require.New(t)
var obj interface{}
should.NotNil(json.Unmarshal([]byte("\\"), &obj))
should.NotNil(UnmarshalFromString("\\", &obj))
}
func Benchmark_jsoniter_unicode(b *testing.B) { func Benchmark_jsoniter_unicode(b *testing.B) {
for n := 0; n < b.N; n++ { for n := 0; n < b.N; n++ {
iter := ParseString(`"\ud83d\udc4a"`) iter := ParseString(`"\ud83d\udc4a"`)