1
0
mirror of https://github.com/json-iterator/go.git synced 2025-06-15 22:50:24 +02:00

49 Commits

Author SHA1 Message Date
787edc95b0 Revert "WIP: Tests to compare against stdlib" 2017-06-12 15:07:52 +08:00
6e5817b773 Merge pull request #55 from thockin/output_tests
WIP: Tests to compare against stdlib
2017-06-12 01:47:50 -05:00
7480e41836 Add output tests for maps of builtins
This tests for exact stdlib compatibility.
2017-06-11 21:09:56 -07:00
9215b3c508 Add output tests for builtin types
This fuzzes a type, marshals it with stdlib and json-iterator, compares,
then unmarshals with stdlib and json-iterator and compares.  This is
checking for literal, byte-for-byte compatibility.

In every case the test is exactly the same.

It also include benchmark functions to compare stdlib vs json-iterator.

This depends on a couple PRs to be merged in gofuzz.
2017-06-11 21:04:59 -07:00
64e500f3c8 Merge branch 'master' of https://github.com/json-iterator/go 2017-06-12 10:13:22 +08:00
3307ce3ba2 #50 map key unlike object field, can contain escaped char 2017-06-12 10:13:13 +08:00
6f50f15678 decoder/encoder;float precision doc 2017-06-11 16:30:31 +08:00
cee09816e3 decoder/encoder;float precision doc 2017-06-11 16:28:31 +08:00
cdbad22d22 test more package description 2017-06-11 15:35:45 +08:00
b0c9f047e2 test more than one pakcage description 2017-06-11 15:32:58 +08:00
6bd13c2948 Merge branch 'master' of https://github.com/json-iterator/go 2017-06-09 17:06:38 +08:00
84ad508437 #48 should return error if concrete tpye unknown 2017-06-09 17:06:27 +08:00
4f909776cf Merge pull request #49 from zhaitianduo/master
Use jsoniter instead of json in example
2017-06-09 03:32:31 -05:00
962c470806 fix import not use 2017-06-09 16:28:20 +08:00
46d443fbad use jsoniter for example 2017-06-09 16:25:58 +08:00
2608d40f2a example unmarshal 2017-06-08 12:08:47 +08:00
3cf822853f example unmarshal 2017-06-08 12:07:03 +08:00
26708bccc9 report error when string end not found 2017-06-08 09:46:19 +08:00
d75b539bad add test for scientific float 2017-06-07 21:34:56 +08:00
cfffa29c8a gofmt 2017-06-06 23:27:00 +08:00
925df245d3 good enough indent implementation 2017-06-06 23:18:37 +08:00
962a8cd303 #40 support UseNumber 2017-06-06 23:15:15 +08:00
6509ba05df Merge pull request #41 from 1046102779/master
解析时,如果输出参数不是指针类型,直接报错,避免程序挂掉
2017-06-06 10:03:06 -05:00
579dbf3c1d Merge pull request #42 from 1046102779/patch-1
把floatDigits改为intDigits
2017-06-06 10:02:26 -05:00
aa5181db67 把floatDigits改为intDigits 2017-06-06 21:08:04 +08:00
67be6df2b1 Update feature_adapter.go 2017-06-06 20:01:43 +08:00
0f5379494a unmarshal failed return non-pointer error 2017-06-06 19:36:33 +08:00
d09e2419ba update benchmark 2017-06-06 16:55:32 +08:00
e1a71f6ba1 update benchmark 2017-06-06 16:54:26 +08:00
dcb78991c4 flush when buffer is large enough 2017-06-06 14:16:54 +08:00
9e8238cdc6 remove unused file 2017-06-06 12:41:13 +08:00
a4e5abf492 support []byte; marshal without copy 2017-06-06 09:44:56 +08:00
3979955e69 support TextMarshaler as map key 2017-06-06 00:09:33 +08:00
5fd09f0e02 remove mapInterfaceEncoder 2017-06-05 23:56:37 +08:00
af4982b22c support decode int key map 2017-06-05 23:53:48 +08:00
29dc1d407d write map with int key 2017-06-05 23:01:00 +08:00
5b27aaa62c update test 2017-06-05 22:10:01 +08:00
106636a191 update test 2017-06-05 22:08:28 +08:00
f50c4cfbbe Merge branch 'master' of https://github.com/json-iterator/go 2017-06-05 22:05:02 +08:00
87149ae489 add simple marshal benchmark 2017-06-05 22:04:52 +08:00
c0a4ad72e1 example test 2017-06-05 20:37:08 +08:00
404c0ee44b Decoder doc 2017-06-05 19:57:20 +08:00
10c1506f87 link test 2017-06-05 19:38:34 +08:00
9a43fe6468 adapter api comment 2017-06-05 19:31:30 +08:00
95e03f2937 Marshal comment 2017-06-05 19:19:46 +08:00
4406ed9e62 Marshal comment 2017-06-05 19:18:12 +08:00
ff027701f5 Marshal comment 2017-06-05 19:15:56 +08:00
c69b61f879 Marshal comment 2017-06-05 19:14:40 +08:00
d97f5db769 Marshal comment 2017-06-05 19:11:16 +08:00
46 changed files with 923 additions and 465 deletions

10
.idea/libraries/Go_SDK.xml generated Normal file
View File

@ -0,0 +1,10 @@
<component name="libraryTable">
<library name="Go SDK">
<CLASSES>
<root url="file:///usr/local/go/src" />
</CLASSES>
<SOURCES>
<root url="file:///usr/local/go/src" />
</SOURCES>
</library>
</component>

View File

@ -2,6 +2,23 @@
jsoniter (json-iterator) is fast and flexible JSON parser available in [Java](https://github.com/json-iterator/java) and [Go](https://github.com/json-iterator/go) jsoniter (json-iterator) is fast and flexible JSON parser available in [Java](https://github.com/json-iterator/java) and [Go](https://github.com/json-iterator/go)
# Benchmark
![benchmark](http://jsoniter.com/benchmarks/go-benchmark.png)
Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/github.com/json-iterator/go-benchmark/benchmark_medium_payload_test.go
Raw Result (easyjson requires static code generation)
| | ns/op | allocation bytes | allocation times |
| --- | --- | --- | --- |
| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
# Usage # Usage
100% compatibility with standard lib 100% compatibility with standard lib

47
example_test.go Normal file
View File

@ -0,0 +1,47 @@
package jsoniter_test
import (
"fmt"
"os"
"github.com/json-iterator/go"
)
func ExampleMarshal() {
type ColorGroup struct {
ID int
Name string
Colors []string
}
group := ColorGroup{
ID: 1,
Name: "Reds",
Colors: []string{"Crimson", "Red", "Ruby", "Maroon"},
}
b, err := jsoniter.Marshal(group)
if err != nil {
fmt.Println("error:", err)
}
os.Stdout.Write(b)
// Output:
// {"ID":1,"Name":"Reds","Colors":["Crimson","Red","Ruby","Maroon"]}
}
func ExampleUnmarshal() {
var jsonBlob = []byte(`[
{"Name": "Platypus", "Order": "Monotremata"},
{"Name": "Quoll", "Order": "Dasyuromorphia"}
]`)
type Animal struct {
Name string
Order string
}
var animals []Animal
err := jsoniter.Unmarshal(jsonBlob, &animals)
if err != nil {
fmt.Println("error:", err)
}
fmt.Printf("%+v", animals)
// Output:
// [{Name:Platypus Order:Monotremata} {Name:Quoll Order:Dasyuromorphia}]
}

View File

@ -1,14 +1,37 @@
// Package jsoniter implements encoding and decoding of JSON as defined in
// RFC 4627 and provides interfaces with identical syntax of standard lib encoding/json.
// Converting from encoding/json to jsoniter is no more than replacing the package with jsoniter
// and variable type declarations (if any).
// jsoniter interfaces gives 100% compatibility with code using standard lib.
//
// "JSON and Go"
// (https://golang.org/doc/articles/json_and_go.html)
// gives a description of how Marshal/Unmarshal operate
// between arbitrary or predefined json objects and bytes,
// and it applies to jsoniter.Marshal/Unmarshal as well.
package jsoniter package jsoniter
import ( import (
"io"
"bytes" "bytes"
"encoding/json"
"errors"
"io"
"reflect"
"unsafe"
) )
// Unmarshal adapts to json/encoding APIs // Unmarshal adapts to json/encoding Unmarshal API
//
// Unmarshal parses the JSON-encoded data and stores the result in the value pointed to by v.
// Refer to https://godoc.org/encoding/json#Unmarshal for more information
func Unmarshal(data []byte, v interface{}) error { func Unmarshal(data []byte, v interface{}) error {
data = data[:lastNotSpacePos(data)] data = data[:lastNotSpacePos(data)]
iter := ParseBytes(data) iter := ParseBytes(data)
typ := reflect.TypeOf(v)
if typ.Kind() != reflect.Ptr {
// return non-pointer error
return errors.New("the second param must be ptr type")
}
iter.ReadVal(v) iter.ReadVal(v)
if iter.head == iter.tail { if iter.head == iter.tail {
iter.loadMore() iter.loadMore()
@ -22,6 +45,7 @@ func Unmarshal(data []byte, v interface{}) error {
return iter.Error return iter.Error
} }
// UnmarshalAny adapts to
func UnmarshalAny(data []byte) (Any, error) { func UnmarshalAny(data []byte) (Any, error) {
data = data[:lastNotSpacePos(data)] data = data[:lastNotSpacePos(data)]
iter := ParseBytes(data) iter := ParseBytes(data)
@ -81,15 +105,17 @@ func UnmarshalAnyFromString(str string) (Any, error) {
return nil, iter.Error return nil, iter.Error
} }
// Marshal adapts to json/encoding Marshal API
//
// Marshal returns the JSON encoding of v, adapts to json/encoding Marshal API
// Refer to https://godoc.org/encoding/json#Marshal for more information
func Marshal(v interface{}) ([]byte, error) { func Marshal(v interface{}) ([]byte, error) {
buf := &bytes.Buffer{} stream := NewStream(nil, 256)
stream := NewStream(buf, 512)
stream.WriteVal(v) stream.WriteVal(v)
stream.Flush()
if stream.Error != nil { if stream.Error != nil {
return nil, stream.Error return nil, stream.Error
} }
return buf.Bytes(), nil return stream.Buffer(), nil
} }
func MarshalToString(v interface{}) (string, error) { func MarshalToString(v interface{}) (string, error) {
@ -100,11 +126,19 @@ func MarshalToString(v interface{}) (string, error) {
return string(buf), nil return string(buf), nil
} }
// NewDecoder adapts to json/stream NewDecoder API.
//
// NewDecoder returns a new decoder that reads from r.
//
// Instead of a json/encoding Decoder, an AdaptedDecoder is returned
// Refer to https://godoc.org/encoding/json#NewDecoder for more information
func NewDecoder(reader io.Reader) *AdaptedDecoder { func NewDecoder(reader io.Reader) *AdaptedDecoder {
iter := Parse(reader, 512) iter := Parse(reader, 512)
return &AdaptedDecoder{iter} return &AdaptedDecoder{iter}
} }
// AdaptedDecoder reads and decodes JSON values from an input stream.
// AdaptedDecoder provides identical APIs with json/stream Decoder (Token() and UseNumber() are in progress)
type AdaptedDecoder struct { type AdaptedDecoder struct {
iter *Iterator iter *Iterator
} }
@ -127,6 +161,16 @@ func (adapter *AdaptedDecoder) Buffered() io.Reader {
return bytes.NewReader(remaining) return bytes.NewReader(remaining)
} }
func (decoder *AdaptedDecoder) UseNumber() {
RegisterTypeDecoder("interface {}", func(ptr unsafe.Pointer, iter *Iterator) {
if iter.WhatIsNext() == Number {
*((*interface{})(ptr)) = json.Number(iter.readNumberAsString())
} else {
*((*interface{})(ptr)) = iter.Read()
}
})
}
func NewEncoder(writer io.Writer) *AdaptedEncoder { func NewEncoder(writer io.Writer) *AdaptedEncoder {
stream := NewStream(writer, 512) stream := NewStream(writer, 512)
return &AdaptedEncoder{stream} return &AdaptedEncoder{stream}
@ -143,5 +187,5 @@ func (adapter *AdaptedEncoder) Encode(val interface{}) error {
} }
func (adapter *AdaptedEncoder) SetIndent(prefix, indent string) { func (adapter *AdaptedEncoder) SetIndent(prefix, indent string) {
// not implemented yet adapter.stream.IndentionStep = len(indent)
} }

View File

@ -1,9 +1,9 @@
package jsoniter package jsoniter
import ( import (
"unsafe"
"fmt" "fmt"
"reflect" "reflect"
"unsafe"
) )
type arrayLazyAny struct { type arrayLazyAny struct {
@ -44,7 +44,7 @@ func (any *arrayLazyAny) fillCacheUntil(target int) Any {
return any.cache[target] return any.cache[target]
} }
iter := any.Parse() iter := any.Parse()
if (len(any.remaining) == len(any.buf)) { if len(any.remaining) == len(any.buf) {
iter.head++ iter.head++
c := iter.nextToken() c := iter.nextToken()
if c != ']' { if c != ']' {

View File

@ -2,8 +2,8 @@ package jsoniter
import ( import (
"io" "io"
"unsafe"
"strconv" "strconv"
"unsafe"
) )
type float64LazyAny struct { type float64LazyAny struct {

View File

@ -2,8 +2,8 @@ package jsoniter
import ( import (
"io" "io"
"unsafe"
"strconv" "strconv"
"unsafe"
) )
type int64LazyAny struct { type int64LazyAny struct {

View File

@ -1,9 +1,9 @@
package jsoniter package jsoniter
import ( import (
"unsafe"
"fmt" "fmt"
"reflect" "reflect"
"unsafe"
) )
type objectLazyAny struct { type objectLazyAny struct {

View File

@ -146,7 +146,6 @@ func (any *stringAny) Parse() *Iterator {
return nil return nil
} }
func (any *stringAny) ValueType() ValueType { func (any *stringAny) ValueType() ValueType {
return String return String
} }

View File

@ -1,12 +1,11 @@
package jsoniter package jsoniter
import ( import (
"io"
"strconv" "strconv"
"unsafe" "unsafe"
"io"
) )
type uint64LazyAny struct { type uint64LazyAny struct {
baseAny baseAny
buf []byte buf []byte

View File

@ -1,3 +1,9 @@
//
// Besides, jsoniter.Iterator provides a different set of interfaces
// iterating given bytes/string/reader
// and yielding parsed elements one by one.
// This set of interfaces reads input as required and gives
// better performance.
package jsoniter package jsoniter
import ( import (
@ -276,4 +282,3 @@ func (iter *Iterator) ReadBase64() (ret []byte) {
} }
return ret[:n] return ret[:n]
} }

View File

@ -23,7 +23,6 @@ func (iter *Iterator) ReadArray() (ret bool) {
} }
} }
func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) { func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
c := iter.nextToken() c := iter.nextToken()
if c == '[' { if c == '[' {

View File

@ -2,12 +2,13 @@ package jsoniter
import ( import (
"io" "io"
"math/big"
"strconv" "strconv"
"unsafe" "unsafe"
"math/big"
) )
var floatDigits []int8 var floatDigits []int8
const invalidCharForNumber = int8(-1) const invalidCharForNumber = int8(-1)
const endOfNumber = int8(-2) const endOfNumber = int8(-2)
const dotInNumber = int8(-3) const dotInNumber = int8(-3)
@ -91,14 +92,14 @@ func (iter *Iterator) readPositiveFloat32() (ret float32) {
if value > uint64SafeToMultiple10 { if value > uint64SafeToMultiple10 {
return iter.readFloat32SlowPath() return iter.readFloat32SlowPath()
} }
value = (value << 3) + (value << 1) + uint64(ind); // value = value * 10 + ind; value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
} }
if c == '.' { if c == '.' {
i++ i++
decimalPlaces := 0; decimalPlaces := 0
for ; i < iter.tail; i++ { for ; i < iter.tail; i++ {
c = iter.buf[i] c = iter.buf[i]
ind := floatDigits[c]; ind := floatDigits[c]
switch ind { switch ind {
case endOfNumber: case endOfNumber:
if decimalPlaces > 0 && decimalPlaces < len(POW10) { if decimalPlaces > 0 && decimalPlaces < len(POW10) {
@ -194,14 +195,14 @@ func (iter *Iterator) readPositiveFloat64() (ret float64) {
if value > uint64SafeToMultiple10 { if value > uint64SafeToMultiple10 {
return iter.readFloat64SlowPath() return iter.readFloat64SlowPath()
} }
value = (value << 3) + (value << 1) + uint64(ind); // value = value * 10 + ind; value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
} }
if c == '.' { if c == '.' {
i++ i++
decimalPlaces := 0; decimalPlaces := 0
for ; i < iter.tail; i++ { for ; i < iter.tail; i++ {
c = iter.buf[i] c = iter.buf[i]
ind := floatDigits[c]; ind := floatDigits[c]
switch ind { switch ind {
case endOfNumber: case endOfNumber:
if decimalPlaces > 0 && decimalPlaces < len(POW10) { if decimalPlaces > 0 && decimalPlaces < len(POW10) {

View File

@ -17,7 +17,7 @@ const uint8Max = uint32(0xffff)
func init() { func init() {
intDigits = make([]int8, 256) intDigits = make([]int8, 256)
for i := 0; i < len(floatDigits); i++ { for i := 0; i < len(intDigits); i++ {
intDigits[i] = invalidCharForNumber intDigits[i] = invalidCharForNumber
} }
for i := int8('0'); i <= int8('9'); i++ { for i := int8('0'); i <= int8('9'); i++ {
@ -194,7 +194,7 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
} }
value = (value << 3) + (value << 1) + uint32(ind) value = (value << 3) + (value << 1) + uint32(ind)
} }
if (!iter.loadMore()) { if !iter.loadMore() {
return value return value
} }
} }
@ -252,7 +252,7 @@ func (iter *Iterator) readUint64(c byte) (ret uint64) {
} }
value = (value << 3) + (value << 1) + uint64(ind) value = (value << 3) + (value << 1) + uint64(ind)
} }
if (!iter.loadMore()) { if !iter.loadMore() {
return value return value
} }
} }

View File

@ -84,7 +84,7 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
return false return false
} }
for iter.nextToken() == ',' { for iter.nextToken() == ',' {
field := string(iter.readObjectFieldAsBytes()) field = string(iter.readObjectFieldAsBytes())
if !callback(iter, field) { if !callback(iter, field) {
return false return false
} }
@ -105,6 +105,46 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
return false return false
} }
func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
c := iter.nextToken()
if c == '{' {
c = iter.nextToken()
if c == '"' {
iter.unreadByte()
field := iter.ReadString()
if iter.nextToken() != ':' {
iter.reportError("ReadMapCB", "expect : after object field")
return false
}
if !callback(iter, field) {
return false
}
for iter.nextToken() == ',' {
field = iter.ReadString()
if iter.nextToken() != ':' {
iter.reportError("ReadMapCB", "expect : after object field")
return false
}
if !callback(iter, field) {
return false
}
}
return true
}
if c == '}' {
return true
}
iter.reportError("ReadMapCB", `expect " after }`)
return false
}
if c == 'n' {
iter.skipFixedBytes(3)
return true // null
}
iter.reportError("ReadMapCB", `expect { or n`)
return false
}
func (iter *Iterator) readObjectStart() bool { func (iter *Iterator) readObjectStart() bool {
c := iter.nextToken() c := iter.nextToken()
if c == '{' { if c == '{' {

View File

@ -29,7 +29,6 @@ func (iter *Iterator) ReadBool() (ret bool) {
return return
} }
func (iter *Iterator) SkipAndReturnBytes() []byte { func (iter *Iterator) SkipAndReturnBytes() []byte {
if iter.reader != nil { if iter.reader != nil {
panic("reader input does not support this api") panic("reader input does not support this api")
@ -40,7 +39,6 @@ func (iter *Iterator) SkipAndReturnBytes() []byte {
return iter.buf[before:after] return iter.buf[before:after]
} }
// Skip skips a json object and positions to relatively the next json object // Skip skips a json object and positions to relatively the next json object
func (iter *Iterator) Skip() { func (iter *Iterator) Skip() {
c := iter.nextToken() c := iter.nextToken()
@ -204,15 +202,15 @@ func (iter *Iterator) skipUntilBreak() {
} }
func (iter *Iterator) skipFixedBytes(n int) { func (iter *Iterator) skipFixedBytes(n int) {
iter.head += n; iter.head += n
if (iter.head >= iter.tail) { if iter.head >= iter.tail {
more := iter.head - iter.tail; more := iter.head - iter.tail
if !iter.loadMore() { if !iter.loadMore() {
if more > 0 { if more > 0 {
iter.reportError("skipFixedBytes", "unexpected end"); iter.reportError("skipFixedBytes", "unexpected end")
} }
return return
} }
iter.head += more; iter.head += more
} }
} }

View File

@ -92,6 +92,7 @@ func (iter *Iterator) readStringSlowPath() (ret string) {
str = append(str, c) str = append(str, c)
} }
} }
iter.reportError("ReadString", "unexpected end of input")
return return
} }

View File

@ -1,26 +1,31 @@
package jsoniter package jsoniter
import ( import (
"encoding"
"encoding/json"
"fmt" "fmt"
"reflect" "reflect"
"sync/atomic" "sync/atomic"
"unsafe" "unsafe"
"encoding/json"
) )
/* // Decoder is an internal type registered to cache as needed.
Reflection on type to create decoders, which is then cached // Don't confuse jsoniter.Decoder with json.Decoder.
Reflection on value is avoided as we can, as the reflect.Value itself will allocate, with following exceptions // For json.Decoder's adapter, refer to jsoniter.AdapterDecoder(todo link).
1. create instance of new value, for example *int will need a int to be allocated //
2. append to slice, if the existing cap is not enough, allocate will be done using Reflect.New // Reflection on type to create decoders, which is then cached
3. assignment to map, both key and value will be reflect.Value // Reflection on value is avoided as we can, as the reflect.Value itself will allocate, with following exceptions
For a simple struct binding, it will be reflect.Value free and allocation free // 1. create instance of new value, for example *int will need a int to be allocated
*/ // 2. append to slice, if the existing cap is not enough, allocate will be done using Reflect.New
// 3. assignment to map, both key and value will be reflect.Value
// For a simple struct binding, it will be reflect.Value free and allocation free
type Decoder interface { type Decoder interface {
decode(ptr unsafe.Pointer, iter *Iterator) decode(ptr unsafe.Pointer, iter *Iterator)
} }
// Encoder is an internal type registered to cache as needed.
// Don't confuse jsoniter.Encoder with json.Encoder.
// For json.Encoder's adapter, refer to jsoniter.AdapterEncoder(todo godoc link).
type Encoder interface { type Encoder interface {
isEmpty(ptr unsafe.Pointer) bool isEmpty(ptr unsafe.Pointer) bool
encode(ptr unsafe.Pointer, stream *Stream) encode(ptr unsafe.Pointer, stream *Stream)
@ -77,6 +82,7 @@ var jsonRawMessageType reflect.Type
var anyType reflect.Type var anyType reflect.Type
var marshalerType reflect.Type var marshalerType reflect.Type
var unmarshalerType reflect.Type var unmarshalerType reflect.Type
var textUnmarshalerType reflect.Type
func init() { func init() {
typeDecoders = map[string]Decoder{} typeDecoders = map[string]Decoder{}
@ -91,6 +97,7 @@ func init() {
anyType = reflect.TypeOf((*Any)(nil)).Elem() anyType = reflect.TypeOf((*Any)(nil)).Elem()
marshalerType = reflect.TypeOf((*json.Marshaler)(nil)).Elem() marshalerType = reflect.TypeOf((*json.Marshaler)(nil)).Elem()
unmarshalerType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem() unmarshalerType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem()
textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
} }
func addDecoderToCache(cacheKey reflect.Type, decoder Decoder) { func addDecoderToCache(cacheKey reflect.Type, decoder Decoder) {
@ -156,15 +163,18 @@ func RegisterExtension(extension ExtensionFunc) {
extensions = append(extensions, extension) extensions = append(extensions, extension)
} }
// CleanDecoders cleans decoders registered // CleanDecoders cleans decoders registered or cached
func CleanDecoders() { func CleanDecoders() {
typeDecoders = map[string]Decoder{} typeDecoders = map[string]Decoder{}
fieldDecoders = map[string]Decoder{} fieldDecoders = map[string]Decoder{}
atomic.StorePointer(&DECODERS, unsafe.Pointer(&map[string]Decoder{}))
} }
// CleanEncoders cleans encoders registered or cached
func CleanEncoders() { func CleanEncoders() {
typeEncoders = map[string]Encoder{} typeEncoders = map[string]Encoder{}
fieldEncoders = map[string]Encoder{} fieldEncoders = map[string]Encoder{}
atomic.StorePointer(&ENCODERS, unsafe.Pointer(&map[string]Encoder{}))
} }
type optionalDecoder struct { type optionalDecoder struct {
@ -274,7 +284,6 @@ func (iter *Iterator) ReadVal(obj interface{}) {
cachedDecoder.decode(e.word, iter) cachedDecoder.decode(e.word, iter)
} }
func (stream *Stream) WriteVal(val interface{}) { func (stream *Stream) WriteVal(val interface{}) {
if nil == val { if nil == val {
stream.WriteNil() stream.WriteNil()
@ -337,6 +346,9 @@ func decoderOfType(typ reflect.Type) (Decoder, error) {
} }
func createDecoderOfType(typ reflect.Type) (Decoder, error) { func createDecoderOfType(typ reflect.Type) (Decoder, error) {
if typ.String() == "[]uint8" {
return &base64Codec{}, nil
}
if typ.AssignableTo(jsonRawMessageType) { if typ.AssignableTo(jsonRawMessageType) {
return &jsonRawMessageCodec{}, nil return &jsonRawMessageCodec{}, nil
} }
@ -424,6 +436,9 @@ func encoderOfType(typ reflect.Type) (Encoder, error) {
} }
func createEncoderOfType(typ reflect.Type) (Encoder, error) { func createEncoderOfType(typ reflect.Type) (Encoder, error) {
if typ.String() == "[]uint8" {
return &base64Codec{}, nil
}
if typ.AssignableTo(jsonRawMessageType) { if typ.AssignableTo(jsonRawMessageType) {
return &jsonRawMessageCodec{}, nil return &jsonRawMessageCodec{}, nil
} }
@ -510,7 +525,7 @@ func decoderOfMap(typ reflect.Type) (Decoder, error) {
return nil, err return nil, err
} }
mapInterface := reflect.New(typ).Interface() mapInterface := reflect.New(typ).Interface()
return &mapDecoder{typ, typ.Elem(), decoder, extractInterface(mapInterface)}, nil return &mapDecoder{typ, typ.Key(), typ.Elem(), decoder, extractInterface(mapInterface)}, nil
} }
func extractInterface(val interface{}) emptyInterface { func extractInterface(val interface{}) emptyInterface {
@ -524,9 +539,5 @@ func encoderOfMap(typ reflect.Type) (Encoder, error) {
return nil, err return nil, err
} }
mapInterface := reflect.New(typ).Elem().Interface() mapInterface := reflect.New(typ).Elem().Interface()
if elemType.Kind() == reflect.Interface && elemType.NumMethod() == 0 {
return &mapInterfaceEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}, nil
} else {
return &mapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}, nil return &mapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}, nil
} }
}

View File

@ -1,10 +1,10 @@
package jsoniter package jsoniter
import ( import (
"unsafe"
"reflect"
"io"
"fmt" "fmt"
"io"
"reflect"
"unsafe"
) )
func decoderOfSlice(typ reflect.Type) (Decoder, error) { func decoderOfSlice(typ reflect.Type) (Decoder, error) {

View File

@ -1,12 +1,16 @@
package jsoniter package jsoniter
import ( import (
"unsafe" "encoding"
"encoding/json"
"reflect" "reflect"
"strconv"
"unsafe"
) )
type mapDecoder struct { type mapDecoder struct {
mapType reflect.Type mapType reflect.Type
keyType reflect.Type
elemType reflect.Type elemType reflect.Type
elemDecoder Decoder elemDecoder Decoder
mapInterface emptyInterface mapInterface emptyInterface
@ -21,12 +25,47 @@ func (decoder *mapDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
if realVal.IsNil() { if realVal.IsNil() {
realVal.Set(reflect.MakeMap(realVal.Type())) realVal.Set(reflect.MakeMap(realVal.Type()))
} }
for field := iter.ReadObject(); field != ""; field = iter.ReadObject() { iter.ReadMapCB(func(iter *Iterator, keyStr string) bool {
elem := reflect.New(decoder.elemType) elem := reflect.New(decoder.elemType)
decoder.elemDecoder.decode(unsafe.Pointer(elem.Pointer()), iter) decoder.elemDecoder.decode(unsafe.Pointer(elem.Pointer()), iter)
// to put into map, we have to use reflection // to put into map, we have to use reflection
realVal.SetMapIndex(reflect.ValueOf(string([]byte(field))), elem.Elem()) keyType := decoder.keyType
switch {
case keyType.Kind() == reflect.String:
realVal.SetMapIndex(reflect.ValueOf(keyStr), elem.Elem())
return true
case keyType.Implements(textUnmarshalerType):
textUnmarshaler := reflect.New(keyType.Elem()).Interface().(encoding.TextUnmarshaler)
err := textUnmarshaler.UnmarshalText([]byte(keyStr))
if err != nil {
iter.reportError("read map key as TextUnmarshaler", err.Error())
return false
} }
realVal.SetMapIndex(reflect.ValueOf(textUnmarshaler), elem.Elem())
return true
default:
switch keyType.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
n, err := strconv.ParseInt(keyStr, 10, 64)
if err != nil || reflect.Zero(keyType).OverflowInt(n) {
iter.reportError("read map key as int64", "read int64 failed")
return false
}
realVal.SetMapIndex(reflect.ValueOf(n).Convert(keyType), elem.Elem())
return true
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
n, err := strconv.ParseUint(keyStr, 10, 64)
if err != nil || reflect.Zero(keyType).OverflowUint(n) {
iter.reportError("read map key as uint64", "read uint64 failed")
return false
}
realVal.SetMapIndex(reflect.ValueOf(n).Convert(keyType), elem.Elem())
return true
}
}
iter.reportError("read map key", "unexpected map key type "+keyType.String())
return true
})
} }
type mapEncoder struct { type mapEncoder struct {
@ -47,13 +86,45 @@ func (encoder *mapEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
if i != 0 { if i != 0 {
stream.WriteMore() stream.WriteMore()
} }
stream.WriteObjectField(key.String()) encodeMapKey(key, stream)
stream.writeByte(':')
val := realVal.MapIndex(key).Interface() val := realVal.MapIndex(key).Interface()
encoder.elemEncoder.encodeInterface(val, stream) encoder.elemEncoder.encodeInterface(val, stream)
} }
stream.WriteObjectEnd() stream.WriteObjectEnd()
} }
func encodeMapKey(key reflect.Value, stream *Stream) {
if key.Kind() == reflect.String {
stream.WriteString(key.String())
return
}
if tm, ok := key.Interface().(encoding.TextMarshaler); ok {
buf, err := tm.MarshalText()
if err != nil {
stream.Error = err
return
}
stream.writeByte('"')
stream.Write(buf)
stream.writeByte('"')
return
}
switch key.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
stream.writeByte('"')
stream.WriteInt64(key.Int())
stream.writeByte('"')
return
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
stream.writeByte('"')
stream.WriteUint64(key.Uint())
stream.writeByte('"')
return
}
stream.Error = &json.UnsupportedTypeError{key.Type()}
}
func (encoder *mapEncoder) encodeInterface(val interface{}, stream *Stream) { func (encoder *mapEncoder) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder) writeToStream(val, stream, encoder)
} }
@ -65,41 +136,3 @@ func (encoder *mapEncoder) isEmpty(ptr unsafe.Pointer) bool {
realVal := reflect.ValueOf(*realInterface) realVal := reflect.ValueOf(*realInterface)
return realVal.Len() == 0 return realVal.Len() == 0
} }
type mapInterfaceEncoder struct {
mapType reflect.Type
elemType reflect.Type
elemEncoder Encoder
mapInterface emptyInterface
}
func (encoder *mapInterfaceEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
mapInterface := encoder.mapInterface
mapInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
realVal := reflect.ValueOf(*realInterface)
stream.WriteObjectStart()
for i, key := range realVal.MapKeys() {
if i != 0 {
stream.WriteMore()
}
stream.WriteObjectField(key.String())
val := realVal.MapIndex(key).Interface()
encoder.elemEncoder.encode(unsafe.Pointer(&val), stream)
}
stream.WriteObjectEnd()
}
func (encoder *mapInterfaceEncoder) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
}
func (encoder *mapInterfaceEncoder) isEmpty(ptr unsafe.Pointer) bool {
mapInterface := encoder.mapInterface
mapInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
realVal := reflect.ValueOf(*realInterface)
return realVal.Len() == 0
}

View File

@ -1,8 +1,9 @@
package jsoniter package jsoniter
import ( import (
"unsafe" "encoding/base64"
"encoding/json" "encoding/json"
"unsafe"
) )
type stringCodec struct { type stringCodec struct {
@ -296,6 +297,10 @@ type nonEmptyInterfaceCodec struct {
func (codec *nonEmptyInterfaceCodec) decode(ptr unsafe.Pointer, iter *Iterator) { func (codec *nonEmptyInterfaceCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
nonEmptyInterface := (*nonEmptyInterface)(ptr) nonEmptyInterface := (*nonEmptyInterface)(ptr)
if nonEmptyInterface.itab == nil {
iter.reportError("read non-empty interface", "do not know which concrete type to decode to")
return
}
var i interface{} var i interface{}
e := (*emptyInterface)(unsafe.Pointer(&i)) e := (*emptyInterface)(unsafe.Pointer(&i))
e.typ = nonEmptyInterface.itab.typ e.typ = nonEmptyInterface.itab.typ
@ -379,6 +384,49 @@ func (encoder *jsonRawMessageCodec) isEmpty(ptr unsafe.Pointer) bool {
return len(*((*json.RawMessage)(ptr))) == 0 return len(*((*json.RawMessage)(ptr))) == 0
} }
type base64Codec struct {
}
func (codec *base64Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
encoding := base64.StdEncoding
src := iter.SkipAndReturnBytes()
src = src[1 : len(src)-1]
decodedLen := encoding.DecodedLen(len(src))
dst := make([]byte, decodedLen)
_, err := encoding.Decode(dst, src)
if err != nil {
iter.reportError("decode base64", err.Error())
} else {
*((*[]byte)(ptr)) = dst
}
}
func (codec *base64Codec) encode(ptr unsafe.Pointer, stream *Stream) {
encoding := base64.StdEncoding
stream.writeByte('"')
src := *((*[]byte)(ptr))
toGrow := encoding.EncodedLen(len(src))
stream.ensure(toGrow)
encoding.Encode(stream.buf[stream.n:], src)
stream.n += toGrow
stream.writeByte('"')
}
func (encoder *base64Codec) encodeInterface(val interface{}, stream *Stream) {
encoding := base64.StdEncoding
stream.writeByte('"')
src := val.([]byte)
toGrow := encoding.EncodedLen(len(src))
stream.ensure(toGrow)
encoding.Encode(stream.buf[stream.n:], src)
stream.n += toGrow
stream.writeByte('"')
}
func (encoder *base64Codec) isEmpty(ptr unsafe.Pointer) bool {
return len(*((*[]byte)(ptr))) == 0
}
type stringNumberDecoder struct { type stringNumberDecoder struct {
elemDecoder Decoder elemDecoder Decoder
} }

View File

@ -1,12 +1,12 @@
package jsoniter package jsoniter
import ( import (
"io"
"fmt" "fmt"
"io"
"reflect" "reflect"
"unsafe"
"strings" "strings"
"unicode" "unicode"
"unsafe"
) )
func encoderOfStruct(typ reflect.Type) (Encoder, error) { func encoderOfStruct(typ reflect.Type) (Encoder, error) {
@ -138,7 +138,7 @@ func EnableUnexportedStructFieldsSupport() {
func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder) (Decoder, error) { func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder) (Decoder, error) {
knownHash := map[int32]struct{}{ knownHash := map[int32]struct{}{
0: struct{}{}, 0: {},
} }
switch len(fields) { switch len(fields) {
case 0: case 0:

View File

@ -32,12 +32,19 @@ func (b *Stream) Buffered() int {
return b.n return b.n
} }
func (b *Stream) Buffer() []byte {
return b.buf[:b.n]
}
// Write writes the contents of p into the buffer. // Write writes the contents of p into the buffer.
// It returns the number of bytes written. // It returns the number of bytes written.
// If nn < len(p), it also returns an error explaining // If nn < len(p), it also returns an error explaining
// why the write is short. // why the write is short.
func (b *Stream) Write(p []byte) (nn int, err error) { func (b *Stream) Write(p []byte) (nn int, err error) {
for len(p) > b.Available() && b.Error == nil { for len(p) > b.Available() && b.Error == nil {
if b.out == nil {
b.growAtLeast(len(p))
} else {
var n int var n int
if b.Buffered() == 0 { if b.Buffered() == 0 {
// Large write, empty buffer. // Large write, empty buffer.
@ -51,6 +58,7 @@ func (b *Stream) Write(p []byte) (nn int, err error) {
nn += n nn += n
p = p[n:] p = p[n:]
} }
}
if b.Error != nil { if b.Error != nil {
return nn, b.Error return nn, b.Error
} }
@ -60,14 +68,13 @@ func (b *Stream) Write(p []byte) (nn int, err error) {
return nn, nil return nn, nil
} }
// WriteByte writes a single byte. // WriteByte writes a single byte.
func (b *Stream) writeByte(c byte) { func (b *Stream) writeByte(c byte) {
if b.Error != nil { if b.Error != nil {
return return
} }
if b.Available() <= 0 && b.Flush() != nil { if b.Available() < 1 {
return b.growAtLeast(1)
} }
b.buf[b.n] = c b.buf[b.n] = c
b.n++ b.n++
@ -77,8 +84,8 @@ func (b *Stream) writeTwoBytes(c1 byte, c2 byte) {
if b.Error != nil { if b.Error != nil {
return return
} }
if b.Available() <= 1 && b.Flush() != nil { if b.Available() < 2 {
return b.growAtLeast(2)
} }
b.buf[b.n] = c1 b.buf[b.n] = c1
b.buf[b.n+1] = c2 b.buf[b.n+1] = c2
@ -89,8 +96,8 @@ func (b *Stream) writeThreeBytes(c1 byte, c2 byte, c3 byte) {
if b.Error != nil { if b.Error != nil {
return return
} }
if b.Available() <= 2 && b.Flush() != nil { if b.Available() < 3 {
return b.growAtLeast(3)
} }
b.buf[b.n] = c1 b.buf[b.n] = c1
b.buf[b.n+1] = c2 b.buf[b.n+1] = c2
@ -102,8 +109,8 @@ func (b *Stream) writeFourBytes(c1 byte, c2 byte, c3 byte, c4 byte) {
if b.Error != nil { if b.Error != nil {
return return
} }
if b.Available() <= 3 && b.Flush() != nil { if b.Available() < 4 {
return b.growAtLeast(4)
} }
b.buf[b.n] = c1 b.buf[b.n] = c1
b.buf[b.n+1] = c2 b.buf[b.n+1] = c2
@ -116,8 +123,8 @@ func (b *Stream) writeFiveBytes(c1 byte, c2 byte, c3 byte, c4 byte, c5 byte) {
if b.Error != nil { if b.Error != nil {
return return
} }
if b.Available() <= 3 && b.Flush() != nil { if b.Available() < 5 {
return b.growAtLeast(5)
} }
b.buf[b.n] = c1 b.buf[b.n] = c1
b.buf[b.n+1] = c2 b.buf[b.n+1] = c2
@ -129,6 +136,9 @@ func (b *Stream) writeFiveBytes(c1 byte, c2 byte, c3 byte, c4 byte, c5 byte) {
// Flush writes any buffered data to the underlying io.Writer. // Flush writes any buffered data to the underlying io.Writer.
func (b *Stream) Flush() error { func (b *Stream) Flush() error {
if b.out == nil {
return nil
}
if b.Error != nil { if b.Error != nil {
return b.Error return b.Error
} }
@ -151,13 +161,28 @@ func (b *Stream) Flush() error {
return nil return nil
} }
func (b *Stream) WriteRaw(s string) { func (b *Stream) ensure(minimal int) {
for len(s) > b.Available() && b.Error == nil { available := b.Available()
n := copy(b.buf[b.n:], s) if available < minimal {
b.n += n if b.n > 1024 {
s = s[n:]
b.Flush() b.Flush()
} }
b.growAtLeast(minimal)
}
}
func (b *Stream) growAtLeast(minimal int) {
toGrow := len(b.buf)
if toGrow < minimal {
toGrow = minimal
}
newBuf := make([]byte, len(b.buf)+toGrow)
copy(newBuf, b.Buffer())
b.buf = newBuf
}
func (b *Stream) WriteRaw(s string) {
b.ensure(len(s))
if b.Error != nil { if b.Error != nil {
return return
} }
@ -166,18 +191,13 @@ func (b *Stream) WriteRaw(s string) {
} }
func (stream *Stream) WriteString(s string) { func (stream *Stream) WriteString(s string) {
stream.ensure(32)
valLen := len(s) valLen := len(s)
toWriteLen := valLen toWriteLen := valLen
bufLengthMinusTwo := len(stream.buf) - 2 // make room for the quotes bufLengthMinusTwo := len(stream.buf) - 2 // make room for the quotes
if stream.n+toWriteLen > bufLengthMinusTwo { if stream.n+toWriteLen > bufLengthMinusTwo {
toWriteLen = bufLengthMinusTwo - stream.n toWriteLen = bufLengthMinusTwo - stream.n
} }
if toWriteLen < 0 {
stream.Flush()
if stream.n + toWriteLen > bufLengthMinusTwo {
toWriteLen = bufLengthMinusTwo - stream.n
}
}
n := stream.n n := stream.n
stream.buf[n] = '"' stream.buf[n] = '"'
n++ n++
@ -189,7 +209,7 @@ func (stream *Stream) WriteString(s string) {
stream.buf[n] = c stream.buf[n] = c
n++ n++
} else { } else {
break; break
} }
} }
if i == valLen { if i == valLen {
@ -200,14 +220,14 @@ func (stream *Stream) WriteString(s string) {
} }
stream.n = n stream.n = n
// for the remaining parts, we process them char by char // for the remaining parts, we process them char by char
stream.writeStringSlowPath(s, i, valLen); stream.writeStringSlowPath(s, i, valLen)
stream.writeByte('"') stream.writeByte('"')
} }
func (stream *Stream) writeStringSlowPath(s string, i int, valLen int) { func (stream *Stream) writeStringSlowPath(s string, i int, valLen int) {
for ; i < valLen; i++ { for ; i < valLen; i++ {
c := s[i] c := s[i]
switch (c) { switch c {
case '"': case '"':
stream.writeTwoBytes('\\', '"') stream.writeTwoBytes('\\', '"')
case '\\': case '\\':
@ -223,7 +243,7 @@ func (stream *Stream) writeStringSlowPath(s string, i int, valLen int) {
case '\t': case '\t':
stream.writeTwoBytes('\\', 't') stream.writeTwoBytes('\\', 't')
default: default:
stream.writeByte(c); stream.writeByte(c)
} }
} }
} }
@ -293,21 +313,14 @@ func (stream *Stream) WriteArrayEnd() {
} }
func (stream *Stream) writeIndention(delta int) { func (stream *Stream) writeIndention(delta int) {
if (stream.indention == 0) { if stream.indention == 0 {
return return
} }
stream.writeByte('\n') stream.writeByte('\n')
toWrite := stream.indention - delta toWrite := stream.indention - delta
i := 0 stream.ensure(toWrite)
for { for i := 0; i < toWrite && stream.n < len(stream.buf); i++ {
for ; i < toWrite && stream.n < len(stream.buf); i++ {
stream.buf[stream.n] = ' ' stream.buf[stream.n] = ' '
stream.n++ stream.n++
} }
if i == toWrite {
break;
} else {
stream.Flush()
}
}
} }

View File

@ -21,7 +21,7 @@ func (stream *Stream) WriteFloat32Lossy(val float32) {
val = -val val = -val
} }
if val > 0x4ffffff { if val > 0x4ffffff {
stream.WriteRaw(strconv.FormatFloat(float64(val), 'f', -1, 32)); stream.WriteRaw(strconv.FormatFloat(float64(val), 'f', -1, 32))
return return
} }
precision := 6 precision := 6
@ -33,9 +33,7 @@ func (stream *Stream) WriteFloat32Lossy(val float32) {
return return
} }
stream.writeByte('.') stream.writeByte('.')
if stream.Available() < 10 { stream.ensure(10)
stream.Flush()
}
for p := precision - 1; p > 0 && fval < POW10[p]; p-- { for p := precision - 1; p > 0 && fval < POW10[p]; p-- {
stream.writeByte('0') stream.writeByte('0')
} }
@ -55,7 +53,7 @@ func (stream *Stream) WriteFloat64Lossy(val float64) {
val = -val val = -val
} }
if val > 0x4ffffff { if val > 0x4ffffff {
stream.WriteRaw(strconv.FormatFloat(val, 'f', -1, 64)); stream.WriteRaw(strconv.FormatFloat(val, 'f', -1, 64))
return return
} }
precision := 6 precision := 6
@ -67,9 +65,7 @@ func (stream *Stream) WriteFloat64Lossy(val float64) {
return return
} }
stream.writeByte('.') stream.writeByte('.')
if stream.Available() < 10 { stream.ensure(10)
stream.Flush()
}
for p := precision - 1; p > 0 && fval < POW10[p]; p-- { for p := precision - 1; p > 0 && fval < POW10[p]; p-- {
stream.writeByte('0') stream.writeByte('0')
} }
@ -79,6 +75,8 @@ func (stream *Stream) WriteFloat64Lossy(val float64) {
} }
} }
// EnableLossyFloatMarshalling keeps 10**(-6) precision
// for float variables for better performance.
func EnableLossyFloatMarshalling() { func EnableLossyFloatMarshalling() {
// for better performance // for better performance
RegisterTypeEncoder("float32", func(ptr unsafe.Pointer, stream *Stream) { RegisterTypeEncoder("float32", func(ptr unsafe.Pointer, stream *Stream) {

View File

@ -1,46 +1,11 @@
package jsoniter package jsoniter
var digits []uint8
var digitTens []uint8
var digitOnes []uint8
var DIGITS []uint32 var DIGITS []uint32
func init() { func init() {
digits = []uint8{
'0', '1', '2', '3', '4', '5',
'6', '7', '8', '9', 'a', 'b',
'c', 'd', 'e', 'f', 'g', 'h',
'i', 'j', 'k', 'l', 'm', 'n',
'o', 'p', 'q', 'r', 's', 't',
'u', 'v', 'w', 'x', 'y', 'z',
}
digitTens = []uint8{
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'1', '1', '1', '1', '1', '1', '1', '1', '1', '1',
'2', '2', '2', '2', '2', '2', '2', '2', '2', '2',
'3', '3', '3', '3', '3', '3', '3', '3', '3', '3',
'4', '4', '4', '4', '4', '4', '4', '4', '4', '4',
'5', '5', '5', '5', '5', '5', '5', '5', '5', '5',
'6', '6', '6', '6', '6', '6', '6', '6', '6', '6',
'7', '7', '7', '7', '7', '7', '7', '7', '7', '7',
'8', '8', '8', '8', '8', '8', '8', '8', '8', '8',
'9', '9', '9', '9', '9', '9', '9', '9', '9', '9',
}
digitOnes = []uint8{
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
}
DIGITS = make([]uint32, 1000) DIGITS = make([]uint32, 1000)
for i := uint32(0); i < 1000; i++ { for i := uint32(0); i < 1000; i++ {
DIGITS[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i % 10 + '0'; DIGITS[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i%10 + '0'
if i < 10 { if i < 10 {
DIGITS[i] += 2 << 24 DIGITS[i] += 2 << 24
} else if i < 100 { } else if i < 100 {
@ -72,19 +37,15 @@ func writeBuf(buf []byte, v uint32, n int) {
} }
func (stream *Stream) WriteUint8(val uint8) { func (stream *Stream) WriteUint8(val uint8) {
if stream.Available() < 3 { stream.ensure(3)
stream.Flush()
}
stream.n = writeFirstBuf(stream.buf, DIGITS[val], stream.n) stream.n = writeFirstBuf(stream.buf, DIGITS[val], stream.n)
} }
func (stream *Stream) WriteInt8(nval int8) { func (stream *Stream) WriteInt8(nval int8) {
if stream.Available() < 4 { stream.ensure(4)
stream.Flush()
}
n := stream.n n := stream.n
var val uint8 var val uint8
if (nval < 0) { if nval < 0 {
val = uint8(-nval) val = uint8(-nval)
stream.buf[n] = '-' stream.buf[n] = '-'
n++ n++
@ -95,15 +56,13 @@ func (stream *Stream) WriteInt8(nval int8) {
} }
func (stream *Stream) WriteUint16(val uint16) { func (stream *Stream) WriteUint16(val uint16) {
if stream.Available() < 5 { stream.ensure(5)
stream.Flush()
}
q1 := val / 1000 q1 := val / 1000
if q1 == 0 { if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], stream.n) stream.n = writeFirstBuf(stream.buf, DIGITS[val], stream.n)
return return
} }
r1 := val - q1 * 1000; r1 := val - q1*1000
n := writeFirstBuf(stream.buf, DIGITS[q1], stream.n) n := writeFirstBuf(stream.buf, DIGITS[q1], stream.n)
writeBuf(stream.buf, DIGITS[r1], n) writeBuf(stream.buf, DIGITS[r1], n)
stream.n = n + 3 stream.n = n + 3
@ -111,12 +70,10 @@ func (stream *Stream) WriteUint16(val uint16) {
} }
func (stream *Stream) WriteInt16(nval int16) { func (stream *Stream) WriteInt16(nval int16) {
if stream.Available() < 6 { stream.ensure(6)
stream.Flush()
}
n := stream.n n := stream.n
var val uint16 var val uint16
if (nval < 0) { if nval < 0 {
val = uint16(-nval) val = uint16(-nval)
stream.buf[n] = '-' stream.buf[n] = '-'
n++ n++
@ -128,7 +85,7 @@ func (stream *Stream) WriteInt16(nval int16) {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n) stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
return return
} }
r1 := val - q1 * 1000; r1 := val - q1*1000
n = writeFirstBuf(stream.buf, DIGITS[q1], n) n = writeFirstBuf(stream.buf, DIGITS[q1], n)
writeBuf(stream.buf, DIGITS[r1], n) writeBuf(stream.buf, DIGITS[r1], n)
stream.n = n + 3 stream.n = n + 3
@ -136,16 +93,14 @@ func (stream *Stream) WriteInt16(nval int16) {
} }
func (stream *Stream) WriteUint32(val uint32) { func (stream *Stream) WriteUint32(val uint32) {
if stream.Available() < 10 { stream.ensure(10)
stream.Flush()
}
n := stream.n n := stream.n
q1 := val / 1000 q1 := val / 1000
if q1 == 0 { if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n) stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
return return
} }
r1 := val - q1 * 1000; r1 := val - q1*1000
q2 := q1 / 1000 q2 := q1 / 1000
if q2 == 0 { if q2 == 0 {
n := writeFirstBuf(stream.buf, DIGITS[q1], n) n := writeFirstBuf(stream.buf, DIGITS[q1], n)
@ -170,12 +125,10 @@ func (stream *Stream) WriteUint32(val uint32) {
} }
func (stream *Stream) WriteInt32(nval int32) { func (stream *Stream) WriteInt32(nval int32) {
if stream.Available() < 11 { stream.ensure(11)
stream.Flush()
}
n := stream.n n := stream.n
var val uint32 var val uint32
if (nval < 0) { if nval < 0 {
val = uint32(-nval) val = uint32(-nval)
stream.buf[n] = '-' stream.buf[n] = '-'
n++ n++
@ -187,7 +140,7 @@ func (stream *Stream) WriteInt32(nval int32) {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n) stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
return return
} }
r1 := val - q1 * 1000; r1 := val - q1*1000
q2 := q1 / 1000 q2 := q1 / 1000
if q2 == 0 { if q2 == 0 {
n := writeFirstBuf(stream.buf, DIGITS[q1], n) n := writeFirstBuf(stream.buf, DIGITS[q1], n)
@ -212,16 +165,14 @@ func (stream *Stream) WriteInt32(nval int32) {
} }
func (stream *Stream) WriteUint64(val uint64) { func (stream *Stream) WriteUint64(val uint64) {
if stream.Available() < 20 { stream.ensure(20)
stream.Flush()
}
n := stream.n n := stream.n
q1 := val / 1000 q1 := val / 1000
if q1 == 0 { if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n) stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
return return
} }
r1 := val - q1 * 1000; r1 := val - q1*1000
q2 := q1 / 1000 q2 := q1 / 1000
if q2 == 0 { if q2 == 0 {
n := writeFirstBuf(stream.buf, DIGITS[q1], n) n := writeFirstBuf(stream.buf, DIGITS[q1], n)
@ -278,12 +229,10 @@ func (stream *Stream) WriteUint64(val uint64) {
} }
func (stream *Stream) WriteInt64(nval int64) { func (stream *Stream) WriteInt64(nval int64) {
if stream.Available() < 20 { stream.ensure(20)
stream.Flush()
}
n := stream.n n := stream.n
var val uint64 var val uint64
if (nval < 0) { if nval < 0 {
val = uint64(-nval) val = uint64(-nval)
stream.buf[n] = '-' stream.buf[n] = '-'
n++ n++
@ -295,7 +244,7 @@ func (stream *Stream) WriteInt64(nval int64) {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n) stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
return return
} }
r1 := val - q1 * 1000; r1 := val - q1*1000
q2 := q1 / 1000 q2 := q1 / 1000
if q2 == 0 { if q2 == 0 {
n := writeFirstBuf(stream.buf, DIGITS[q1], n) n := writeFirstBuf(stream.buf, DIGITS[q1], n)

View File

@ -1,11 +1,11 @@
package jsoniter package jsoniter
import ( import (
"testing"
"github.com/json-iterator/go/require"
"encoding/json"
"bytes" "bytes"
"encoding/json"
"github.com/json-iterator/go/require"
"io/ioutil" "io/ioutil"
"testing"
) )
func Test_new_decoder(t *testing.T) { func Test_new_decoder(t *testing.T) {
@ -44,3 +44,17 @@ func Test_new_encoder(t *testing.T) {
encoder2.Encode([]int{1}) encoder2.Encode([]int{1})
should.Equal("[1]", buf2.String()) should.Equal("[1]", buf2.String())
} }
func Test_use_number(t *testing.T) {
should := require.New(t)
decoder1 := json.NewDecoder(bytes.NewBufferString(`123`))
decoder1.UseNumber()
decoder2 := NewDecoder(bytes.NewBufferString(`123`))
decoder2.UseNumber()
var obj1 interface{}
should.Nil(decoder1.Decode(&obj1))
should.Equal(json.Number("123"), obj1)
var obj2 interface{}
should.Nil(decoder2.Decode(&obj2))
should.Equal(json.Number("123"), obj2)
}

View File

@ -1,11 +1,11 @@
package jsoniter package jsoniter
import ( import (
"encoding/json"
"testing"
"github.com/json-iterator/go/require"
"bytes" "bytes"
"encoding/json"
"github.com/json-iterator/go/require"
"io" "io"
"testing"
) )
func Test_empty_array(t *testing.T) { func Test_empty_array(t *testing.T) {
@ -117,9 +117,9 @@ func Test_array_lazy_any_get_all(t *testing.T) {
func Test_array_wrapper_any_get_all(t *testing.T) { func Test_array_wrapper_any_get_all(t *testing.T) {
should := require.New(t) should := require.New(t)
any := wrapArray([][]int{ any := wrapArray([][]int{
[]int{1, 2}, {1, 2},
[]int{3, 4}, {3, 4},
[]int{5, 6}, {5, 6},
}) })
should.Equal("[1,3,5]", any.Get('*', 0).ToString()) should.Equal("[1,3,5]", any.Get('*', 0).ToString())
} }
@ -264,6 +264,27 @@ func Test_json_RawMessage(t *testing.T) {
should.Equal(`[1,2,3]`, str) should.Equal(`[1,2,3]`, str)
} }
func Test_encode_byte_array(t *testing.T) {
should := require.New(t)
bytes, err := json.Marshal([]byte{1, 2, 3})
should.Nil(err)
should.Equal(`"AQID"`, string(bytes))
bytes, err = Marshal([]byte{1, 2, 3})
should.Nil(err)
should.Equal(`"AQID"`, string(bytes))
}
func Test_decode_byte_array(t *testing.T) {
should := require.New(t)
data := []byte{}
err := json.Unmarshal([]byte(`"AQID"`), &data)
should.Nil(err)
should.Equal([]byte{1, 2, 3}, data)
err = Unmarshal([]byte(`"AQID"`), &data)
should.Nil(err)
should.Equal([]byte{1, 2, 3}, data)
}
func Benchmark_jsoniter_array(b *testing.B) { func Benchmark_jsoniter_array(b *testing.B) {
b.ReportAllocs() b.ReportAllocs()
input := []byte(`[1,2,3,4,5,6,7,8,9]`) input := []byte(`[1,2,3,4,5,6,7,8,9]`)

View File

@ -1,9 +1,9 @@
package jsoniter package jsoniter
import ( import (
"testing"
"bytes" "bytes"
"github.com/json-iterator/go/require" "github.com/json-iterator/go/require"
"testing"
) )
func Test_true(t *testing.T) { func Test_true(t *testing.T) {
@ -38,7 +38,6 @@ func Test_write_true_false(t *testing.T) {
should.Equal("truefalse", buf.String()) should.Equal("truefalse", buf.String())
} }
func Test_write_val_bool(t *testing.T) { func Test_write_val_bool(t *testing.T) {
should := require.New(t) should := require.New(t)
buf := &bytes.Buffer{} buf := &bytes.Buffer{}

View File

@ -1,13 +1,13 @@
package jsoniter package jsoniter
import ( import (
"encoding/json"
"github.com/json-iterator/go/require"
"reflect" "reflect"
"strconv" "strconv"
"testing" "testing"
"time" "time"
"unsafe" "unsafe"
"github.com/json-iterator/go/require"
"encoding/json"
) )
func Test_customize_type_decoder(t *testing.T) { func Test_customize_type_decoder(t *testing.T) {
@ -45,6 +45,7 @@ func Test_customize_type_encoder(t *testing.T) {
} }
func Test_customize_byte_array_encoder(t *testing.T) { func Test_customize_byte_array_encoder(t *testing.T) {
CleanEncoders()
should := require.New(t) should := require.New(t)
RegisterTypeEncoder("[]uint8", func(ptr unsafe.Pointer, stream *Stream) { RegisterTypeEncoder("[]uint8", func(ptr unsafe.Pointer, stream *Stream) {
t := *((*[]byte)(ptr)) t := *((*[]byte)(ptr))

View File

@ -1,9 +1,10 @@
package jsoniter package jsoniter
import ( import (
"encoding/json"
"fmt" "fmt"
"testing"
"github.com/json-iterator/go/require" "github.com/json-iterator/go/require"
"testing"
) )
func Test_bind_api_demo(t *testing.T) { func Test_bind_api_demo(t *testing.T) {
@ -22,3 +23,64 @@ func Test_iterator_api_demo(t *testing.T) {
} }
fmt.Println(total) fmt.Println(total)
} }
type People struct {
Name string
Gender string
Age int
Address string
Mobile string
Country string
Height int
}
func jsoniterMarshal(p *People) error {
_, err := Marshal(p)
if nil != err {
return err
}
return nil
}
func stdMarshal(p *People) error {
_, err := json.Marshal(p)
if nil != err {
return err
}
return nil
}
func BenchmarkJosniterMarshal(b *testing.B) {
var p People
p.Address = "上海市徐汇区漕宝路"
p.Age = 30
p.Country = "中国"
p.Gender = "male"
p.Height = 170
p.Mobile = "18502120533"
p.Name = "Elvin"
b.ReportAllocs()
for i := 0; i < b.N; i++ {
err := jsoniterMarshal(&p)
if nil != err {
b.Error(err)
}
}
}
func BenchmarkStdMarshal(b *testing.B) {
var p People
p.Address = "上海市徐汇区漕宝路"
p.Age = 30
p.Country = "中国"
p.Gender = "male"
p.Height = 170
p.Mobile = "18502120533"
p.Name = "Elvin"
b.ReportAllocs()
for i := 0; i < b.N; i++ {
err := stdMarshal(&p)
if nil != err {
b.Error(err)
}
}
}

View File

@ -1,9 +1,9 @@
package jsoniter package jsoniter
import ( import (
"github.com/json-iterator/go/require"
"io" "io"
"testing" "testing"
"github.com/json-iterator/go/require"
) )
func Test_string_end(t *testing.T) { func Test_string_end(t *testing.T) {

View File

@ -1,12 +1,12 @@
package jsoniter package jsoniter
import ( import (
"bytes"
"encoding/json" "encoding/json"
"fmt" "fmt"
"testing"
"github.com/json-iterator/go/require" "github.com/json-iterator/go/require"
"bytes"
"strconv" "strconv"
"testing"
) )
func Test_read_big_float(t *testing.T) { func Test_read_big_float(t *testing.T) {
@ -158,6 +158,19 @@ func Test_read_float64_cursor(t *testing.T) {
should.Equal(float64(2), iter.Read()) should.Equal(float64(2), iter.Read())
} }
func Test_read_float_scientific(t *testing.T) {
should := require.New(t)
var obj interface{}
should.Nil(UnmarshalFromString(`1e1`, &obj))
should.Equal(float64(10), obj)
should.Nil(json.Unmarshal([]byte(`1e1`), &obj))
should.Equal(float64(10), obj)
should.Nil(UnmarshalFromString(`1.0e1`, &obj))
should.Equal(float64(10), obj)
should.Nil(json.Unmarshal([]byte(`1.0e1`), &obj))
should.Equal(float64(10), obj)
}
func Benchmark_jsoniter_float(b *testing.B) { func Benchmark_jsoniter_float(b *testing.B) {
b.ReportAllocs() b.ReportAllocs()
input := []byte(`1.1123,`) input := []byte(`1.1123,`)

View File

@ -3,12 +3,12 @@ package jsoniter
import ( import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"testing"
"github.com/json-iterator/go/require"
"fmt" "fmt"
"strconv" "github.com/json-iterator/go/require"
"io/ioutil"
"io" "io"
"io/ioutil"
"strconv"
"testing"
) )
func Test_read_uint64_invalid(t *testing.T) { func Test_read_uint64_invalid(t *testing.T) {

View File

@ -1,9 +1,10 @@
package jsoniter package jsoniter
import ( import (
"testing"
"github.com/json-iterator/go/require" "github.com/json-iterator/go/require"
"testing"
"unsafe" "unsafe"
"encoding/json"
) )
func Test_write_array_of_interface(t *testing.T) { func Test_write_array_of_interface(t *testing.T) {
@ -138,3 +139,17 @@ func Test_encode_object_contain_non_empty_interface(t *testing.T) {
should.Nil(err) should.Nil(err)
should.Equal(`{"Field":"hello"}`, str) should.Equal(`{"Field":"hello"}`, str)
} }
func Test_nil_non_empty_interface(t *testing.T) {
CleanEncoders()
CleanDecoders()
type TestObject struct {
Field []MyInterface
}
should := require.New(t)
obj := TestObject{}
b := []byte(`{"Field":["AAA"]}`)
should.NotNil(json.Unmarshal(b, &obj))
should.NotNil(Unmarshal(b, &obj))
}

View File

@ -1,8 +1,10 @@
package jsoniter package jsoniter
import ( import (
"testing"
"github.com/json-iterator/go/require" "github.com/json-iterator/go/require"
"math/big"
"testing"
"encoding/json"
) )
func Test_read_map(t *testing.T) { func Test_read_map(t *testing.T) {
@ -45,7 +47,7 @@ func Test_wrap_map(t *testing.T) {
func Test_map_wrapper_any_get_all(t *testing.T) { func Test_map_wrapper_any_get_all(t *testing.T) {
should := require.New(t) should := require.New(t)
any := Wrap(map[string][]int{"Field1": []int{1, 2}}) any := Wrap(map[string][]int{"Field1": {1, 2}})
should.Equal(`{"Field1":1}`, any.Get('*', 0).ToString()) should.Equal(`{"Field1":1}`, any.Get('*', 0).ToString())
} }
@ -67,3 +69,61 @@ func Test_slice_of_map(t *testing.T) {
should.Nil(UnmarshalFromString(str, &val)) should.Nil(UnmarshalFromString(str, &val))
should.Equal("2", val[0]["1"]) should.Equal("2", val[0]["1"])
} }
func Test_encode_int_key_map(t *testing.T) {
should := require.New(t)
val := map[int]string{1: "2"}
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"1":"2"}`, str)
}
func Test_decode_int_key_map(t *testing.T) {
should := require.New(t)
var val map[int]string
should.Nil(UnmarshalFromString(`{"1":"2"}`, &val))
should.Equal(map[int]string{1: "2"}, val)
}
func Test_encode_TextMarshaler_key_map(t *testing.T) {
should := require.New(t)
f, _, _ := big.ParseFloat("1", 10, 64, big.ToZero)
val := map[*big.Float]string{f: "2"}
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"1":"2"}`, str)
}
func Test_decode_TextMarshaler_key_map(t *testing.T) {
should := require.New(t)
var val map[*big.Float]string
should.Nil(UnmarshalFromString(`{"1":"2"}`, &val))
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"1":"2"}`, str)
}
func Test_map_key_with_escaped_char(t *testing.T) {
type Ttest struct {
Map map[string]string
}
var jsonBytes = []byte(`
{
"Map":{
"k\"ey": "val"
}
}`)
should := require.New(t)
{
var obj Ttest
should.Nil(json.Unmarshal(jsonBytes, &obj))
should.Equal(map[string]string{"k\"ey":"val"}, obj.Map)
}
{
var obj Ttest
should.Nil(Unmarshal(jsonBytes, &obj))
should.Equal(map[string]string{"k\"ey":"val"}, obj.Map)
}
}

View File

@ -1,9 +1,9 @@
package jsoniter package jsoniter
import ( import (
"testing"
"github.com/json-iterator/go/require"
"bytes" "bytes"
"github.com/json-iterator/go/require"
"testing"
) )
func Test_read_null(t *testing.T) { func Test_read_null(t *testing.T) {

View File

@ -1,10 +1,10 @@
package jsoniter package jsoniter
import ( import (
"encoding/json"
"testing"
"github.com/json-iterator/go/require"
"bytes" "bytes"
"encoding/json"
"github.com/json-iterator/go/require"
"testing"
) )
func Test_empty_object(t *testing.T) { func Test_empty_object(t *testing.T) {

View File

@ -1,8 +1,8 @@
package jsoniter package jsoniter
import ( import (
"testing"
"github.com/json-iterator/go/require" "github.com/json-iterator/go/require"
"testing"
) )
func Test_encode_optional_int_pointer(t *testing.T) { func Test_encode_optional_int_pointer(t *testing.T) {

View File

@ -1,8 +1,8 @@
package jsoniter package jsoniter
import ( import (
"testing"
"fmt" "fmt"
"testing"
) )
func Test_reflect_str(t *testing.T) { func Test_reflect_str(t *testing.T) {

View File

@ -1,9 +1,9 @@
package jsoniter package jsoniter
import ( import (
"testing"
"github.com/json-iterator/go/require"
"bytes" "bytes"
"github.com/json-iterator/go/require"
"testing"
) )
func Test_decode_one_field_struct(t *testing.T) { func Test_decode_one_field_struct(t *testing.T) {

View File

@ -3,9 +3,9 @@ package jsoniter
import ( import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/json-iterator/go/require"
"testing" "testing"
"unsafe" "unsafe"
"github.com/json-iterator/go/require"
) )
func Test_decode_slice(t *testing.T) { func Test_decode_slice(t *testing.T) {

54
jsoniter_stream_test.go Normal file
View File

@ -0,0 +1,54 @@
package jsoniter
import (
"github.com/json-iterator/go/require"
"testing"
)
func Test_writeByte_should_grow_buffer(t *testing.T) {
should := require.New(t)
stream := NewStream(nil, 1)
stream.writeByte('1')
should.Equal("1", string(stream.Buffer()))
should.Equal(1, len(stream.buf))
stream.writeByte('2')
should.Equal("12", string(stream.Buffer()))
should.Equal(2, len(stream.buf))
stream.writeThreeBytes('3', '4', '5')
should.Equal("12345", string(stream.Buffer()))
}
func Test_writeBytes_should_grow_buffer(t *testing.T) {
should := require.New(t)
stream := NewStream(nil, 1)
stream.Write([]byte{'1', '2'})
should.Equal("12", string(stream.Buffer()))
should.Equal(3, len(stream.buf))
stream.Write([]byte{'3', '4', '5', '6', '7'})
should.Equal("1234567", string(stream.Buffer()))
should.Equal(8, len(stream.buf))
}
func Test_writeIndention_should_grow_buffer(t *testing.T) {
should := require.New(t)
stream := NewStream(nil, 1)
stream.IndentionStep = 2
stream.WriteVal([]int{1, 2, 3})
should.Equal("[\n 1,\n 2,\n 3\n]", string(stream.Buffer()))
}
func Test_writeRaw_should_grow_buffer(t *testing.T) {
should := require.New(t)
stream := NewStream(nil, 1)
stream.WriteRaw("123")
should.Nil(stream.Error)
should.Equal("123", string(stream.Buffer()))
}
func Test_writeString_should_grow_buffer(t *testing.T) {
should := require.New(t)
stream := NewStream(nil, 0)
stream.WriteString("123")
should.Nil(stream.Error)
should.Equal(`"123"`, string(stream.Buffer()))
}

View File

@ -3,9 +3,9 @@ package jsoniter
import ( import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"testing"
"github.com/json-iterator/go/require"
"fmt" "fmt"
"github.com/json-iterator/go/require"
"testing"
) )
func Test_read_normal_string(t *testing.T) { func Test_read_normal_string(t *testing.T) {
@ -105,6 +105,13 @@ func Test_write_val_string(t *testing.T) {
should.Equal(`"hello"`, buf.String()) should.Equal(`"hello"`, buf.String())
} }
func Test_decode_slash(t *testing.T) {
should := require.New(t)
var obj interface{}
should.NotNil(json.Unmarshal([]byte("\\"), &obj))
should.NotNil(UnmarshalFromString("\\", &obj))
}
func Benchmark_jsoniter_unicode(b *testing.B) { func Benchmark_jsoniter_unicode(b *testing.B) {
for n := 0; n < b.N; n++ { for n := 0; n < b.N; n++ {
iter := ParseString(`"\ud83d\udc4a"`) iter := ParseString(`"\ud83d\udc4a"`)