1
0
mirror of https://github.com/json-iterator/go.git synced 2025-06-15 22:50:24 +02:00

49 Commits

Author SHA1 Message Date
787edc95b0 Revert "WIP: Tests to compare against stdlib" 2017-06-12 15:07:52 +08:00
6e5817b773 Merge pull request #55 from thockin/output_tests
WIP: Tests to compare against stdlib
2017-06-12 01:47:50 -05:00
7480e41836 Add output tests for maps of builtins
This tests for exact stdlib compatibility.
2017-06-11 21:09:56 -07:00
9215b3c508 Add output tests for builtin types
This fuzzes a type, marshals it with stdlib and json-iterator, compares,
then unmarshals with stdlib and json-iterator and compares.  This is
checking for literal, byte-for-byte compatibility.

In every case the test is exactly the same.

It also include benchmark functions to compare stdlib vs json-iterator.

This depends on a couple PRs to be merged in gofuzz.
2017-06-11 21:04:59 -07:00
64e500f3c8 Merge branch 'master' of https://github.com/json-iterator/go 2017-06-12 10:13:22 +08:00
3307ce3ba2 #50 map key unlike object field, can contain escaped char 2017-06-12 10:13:13 +08:00
6f50f15678 decoder/encoder;float precision doc 2017-06-11 16:30:31 +08:00
cee09816e3 decoder/encoder;float precision doc 2017-06-11 16:28:31 +08:00
cdbad22d22 test more package description 2017-06-11 15:35:45 +08:00
b0c9f047e2 test more than one pakcage description 2017-06-11 15:32:58 +08:00
6bd13c2948 Merge branch 'master' of https://github.com/json-iterator/go 2017-06-09 17:06:38 +08:00
84ad508437 #48 should return error if concrete tpye unknown 2017-06-09 17:06:27 +08:00
4f909776cf Merge pull request #49 from zhaitianduo/master
Use jsoniter instead of json in example
2017-06-09 03:32:31 -05:00
962c470806 fix import not use 2017-06-09 16:28:20 +08:00
46d443fbad use jsoniter for example 2017-06-09 16:25:58 +08:00
2608d40f2a example unmarshal 2017-06-08 12:08:47 +08:00
3cf822853f example unmarshal 2017-06-08 12:07:03 +08:00
26708bccc9 report error when string end not found 2017-06-08 09:46:19 +08:00
d75b539bad add test for scientific float 2017-06-07 21:34:56 +08:00
cfffa29c8a gofmt 2017-06-06 23:27:00 +08:00
925df245d3 good enough indent implementation 2017-06-06 23:18:37 +08:00
962a8cd303 #40 support UseNumber 2017-06-06 23:15:15 +08:00
6509ba05df Merge pull request #41 from 1046102779/master
解析时,如果输出参数不是指针类型,直接报错,避免程序挂掉
2017-06-06 10:03:06 -05:00
579dbf3c1d Merge pull request #42 from 1046102779/patch-1
把floatDigits改为intDigits
2017-06-06 10:02:26 -05:00
aa5181db67 把floatDigits改为intDigits 2017-06-06 21:08:04 +08:00
67be6df2b1 Update feature_adapter.go 2017-06-06 20:01:43 +08:00
0f5379494a unmarshal failed return non-pointer error 2017-06-06 19:36:33 +08:00
d09e2419ba update benchmark 2017-06-06 16:55:32 +08:00
e1a71f6ba1 update benchmark 2017-06-06 16:54:26 +08:00
dcb78991c4 flush when buffer is large enough 2017-06-06 14:16:54 +08:00
9e8238cdc6 remove unused file 2017-06-06 12:41:13 +08:00
a4e5abf492 support []byte; marshal without copy 2017-06-06 09:44:56 +08:00
3979955e69 support TextMarshaler as map key 2017-06-06 00:09:33 +08:00
5fd09f0e02 remove mapInterfaceEncoder 2017-06-05 23:56:37 +08:00
af4982b22c support decode int key map 2017-06-05 23:53:48 +08:00
29dc1d407d write map with int key 2017-06-05 23:01:00 +08:00
5b27aaa62c update test 2017-06-05 22:10:01 +08:00
106636a191 update test 2017-06-05 22:08:28 +08:00
f50c4cfbbe Merge branch 'master' of https://github.com/json-iterator/go 2017-06-05 22:05:02 +08:00
87149ae489 add simple marshal benchmark 2017-06-05 22:04:52 +08:00
c0a4ad72e1 example test 2017-06-05 20:37:08 +08:00
404c0ee44b Decoder doc 2017-06-05 19:57:20 +08:00
10c1506f87 link test 2017-06-05 19:38:34 +08:00
9a43fe6468 adapter api comment 2017-06-05 19:31:30 +08:00
95e03f2937 Marshal comment 2017-06-05 19:19:46 +08:00
4406ed9e62 Marshal comment 2017-06-05 19:18:12 +08:00
ff027701f5 Marshal comment 2017-06-05 19:15:56 +08:00
c69b61f879 Marshal comment 2017-06-05 19:14:40 +08:00
d97f5db769 Marshal comment 2017-06-05 19:11:16 +08:00
46 changed files with 923 additions and 465 deletions

10
.idea/libraries/Go_SDK.xml generated Normal file
View File

@ -0,0 +1,10 @@
<component name="libraryTable">
<library name="Go SDK">
<CLASSES>
<root url="file:///usr/local/go/src" />
</CLASSES>
<SOURCES>
<root url="file:///usr/local/go/src" />
</SOURCES>
</library>
</component>

View File

@ -2,6 +2,23 @@
jsoniter (json-iterator) is fast and flexible JSON parser available in [Java](https://github.com/json-iterator/java) and [Go](https://github.com/json-iterator/go)
# Benchmark
![benchmark](http://jsoniter.com/benchmarks/go-benchmark.png)
Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/github.com/json-iterator/go-benchmark/benchmark_medium_payload_test.go
Raw Result (easyjson requires static code generation)
| | ns/op | allocation bytes | allocation times |
| --- | --- | --- | --- |
| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
# Usage
100% compatibility with standard lib

View File

@ -1014,4 +1014,4 @@ func typeAndKind(v interface{}) (reflect.Type, reflect.Kind) {
k = t.Kind()
}
return t, k
}
}

47
example_test.go Normal file
View File

@ -0,0 +1,47 @@
package jsoniter_test
import (
"fmt"
"os"
"github.com/json-iterator/go"
)
func ExampleMarshal() {
type ColorGroup struct {
ID int
Name string
Colors []string
}
group := ColorGroup{
ID: 1,
Name: "Reds",
Colors: []string{"Crimson", "Red", "Ruby", "Maroon"},
}
b, err := jsoniter.Marshal(group)
if err != nil {
fmt.Println("error:", err)
}
os.Stdout.Write(b)
// Output:
// {"ID":1,"Name":"Reds","Colors":["Crimson","Red","Ruby","Maroon"]}
}
func ExampleUnmarshal() {
var jsonBlob = []byte(`[
{"Name": "Platypus", "Order": "Monotremata"},
{"Name": "Quoll", "Order": "Dasyuromorphia"}
]`)
type Animal struct {
Name string
Order string
}
var animals []Animal
err := jsoniter.Unmarshal(jsonBlob, &animals)
if err != nil {
fmt.Println("error:", err)
}
fmt.Printf("%+v", animals)
// Output:
// [{Name:Platypus Order:Monotremata} {Name:Quoll Order:Dasyuromorphia}]
}

View File

@ -1,14 +1,37 @@
// Package jsoniter implements encoding and decoding of JSON as defined in
// RFC 4627 and provides interfaces with identical syntax of standard lib encoding/json.
// Converting from encoding/json to jsoniter is no more than replacing the package with jsoniter
// and variable type declarations (if any).
// jsoniter interfaces gives 100% compatibility with code using standard lib.
//
// "JSON and Go"
// (https://golang.org/doc/articles/json_and_go.html)
// gives a description of how Marshal/Unmarshal operate
// between arbitrary or predefined json objects and bytes,
// and it applies to jsoniter.Marshal/Unmarshal as well.
package jsoniter
import (
"io"
"bytes"
"encoding/json"
"errors"
"io"
"reflect"
"unsafe"
)
// Unmarshal adapts to json/encoding APIs
// Unmarshal adapts to json/encoding Unmarshal API
//
// Unmarshal parses the JSON-encoded data and stores the result in the value pointed to by v.
// Refer to https://godoc.org/encoding/json#Unmarshal for more information
func Unmarshal(data []byte, v interface{}) error {
data = data[:lastNotSpacePos(data)]
iter := ParseBytes(data)
typ := reflect.TypeOf(v)
if typ.Kind() != reflect.Ptr {
// return non-pointer error
return errors.New("the second param must be ptr type")
}
iter.ReadVal(v)
if iter.head == iter.tail {
iter.loadMore()
@ -22,6 +45,7 @@ func Unmarshal(data []byte, v interface{}) error {
return iter.Error
}
// UnmarshalAny adapts to
func UnmarshalAny(data []byte) (Any, error) {
data = data[:lastNotSpacePos(data)]
iter := ParseBytes(data)
@ -81,15 +105,17 @@ func UnmarshalAnyFromString(str string) (Any, error) {
return nil, iter.Error
}
// Marshal adapts to json/encoding Marshal API
//
// Marshal returns the JSON encoding of v, adapts to json/encoding Marshal API
// Refer to https://godoc.org/encoding/json#Marshal for more information
func Marshal(v interface{}) ([]byte, error) {
buf := &bytes.Buffer{}
stream := NewStream(buf, 512)
stream := NewStream(nil, 256)
stream.WriteVal(v)
stream.Flush()
if stream.Error != nil {
return nil, stream.Error
}
return buf.Bytes(), nil
return stream.Buffer(), nil
}
func MarshalToString(v interface{}) (string, error) {
@ -100,11 +126,19 @@ func MarshalToString(v interface{}) (string, error) {
return string(buf), nil
}
// NewDecoder adapts to json/stream NewDecoder API.
//
// NewDecoder returns a new decoder that reads from r.
//
// Instead of a json/encoding Decoder, an AdaptedDecoder is returned
// Refer to https://godoc.org/encoding/json#NewDecoder for more information
func NewDecoder(reader io.Reader) *AdaptedDecoder {
iter := Parse(reader, 512)
return &AdaptedDecoder{iter}
}
// AdaptedDecoder reads and decodes JSON values from an input stream.
// AdaptedDecoder provides identical APIs with json/stream Decoder (Token() and UseNumber() are in progress)
type AdaptedDecoder struct {
iter *Iterator
}
@ -127,6 +161,16 @@ func (adapter *AdaptedDecoder) Buffered() io.Reader {
return bytes.NewReader(remaining)
}
func (decoder *AdaptedDecoder) UseNumber() {
RegisterTypeDecoder("interface {}", func(ptr unsafe.Pointer, iter *Iterator) {
if iter.WhatIsNext() == Number {
*((*interface{})(ptr)) = json.Number(iter.readNumberAsString())
} else {
*((*interface{})(ptr)) = iter.Read()
}
})
}
func NewEncoder(writer io.Writer) *AdaptedEncoder {
stream := NewStream(writer, 512)
return &AdaptedEncoder{stream}
@ -143,5 +187,5 @@ func (adapter *AdaptedEncoder) Encode(val interface{}) error {
}
func (adapter *AdaptedEncoder) SetIndent(prefix, indent string) {
// not implemented yet
}
adapter.stream.IndentionStep = len(indent)
}

View File

@ -1,9 +1,9 @@
package jsoniter
import (
"unsafe"
"fmt"
"reflect"
"unsafe"
)
type arrayLazyAny struct {
@ -44,7 +44,7 @@ func (any *arrayLazyAny) fillCacheUntil(target int) Any {
return any.cache[target]
}
iter := any.Parse()
if (len(any.remaining) == len(any.buf)) {
if len(any.remaining) == len(any.buf) {
iter.head++
c := iter.nextToken()
if c != ']' {
@ -337,9 +337,9 @@ func (any *arrayLazyAny) GetInterface() interface{} {
type arrayAny struct {
baseAny
err error
cache []Any
val reflect.Value
err error
cache []Any
val reflect.Value
}
func wrapArray(val interface{}) *arrayAny {
@ -536,4 +536,4 @@ func (any *arrayAny) WriteTo(stream *Stream) {
func (any *arrayAny) GetInterface() interface{} {
any.fillCache()
return any.cache
}
}

View File

@ -2,15 +2,15 @@ package jsoniter
import (
"io"
"unsafe"
"strconv"
"unsafe"
)
type float64LazyAny struct {
baseAny
buf []byte
iter *Iterator
err error
buf []byte
iter *Iterator
err error
cache float64
}
@ -163,4 +163,4 @@ func (any *floatAny) WriteTo(stream *Stream) {
func (any *floatAny) GetInterface() interface{} {
return any.val
}
}

View File

@ -67,4 +67,4 @@ func (any *int32Any) Parse() *Iterator {
func (any *int32Any) GetInterface() interface{} {
return any.val
}
}

View File

@ -2,8 +2,8 @@ package jsoniter
import (
"io"
"unsafe"
"strconv"
"unsafe"
)
type int64LazyAny struct {
@ -163,4 +163,4 @@ func (any *int64Any) Parse() *Iterator {
func (any *int64Any) GetInterface() interface{} {
return any.val
}
}

View File

@ -62,4 +62,4 @@ func (any *nilAny) Parse() *Iterator {
func (any *nilAny) GetInterface() interface{} {
return nil
}
}

View File

@ -1,9 +1,9 @@
package jsoniter
import (
"unsafe"
"fmt"
"reflect"
"unsafe"
)
type objectLazyAny struct {

View File

@ -5,7 +5,7 @@ import (
"strconv"
)
type stringLazyAny struct{
type stringLazyAny struct {
baseAny
buf []byte
iter *Iterator
@ -136,9 +136,9 @@ func (any *stringLazyAny) GetInterface() interface{} {
return any.cache
}
type stringAny struct{
type stringAny struct {
baseAny
err error
err error
val string
}
@ -146,7 +146,6 @@ func (any *stringAny) Parse() *Iterator {
return nil
}
func (any *stringAny) ValueType() ValueType {
return String
}
@ -228,4 +227,4 @@ func (any *stringAny) WriteTo(stream *Stream) {
func (any *stringAny) GetInterface() interface{} {
return any.val
}
}

View File

@ -1,12 +1,11 @@
package jsoniter
import (
"io"
"strconv"
"unsafe"
"io"
)
type uint64LazyAny struct {
baseAny
buf []byte
@ -164,4 +163,4 @@ func (any *uint64Any) Parse() *Iterator {
func (any *uint64Any) GetInterface() interface{} {
return any.val
}
}

View File

@ -1,3 +1,9 @@
//
// Besides, jsoniter.Iterator provides a different set of interfaces
// iterating given bytes/string/reader
// and yielding parsed elements one by one.
// This set of interfaces reads input as required and gives
// better performance.
package jsoniter
import (
@ -276,4 +282,3 @@ func (iter *Iterator) ReadBase64() (ret []byte) {
}
return ret[:n]
}

View File

@ -18,12 +18,11 @@ func (iter *Iterator) ReadArray() (ret bool) {
case ',':
return true
default:
iter.reportError("ReadArray", "expect [ or , or ] or n, but found: " + string([]byte{c}))
iter.reportError("ReadArray", "expect [ or , or ] or n, but found: "+string([]byte{c}))
return
}
}
func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
c := iter.nextToken()
if c == '[' {
@ -46,6 +45,6 @@ func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
iter.skipFixedBytes(3)
return true // null
}
iter.reportError("ReadArrayCB", "expect [ or n, but found: " + string([]byte{c}))
iter.reportError("ReadArrayCB", "expect [ or n, but found: "+string([]byte{c}))
return false
}
}

View File

@ -2,12 +2,13 @@ package jsoniter
import (
"io"
"math/big"
"strconv"
"unsafe"
"math/big"
)
var floatDigits []int8
const invalidCharForNumber = int8(-1)
const endOfNumber = int8(-2)
const dotInNumber = int8(-3)
@ -75,7 +76,7 @@ func (iter *Iterator) readPositiveFloat32() (ret float32) {
value := uint64(0)
c := byte(' ')
i := iter.head
non_decimal_loop:
non_decimal_loop:
for ; i < iter.tail; i++ {
c = iter.buf[i]
ind := floatDigits[c]
@ -91,14 +92,14 @@ func (iter *Iterator) readPositiveFloat32() (ret float32) {
if value > uint64SafeToMultiple10 {
return iter.readFloat32SlowPath()
}
value = (value << 3) + (value << 1) + uint64(ind); // value = value * 10 + ind;
value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
}
if c == '.' {
i++
decimalPlaces := 0;
decimalPlaces := 0
for ; i < iter.tail; i++ {
c = iter.buf[i]
ind := floatDigits[c];
ind := floatDigits[c]
switch ind {
case endOfNumber:
if decimalPlaces > 0 && decimalPlaces < len(POW10) {
@ -106,7 +107,7 @@ func (iter *Iterator) readPositiveFloat32() (ret float32) {
return float32(float64(value) / float64(POW10[decimalPlaces]))
}
// too many decimal places
return iter.readFloat32SlowPath()
return iter.readFloat32SlowPath()
case invalidCharForNumber:
fallthrough
case dotInNumber:
@ -125,7 +126,7 @@ func (iter *Iterator) readPositiveFloat32() (ret float32) {
func (iter *Iterator) readNumberAsString() (ret string) {
strBuf := [16]byte{}
str := strBuf[0:0]
load_loop:
load_loop:
for {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
@ -178,7 +179,7 @@ func (iter *Iterator) readPositiveFloat64() (ret float64) {
value := uint64(0)
c := byte(' ')
i := iter.head
non_decimal_loop:
non_decimal_loop:
for ; i < iter.tail; i++ {
c = iter.buf[i]
ind := floatDigits[c]
@ -194,14 +195,14 @@ func (iter *Iterator) readPositiveFloat64() (ret float64) {
if value > uint64SafeToMultiple10 {
return iter.readFloat64SlowPath()
}
value = (value << 3) + (value << 1) + uint64(ind); // value = value * 10 + ind;
value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
}
if c == '.' {
i++
decimalPlaces := 0;
decimalPlaces := 0
for ; i < iter.tail; i++ {
c = iter.buf[i]
ind := floatDigits[c];
ind := floatDigits[c]
switch ind {
case endOfNumber:
if decimalPlaces > 0 && decimalPlaces < len(POW10) {

View File

@ -6,8 +6,8 @@ import (
var intDigits []int8
const uint32SafeToMultiply10 = uint32(0xffffffff) / 10 - 1
const uint64SafeToMultiple10 = uint64(0xffffffffffffffff) / 10 - 1
const uint32SafeToMultiply10 = uint32(0xffffffff)/10 - 1
const uint64SafeToMultiple10 = uint64(0xffffffffffffffff)/10 - 1
const int64Max = uint64(0x7fffffffffffffff)
const int32Max = uint32(0x7fffffff)
const int16Max = uint32(0x7fff)
@ -17,7 +17,7 @@ const uint8Max = uint32(0xffff)
func init() {
intDigits = make([]int8, 256)
for i := 0; i < len(floatDigits); i++ {
for i := 0; i < len(intDigits); i++ {
intDigits[i] = invalidCharForNumber
}
for i := int8('0'); i <= int8('9'); i++ {
@ -37,15 +37,15 @@ func (iter *Iterator) ReadInt8() (ret int8) {
c := iter.nextToken()
if c == '-' {
val := iter.readUint32(iter.readByte())
if val > int8Max + 1 {
iter.reportError("ReadInt8", "overflow: " + strconv.FormatInt(int64(val), 10))
if val > int8Max+1 {
iter.reportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return -int8(val)
} else {
val := iter.readUint32(c)
if val > int8Max {
iter.reportError("ReadInt8", "overflow: " + strconv.FormatInt(int64(val), 10))
iter.reportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return int8(val)
@ -55,7 +55,7 @@ func (iter *Iterator) ReadInt8() (ret int8) {
func (iter *Iterator) ReadUint8() (ret uint8) {
val := iter.readUint32(iter.nextToken())
if val > uint8Max {
iter.reportError("ReadUint8", "overflow: " + strconv.FormatInt(int64(val), 10))
iter.reportError("ReadUint8", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return uint8(val)
@ -65,15 +65,15 @@ func (iter *Iterator) ReadInt16() (ret int16) {
c := iter.nextToken()
if c == '-' {
val := iter.readUint32(iter.readByte())
if val > int16Max + 1 {
iter.reportError("ReadInt16", "overflow: " + strconv.FormatInt(int64(val), 10))
if val > int16Max+1 {
iter.reportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return -int16(val)
} else {
val := iter.readUint32(c)
if val > int16Max {
iter.reportError("ReadInt16", "overflow: " + strconv.FormatInt(int64(val), 10))
iter.reportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return int16(val)
@ -83,7 +83,7 @@ func (iter *Iterator) ReadInt16() (ret int16) {
func (iter *Iterator) ReadUint16() (ret uint16) {
val := iter.readUint32(iter.nextToken())
if val > uint16Max {
iter.reportError("ReadUint16", "overflow: " + strconv.FormatInt(int64(val), 10))
iter.reportError("ReadUint16", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return uint16(val)
@ -93,15 +93,15 @@ func (iter *Iterator) ReadInt32() (ret int32) {
c := iter.nextToken()
if c == '-' {
val := iter.readUint32(iter.readByte())
if val > int32Max + 1 {
iter.reportError("ReadInt32", "overflow: " + strconv.FormatInt(int64(val), 10))
if val > int32Max+1 {
iter.reportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return -int32(val)
} else {
val := iter.readUint32(c)
if val > int32Max {
iter.reportError("ReadInt32", "overflow: " + strconv.FormatInt(int64(val), 10))
iter.reportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return int32(val)
@ -118,11 +118,11 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
return 0 // single zero
}
if ind == invalidCharForNumber {
iter.reportError("readUint32", "unexpected character: " + string([]byte{byte(ind)}))
iter.reportError("readUint32", "unexpected character: "+string([]byte{byte(ind)}))
return
}
value := uint32(ind)
if iter.tail - iter.head > 10 {
if iter.tail-iter.head > 10 {
i := iter.head
ind2 := intDigits[iter.buf[i]]
if ind2 == invalidCharForNumber {
@ -133,7 +133,7 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind3 := intDigits[iter.buf[i]]
if ind3 == invalidCharForNumber {
iter.head = i
return value * 10 + uint32(ind2)
return value*10 + uint32(ind2)
}
//iter.head = i + 1
//value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
@ -141,35 +141,35 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind4 := intDigits[iter.buf[i]]
if ind4 == invalidCharForNumber {
iter.head = i
return value * 100 + uint32(ind2) * 10 + uint32(ind3)
return value*100 + uint32(ind2)*10 + uint32(ind3)
}
i++
ind5 := intDigits[iter.buf[i]]
if ind5 == invalidCharForNumber {
iter.head = i
return value * 1000 + uint32(ind2) * 100 + uint32(ind3) * 10 + uint32(ind4)
return value*1000 + uint32(ind2)*100 + uint32(ind3)*10 + uint32(ind4)
}
i++
ind6 := intDigits[iter.buf[i]]
if ind6 == invalidCharForNumber {
iter.head = i
return value * 10000 + uint32(ind2) * 1000 + uint32(ind3) * 100 + uint32(ind4) * 10 + uint32(ind5)
return value*10000 + uint32(ind2)*1000 + uint32(ind3)*100 + uint32(ind4)*10 + uint32(ind5)
}
i++
ind7 := intDigits[iter.buf[i]]
if ind7 == invalidCharForNumber {
iter.head = i
return value * 100000 + uint32(ind2) * 10000 + uint32(ind3) * 1000 + uint32(ind4) * 100 + uint32(ind5) * 10 + uint32(ind6)
return value*100000 + uint32(ind2)*10000 + uint32(ind3)*1000 + uint32(ind4)*100 + uint32(ind5)*10 + uint32(ind6)
}
i++
ind8 := intDigits[iter.buf[i]]
if ind8 == invalidCharForNumber {
iter.head = i
return value * 1000000 + uint32(ind2) * 100000 + uint32(ind3) * 10000 + uint32(ind4) * 1000 + uint32(ind5) * 100 + uint32(ind6) * 10 + uint32(ind7)
return value*1000000 + uint32(ind2)*100000 + uint32(ind3)*10000 + uint32(ind4)*1000 + uint32(ind5)*100 + uint32(ind6)*10 + uint32(ind7)
}
i++
ind9 := intDigits[iter.buf[i]]
value = value * 10000000 + uint32(ind2) * 1000000 + uint32(ind3) * 100000 + uint32(ind4) * 10000 + uint32(ind5) * 1000 + uint32(ind6) * 100 + uint32(ind7) * 10 + uint32(ind8)
value = value*10000000 + uint32(ind2)*1000000 + uint32(ind3)*100000 + uint32(ind4)*10000 + uint32(ind5)*1000 + uint32(ind6)*100 + uint32(ind7)*10 + uint32(ind8)
iter.head = i
if ind9 == invalidCharForNumber {
return value
@ -194,7 +194,7 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
}
value = (value << 3) + (value << 1) + uint32(ind)
}
if (!iter.loadMore()) {
if !iter.loadMore() {
return value
}
}
@ -204,15 +204,15 @@ func (iter *Iterator) ReadInt64() (ret int64) {
c := iter.nextToken()
if c == '-' {
val := iter.readUint64(iter.readByte())
if val > int64Max + 1 {
iter.reportError("ReadInt64", "overflow: " + strconv.FormatUint(uint64(val), 10))
if val > int64Max+1 {
iter.reportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
return
}
return -int64(val)
} else {
val := iter.readUint64(c)
if val > int64Max {
iter.reportError("ReadInt64", "overflow: " + strconv.FormatUint(uint64(val), 10))
iter.reportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
return
}
return int64(val)
@ -229,7 +229,7 @@ func (iter *Iterator) readUint64(c byte) (ret uint64) {
return 0 // single zero
}
if ind == invalidCharForNumber {
iter.reportError("readUint64", "unexpected character: " + string([]byte{byte(ind)}))
iter.reportError("readUint64", "unexpected character: "+string([]byte{byte(ind)}))
return
}
value := uint64(ind)
@ -252,7 +252,7 @@ func (iter *Iterator) readUint64(c byte) (ret uint64) {
}
value = (value << 3) + (value << 1) + uint64(ind)
}
if (!iter.loadMore()) {
if !iter.loadMore() {
return value
}
}

View File

@ -44,10 +44,10 @@ func (iter *Iterator) readFieldHash() int32 {
b += 'a' - 'A'
}
if b == '"' {
iter.head = i+1
iter.head = i + 1
c = iter.nextToken()
if c != ':' {
iter.reportError("readFieldHash", `expect :, but found ` + string([]byte{c}))
iter.reportError("readFieldHash", `expect :, but found `+string([]byte{c}))
}
return int32(hash)
}
@ -60,7 +60,7 @@ func (iter *Iterator) readFieldHash() int32 {
}
}
}
iter.reportError("readFieldHash", `expect ", but found ` + string([]byte{c}))
iter.reportError("readFieldHash", `expect ", but found `+string([]byte{c}))
return 0
}
@ -84,7 +84,7 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
return false
}
for iter.nextToken() == ',' {
field := string(iter.readObjectFieldAsBytes())
field = string(iter.readObjectFieldAsBytes())
if !callback(iter, field) {
return false
}
@ -105,6 +105,46 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
return false
}
func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
c := iter.nextToken()
if c == '{' {
c = iter.nextToken()
if c == '"' {
iter.unreadByte()
field := iter.ReadString()
if iter.nextToken() != ':' {
iter.reportError("ReadMapCB", "expect : after object field")
return false
}
if !callback(iter, field) {
return false
}
for iter.nextToken() == ',' {
field = iter.ReadString()
if iter.nextToken() != ':' {
iter.reportError("ReadMapCB", "expect : after object field")
return false
}
if !callback(iter, field) {
return false
}
}
return true
}
if c == '}' {
return true
}
iter.reportError("ReadMapCB", `expect " after }`)
return false
}
if c == 'n' {
iter.skipFixedBytes(3)
return true // null
}
iter.reportError("ReadMapCB", `expect { or n`)
return false
}
func (iter *Iterator) readObjectStart() bool {
c := iter.nextToken()
if c == '{' {

View File

@ -29,7 +29,6 @@ func (iter *Iterator) ReadBool() (ret bool) {
return
}
func (iter *Iterator) SkipAndReturnBytes() []byte {
if iter.reader != nil {
panic("reader input does not support this api")
@ -40,7 +39,6 @@ func (iter *Iterator) SkipAndReturnBytes() []byte {
return iter.buf[before:after]
}
// Skip skips a json object and positions to relatively the next json object
func (iter *Iterator) Skip() {
c := iter.nextToken()
@ -204,15 +202,15 @@ func (iter *Iterator) skipUntilBreak() {
}
func (iter *Iterator) skipFixedBytes(n int) {
iter.head += n;
if (iter.head >= iter.tail) {
more := iter.head - iter.tail;
iter.head += n
if iter.head >= iter.tail {
more := iter.head - iter.tail
if !iter.loadMore() {
if more > 0 {
iter.reportError("skipFixedBytes", "unexpected end");
iter.reportError("skipFixedBytes", "unexpected end")
}
return
}
iter.head += more;
iter.head += more
}
}
}

View File

@ -7,7 +7,7 @@ import (
func (iter *Iterator) ReadString() (ret string) {
c := iter.nextToken()
if c == '"' {
for i := iter.head ; i < iter.tail; i++ {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
if c == '"' {
ret = string(iter.buf[iter.head:i])
@ -92,6 +92,7 @@ func (iter *Iterator) readStringSlowPath() (ret string) {
str = append(str, c)
}
}
iter.reportError("ReadString", "unexpected end of input")
return
}
@ -103,13 +104,13 @@ func (iter *Iterator) ReadStringAsSlice() (ret []byte) {
// for: field name, base64, number
if iter.buf[i] == '"' {
// fast path: reuse the underlying buffer
ret = iter.buf[iter.head : i]
ret = iter.buf[iter.head:i]
iter.head = i + 1
return ret
}
}
readLen := iter.tail - iter.head
copied := make([]byte, readLen, readLen * 2)
copied := make([]byte, readLen, readLen*2)
copy(copied, iter.buf[iter.head:iter.tail])
iter.head = iter.tail
for iter.Error == nil {
@ -132,11 +133,11 @@ func (iter *Iterator) readU4() (ret rune) {
return
}
if c >= '0' && c <= '9' {
ret = ret * 16 + rune(c - '0')
ret = ret*16 + rune(c-'0')
} else if c >= 'a' && c <= 'f' {
ret = ret * 16 + rune(c - 'a' + 10)
ret = ret*16 + rune(c-'a'+10)
} else if c >= 'A' && c <= 'F' {
ret = ret * 16 + rune(c - 'A' + 10)
ret = ret*16 + rune(c-'A'+10)
} else {
iter.reportError("readU4", "expects 0~9 or a~f")
return
@ -158,14 +159,14 @@ const (
mask3 = 0x0F // 0000 1111
mask4 = 0x07 // 0000 0111
rune1Max = 1 << 7 - 1
rune2Max = 1 << 11 - 1
rune3Max = 1 << 16 - 1
rune1Max = 1<<7 - 1
rune2Max = 1<<11 - 1
rune3Max = 1<<16 - 1
surrogateMin = 0xD800
surrogateMax = 0xDFFF
maxRune = '\U0010FFFF' // Maximum valid Unicode code point.
maxRune = '\U0010FFFF' // Maximum valid Unicode code point.
runeError = '\uFFFD' // the "error" Rune or "Unicode replacement character"
)
@ -176,22 +177,22 @@ func appendRune(p []byte, r rune) []byte {
p = append(p, byte(r))
return p
case i <= rune2Max:
p = append(p, t2 | byte(r >> 6))
p = append(p, tx | byte(r) & maskx)
p = append(p, t2|byte(r>>6))
p = append(p, tx|byte(r)&maskx)
return p
case i > maxRune, surrogateMin <= i && i <= surrogateMax:
r = runeError
fallthrough
case i <= rune3Max:
p = append(p, t3 | byte(r >> 12))
p = append(p, tx | byte(r >> 6) & maskx)
p = append(p, tx | byte(r) & maskx)
p = append(p, t3|byte(r>>12))
p = append(p, tx|byte(r>>6)&maskx)
p = append(p, tx|byte(r)&maskx)
return p
default:
p = append(p, t4 | byte(r >> 18))
p = append(p, tx | byte(r >> 12) & maskx)
p = append(p, tx | byte(r >> 6) & maskx)
p = append(p, tx | byte(r) & maskx)
p = append(p, t4|byte(r>>18))
p = append(p, tx|byte(r>>12)&maskx)
p = append(p, tx|byte(r>>6)&maskx)
p = append(p, tx|byte(r)&maskx)
return p
}
}

View File

@ -1,26 +1,31 @@
package jsoniter
import (
"encoding"
"encoding/json"
"fmt"
"reflect"
"sync/atomic"
"unsafe"
"encoding/json"
)
/*
Reflection on type to create decoders, which is then cached
Reflection on value is avoided as we can, as the reflect.Value itself will allocate, with following exceptions
1. create instance of new value, for example *int will need a int to be allocated
2. append to slice, if the existing cap is not enough, allocate will be done using Reflect.New
3. assignment to map, both key and value will be reflect.Value
For a simple struct binding, it will be reflect.Value free and allocation free
*/
// Decoder is an internal type registered to cache as needed.
// Don't confuse jsoniter.Decoder with json.Decoder.
// For json.Decoder's adapter, refer to jsoniter.AdapterDecoder(todo link).
//
// Reflection on type to create decoders, which is then cached
// Reflection on value is avoided as we can, as the reflect.Value itself will allocate, with following exceptions
// 1. create instance of new value, for example *int will need a int to be allocated
// 2. append to slice, if the existing cap is not enough, allocate will be done using Reflect.New
// 3. assignment to map, both key and value will be reflect.Value
// For a simple struct binding, it will be reflect.Value free and allocation free
type Decoder interface {
decode(ptr unsafe.Pointer, iter *Iterator)
}
// Encoder is an internal type registered to cache as needed.
// Don't confuse jsoniter.Encoder with json.Encoder.
// For json.Encoder's adapter, refer to jsoniter.AdapterEncoder(todo godoc link).
type Encoder interface {
isEmpty(ptr unsafe.Pointer) bool
encode(ptr unsafe.Pointer, stream *Stream)
@ -77,6 +82,7 @@ var jsonRawMessageType reflect.Type
var anyType reflect.Type
var marshalerType reflect.Type
var unmarshalerType reflect.Type
var textUnmarshalerType reflect.Type
func init() {
typeDecoders = map[string]Decoder{}
@ -91,6 +97,7 @@ func init() {
anyType = reflect.TypeOf((*Any)(nil)).Elem()
marshalerType = reflect.TypeOf((*json.Marshaler)(nil)).Elem()
unmarshalerType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem()
textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
}
func addDecoderToCache(cacheKey reflect.Type, decoder Decoder) {
@ -156,15 +163,18 @@ func RegisterExtension(extension ExtensionFunc) {
extensions = append(extensions, extension)
}
// CleanDecoders cleans decoders registered
// CleanDecoders cleans decoders registered or cached
func CleanDecoders() {
typeDecoders = map[string]Decoder{}
fieldDecoders = map[string]Decoder{}
atomic.StorePointer(&DECODERS, unsafe.Pointer(&map[string]Decoder{}))
}
// CleanEncoders cleans encoders registered or cached
func CleanEncoders() {
typeEncoders = map[string]Encoder{}
fieldEncoders = map[string]Encoder{}
atomic.StorePointer(&ENCODERS, unsafe.Pointer(&map[string]Encoder{}))
}
type optionalDecoder struct {
@ -274,7 +284,6 @@ func (iter *Iterator) ReadVal(obj interface{}) {
cachedDecoder.decode(e.word, iter)
}
func (stream *Stream) WriteVal(val interface{}) {
if nil == val {
stream.WriteNil()
@ -320,7 +329,7 @@ func decoderOfType(typ reflect.Type) (Decoder, error) {
if typ.Kind() == reflect.Ptr {
typeDecoder := typeDecoders[typ.Elem().String()]
if typeDecoder != nil {
return &optionalDecoder{typ.Elem(),typeDecoder}, nil
return &optionalDecoder{typ.Elem(), typeDecoder}, nil
}
}
cacheKey := typ
@ -337,6 +346,9 @@ func decoderOfType(typ reflect.Type) (Decoder, error) {
}
func createDecoderOfType(typ reflect.Type) (Decoder, error) {
if typ.String() == "[]uint8" {
return &base64Codec{}, nil
}
if typ.AssignableTo(jsonRawMessageType) {
return &jsonRawMessageCodec{}, nil
}
@ -424,6 +436,9 @@ func encoderOfType(typ reflect.Type) (Encoder, error) {
}
func createEncoderOfType(typ reflect.Type) (Encoder, error) {
if typ.String() == "[]uint8" {
return &base64Codec{}, nil
}
if typ.AssignableTo(jsonRawMessageType) {
return &jsonRawMessageCodec{}, nil
}
@ -501,7 +516,7 @@ func encoderOfOptional(typ reflect.Type) (Encoder, error) {
if err != nil {
return nil, err
}
return &optionalEncoder{ decoder}, nil
return &optionalEncoder{decoder}, nil
}
func decoderOfMap(typ reflect.Type) (Decoder, error) {
@ -510,7 +525,7 @@ func decoderOfMap(typ reflect.Type) (Decoder, error) {
return nil, err
}
mapInterface := reflect.New(typ).Interface()
return &mapDecoder{typ, typ.Elem(), decoder, extractInterface(mapInterface)}, nil
return &mapDecoder{typ, typ.Key(), typ.Elem(), decoder, extractInterface(mapInterface)}, nil
}
func extractInterface(val interface{}) emptyInterface {
@ -524,9 +539,5 @@ func encoderOfMap(typ reflect.Type) (Encoder, error) {
return nil, err
}
mapInterface := reflect.New(typ).Elem().Interface()
if elemType.Kind() == reflect.Interface && elemType.NumMethod() == 0 {
return &mapInterfaceEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}, nil
} else {
return &mapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}, nil
}
return &mapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}, nil
}

View File

@ -1,10 +1,10 @@
package jsoniter
import (
"unsafe"
"reflect"
"io"
"fmt"
"io"
"reflect"
"unsafe"
)
func decoderOfSlice(typ reflect.Type) (Decoder, error) {
@ -21,7 +21,7 @@ func encoderOfSlice(typ reflect.Type) (Encoder, error) {
return nil, err
}
if typ.Elem().Kind() == reflect.Map {
encoder = &optionalEncoder{ encoder}
encoder = &optionalEncoder{encoder}
}
return &sliceEncoder{typ, typ.Elem(), encoder}, nil
}
@ -88,30 +88,30 @@ func (decoder *sliceDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) {
return
}
offset := uintptr(0)
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data) + offset), iter)
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data)+offset), iter)
if !iter.ReadArray() {
slice.Len = 1
return
}
offset += decoder.elemType.Size()
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data) + offset), iter)
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data)+offset), iter)
if !iter.ReadArray() {
slice.Len = 2
return
}
offset += decoder.elemType.Size()
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data) + offset), iter)
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data)+offset), iter)
if !iter.ReadArray() {
slice.Len = 3
return
}
offset += decoder.elemType.Size()
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data) + offset), iter)
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data)+offset), iter)
slice.Len = 4
for iter.ReadArray() {
growOne(slice, decoder.sliceType, decoder.elemType)
offset += decoder.elemType.Size()
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data) + offset), iter)
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data)+offset), iter)
}
}
@ -155,4 +155,4 @@ func reuseSlice(slice *sliceHeader, sliceType reflect.Type, expectedCap int) {
dst := unsafe.Pointer(reflect.MakeSlice(sliceType, 0, expectedCap).Pointer())
slice.Cap = expectedCap
slice.Data = dst
}
}

View File

@ -1,12 +1,16 @@
package jsoniter
import (
"unsafe"
"encoding"
"encoding/json"
"reflect"
"strconv"
"unsafe"
)
type mapDecoder struct {
mapType reflect.Type
keyType reflect.Type
elemType reflect.Type
elemDecoder Decoder
mapInterface emptyInterface
@ -21,12 +25,47 @@ func (decoder *mapDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
if realVal.IsNil() {
realVal.Set(reflect.MakeMap(realVal.Type()))
}
for field := iter.ReadObject(); field != ""; field = iter.ReadObject() {
iter.ReadMapCB(func(iter *Iterator, keyStr string) bool {
elem := reflect.New(decoder.elemType)
decoder.elemDecoder.decode(unsafe.Pointer(elem.Pointer()), iter)
// to put into map, we have to use reflection
realVal.SetMapIndex(reflect.ValueOf(string([]byte(field))), elem.Elem())
}
keyType := decoder.keyType
switch {
case keyType.Kind() == reflect.String:
realVal.SetMapIndex(reflect.ValueOf(keyStr), elem.Elem())
return true
case keyType.Implements(textUnmarshalerType):
textUnmarshaler := reflect.New(keyType.Elem()).Interface().(encoding.TextUnmarshaler)
err := textUnmarshaler.UnmarshalText([]byte(keyStr))
if err != nil {
iter.reportError("read map key as TextUnmarshaler", err.Error())
return false
}
realVal.SetMapIndex(reflect.ValueOf(textUnmarshaler), elem.Elem())
return true
default:
switch keyType.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
n, err := strconv.ParseInt(keyStr, 10, 64)
if err != nil || reflect.Zero(keyType).OverflowInt(n) {
iter.reportError("read map key as int64", "read int64 failed")
return false
}
realVal.SetMapIndex(reflect.ValueOf(n).Convert(keyType), elem.Elem())
return true
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
n, err := strconv.ParseUint(keyStr, 10, 64)
if err != nil || reflect.Zero(keyType).OverflowUint(n) {
iter.reportError("read map key as uint64", "read uint64 failed")
return false
}
realVal.SetMapIndex(reflect.ValueOf(n).Convert(keyType), elem.Elem())
return true
}
}
iter.reportError("read map key", "unexpected map key type "+keyType.String())
return true
})
}
type mapEncoder struct {
@ -47,13 +86,45 @@ func (encoder *mapEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
if i != 0 {
stream.WriteMore()
}
stream.WriteObjectField(key.String())
encodeMapKey(key, stream)
stream.writeByte(':')
val := realVal.MapIndex(key).Interface()
encoder.elemEncoder.encodeInterface(val, stream)
}
stream.WriteObjectEnd()
}
func encodeMapKey(key reflect.Value, stream *Stream) {
if key.Kind() == reflect.String {
stream.WriteString(key.String())
return
}
if tm, ok := key.Interface().(encoding.TextMarshaler); ok {
buf, err := tm.MarshalText()
if err != nil {
stream.Error = err
return
}
stream.writeByte('"')
stream.Write(buf)
stream.writeByte('"')
return
}
switch key.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
stream.writeByte('"')
stream.WriteInt64(key.Int())
stream.writeByte('"')
return
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
stream.writeByte('"')
stream.WriteUint64(key.Uint())
stream.writeByte('"')
return
}
stream.Error = &json.UnsupportedTypeError{key.Type()}
}
func (encoder *mapEncoder) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
}
@ -65,41 +136,3 @@ func (encoder *mapEncoder) isEmpty(ptr unsafe.Pointer) bool {
realVal := reflect.ValueOf(*realInterface)
return realVal.Len() == 0
}
type mapInterfaceEncoder struct {
mapType reflect.Type
elemType reflect.Type
elemEncoder Encoder
mapInterface emptyInterface
}
func (encoder *mapInterfaceEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
mapInterface := encoder.mapInterface
mapInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
realVal := reflect.ValueOf(*realInterface)
stream.WriteObjectStart()
for i, key := range realVal.MapKeys() {
if i != 0 {
stream.WriteMore()
}
stream.WriteObjectField(key.String())
val := realVal.MapIndex(key).Interface()
encoder.elemEncoder.encode(unsafe.Pointer(&val), stream)
}
stream.WriteObjectEnd()
}
func (encoder *mapInterfaceEncoder) encodeInterface(val interface{}, stream *Stream) {
writeToStream(val, stream, encoder)
}
func (encoder *mapInterfaceEncoder) isEmpty(ptr unsafe.Pointer) bool {
mapInterface := encoder.mapInterface
mapInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
realVal := reflect.ValueOf(*realInterface)
return realVal.Len() == 0
}

View File

@ -1,8 +1,9 @@
package jsoniter
import (
"unsafe"
"encoding/base64"
"encoding/json"
"unsafe"
)
type stringCodec struct {
@ -296,6 +297,10 @@ type nonEmptyInterfaceCodec struct {
func (codec *nonEmptyInterfaceCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
nonEmptyInterface := (*nonEmptyInterface)(ptr)
if nonEmptyInterface.itab == nil {
iter.reportError("read non-empty interface", "do not know which concrete type to decode to")
return
}
var i interface{}
e := (*emptyInterface)(unsafe.Pointer(&i))
e.typ = nonEmptyInterface.itab.typ
@ -379,6 +384,49 @@ func (encoder *jsonRawMessageCodec) isEmpty(ptr unsafe.Pointer) bool {
return len(*((*json.RawMessage)(ptr))) == 0
}
type base64Codec struct {
}
func (codec *base64Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
encoding := base64.StdEncoding
src := iter.SkipAndReturnBytes()
src = src[1 : len(src)-1]
decodedLen := encoding.DecodedLen(len(src))
dst := make([]byte, decodedLen)
_, err := encoding.Decode(dst, src)
if err != nil {
iter.reportError("decode base64", err.Error())
} else {
*((*[]byte)(ptr)) = dst
}
}
func (codec *base64Codec) encode(ptr unsafe.Pointer, stream *Stream) {
encoding := base64.StdEncoding
stream.writeByte('"')
src := *((*[]byte)(ptr))
toGrow := encoding.EncodedLen(len(src))
stream.ensure(toGrow)
encoding.Encode(stream.buf[stream.n:], src)
stream.n += toGrow
stream.writeByte('"')
}
func (encoder *base64Codec) encodeInterface(val interface{}, stream *Stream) {
encoding := base64.StdEncoding
stream.writeByte('"')
src := val.([]byte)
toGrow := encoding.EncodedLen(len(src))
stream.ensure(toGrow)
encoding.Encode(stream.buf[stream.n:], src)
stream.n += toGrow
stream.writeByte('"')
}
func (encoder *base64Codec) isEmpty(ptr unsafe.Pointer) bool {
return len(*((*[]byte)(ptr))) == 0
}
type stringNumberDecoder struct {
elemDecoder Decoder
}
@ -447,4 +495,4 @@ func (decoder *unmarshalerDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
if err != nil {
iter.reportError("unmarshaler", err.Error())
}
}
}

View File

@ -1,12 +1,12 @@
package jsoniter
import (
"io"
"fmt"
"io"
"reflect"
"unsafe"
"strings"
"unicode"
"unsafe"
)
func encoderOfStruct(typ reflect.Type) (Encoder, error) {
@ -47,7 +47,7 @@ func encoderOfStruct(typ reflect.Type) (Encoder, error) {
}
for _, fieldName := range fieldNames {
fields[fieldName] = &structFieldEncoder{field, fieldName, encoder, omitempty}
}
}
}
if len(fields) == 0 {
return &emptyStructEncoder{}, nil
@ -138,7 +138,7 @@ func EnableUnexportedStructFieldsSupport() {
func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder) (Decoder, error) {
knownHash := map[int32]struct{}{
0: struct{}{},
0: {},
}
switch len(fields) {
case 0:
@ -203,7 +203,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &threeFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3}, nil
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3}, nil
case 4:
var fieldName1 int32
var fieldName2 int32
@ -236,8 +236,8 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &fourFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4}, nil
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4}, nil
case 5:
var fieldName1 int32
var fieldName2 int32
@ -275,8 +275,8 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &fiveFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5}, nil
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5}, nil
case 6:
var fieldName1 int32
var fieldName2 int32
@ -319,8 +319,8 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &sixFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6}, nil
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6}, nil
case 7:
var fieldName1 int32
var fieldName2 int32
@ -368,9 +368,9 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &sevenFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7}, nil
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7}, nil
case 8:
var fieldName1 int32
var fieldName2 int32
@ -423,9 +423,9 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &eightFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8}, nil
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8}, nil
case 9:
var fieldName1 int32
var fieldName2 int32
@ -483,9 +483,9 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &nineFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8, fieldName9, fieldDecoder9}, nil
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8, fieldName9, fieldDecoder9}, nil
case 10:
var fieldName1 int32
var fieldName2 int32
@ -548,10 +548,10 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &tenFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8, fieldName9, fieldDecoder9,
fieldName10, fieldDecoder10}, nil
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8, fieldName9, fieldDecoder9,
fieldName10, fieldDecoder10}, nil
}
return &generalStructDecoder{typ, fields}, nil
}

View File

@ -32,24 +32,32 @@ func (b *Stream) Buffered() int {
return b.n
}
func (b *Stream) Buffer() []byte {
return b.buf[:b.n]
}
// Write writes the contents of p into the buffer.
// It returns the number of bytes written.
// If nn < len(p), it also returns an error explaining
// why the write is short.
func (b *Stream) Write(p []byte) (nn int, err error) {
for len(p) > b.Available() && b.Error == nil {
var n int
if b.Buffered() == 0 {
// Large write, empty buffer.
// Write directly from p to avoid copy.
n, b.Error = b.out.Write(p)
if b.out == nil {
b.growAtLeast(len(p))
} else {
n = copy(b.buf[b.n:], p)
b.n += n
b.Flush()
var n int
if b.Buffered() == 0 {
// Large write, empty buffer.
// Write directly from p to avoid copy.
n, b.Error = b.out.Write(p)
} else {
n = copy(b.buf[b.n:], p)
b.n += n
b.Flush()
}
nn += n
p = p[n:]
}
nn += n
p = p[n:]
}
if b.Error != nil {
return nn, b.Error
@ -60,14 +68,13 @@ func (b *Stream) Write(p []byte) (nn int, err error) {
return nn, nil
}
// WriteByte writes a single byte.
func (b *Stream) writeByte(c byte) {
if b.Error != nil {
return
}
if b.Available() <= 0 && b.Flush() != nil {
return
if b.Available() < 1 {
b.growAtLeast(1)
}
b.buf[b.n] = c
b.n++
@ -77,11 +84,11 @@ func (b *Stream) writeTwoBytes(c1 byte, c2 byte) {
if b.Error != nil {
return
}
if b.Available() <= 1 && b.Flush() != nil {
return
if b.Available() < 2 {
b.growAtLeast(2)
}
b.buf[b.n] = c1
b.buf[b.n + 1] = c2
b.buf[b.n+1] = c2
b.n += 2
}
@ -89,12 +96,12 @@ func (b *Stream) writeThreeBytes(c1 byte, c2 byte, c3 byte) {
if b.Error != nil {
return
}
if b.Available() <= 2 && b.Flush() != nil {
return
if b.Available() < 3 {
b.growAtLeast(3)
}
b.buf[b.n] = c1
b.buf[b.n + 1] = c2
b.buf[b.n + 2] = c3
b.buf[b.n+1] = c2
b.buf[b.n+2] = c3
b.n += 3
}
@ -102,13 +109,13 @@ func (b *Stream) writeFourBytes(c1 byte, c2 byte, c3 byte, c4 byte) {
if b.Error != nil {
return
}
if b.Available() <= 3 && b.Flush() != nil {
return
if b.Available() < 4 {
b.growAtLeast(4)
}
b.buf[b.n] = c1
b.buf[b.n + 1] = c2
b.buf[b.n + 2] = c3
b.buf[b.n + 3] = c4
b.buf[b.n+1] = c2
b.buf[b.n+2] = c3
b.buf[b.n+3] = c4
b.n += 4
}
@ -116,19 +123,22 @@ func (b *Stream) writeFiveBytes(c1 byte, c2 byte, c3 byte, c4 byte, c5 byte) {
if b.Error != nil {
return
}
if b.Available() <= 3 && b.Flush() != nil {
return
if b.Available() < 5 {
b.growAtLeast(5)
}
b.buf[b.n] = c1
b.buf[b.n + 1] = c2
b.buf[b.n + 2] = c3
b.buf[b.n + 3] = c4
b.buf[b.n + 4] = c5
b.buf[b.n+1] = c2
b.buf[b.n+2] = c3
b.buf[b.n+3] = c4
b.buf[b.n+4] = c5
b.n += 5
}
// Flush writes any buffered data to the underlying io.Writer.
func (b *Stream) Flush() error {
if b.out == nil {
return nil
}
if b.Error != nil {
return b.Error
}
@ -141,7 +151,7 @@ func (b *Stream) Flush() error {
}
if err != nil {
if n > 0 && n < b.n {
copy(b.buf[0:b.n - n], b.buf[n:b.n])
copy(b.buf[0:b.n-n], b.buf[n:b.n])
}
b.n -= n
b.Error = err
@ -151,13 +161,28 @@ func (b *Stream) Flush() error {
return nil
}
func (b *Stream) WriteRaw(s string) {
for len(s) > b.Available() && b.Error == nil {
n := copy(b.buf[b.n:], s)
b.n += n
s = s[n:]
b.Flush()
func (b *Stream) ensure(minimal int) {
available := b.Available()
if available < minimal {
if b.n > 1024 {
b.Flush()
}
b.growAtLeast(minimal)
}
}
func (b *Stream) growAtLeast(minimal int) {
toGrow := len(b.buf)
if toGrow < minimal {
toGrow = minimal
}
newBuf := make([]byte, len(b.buf)+toGrow)
copy(newBuf, b.Buffer())
b.buf = newBuf
}
func (b *Stream) WriteRaw(s string) {
b.ensure(len(s))
if b.Error != nil {
return
}
@ -166,18 +191,13 @@ func (b *Stream) WriteRaw(s string) {
}
func (stream *Stream) WriteString(s string) {
stream.ensure(32)
valLen := len(s)
toWriteLen := valLen
bufLengthMinusTwo := len(stream.buf) - 2 // make room for the quotes
if stream.n + toWriteLen > bufLengthMinusTwo {
if stream.n+toWriteLen > bufLengthMinusTwo {
toWriteLen = bufLengthMinusTwo - stream.n
}
if toWriteLen < 0 {
stream.Flush()
if stream.n + toWriteLen > bufLengthMinusTwo {
toWriteLen = bufLengthMinusTwo - stream.n
}
}
n := stream.n
stream.buf[n] = '"'
n++
@ -189,7 +209,7 @@ func (stream *Stream) WriteString(s string) {
stream.buf[n] = c
n++
} else {
break;
break
}
}
if i == valLen {
@ -200,14 +220,14 @@ func (stream *Stream) WriteString(s string) {
}
stream.n = n
// for the remaining parts, we process them char by char
stream.writeStringSlowPath(s, i, valLen);
stream.writeStringSlowPath(s, i, valLen)
stream.writeByte('"')
}
func (stream *Stream) writeStringSlowPath(s string, i int, valLen int) {
for ; i < valLen; i++ {
c := s[i]
switch (c) {
switch c {
case '"':
stream.writeTwoBytes('\\', '"')
case '\\':
@ -223,7 +243,7 @@ func (stream *Stream) writeStringSlowPath(s string, i int, valLen int) {
case '\t':
stream.writeTwoBytes('\\', 't')
default:
stream.writeByte(c);
stream.writeByte(c)
}
}
}
@ -293,21 +313,14 @@ func (stream *Stream) WriteArrayEnd() {
}
func (stream *Stream) writeIndention(delta int) {
if (stream.indention == 0) {
if stream.indention == 0 {
return
}
stream.writeByte('\n')
toWrite := stream.indention - delta
i := 0
for {
for ; i < toWrite && stream.n < len(stream.buf); i++ {
stream.buf[stream.n] = ' '
stream.n ++
}
if i == toWrite {
break;
} else {
stream.Flush()
}
stream.ensure(toWrite)
for i := 0; i < toWrite && stream.n < len(stream.buf); i++ {
stream.buf[stream.n] = ' '
stream.n++
}
}
}

View File

@ -21,26 +21,24 @@ func (stream *Stream) WriteFloat32Lossy(val float32) {
val = -val
}
if val > 0x4ffffff {
stream.WriteRaw(strconv.FormatFloat(float64(val), 'f', -1, 32));
stream.WriteRaw(strconv.FormatFloat(float64(val), 'f', -1, 32))
return
}
precision := 6
exp := uint64(1000000) // 6
lval := uint64(float64(val) * float64(exp) + 0.5)
lval := uint64(float64(val)*float64(exp) + 0.5)
stream.WriteUint64(lval / exp)
fval := lval % exp
if fval == 0 {
return
}
stream.writeByte('.')
if stream.Available() < 10 {
stream.Flush()
}
stream.ensure(10)
for p := precision - 1; p > 0 && fval < POW10[p]; p-- {
stream.writeByte('0')
}
stream.WriteUint64(fval)
for stream.buf[stream.n - 1] == '0' {
for stream.buf[stream.n-1] == '0' {
stream.n--
}
}
@ -55,30 +53,30 @@ func (stream *Stream) WriteFloat64Lossy(val float64) {
val = -val
}
if val > 0x4ffffff {
stream.WriteRaw(strconv.FormatFloat(val, 'f', -1, 64));
stream.WriteRaw(strconv.FormatFloat(val, 'f', -1, 64))
return
}
precision := 6
exp := uint64(1000000) // 6
lval := uint64(val * float64(exp) + 0.5)
lval := uint64(val*float64(exp) + 0.5)
stream.WriteUint64(lval / exp)
fval := lval % exp
if fval == 0 {
return
}
stream.writeByte('.')
if stream.Available() < 10 {
stream.Flush()
}
stream.ensure(10)
for p := precision - 1; p > 0 && fval < POW10[p]; p-- {
stream.writeByte('0')
}
stream.WriteUint64(fval)
for stream.buf[stream.n - 1] == '0' {
for stream.buf[stream.n-1] == '0' {
stream.n--
}
}
// EnableLossyFloatMarshalling keeps 10**(-6) precision
// for float variables for better performance.
func EnableLossyFloatMarshalling() {
// for better performance
RegisterTypeEncoder("float32", func(ptr unsafe.Pointer, stream *Stream) {
@ -89,4 +87,4 @@ func EnableLossyFloatMarshalling() {
val := *((*float64)(ptr))
stream.WriteFloat64Lossy(val)
})
}
}

View File

@ -1,46 +1,11 @@
package jsoniter
var digits []uint8
var digitTens []uint8
var digitOnes []uint8
var DIGITS []uint32
func init() {
digits = []uint8{
'0', '1', '2', '3', '4', '5',
'6', '7', '8', '9', 'a', 'b',
'c', 'd', 'e', 'f', 'g', 'h',
'i', 'j', 'k', 'l', 'm', 'n',
'o', 'p', 'q', 'r', 's', 't',
'u', 'v', 'w', 'x', 'y', 'z',
}
digitTens = []uint8{
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'1', '1', '1', '1', '1', '1', '1', '1', '1', '1',
'2', '2', '2', '2', '2', '2', '2', '2', '2', '2',
'3', '3', '3', '3', '3', '3', '3', '3', '3', '3',
'4', '4', '4', '4', '4', '4', '4', '4', '4', '4',
'5', '5', '5', '5', '5', '5', '5', '5', '5', '5',
'6', '6', '6', '6', '6', '6', '6', '6', '6', '6',
'7', '7', '7', '7', '7', '7', '7', '7', '7', '7',
'8', '8', '8', '8', '8', '8', '8', '8', '8', '8',
'9', '9', '9', '9', '9', '9', '9', '9', '9', '9',
}
digitOnes = []uint8{
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
}
DIGITS = make([]uint32, 1000)
for i := uint32(0); i < 1000; i++ {
DIGITS[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i % 10 + '0';
DIGITS[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i%10 + '0'
if i < 10 {
DIGITS[i] += 2 << 24
} else if i < 100 {
@ -67,24 +32,20 @@ func writeFirstBuf(buf []byte, v uint32, n int) int {
func writeBuf(buf []byte, v uint32, n int) {
buf[n] = byte(v >> 16)
buf[n + 1] = byte(v >> 8)
buf[n + 2] = byte(v)
buf[n+1] = byte(v >> 8)
buf[n+2] = byte(v)
}
func (stream *Stream) WriteUint8(val uint8) {
if stream.Available() < 3 {
stream.Flush()
}
stream.ensure(3)
stream.n = writeFirstBuf(stream.buf, DIGITS[val], stream.n)
}
func (stream *Stream) WriteInt8(nval int8) {
if stream.Available() < 4 {
stream.Flush()
}
stream.ensure(4)
n := stream.n
var val uint8
if (nval < 0) {
if nval < 0 {
val = uint8(-nval)
stream.buf[n] = '-'
n++
@ -95,15 +56,13 @@ func (stream *Stream) WriteInt8(nval int8) {
}
func (stream *Stream) WriteUint16(val uint16) {
if stream.Available() < 5 {
stream.Flush()
}
stream.ensure(5)
q1 := val / 1000
if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], stream.n)
return
}
r1 := val - q1 * 1000;
r1 := val - q1*1000
n := writeFirstBuf(stream.buf, DIGITS[q1], stream.n)
writeBuf(stream.buf, DIGITS[r1], n)
stream.n = n + 3
@ -111,12 +70,10 @@ func (stream *Stream) WriteUint16(val uint16) {
}
func (stream *Stream) WriteInt16(nval int16) {
if stream.Available() < 6 {
stream.Flush()
}
stream.ensure(6)
n := stream.n
var val uint16
if (nval < 0) {
if nval < 0 {
val = uint16(-nval)
stream.buf[n] = '-'
n++
@ -128,7 +85,7 @@ func (stream *Stream) WriteInt16(nval int16) {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
return
}
r1 := val - q1 * 1000;
r1 := val - q1*1000
n = writeFirstBuf(stream.buf, DIGITS[q1], n)
writeBuf(stream.buf, DIGITS[r1], n)
stream.n = n + 3
@ -136,16 +93,14 @@ func (stream *Stream) WriteInt16(nval int16) {
}
func (stream *Stream) WriteUint32(val uint32) {
if stream.Available() < 10 {
stream.Flush()
}
stream.ensure(10)
n := stream.n
q1 := val / 1000
if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
return
}
r1 := val - q1 * 1000;
r1 := val - q1*1000
q2 := q1 / 1000
if q2 == 0 {
n := writeFirstBuf(stream.buf, DIGITS[q1], n)
@ -153,29 +108,27 @@ func (stream *Stream) WriteUint32(val uint32) {
stream.n = n + 3
return
}
r2 := q1 - q2 * 1000
r2 := q1 - q2*1000
q3 := q2 / 1000
if q3 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q2], n)
} else {
r3 := q2 - q3 * 1000
r3 := q2 - q3*1000
stream.buf[n] = byte(q3 + '0')
n++
writeBuf(stream.buf, DIGITS[r3], n)
n += 3
}
writeBuf(stream.buf, DIGITS[r2], n)
writeBuf(stream.buf, DIGITS[r1], n + 3)
writeBuf(stream.buf, DIGITS[r1], n+3)
stream.n = n + 6
}
func (stream *Stream) WriteInt32(nval int32) {
if stream.Available() < 11 {
stream.Flush()
}
stream.ensure(11)
n := stream.n
var val uint32
if (nval < 0) {
if nval < 0 {
val = uint32(-nval)
stream.buf[n] = '-'
n++
@ -187,7 +140,7 @@ func (stream *Stream) WriteInt32(nval int32) {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
return
}
r1 := val - q1 * 1000;
r1 := val - q1*1000
q2 := q1 / 1000
if q2 == 0 {
n := writeFirstBuf(stream.buf, DIGITS[q1], n)
@ -195,33 +148,31 @@ func (stream *Stream) WriteInt32(nval int32) {
stream.n = n + 3
return
}
r2 := q1 - q2 * 1000
r2 := q1 - q2*1000
q3 := q2 / 1000
if q3 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q2], n)
} else {
r3 := q2 - q3 * 1000
r3 := q2 - q3*1000
stream.buf[n] = byte(q3 + '0')
n++
writeBuf(stream.buf, DIGITS[r3], n)
n += 3
}
writeBuf(stream.buf, DIGITS[r2], n)
writeBuf(stream.buf, DIGITS[r1], n + 3)
writeBuf(stream.buf, DIGITS[r1], n+3)
stream.n = n + 6
}
func (stream *Stream) WriteUint64(val uint64) {
if stream.Available() < 20 {
stream.Flush()
}
stream.ensure(20)
n := stream.n
q1 := val / 1000
if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
return
}
r1 := val - q1 * 1000;
r1 := val - q1*1000
q2 := q1 / 1000
if q2 == 0 {
n := writeFirstBuf(stream.buf, DIGITS[q1], n)
@ -229,61 +180,59 @@ func (stream *Stream) WriteUint64(val uint64) {
stream.n = n + 3
return
}
r2 := q1 - q2 * 1000
r2 := q1 - q2*1000
q3 := q2 / 1000
if q3 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q2], n)
writeBuf(stream.buf, DIGITS[r2], n)
writeBuf(stream.buf, DIGITS[r1], n + 3)
writeBuf(stream.buf, DIGITS[r1], n+3)
stream.n = n + 6
return
}
r3 := q2 - q3 * 1000
r3 := q2 - q3*1000
q4 := q3 / 1000
if q4 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q3], n)
writeBuf(stream.buf, DIGITS[r3], n)
writeBuf(stream.buf, DIGITS[r2], n + 3)
writeBuf(stream.buf, DIGITS[r1], n + 6)
writeBuf(stream.buf, DIGITS[r2], n+3)
writeBuf(stream.buf, DIGITS[r1], n+6)
stream.n = n + 9
return
}
r4 := q3 - q4 * 1000
r4 := q3 - q4*1000
q5 := q4 / 1000
if q5 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q4], n)
writeBuf(stream.buf, DIGITS[r4], n)
writeBuf(stream.buf, DIGITS[r3], n + 3)
writeBuf(stream.buf, DIGITS[r2], n + 6)
writeBuf(stream.buf, DIGITS[r1], n + 9)
writeBuf(stream.buf, DIGITS[r3], n+3)
writeBuf(stream.buf, DIGITS[r2], n+6)
writeBuf(stream.buf, DIGITS[r1], n+9)
stream.n = n + 12
return
}
r5 := q4 - q5 * 1000
r5 := q4 - q5*1000
q6 := q5 / 1000
if q6 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q5], n)
} else {
n = writeFirstBuf(stream.buf, DIGITS[q6], n)
r6 := q5 - q6 * 1000
r6 := q5 - q6*1000
writeBuf(stream.buf, DIGITS[r6], n)
n += 3
}
writeBuf(stream.buf, DIGITS[r5], n)
writeBuf(stream.buf, DIGITS[r4], n + 3)
writeBuf(stream.buf, DIGITS[r3], n + 6)
writeBuf(stream.buf, DIGITS[r2], n + 9)
writeBuf(stream.buf, DIGITS[r1], n + 12)
writeBuf(stream.buf, DIGITS[r4], n+3)
writeBuf(stream.buf, DIGITS[r3], n+6)
writeBuf(stream.buf, DIGITS[r2], n+9)
writeBuf(stream.buf, DIGITS[r1], n+12)
stream.n = n + 15
}
func (stream *Stream) WriteInt64(nval int64) {
if stream.Available() < 20 {
stream.Flush()
}
stream.ensure(20)
n := stream.n
var val uint64
if (nval < 0) {
if nval < 0 {
val = uint64(-nval)
stream.buf[n] = '-'
n++
@ -295,7 +244,7 @@ func (stream *Stream) WriteInt64(nval int64) {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
return
}
r1 := val - q1 * 1000;
r1 := val - q1*1000
q2 := q1 / 1000
if q2 == 0 {
n := writeFirstBuf(stream.buf, DIGITS[q1], n)
@ -303,52 +252,52 @@ func (stream *Stream) WriteInt64(nval int64) {
stream.n = n + 3
return
}
r2 := q1 - q2 * 1000
r2 := q1 - q2*1000
q3 := q2 / 1000
if q3 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q2], n)
writeBuf(stream.buf, DIGITS[r2], n)
writeBuf(stream.buf, DIGITS[r1], n + 3)
writeBuf(stream.buf, DIGITS[r1], n+3)
stream.n = n + 6
return
}
r3 := q2 - q3 * 1000
r3 := q2 - q3*1000
q4 := q3 / 1000
if q4 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q3], n)
writeBuf(stream.buf, DIGITS[r3], n)
writeBuf(stream.buf, DIGITS[r2], n + 3)
writeBuf(stream.buf, DIGITS[r1], n + 6)
writeBuf(stream.buf, DIGITS[r2], n+3)
writeBuf(stream.buf, DIGITS[r1], n+6)
stream.n = n + 9
return
}
r4 := q3 - q4 * 1000
r4 := q3 - q4*1000
q5 := q4 / 1000
if q5 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q4], n)
writeBuf(stream.buf, DIGITS[r4], n)
writeBuf(stream.buf, DIGITS[r3], n + 3)
writeBuf(stream.buf, DIGITS[r2], n + 6)
writeBuf(stream.buf, DIGITS[r1], n + 9)
writeBuf(stream.buf, DIGITS[r3], n+3)
writeBuf(stream.buf, DIGITS[r2], n+6)
writeBuf(stream.buf, DIGITS[r1], n+9)
stream.n = n + 12
return
}
r5 := q4 - q5 * 1000
r5 := q4 - q5*1000
q6 := q5 / 1000
if q6 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q5], n)
} else {
stream.buf[n] = byte(q6 + '0')
n++
r6 := q5 - q6 * 1000
r6 := q5 - q6*1000
writeBuf(stream.buf, DIGITS[r6], n)
n += 3
}
writeBuf(stream.buf, DIGITS[r5], n)
writeBuf(stream.buf, DIGITS[r4], n + 3)
writeBuf(stream.buf, DIGITS[r3], n + 6)
writeBuf(stream.buf, DIGITS[r2], n + 9)
writeBuf(stream.buf, DIGITS[r1], n + 12)
writeBuf(stream.buf, DIGITS[r4], n+3)
writeBuf(stream.buf, DIGITS[r3], n+6)
writeBuf(stream.buf, DIGITS[r2], n+9)
writeBuf(stream.buf, DIGITS[r1], n+12)
stream.n = n + 15
}
@ -358,4 +307,4 @@ func (stream *Stream) WriteInt(val int) {
func (stream *Stream) WriteUint(val uint) {
stream.WriteUint64(uint64(val))
}
}

View File

@ -1,11 +1,11 @@
package jsoniter
import (
"testing"
"github.com/json-iterator/go/require"
"encoding/json"
"bytes"
"encoding/json"
"github.com/json-iterator/go/require"
"io/ioutil"
"testing"
)
func Test_new_decoder(t *testing.T) {
@ -43,4 +43,18 @@ func Test_new_encoder(t *testing.T) {
encoder2 := NewEncoder(buf2)
encoder2.Encode([]int{1})
should.Equal("[1]", buf2.String())
}
}
func Test_use_number(t *testing.T) {
should := require.New(t)
decoder1 := json.NewDecoder(bytes.NewBufferString(`123`))
decoder1.UseNumber()
decoder2 := NewDecoder(bytes.NewBufferString(`123`))
decoder2.UseNumber()
var obj1 interface{}
should.Nil(decoder1.Decode(&obj1))
should.Equal(json.Number("123"), obj1)
var obj2 interface{}
should.Nil(decoder2.Decode(&obj2))
should.Equal(json.Number("123"), obj2)
}

View File

@ -1,11 +1,11 @@
package jsoniter
import (
"encoding/json"
"testing"
"github.com/json-iterator/go/require"
"bytes"
"encoding/json"
"github.com/json-iterator/go/require"
"io"
"testing"
)
func Test_empty_array(t *testing.T) {
@ -84,7 +84,7 @@ func Test_read_array_with_any_iterator(t *testing.T) {
func Test_wrap_array(t *testing.T) {
should := require.New(t)
any := Wrap([]int{1,2,3})
any := Wrap([]int{1, 2, 3})
should.Equal("[1,2,3]", any.ToString())
var element Any
var elements []int
@ -93,9 +93,9 @@ func Test_wrap_array(t *testing.T) {
elements = append(elements, element.ToInt())
}
should.Equal([]int{1, 2, 3}, elements)
any = Wrap([]int{1,2,3})
any = Wrap([]int{1, 2, 3})
should.Equal(3, any.Size())
any = Wrap([]int{1,2,3})
any = Wrap([]int{1, 2, 3})
should.Equal(2, any.Get(1).ToInt())
}
@ -103,7 +103,7 @@ func Test_array_lazy_any_get(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString("[1,[2,3],4]")
should.Nil(err)
should.Equal(3, any.Get(1,1).ToInt())
should.Equal(3, any.Get(1, 1).ToInt())
should.Equal("[1,[2,3],4]", any.ToString())
}
@ -111,25 +111,25 @@ func Test_array_lazy_any_get_all(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString("[[1],[2],[3,4]]")
should.Nil(err)
should.Equal("[1,2,3]", any.Get('*',0).ToString())
should.Equal("[1,2,3]", any.Get('*', 0).ToString())
}
func Test_array_wrapper_any_get_all(t *testing.T) {
should := require.New(t)
any := wrapArray([][]int{
[]int{1, 2},
[]int{3, 4},
[]int{5, 6},
{1, 2},
{3, 4},
{5, 6},
})
should.Equal("[1,3,5]", any.Get('*',0).ToString())
should.Equal("[1,3,5]", any.Get('*', 0).ToString())
}
func Test_array_lazy_any_get_invalid(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString("[]")
should.Nil(err)
should.Equal(Invalid, any.Get(1,1).ValueType())
should.NotNil(any.Get(1,1).LastError())
should.Equal(Invalid, any.Get(1, 1).ValueType())
should.NotNil(any.Get(1, 1).LastError())
should.Equal(Invalid, any.Get("1").ValueType())
should.NotNil(any.Get("1").LastError())
}
@ -244,7 +244,7 @@ func Test_write_val_empty_array(t *testing.T) {
func Test_write_array_of_interface_in_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field []interface{}
Field []interface{}
Field2 string
}
val := TestObject{[]interface{}{1, 2}, ""}
@ -264,6 +264,27 @@ func Test_json_RawMessage(t *testing.T) {
should.Equal(`[1,2,3]`, str)
}
func Test_encode_byte_array(t *testing.T) {
should := require.New(t)
bytes, err := json.Marshal([]byte{1, 2, 3})
should.Nil(err)
should.Equal(`"AQID"`, string(bytes))
bytes, err = Marshal([]byte{1, 2, 3})
should.Nil(err)
should.Equal(`"AQID"`, string(bytes))
}
func Test_decode_byte_array(t *testing.T) {
should := require.New(t)
data := []byte{}
err := json.Unmarshal([]byte(`"AQID"`), &data)
should.Nil(err)
should.Equal([]byte{1, 2, 3}, data)
err = Unmarshal([]byte(`"AQID"`), &data)
should.Nil(err)
should.Equal([]byte{1, 2, 3}, data)
}
func Benchmark_jsoniter_array(b *testing.B) {
b.ReportAllocs()
input := []byte(`[1,2,3,4,5,6,7,8,9]`)

View File

@ -1,9 +1,9 @@
package jsoniter
import (
"testing"
"bytes"
"github.com/json-iterator/go/require"
"testing"
)
func Test_true(t *testing.T) {
@ -38,7 +38,6 @@ func Test_write_true_false(t *testing.T) {
should.Equal("truefalse", buf.String())
}
func Test_write_val_bool(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
@ -47,4 +46,4 @@ func Test_write_val_bool(t *testing.T) {
stream.Flush()
should.Nil(stream.Error)
should.Equal("true", buf.String())
}
}

View File

@ -1,13 +1,13 @@
package jsoniter
import (
"encoding/json"
"github.com/json-iterator/go/require"
"reflect"
"strconv"
"testing"
"time"
"unsafe"
"github.com/json-iterator/go/require"
"encoding/json"
)
func Test_customize_type_decoder(t *testing.T) {
@ -45,6 +45,7 @@ func Test_customize_type_encoder(t *testing.T) {
}
func Test_customize_byte_array_encoder(t *testing.T) {
CleanEncoders()
should := require.New(t)
RegisterTypeEncoder("[]uint8", func(ptr unsafe.Pointer, stream *Stream) {
t := *((*[]byte)(ptr))
@ -178,7 +179,7 @@ func (obj *ObjectImplementedUnmarshaler) UnmarshalJSON([]byte) error {
func Test_unmarshaler(t *testing.T) {
type TestObject struct {
Field *ObjectImplementedUnmarshaler
Field *ObjectImplementedUnmarshaler
Field2 string
}
should := require.New(t)
@ -195,7 +196,7 @@ func Test_unmarshaler(t *testing.T) {
func Test_unmarshaler_and_decoder(t *testing.T) {
type TestObject struct {
Field *ObjectImplementedUnmarshaler
Field *ObjectImplementedUnmarshaler
Field2 string
}
should := require.New(t)
@ -212,4 +213,4 @@ func Test_unmarshaler_and_decoder(t *testing.T) {
err = Unmarshal([]byte(`{"Field":"hello"}`), &obj)
should.Nil(err)
should.Equal(10, int(*obj.Field))
}
}

View File

@ -1,9 +1,10 @@
package jsoniter
import (
"encoding/json"
"fmt"
"testing"
"github.com/json-iterator/go/require"
"testing"
)
func Test_bind_api_demo(t *testing.T) {
@ -22,3 +23,64 @@ func Test_iterator_api_demo(t *testing.T) {
}
fmt.Println(total)
}
type People struct {
Name string
Gender string
Age int
Address string
Mobile string
Country string
Height int
}
func jsoniterMarshal(p *People) error {
_, err := Marshal(p)
if nil != err {
return err
}
return nil
}
func stdMarshal(p *People) error {
_, err := json.Marshal(p)
if nil != err {
return err
}
return nil
}
func BenchmarkJosniterMarshal(b *testing.B) {
var p People
p.Address = "上海市徐汇区漕宝路"
p.Age = 30
p.Country = "中国"
p.Gender = "male"
p.Height = 170
p.Mobile = "18502120533"
p.Name = "Elvin"
b.ReportAllocs()
for i := 0; i < b.N; i++ {
err := jsoniterMarshal(&p)
if nil != err {
b.Error(err)
}
}
}
func BenchmarkStdMarshal(b *testing.B) {
var p People
p.Address = "上海市徐汇区漕宝路"
p.Age = 30
p.Country = "中国"
p.Gender = "male"
p.Height = 170
p.Mobile = "18502120533"
p.Name = "Elvin"
b.ReportAllocs()
for i := 0; i < b.N; i++ {
err := stdMarshal(&p)
if nil != err {
b.Error(err)
}
}
}

View File

@ -1,9 +1,9 @@
package jsoniter
import (
"github.com/json-iterator/go/require"
"io"
"testing"
"github.com/json-iterator/go/require"
)
func Test_string_end(t *testing.T) {

View File

@ -1,12 +1,12 @@
package jsoniter
import (
"bytes"
"encoding/json"
"fmt"
"testing"
"github.com/json-iterator/go/require"
"bytes"
"strconv"
"testing"
)
func Test_read_big_float(t *testing.T) {
@ -46,14 +46,14 @@ func Test_read_float(t *testing.T) {
// streaming
t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
should := require.New(t)
iter := Parse(bytes.NewBufferString(input + ","), 2)
iter := Parse(bytes.NewBufferString(input+","), 2)
expected, err := strconv.ParseFloat(input, 32)
should.Nil(err)
should.Equal(float32(expected), iter.ReadFloat32())
})
t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
should := require.New(t)
iter := Parse(bytes.NewBufferString(input + ","), 2)
iter := Parse(bytes.NewBufferString(input+","), 2)
expected, err := strconv.ParseFloat(input, 64)
should.Nil(err)
should.Equal(expected, iter.ReadFloat64())
@ -85,7 +85,7 @@ func Test_wrap_float(t *testing.T) {
func Test_write_float32(t *testing.T) {
vals := []float32{0, 1, -1, 99, 0xff, 0xfff, 0xffff, 0xfffff, 0xffffff, 0x4ffffff, 0xfffffff,
-0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001}
-0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001}
for _, val := range vals {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
@ -118,7 +118,7 @@ func Test_write_float32(t *testing.T) {
func Test_write_float64(t *testing.T) {
vals := []float64{0, 1, -1, 99, 0xff, 0xfff, 0xffff, 0xfffff, 0xffffff, 0x4ffffff, 0xfffffff,
-0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001}
-0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001}
for _, val := range vals {
t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
should := require.New(t)
@ -158,6 +158,19 @@ func Test_read_float64_cursor(t *testing.T) {
should.Equal(float64(2), iter.Read())
}
func Test_read_float_scientific(t *testing.T) {
should := require.New(t)
var obj interface{}
should.Nil(UnmarshalFromString(`1e1`, &obj))
should.Equal(float64(10), obj)
should.Nil(json.Unmarshal([]byte(`1e1`), &obj))
should.Equal(float64(10), obj)
should.Nil(UnmarshalFromString(`1.0e1`, &obj))
should.Equal(float64(10), obj)
should.Nil(json.Unmarshal([]byte(`1.0e1`), &obj))
should.Equal(float64(10), obj)
}
func Benchmark_jsoniter_float(b *testing.B) {
b.ReportAllocs()
input := []byte(`1.1123,`)

View File

@ -3,12 +3,12 @@ package jsoniter
import (
"bytes"
"encoding/json"
"testing"
"github.com/json-iterator/go/require"
"fmt"
"strconv"
"io/ioutil"
"github.com/json-iterator/go/require"
"io"
"io/ioutil"
"strconv"
"testing"
)
func Test_read_uint64_invalid(t *testing.T) {

View File

@ -1,9 +1,10 @@
package jsoniter
import (
"testing"
"github.com/json-iterator/go/require"
"testing"
"unsafe"
"encoding/json"
)
func Test_write_array_of_interface(t *testing.T) {
@ -16,7 +17,7 @@ func Test_write_array_of_interface(t *testing.T) {
func Test_write_map_of_interface(t *testing.T) {
should := require.New(t)
val := map[string]interface{}{"hello":"world"}
val := map[string]interface{}{"hello": "world"}
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"hello":"world"}`, str)
@ -27,7 +28,7 @@ func Test_write_map_of_interface_in_struct(t *testing.T) {
Field map[string]interface{}
}
should := require.New(t)
val := TestObject{map[string]interface{}{"hello":"world"}}
val := TestObject{map[string]interface{}{"hello": "world"}}
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"Field":{"hello":"world"}}`, str)
@ -35,11 +36,11 @@ func Test_write_map_of_interface_in_struct(t *testing.T) {
func Test_write_map_of_interface_in_struct_with_two_fields(t *testing.T) {
type TestObject struct {
Field map[string]interface{}
Field map[string]interface{}
Field2 string
}
should := require.New(t)
val := TestObject{map[string]interface{}{"hello":"world"}, ""}
val := TestObject{map[string]interface{}{"hello": "world"}, ""}
str, err := MarshalToString(val)
should.Nil(err)
should.Contains(str, `"Field":{"hello":"world"}`)
@ -59,7 +60,7 @@ func Test_write_map_of_custom_interface(t *testing.T) {
should := require.New(t)
myStr := MyString("world")
should.Equal("world", myStr.Hello())
val := map[string]MyInterface{"hello":myStr}
val := map[string]MyInterface{"hello": myStr}
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"hello":"world"}`, str)
@ -137,4 +138,18 @@ func Test_encode_object_contain_non_empty_interface(t *testing.T) {
str, err := MarshalToString(obj)
should.Nil(err)
should.Equal(`{"Field":"hello"}`, str)
}
func Test_nil_non_empty_interface(t *testing.T) {
CleanEncoders()
CleanDecoders()
type TestObject struct {
Field []MyInterface
}
should := require.New(t)
obj := TestObject{}
b := []byte(`{"Field":["AAA"]}`)
should.NotNil(json.Unmarshal(b, &obj))
should.NotNil(Unmarshal(b, &obj))
}

View File

@ -1,8 +1,10 @@
package jsoniter
import (
"testing"
"github.com/json-iterator/go/require"
"math/big"
"testing"
"encoding/json"
)
func Test_read_map(t *testing.T) {
@ -40,12 +42,12 @@ func Test_wrap_map(t *testing.T) {
vals[k] = v.ToString()
}
}
should.Equal(map[string]string{"Field1":"hello"}, vals)
should.Equal(map[string]string{"Field1": "hello"}, vals)
}
func Test_map_wrapper_any_get_all(t *testing.T) {
should := require.New(t)
any := Wrap(map[string][]int{"Field1": []int{1, 2}})
any := Wrap(map[string][]int{"Field1": {1, 2}})
should.Equal(`{"Field1":1}`, any.Get('*', 0).ToString())
}
@ -66,4 +68,62 @@ func Test_slice_of_map(t *testing.T) {
val = []map[string]string{}
should.Nil(UnmarshalFromString(str, &val))
should.Equal("2", val[0]["1"])
}
}
func Test_encode_int_key_map(t *testing.T) {
should := require.New(t)
val := map[int]string{1: "2"}
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"1":"2"}`, str)
}
func Test_decode_int_key_map(t *testing.T) {
should := require.New(t)
var val map[int]string
should.Nil(UnmarshalFromString(`{"1":"2"}`, &val))
should.Equal(map[int]string{1: "2"}, val)
}
func Test_encode_TextMarshaler_key_map(t *testing.T) {
should := require.New(t)
f, _, _ := big.ParseFloat("1", 10, 64, big.ToZero)
val := map[*big.Float]string{f: "2"}
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"1":"2"}`, str)
}
func Test_decode_TextMarshaler_key_map(t *testing.T) {
should := require.New(t)
var val map[*big.Float]string
should.Nil(UnmarshalFromString(`{"1":"2"}`, &val))
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"1":"2"}`, str)
}
func Test_map_key_with_escaped_char(t *testing.T) {
type Ttest struct {
Map map[string]string
}
var jsonBytes = []byte(`
{
"Map":{
"k\"ey": "val"
}
}`)
should := require.New(t)
{
var obj Ttest
should.Nil(json.Unmarshal(jsonBytes, &obj))
should.Equal(map[string]string{"k\"ey":"val"}, obj.Map)
}
{
var obj Ttest
should.Nil(Unmarshal(jsonBytes, &obj))
should.Equal(map[string]string{"k\"ey":"val"}, obj.Map)
}
}

View File

@ -1,9 +1,9 @@
package jsoniter
import (
"testing"
"github.com/json-iterator/go/require"
"bytes"
"github.com/json-iterator/go/require"
"testing"
)
func Test_read_null(t *testing.T) {

View File

@ -1,10 +1,10 @@
package jsoniter
import (
"encoding/json"
"testing"
"github.com/json-iterator/go/require"
"bytes"
"encoding/json"
"github.com/json-iterator/go/require"
"testing"
)
func Test_empty_object(t *testing.T) {
@ -100,7 +100,7 @@ func Test_object_any_lazy_iterator(t *testing.T) {
should.False(hasNext)
vals[k] = v.ToString()
should.Equal(map[string]string{"a":"b", "c":"d"}, vals)
should.Equal(map[string]string{"a": "b", "c": "d"}, vals)
vals = map[string]string{}
for next, hasNext := any.IterateObject(); hasNext; {
k, v, hasNext = next()
@ -108,7 +108,7 @@ func Test_object_any_lazy_iterator(t *testing.T) {
vals[k] = v.ToString()
}
}
should.Equal(map[string]string{"a":"b", "c":"d"}, vals)
should.Equal(map[string]string{"a": "b", "c": "d"}, vals)
}
func Test_object_any_with_two_lazy_iterators(t *testing.T) {
@ -194,7 +194,7 @@ func Test_wrap_object(t *testing.T) {
vals[k] = v.ToString()
}
}
should.Equal(map[string]string{"Field1":"hello"}, vals)
should.Equal(map[string]string{"Field1": "hello"}, vals)
}
func Test_object_wrapper_any_get_all(t *testing.T) {

View File

@ -1,8 +1,8 @@
package jsoniter
import (
"testing"
"github.com/json-iterator/go/require"
"testing"
)
func Test_encode_optional_int_pointer(t *testing.T) {
@ -43,4 +43,4 @@ func Test_encode_struct_with_optional_field(t *testing.T) {
should.Nil(err)
should.Contains(str, `"field1":null`)
should.Contains(str, `"field2":"world"`)
}
}

View File

@ -1,8 +1,8 @@
package jsoniter
import (
"testing"
"fmt"
"testing"
)
func Test_reflect_str(t *testing.T) {
@ -151,4 +151,4 @@ func Test_reflect_bool(t *testing.T) {
fmt.Println(iter.Error)
t.Fatal(val)
}
}
}

View File

@ -1,9 +1,9 @@
package jsoniter
import (
"testing"
"github.com/json-iterator/go/require"
"bytes"
"github.com/json-iterator/go/require"
"testing"
)
func Test_decode_one_field_struct(t *testing.T) {
@ -149,9 +149,9 @@ func Test_write_val_one_field_struct(t *testing.T) {
func Test_mixed(t *testing.T) {
should := require.New(t)
type AA struct {
ID int `json:"id"`
ID int `json:"id"`
Payload map[string]interface{} `json:"payload"`
buf *bytes.Buffer `json:"-"`
buf *bytes.Buffer `json:"-"`
}
aa := AA{}
err := UnmarshalFromString(` {"id":1, "payload":{"account":"123","password":"456"}}`, &aa)
@ -230,7 +230,7 @@ func Test_anonymous_struct_marshal(t *testing.T) {
type TestObject struct {
Field string
}
str, err := MarshalToString(struct{
str, err := MarshalToString(struct {
TestObject
Field int
}{
@ -238,4 +238,4 @@ func Test_anonymous_struct_marshal(t *testing.T) {
})
should.Nil(err)
should.Equal(`{"Field":100}`, str)
}
}

View File

@ -3,9 +3,9 @@ package jsoniter
import (
"encoding/json"
"fmt"
"github.com/json-iterator/go/require"
"testing"
"unsafe"
"github.com/json-iterator/go/require"
)
func Test_decode_slice(t *testing.T) {

54
jsoniter_stream_test.go Normal file
View File

@ -0,0 +1,54 @@
package jsoniter
import (
"github.com/json-iterator/go/require"
"testing"
)
func Test_writeByte_should_grow_buffer(t *testing.T) {
should := require.New(t)
stream := NewStream(nil, 1)
stream.writeByte('1')
should.Equal("1", string(stream.Buffer()))
should.Equal(1, len(stream.buf))
stream.writeByte('2')
should.Equal("12", string(stream.Buffer()))
should.Equal(2, len(stream.buf))
stream.writeThreeBytes('3', '4', '5')
should.Equal("12345", string(stream.Buffer()))
}
func Test_writeBytes_should_grow_buffer(t *testing.T) {
should := require.New(t)
stream := NewStream(nil, 1)
stream.Write([]byte{'1', '2'})
should.Equal("12", string(stream.Buffer()))
should.Equal(3, len(stream.buf))
stream.Write([]byte{'3', '4', '5', '6', '7'})
should.Equal("1234567", string(stream.Buffer()))
should.Equal(8, len(stream.buf))
}
func Test_writeIndention_should_grow_buffer(t *testing.T) {
should := require.New(t)
stream := NewStream(nil, 1)
stream.IndentionStep = 2
stream.WriteVal([]int{1, 2, 3})
should.Equal("[\n 1,\n 2,\n 3\n]", string(stream.Buffer()))
}
func Test_writeRaw_should_grow_buffer(t *testing.T) {
should := require.New(t)
stream := NewStream(nil, 1)
stream.WriteRaw("123")
should.Nil(stream.Error)
should.Equal("123", string(stream.Buffer()))
}
func Test_writeString_should_grow_buffer(t *testing.T) {
should := require.New(t)
stream := NewStream(nil, 0)
stream.WriteString("123")
should.Nil(stream.Error)
should.Equal(`"123"`, string(stream.Buffer()))
}

View File

@ -3,15 +3,15 @@ package jsoniter
import (
"bytes"
"encoding/json"
"testing"
"github.com/json-iterator/go/require"
"fmt"
"github.com/json-iterator/go/require"
"testing"
)
func Test_read_normal_string(t *testing.T) {
cases := map[string]string{
`"0123456789012345678901234567890123456789"`: `0123456789012345678901234567890123456789`,
`""`: ``,
`""`: ``,
`"hello"`: `hello`,
}
for input, output := range cases {
@ -40,8 +40,8 @@ func Test_read_normal_string(t *testing.T) {
func Test_read_exotic_string(t *testing.T) {
cases := map[string]string{
`"hel\"lo"`: `hel"lo`,
`"hel\nlo"`: "hel\nlo",
`"hel\"lo"`: `hel"lo`,
`"hel\nlo"`: "hel\nlo",
`"\u4e2d\u6587"`: "中文",
`"\ud83d\udc4a"`: "\xf0\x9f\x91\x8a", // surrogate
}
@ -105,6 +105,13 @@ func Test_write_val_string(t *testing.T) {
should.Equal(`"hello"`, buf.String())
}
func Test_decode_slash(t *testing.T) {
should := require.New(t)
var obj interface{}
should.NotNil(json.Unmarshal([]byte("\\"), &obj))
should.NotNil(UnmarshalFromString("\\", &obj))
}
func Benchmark_jsoniter_unicode(b *testing.B) {
for n := 0; n < b.N; n++ {
iter := ParseString(`"\ud83d\udc4a"`)