1
0
mirror of https://github.com/json-iterator/go.git synced 2025-06-15 22:50:24 +02:00

41 Commits
1.0.3 ... 1.0.5

Author SHA1 Message Date
28452fcdec cow cache is not same, as map read will modify the underlying map. use sync.Map for 1.9 and above, and mutex if sync.Map not available 2018-01-28 17:00:11 +08:00
ea8c33040f fix #228 2018-01-27 16:25:48 +08:00
358cfc3929 Merge branch 'master' of https://github.com/json-iterator/go 2018-01-25 14:48:02 +08:00
c39a632e65 fix #227, fix empty json.Number 2018-01-25 14:47:50 +08:00
e31252f2e2 Merge pull request #225 from mgood/empty-array-fix
Fix encoding 0-length arrays
2018-01-23 23:31:07 +08:00
807e4a8b20 Optimize 0-length array case
Instead of checking the array length in encode, this can be checked up
front in `encoderOfArray` since the array type has a fixed length
determined at compile time. So return an `emptyArrayEncoder` that simply
writes an empty array to the stream.
2018-01-22 14:03:50 -08:00
e78b7e89b6 Merge branch 'master' of https://github.com/json-iterator/go 2018-01-21 20:59:32 +08:00
945d1aaa19 fix #140 uintptr will no lock the address from gc 2018-01-21 20:59:18 +08:00
ba3857729b Fix encoding 0-length arrays
The array encoder assumed that arrays had at least one value, so it
would serialize them with a zero-value for the array, such as `[0]`.

This adds a test to reproduce the issue, and updates the encoder to
write an empty array if the length is 0.
2018-01-16 11:02:03 -08:00
c3ed5e85e0 Merge pull request #222 from neverlee/mydev
加入一个OnlyTaggedField选项
2018-01-09 18:30:25 +08:00
c27f6f9350 config: add OnlyTaggedField config, only process tagged fields in struct 2018-01-09 17:29:47 +08:00
0ab880662f fix #219 should check real value for empty instead of just the pointer for nested field 2018-01-07 13:57:46 +08:00
6dad2de6cc fix build 2018-01-04 17:18:16 +08:00
11c1cce0d8 fix #217 when input is null, non-decodable type should not be considered as error, to be compatible with stdlib 2018-01-04 16:19:26 +08:00
96fcb84835 fix #215 lazy load more 2017-12-23 10:52:17 +08:00
e7a8aea845 Merge branch 'master' of https://github.com/json-iterator/go 2017-12-21 22:18:40 +08:00
60a9df5ebc fix #214 report EOF like stdlib 2017-12-21 22:18:28 +08:00
7b060ec866 Merge pull request #210 from coocood/master
add ReadNumber for Iterator.
2017-12-18 08:22:47 +09:00
25f147f530 add ReadNumber for Iterator. 2017-12-17 16:44:04 +08:00
a9b9c73b4d fix #207 delay unsupported type error reporting 2017-12-15 10:13:11 +08:00
e0df39fda2 fix #206, do not allow nil pointer as unmarshal input 2017-12-14 17:18:05 +08:00
13f86432b8 do not use defer() in read int 2017-12-12 18:52:41 +08:00
d2a7335211 fix #202 #203 #204 map encoder not proplery initialized 2017-12-08 21:18:59 +08:00
b2a706d14b reverse last commit, need a better fix 2017-12-08 06:15:49 +08:00
23078876c5 fix #203 consider MarshalJSON as non empty 2017-12-07 23:20:43 +08:00
051434fab7 fix #198, use dep for vendoring 2017-11-30 10:42:24 +08:00
be6688fc1a fix #200, do not use symbolic link in the code 2017-11-30 10:34:05 +08:00
ff2b70c1db support config level extension 2017-11-23 00:09:35 +08:00
f7279a603e fix out of range 2017-11-15 23:34:21 +08:00
9f088cbcc4 fix #195 when decode float as int, report it clearly 2017-11-15 23:25:12 +08:00
3c0e5762c4 fix #196 do not hard code 1 << 49 2017-11-15 23:15:31 +08:00
d394a135a1 #197 fix place holder encoder to use EncodeInterface, WriteToStream is unsafe when the real encoder is unknown 2017-11-15 22:56:23 +08:00
9fddff05f0 try to fix #194 with larger array 2017-11-11 08:31:44 +08:00
b1b003864e expose OptionalEncoder&OptionalDecoder; add attachment to Stream&Iterator for customized decoder/encoder 2017-11-08 11:41:45 +08:00
aed5a81f09 fix #190 handle empty input 2017-10-31 22:47:02 +08:00
f1258b01aa fix #191 do not always assume the object field is simple string 2017-10-31 22:38:41 +08:00
fbd210edfc Merge pull request #189 from ggaaooppeenngg/compatible-with-map
Fix standard compatiblility
2017-10-26 18:39:38 -05:00
640251ab91 Fix standard compatiblility
Non-nil but empty map with omitempty should be ignored.

Signed-off-by: Peng Gao <peng.gao.dut@gmail.com>
2017-10-27 01:43:41 +08:00
06b2a7cf1d Merge pull request #188 from ggaaooppeenngg/compatible
Fix standard compatiblility
2017-10-26 06:41:01 -05:00
5fffb9b8f7 Fix standard compatiblility
Encode has trailing newline at the end.

Signed-off-by: Peng Gao <peng.gao.dut@gmail.com>
2017-10-26 15:15:36 +08:00
7e3b776024 change jsoniter-sloppy to jsoniter_sloppy 2017-10-23 15:03:44 +08:00
41 changed files with 1523 additions and 792 deletions

5
.gitignore vendored
View File

@ -1,3 +1,4 @@
.idea
/vendor
/bug_test.go
/coverage.txt
/profile.out
/.idea

33
Gopkg.lock generated Normal file
View File

@ -0,0 +1,33 @@
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
[[projects]]
name = "github.com/davecgh/go-spew"
packages = ["spew"]
revision = "346938d642f2ec3594ed81d874461961cd0faa76"
version = "v1.1.0"
[[projects]]
branch = "master"
name = "github.com/google/gofuzz"
packages = ["."]
revision = "24818f796faf91cd76ec7bddd72458fbced7a6c1"
[[projects]]
name = "github.com/pmezard/go-difflib"
packages = ["difflib"]
revision = "792786c7400a136282c1664665ae0a8db921c6c2"
version = "v1.0.0"
[[projects]]
name = "github.com/stretchr/testify"
packages = ["assert","require"]
revision = "69483b4bd14f5845b5a1e55bca19e954e827f1d0"
version = "v1.1.4"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "f8b7cf3941d3792cbbd570bb53c093adaf774334d1162c651565c97a58dc9d09"
solver-name = "gps-cdcl"
solver-version = 1

33
Gopkg.toml Normal file
View File

@ -0,0 +1,33 @@
# Gopkg.toml example
#
# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
# for detailed Gopkg.toml documentation.
#
# required = ["github.com/user/thing/cmd/thing"]
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
#
# [[constraint]]
# name = "github.com/user/project"
# version = "1.0.0"
#
# [[constraint]]
# name = "github.com/user/project2"
# branch = "dev"
# source = "github.com/myfork/project2"
#
# [[override]]
# name = "github.com/x/y"
# version = "2.4.0"
[[constraint]]
name = "github.com/davecgh/go-spew"
version = "1.1.0"
[[constraint]]
branch = "master"
name = "github.com/google/gofuzz"
[[constraint]]
name = "github.com/stretchr/testify"
version = "1.1.4"

12
build.sh Executable file
View File

@ -0,0 +1,12 @@
#!/bin/bash
set -e
set -x
if [ ! -d /tmp/build-golang/src/github.com/json-iterator ]; then
mkdir -p /tmp/build-golang/src/github.com/json-iterator
ln -s $PWD /tmp/build-golang/src/github.com/json-iterator/go
fi
export GOPATH=/tmp/build-golang
go get -u github.com/golang/dep/cmd/dep
cd /tmp/build-golang/src/github.com/json-iterator/go
exec $GOPATH/bin/dep ensure -update

40
compatible_test.go Normal file
View File

@ -0,0 +1,40 @@
package jsoniter
import (
"bytes"
"encoding/json"
"testing"
"github.com/stretchr/testify/require"
)
// Standard Encoder has trailing newline.
func TestEncoderHasTrailingNewline(t *testing.T) {
should := require.New(t)
var buf, stdbuf bytes.Buffer
enc := ConfigCompatibleWithStandardLibrary.NewEncoder(&buf)
enc.Encode(1)
stdenc := json.NewEncoder(&stdbuf)
stdenc.Encode(1)
should.Equal(stdbuf.Bytes(), buf.Bytes())
}
// Non-nil but empty map should be ignored.
func TestOmitempty(t *testing.T) {
o := struct {
A string `json:"a,omitempty"`
B string `json:"b,omitempty"`
Annotations map[string]string `json:"annotations,omitempty"`
}{
A: "a",
B: "b",
Annotations: map[string]string{},
}
should := require.New(t)
var buf, stdbuf bytes.Buffer
enc := ConfigCompatibleWithStandardLibrary.NewEncoder(&buf)
enc.Encode(o)
stdenc := json.NewEncoder(&stdbuf)
stdenc.Encode(o)
should.Equal(string(stdbuf.Bytes()), string(buf.Bytes()))
}

View File

@ -71,6 +71,11 @@ type Decoder struct {
// Decode decode JSON into interface{}
func (adapter *Decoder) Decode(obj interface{}) error {
if adapter.iter.head == adapter.iter.tail && adapter.iter.reader != nil {
if !adapter.iter.loadMore() {
return io.EOF
}
}
adapter.iter.ReadVal(obj)
err := adapter.iter.Error
if err == io.EOF {
@ -110,6 +115,7 @@ type Encoder struct {
// Encode encode interface{} as JSON to io.Writer
func (adapter *Encoder) Encode(val interface{}) error {
adapter.stream.WriteVal(val)
adapter.stream.WriteRaw("\n")
adapter.stream.Flush()
return adapter.stream.Error
}

View File

@ -1,6 +1,7 @@
package jsoniter
import (
"errors"
"fmt"
"io"
"reflect"
@ -157,6 +158,8 @@ func (iter *Iterator) readAny() Any {
return iter.readArrayAny()
case '-':
return iter.readNumberAny(false)
case 0:
return &invalidAny{baseAny{}, errors.New("input is empty")}
default:
return iter.readNumberAny(true)
}

View File

@ -1,6 +1,9 @@
package jsoniter
import "unsafe"
import (
"unsafe"
"io"
)
type numberLazyAny struct {
baseAny
@ -29,7 +32,9 @@ func (any *numberLazyAny) ToInt() int {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadInt()
any.err = iter.Error
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
@ -37,7 +42,9 @@ func (any *numberLazyAny) ToInt32() int32 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadInt32()
any.err = iter.Error
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
@ -45,7 +52,9 @@ func (any *numberLazyAny) ToInt64() int64 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadInt64()
any.err = iter.Error
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
@ -53,7 +62,9 @@ func (any *numberLazyAny) ToUint() uint {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadUint()
any.err = iter.Error
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
@ -61,7 +72,9 @@ func (any *numberLazyAny) ToUint32() uint32 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadUint32()
any.err = iter.Error
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
@ -69,7 +82,9 @@ func (any *numberLazyAny) ToUint64() uint64 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadUint64()
any.err = iter.Error
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
@ -77,7 +92,9 @@ func (any *numberLazyAny) ToFloat32() float32 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadFloat32()
any.err = iter.Error
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
@ -85,7 +102,9 @@ func (any *numberLazyAny) ToFloat64() float64 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadFloat64()
any.err = iter.Error
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}

View File

@ -5,31 +5,21 @@ import (
"errors"
"io"
"reflect"
"sync/atomic"
"unsafe"
)
// Config customize how the API should behave.
// The API is created from Config by Froze.
type Config struct {
IndentionStep int
MarshalFloatWith6Digits bool
EscapeHTML bool
SortMapKeys bool
UseNumber bool
TagKey string
ValidateJsonRawMessage bool
}
type frozenConfig struct {
configBeforeFrozen Config
sortMapKeys bool
indentionStep int
decoderCache unsafe.Pointer
encoderCache unsafe.Pointer
extensions []Extension
streamPool chan *Stream
iteratorPool chan *Iterator
IndentionStep int
MarshalFloatWith6Digits bool
EscapeHTML bool
SortMapKeys bool
UseNumber bool
TagKey string
OnlyTaggedField bool
ValidateJsonRawMessage bool
ObjectFieldMustBeSimpleString bool
}
// API the public interface of this package.
@ -46,6 +36,7 @@ type API interface {
NewEncoder(writer io.Writer) *Encoder
NewDecoder(reader io.Reader) *Decoder
Valid(data []byte) bool
RegisterExtension(extension Extension)
}
// ConfigDefault the default API
@ -62,21 +53,23 @@ var ConfigCompatibleWithStandardLibrary = Config{
// ConfigFastest marshals float with only 6 digits precision
var ConfigFastest = Config{
EscapeHTML: false,
MarshalFloatWith6Digits: true,
EscapeHTML: false,
MarshalFloatWith6Digits: true, // will lose precession
ObjectFieldMustBeSimpleString: true, // do not unescape object field
}.Froze()
// Froze forge API from config
func (cfg Config) Froze() API {
// TODO: cache frozen config
frozenConfig := &frozenConfig{
sortMapKeys: cfg.SortMapKeys,
indentionStep: cfg.IndentionStep,
streamPool: make(chan *Stream, 16),
iteratorPool: make(chan *Iterator, 16),
sortMapKeys: cfg.SortMapKeys,
indentionStep: cfg.IndentionStep,
objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString,
onlyTaggedField: cfg.OnlyTaggedField,
streamPool: make(chan *Stream, 16),
iteratorPool: make(chan *Iterator, 16),
}
atomic.StorePointer(&frozenConfig.decoderCache, unsafe.Pointer(&map[string]ValDecoder{}))
atomic.StorePointer(&frozenConfig.encoderCache, unsafe.Pointer(&map[string]ValEncoder{}))
frozenConfig.initCache()
if cfg.MarshalFloatWith6Digits {
frozenConfig.marshalFloatWith6Digits()
}
@ -128,7 +121,7 @@ func (cfg *frozenConfig) getTagKey() string {
return tagKey
}
func (cfg *frozenConfig) registerExtension(extension Extension) {
func (cfg *frozenConfig) RegisterExtension(extension Extension) {
cfg.extensions = append(cfg.extensions, extension)
}
@ -190,46 +183,6 @@ func (cfg *frozenConfig) escapeHTML() {
cfg.addEncoderToCache(reflect.TypeOf((*string)(nil)).Elem(), &htmlEscapedStringEncoder{})
}
func (cfg *frozenConfig) addDecoderToCache(cacheKey reflect.Type, decoder ValDecoder) {
done := false
for !done {
ptr := atomic.LoadPointer(&cfg.decoderCache)
cache := *(*map[reflect.Type]ValDecoder)(ptr)
copied := map[reflect.Type]ValDecoder{}
for k, v := range cache {
copied[k] = v
}
copied[cacheKey] = decoder
done = atomic.CompareAndSwapPointer(&cfg.decoderCache, ptr, unsafe.Pointer(&copied))
}
}
func (cfg *frozenConfig) addEncoderToCache(cacheKey reflect.Type, encoder ValEncoder) {
done := false
for !done {
ptr := atomic.LoadPointer(&cfg.encoderCache)
cache := *(*map[reflect.Type]ValEncoder)(ptr)
copied := map[reflect.Type]ValEncoder{}
for k, v := range cache {
copied[k] = v
}
copied[cacheKey] = encoder
done = atomic.CompareAndSwapPointer(&cfg.encoderCache, ptr, unsafe.Pointer(&copied))
}
}
func (cfg *frozenConfig) getDecoderFromCache(cacheKey reflect.Type) ValDecoder {
ptr := atomic.LoadPointer(&cfg.decoderCache)
cache := *(*map[reflect.Type]ValDecoder)(ptr)
return cache[cacheKey]
}
func (cfg *frozenConfig) getEncoderFromCache(cacheKey reflect.Type) ValEncoder {
ptr := atomic.LoadPointer(&cfg.encoderCache)
cache := *(*map[reflect.Type]ValEncoder)(ptr)
return cache[cacheKey]
}
func (cfg *frozenConfig) cleanDecoders() {
typeDecoders = map[string]ValDecoder{}
fieldDecoders = map[string]ValDecoder{}

View File

@ -0,0 +1,51 @@
//+build go1.9
package jsoniter
import (
"reflect"
"sync"
)
type frozenConfig struct {
configBeforeFrozen Config
sortMapKeys bool
indentionStep int
objectFieldMustBeSimpleString bool
onlyTaggedField bool
decoderCache sync.Map
encoderCache sync.Map
extensions []Extension
streamPool chan *Stream
iteratorPool chan *Iterator
}
func (cfg *frozenConfig) initCache() {
cfg.decoderCache = sync.Map{}
cfg.encoderCache = sync.Map{}
}
func (cfg *frozenConfig) addDecoderToCache(cacheKey reflect.Type, decoder ValDecoder) {
cfg.decoderCache.Store(cacheKey, decoder)
}
func (cfg *frozenConfig) addEncoderToCache(cacheKey reflect.Type, encoder ValEncoder) {
cfg.encoderCache.Store(cacheKey, encoder)
}
func (cfg *frozenConfig) getDecoderFromCache(cacheKey reflect.Type) ValDecoder {
decoder, found := cfg.decoderCache.Load(cacheKey)
if found {
return decoder.(ValDecoder)
}
return nil
}
func (cfg *frozenConfig) getEncoderFromCache(cacheKey reflect.Type) ValEncoder {
encoder, found := cfg.encoderCache.Load(cacheKey)
if found {
return encoder.(ValEncoder)
}
return nil
}

View File

@ -0,0 +1,54 @@
//+build !go1.9
package jsoniter
import (
"reflect"
"sync"
)
type frozenConfig struct {
configBeforeFrozen Config
sortMapKeys bool
indentionStep int
objectFieldMustBeSimpleString bool
onlyTaggedField bool
cacheLock *sync.RWMutex
decoderCache map[reflect.Type]ValDecoder
encoderCache map[reflect.Type]ValEncoder
extensions []Extension
streamPool chan *Stream
iteratorPool chan *Iterator
}
func (cfg *frozenConfig) initCache() {
cfg.cacheLock = &sync.RWMutex{}
cfg.decoderCache = map[reflect.Type]ValDecoder{}
cfg.encoderCache = map[reflect.Type]ValEncoder{}
}
func (cfg *frozenConfig) addDecoderToCache(cacheKey reflect.Type, decoder ValDecoder) {
cfg.cacheLock.Lock()
cfg.decoderCache[cacheKey] = decoder
cfg.cacheLock.Unlock()
}
func (cfg *frozenConfig) addEncoderToCache(cacheKey reflect.Type, encoder ValEncoder) {
cfg.cacheLock.Lock()
cfg.encoderCache[cacheKey] = encoder
cfg.cacheLock.Unlock()
}
func (cfg *frozenConfig) getDecoderFromCache(cacheKey reflect.Type) ValDecoder {
cfg.cacheLock.RLock()
decoder, _ := cfg.decoderCache[cacheKey].(ValDecoder)
cfg.cacheLock.RUnlock()
return decoder
}
func (cfg *frozenConfig) getEncoderFromCache(cacheKey reflect.Type) ValEncoder {
cfg.cacheLock.RLock()
encoder, _ := cfg.encoderCache[cacheKey].(ValEncoder)
cfg.cacheLock.RUnlock()
return encoder
}

View File

@ -77,6 +77,7 @@ type Iterator struct {
captureStartedAt int
captured []byte
Error error
Attachment interface{} // open for customized decoder
}
// NewIterator creates an empty Iterator instance
@ -167,7 +168,7 @@ func (iter *Iterator) isObjectEnd() bool {
if c == '}' {
return true
}
iter.ReportError("isObjectEnd", "object ended prematurely")
iter.ReportError("isObjectEnd", "object ended prematurely, unexpected char "+string([]byte{c}))
return true
}

View File

@ -1,6 +1,7 @@
package jsoniter
import (
"encoding/json"
"io"
"math/big"
"strconv"
@ -339,3 +340,8 @@ func validateFloat(str string) string {
}
return ""
}
// ReadNumber read json.Number
func (iter *Iterator) ReadNumber() (ret json.Number) {
return json.Number(iter.readNumberAsString())
}

View File

@ -115,6 +115,7 @@ func (iter *Iterator) ReadUint32() (ret uint32) {
func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind := intDigits[c]
if ind == 0 {
iter.assertInteger()
return 0 // single zero
}
if ind == invalidCharForNumber {
@ -127,12 +128,14 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind2 := intDigits[iter.buf[i]]
if ind2 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value
}
i++
ind3 := intDigits[iter.buf[i]]
if ind3 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*10 + uint32(ind2)
}
//iter.head = i + 1
@ -141,30 +144,35 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind4 := intDigits[iter.buf[i]]
if ind4 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*100 + uint32(ind2)*10 + uint32(ind3)
}
i++
ind5 := intDigits[iter.buf[i]]
if ind5 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*1000 + uint32(ind2)*100 + uint32(ind3)*10 + uint32(ind4)
}
i++
ind6 := intDigits[iter.buf[i]]
if ind6 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*10000 + uint32(ind2)*1000 + uint32(ind3)*100 + uint32(ind4)*10 + uint32(ind5)
}
i++
ind7 := intDigits[iter.buf[i]]
if ind7 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*100000 + uint32(ind2)*10000 + uint32(ind3)*1000 + uint32(ind4)*100 + uint32(ind5)*10 + uint32(ind6)
}
i++
ind8 := intDigits[iter.buf[i]]
if ind8 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*1000000 + uint32(ind2)*100000 + uint32(ind3)*10000 + uint32(ind4)*1000 + uint32(ind5)*100 + uint32(ind6)*10 + uint32(ind7)
}
i++
@ -172,6 +180,7 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
value = value*10000000 + uint32(ind2)*1000000 + uint32(ind3)*100000 + uint32(ind4)*10000 + uint32(ind5)*1000 + uint32(ind6)*100 + uint32(ind7)*10 + uint32(ind8)
iter.head = i
if ind9 == invalidCharForNumber {
iter.assertInteger()
return value
}
}
@ -180,6 +189,7 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind = intDigits[iter.buf[i]]
if ind == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value
}
if value > uint32SafeToMultiply10 {
@ -194,6 +204,7 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
value = (value << 3) + (value << 1) + uint32(ind)
}
if !iter.loadMore() {
iter.assertInteger()
return value
}
}
@ -226,6 +237,7 @@ func (iter *Iterator) ReadUint64() uint64 {
func (iter *Iterator) readUint64(c byte) (ret uint64) {
ind := intDigits[c]
if ind == 0 {
iter.assertInteger()
return 0 // single zero
}
if ind == invalidCharForNumber {
@ -233,11 +245,73 @@ func (iter *Iterator) readUint64(c byte) (ret uint64) {
return
}
value := uint64(ind)
if iter.tail-iter.head > 10 {
i := iter.head
ind2 := intDigits[iter.buf[i]]
if ind2 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value
}
i++
ind3 := intDigits[iter.buf[i]]
if ind3 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*10 + uint64(ind2)
}
//iter.head = i + 1
//value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
i++
ind4 := intDigits[iter.buf[i]]
if ind4 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*100 + uint64(ind2)*10 + uint64(ind3)
}
i++
ind5 := intDigits[iter.buf[i]]
if ind5 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*1000 + uint64(ind2)*100 + uint64(ind3)*10 + uint64(ind4)
}
i++
ind6 := intDigits[iter.buf[i]]
if ind6 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*10000 + uint64(ind2)*1000 + uint64(ind3)*100 + uint64(ind4)*10 + uint64(ind5)
}
i++
ind7 := intDigits[iter.buf[i]]
if ind7 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*100000 + uint64(ind2)*10000 + uint64(ind3)*1000 + uint64(ind4)*100 + uint64(ind5)*10 + uint64(ind6)
}
i++
ind8 := intDigits[iter.buf[i]]
if ind8 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*1000000 + uint64(ind2)*100000 + uint64(ind3)*10000 + uint64(ind4)*1000 + uint64(ind5)*100 + uint64(ind6)*10 + uint64(ind7)
}
i++
ind9 := intDigits[iter.buf[i]]
value = value*10000000 + uint64(ind2)*1000000 + uint64(ind3)*100000 + uint64(ind4)*10000 + uint64(ind5)*1000 + uint64(ind6)*100 + uint64(ind7)*10 + uint64(ind8)
iter.head = i
if ind9 == invalidCharForNumber {
iter.assertInteger()
return value
}
}
for {
for i := iter.head; i < iter.tail; i++ {
ind = intDigits[iter.buf[i]]
if ind == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value
}
if value > uint64SafeToMultiple10 {
@ -252,7 +326,14 @@ func (iter *Iterator) readUint64(c byte) (ret uint64) {
value = (value << 3) + (value << 1) + uint64(ind)
}
if !iter.loadMore() {
iter.assertInteger()
return value
}
}
}
func (iter *Iterator) assertInteger() {
if iter.head < len(iter.buf) && iter.buf[iter.head] == '.' {
iter.ReportError("assertInteger", "can not decode float as int")
}
}

View File

@ -19,7 +19,16 @@ func (iter *Iterator) ReadObject() (ret string) {
c = iter.nextToken()
if c == '"' {
iter.unreadByte()
return string(iter.readObjectFieldAsBytes())
if iter.cfg.objectFieldMustBeSimpleString {
return string(iter.readObjectFieldAsBytes())
} else {
field := iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
return field
}
}
if c == '}' {
return "" // end of object
@ -27,7 +36,16 @@ func (iter *Iterator) ReadObject() (ret string) {
iter.ReportError("ReadObject", `expect " after {, but found `+string([]byte{c}))
return
case ',':
return string(iter.readObjectFieldAsBytes())
if iter.cfg.objectFieldMustBeSimpleString {
return string(iter.readObjectFieldAsBytes())
} else {
field := iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
return field
}
case '}':
return "" // end of object
default:
@ -44,17 +62,34 @@ func (iter *Iterator) readFieldHash() int32 {
for i := iter.head; i < iter.tail; i++ {
// require ascii string and no escape
b := iter.buf[i]
if 'A' <= b && b <= 'Z' {
b += 'a' - 'A'
if !iter.cfg.objectFieldMustBeSimpleString && b == '\\' {
iter.head = i
for _, b := range iter.readStringSlowPath() {
if 'A' <= b && b <= 'Z' {
b += 'a' - 'A'
}
hash ^= int64(b)
hash *= 0x1000193
}
c = iter.nextToken()
if c != ':' {
iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
return 0
}
return int32(hash)
}
if b == '"' {
iter.head = i + 1
c = iter.nextToken()
if c != ':' {
iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
return 0
}
return int32(hash)
}
if 'A' <= b && b <= 'Z' {
b += 'a' - 'A'
}
hash ^= int64(b)
hash *= 0x1000193
}
@ -80,18 +115,38 @@ func calcHash(str string) int32 {
// ReadObjectCB read object with callback, the key is ascii only and field name not copied
func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
c := iter.nextToken()
var fieldBytes []byte
var field string
if c == '{' {
c = iter.nextToken()
if c == '"' {
iter.unreadByte()
field := iter.readObjectFieldAsBytes()
if !callback(iter, *(*string)(unsafe.Pointer(&field))) {
if iter.cfg.objectFieldMustBeSimpleString {
fieldBytes = iter.readObjectFieldAsBytes()
field = *(*string)(unsafe.Pointer(&fieldBytes))
} else {
field = iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
}
if !callback(iter, field) {
return false
}
c = iter.nextToken()
for c == ',' {
field = iter.readObjectFieldAsBytes()
if !callback(iter, *(*string)(unsafe.Pointer(&field))) {
if iter.cfg.objectFieldMustBeSimpleString {
fieldBytes = iter.readObjectFieldAsBytes()
field = *(*string)(unsafe.Pointer(&fieldBytes))
} else {
field = iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
}
if !callback(iter, field) {
return false
}
c = iter.nextToken()

View File

@ -1,4 +1,4 @@
//+build jsoniter-sloppy
//+build jsoniter_sloppy
package jsoniter

View File

@ -1,4 +1,4 @@
//+build !jsoniter-sloppy
//+build !jsoniter_sloppy
package jsoniter

View File

@ -28,6 +28,7 @@ func (cfg *frozenConfig) BorrowStream(writer io.Writer) *Stream {
func (cfg *frozenConfig) ReturnStream(stream *Stream) {
stream.Error = nil
stream.Attachment = nil
select {
case cfg.streamPool <- stream:
return
@ -48,6 +49,7 @@ func (cfg *frozenConfig) BorrowIterator(data []byte) *Iterator {
func (cfg *frozenConfig) ReturnIterator(iter *Iterator) {
iter.Error = nil
iter.Attachment = nil
select {
case cfg.iteratorPool <- iter:
return

File diff suppressed because it is too large Load Diff

View File

@ -7,23 +7,34 @@ import (
"unsafe"
)
func decoderOfArray(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) {
decoder, err := decoderOfType(cfg, typ.Elem())
if err != nil {
return nil, err
}
return &arrayDecoder{typ, typ.Elem(), decoder}, nil
func decoderOfArray(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder {
decoder := decoderOfType(cfg, prefix+"[array]->", typ.Elem())
return &arrayDecoder{typ, typ.Elem(), decoder}
}
func encoderOfArray(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
encoder, err := encoderOfType(cfg, typ.Elem())
if err != nil {
return nil, err
func encoderOfArray(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder {
if typ.Len() == 0 {
return emptyArrayEncoder{}
}
encoder := encoderOfType(cfg, prefix+"[array]->", typ.Elem())
if typ.Elem().Kind() == reflect.Map {
encoder = &optionalEncoder{encoder}
encoder = &OptionalEncoder{encoder}
}
return &arrayEncoder{typ, typ.Elem(), encoder}, nil
return &arrayEncoder{typ, typ.Elem(), encoder}
}
type emptyArrayEncoder struct{}
func (encoder emptyArrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteEmptyArray()
}
func (encoder emptyArrayEncoder) EncodeInterface(val interface{}, stream *Stream) {
stream.WriteEmptyArray()
}
func (encoder emptyArrayEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return true
}
type arrayEncoder struct {

View File

@ -161,22 +161,31 @@ func RegisterExtension(extension Extension) {
extensions = append(extensions, extension)
}
func getTypeDecoderFromExtension(typ reflect.Type) ValDecoder {
decoder := _getTypeDecoderFromExtension(typ)
func getTypeDecoderFromExtension(cfg *frozenConfig, typ reflect.Type) ValDecoder {
decoder := _getTypeDecoderFromExtension(cfg, typ)
if decoder != nil {
for _, extension := range extensions {
decoder = extension.DecorateDecoder(typ, decoder)
}
for _, extension := range cfg.extensions {
decoder = extension.DecorateDecoder(typ, decoder)
}
}
return decoder
}
func _getTypeDecoderFromExtension(typ reflect.Type) ValDecoder {
func _getTypeDecoderFromExtension(cfg *frozenConfig, typ reflect.Type) ValDecoder {
for _, extension := range extensions {
decoder := extension.CreateDecoder(typ)
if decoder != nil {
return decoder
}
}
for _, extension := range cfg.extensions {
decoder := extension.CreateDecoder(typ)
if decoder != nil {
return decoder
}
}
typeName := typ.String()
decoder := typeDecoders[typeName]
if decoder != nil {
@ -185,29 +194,38 @@ func _getTypeDecoderFromExtension(typ reflect.Type) ValDecoder {
if typ.Kind() == reflect.Ptr {
decoder := typeDecoders[typ.Elem().String()]
if decoder != nil {
return &optionalDecoder{typ.Elem(), decoder}
return &OptionalDecoder{typ.Elem(), decoder}
}
}
return nil
}
func getTypeEncoderFromExtension(typ reflect.Type) ValEncoder {
encoder := _getTypeEncoderFromExtension(typ)
func getTypeEncoderFromExtension(cfg *frozenConfig, typ reflect.Type) ValEncoder {
encoder := _getTypeEncoderFromExtension(cfg, typ)
if encoder != nil {
for _, extension := range extensions {
encoder = extension.DecorateEncoder(typ, encoder)
}
for _, extension := range cfg.extensions {
encoder = extension.DecorateEncoder(typ, encoder)
}
}
return encoder
}
func _getTypeEncoderFromExtension(typ reflect.Type) ValEncoder {
func _getTypeEncoderFromExtension(cfg *frozenConfig, typ reflect.Type) ValEncoder {
for _, extension := range extensions {
encoder := extension.CreateEncoder(typ)
if encoder != nil {
return encoder
}
}
for _, extension := range cfg.extensions {
encoder := extension.CreateEncoder(typ)
if encoder != nil {
return encoder
}
}
typeName := typ.String()
encoder := typeEncoders[typeName]
if encoder != nil {
@ -216,28 +234,28 @@ func _getTypeEncoderFromExtension(typ reflect.Type) ValEncoder {
if typ.Kind() == reflect.Ptr {
encoder := typeEncoders[typ.Elem().String()]
if encoder != nil {
return &optionalEncoder{encoder}
return &OptionalEncoder{encoder}
}
}
return nil
}
func describeStruct(cfg *frozenConfig, typ reflect.Type) (*StructDescriptor, error) {
func describeStruct(cfg *frozenConfig, prefix string, typ reflect.Type) *StructDescriptor {
embeddedBindings := []*Binding{}
bindings := []*Binding{}
for i := 0; i < typ.NumField(); i++ {
field := typ.Field(i)
tag := field.Tag.Get(cfg.getTagKey())
tag, hastag := field.Tag.Lookup(cfg.getTagKey())
if cfg.onlyTaggedField && !hastag {
continue
}
tagParts := strings.Split(tag, ",")
if tag == "-" {
continue
}
if field.Anonymous && (tag == "" || tagParts[0] == "") {
if field.Type.Kind() == reflect.Struct {
structDescriptor, err := describeStruct(cfg, field.Type)
if err != nil {
return nil, err
}
structDescriptor := describeStruct(cfg, prefix, field.Type)
for _, binding := range structDescriptor.Fields {
binding.levels = append([]int{i}, binding.levels...)
omitempty := binding.Encoder.(*structFieldEncoder).omitempty
@ -247,16 +265,13 @@ func describeStruct(cfg *frozenConfig, typ reflect.Type) (*StructDescriptor, err
}
continue
} else if field.Type.Kind() == reflect.Ptr && field.Type.Elem().Kind() == reflect.Struct {
structDescriptor, err := describeStruct(cfg, field.Type.Elem())
if err != nil {
return nil, err
}
structDescriptor := describeStruct(cfg, prefix, field.Type.Elem())
for _, binding := range structDescriptor.Fields {
binding.levels = append([]int{i}, binding.levels...)
omitempty := binding.Encoder.(*structFieldEncoder).omitempty
binding.Encoder = &optionalEncoder{binding.Encoder}
binding.Encoder = &dereferenceEncoder{binding.Encoder}
binding.Encoder = &structFieldEncoder{&field, binding.Encoder, omitempty}
binding.Decoder = &deferenceDecoder{field.Type.Elem(), binding.Decoder}
binding.Decoder = &dereferenceDecoder{field.Type.Elem(), binding.Decoder}
binding.Decoder = &structFieldDecoder{&field, binding.Decoder}
embeddedBindings = append(embeddedBindings, binding)
}
@ -267,22 +282,15 @@ func describeStruct(cfg *frozenConfig, typ reflect.Type) (*StructDescriptor, err
fieldCacheKey := fmt.Sprintf("%s/%s", typ.String(), field.Name)
decoder := fieldDecoders[fieldCacheKey]
if decoder == nil {
var err error
decoder, err = decoderOfType(cfg, field.Type)
if len(fieldNames) > 0 && err != nil {
return nil, err
}
decoder = decoderOfType(cfg, prefix+typ.String()+"."+field.Name+"->", field.Type)
}
encoder := fieldEncoders[fieldCacheKey]
if encoder == nil {
var err error
encoder, err = encoderOfType(cfg, field.Type)
if len(fieldNames) > 0 && err != nil {
return nil, err
}
// map is stored as pointer in the struct
encoder = encoderOfType(cfg, prefix+typ.String()+"."+field.Name+"->", field.Type)
// map is stored as pointer in the struct,
// and treat nil or empty map as empty field
if encoder != nil && field.Type.Kind() == reflect.Map {
encoder = &optionalEncoder{encoder}
encoder = &optionalMapEncoder{encoder}
}
}
binding := &Binding{
@ -295,7 +303,7 @@ func describeStruct(cfg *frozenConfig, typ reflect.Type) (*StructDescriptor, err
binding.levels = []int{i}
bindings = append(bindings, binding)
}
return createStructDescriptor(cfg, typ, bindings, embeddedBindings), nil
return createStructDescriptor(cfg, typ, bindings, embeddedBindings)
}
func createStructDescriptor(cfg *frozenConfig, typ reflect.Type, bindings []*Binding, embeddedBindings []*Binding) *StructDescriptor {
onePtrEmbedded := false
@ -323,6 +331,9 @@ func createStructDescriptor(cfg *frozenConfig, typ reflect.Type, bindings []*Bin
for _, extension := range extensions {
extension.UpdateStructDescriptor(structDescriptor)
}
for _, extension := range cfg.extensions {
extension.UpdateStructDescriptor(structDescriptor)
}
processTags(structDescriptor, cfg)
// merge normal & embedded bindings & sort with original order
allBindings := sortableBindings(append(embeddedBindings, structDescriptor.Fields...))

View File

@ -9,6 +9,22 @@ import (
"unsafe"
)
func decoderOfMap(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder {
decoder := decoderOfType(cfg, prefix+"[map]->", typ.Elem())
mapInterface := reflect.New(typ).Interface()
return &mapDecoder{typ, typ.Key(), typ.Elem(), decoder, extractInterface(mapInterface)}
}
func encoderOfMap(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder {
elemType := typ.Elem()
encoder := encoderOfType(cfg, prefix+"[map]->", elemType)
mapInterface := reflect.New(typ).Elem().Interface()
if cfg.sortMapKeys {
return &sortKeysMapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}
}
return &mapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}
}
type mapDecoder struct {
mapType reflect.Type
keyType reflect.Type
@ -32,7 +48,7 @@ func (decoder *mapDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
}
iter.ReadMapCB(func(iter *Iterator, keyStr string) bool {
elem := reflect.New(decoder.elemType)
decoder.elemDecoder.Decode(unsafe.Pointer(elem.Pointer()), iter)
decoder.elemDecoder.Decode(extractInterface(elem.Interface()).word, iter)
// to put into map, we have to use reflection
keyType := decoder.keyType
// TODO: remove this from loop

View File

@ -382,6 +382,11 @@ type nonEmptyInterfaceCodec struct {
}
func (codec *nonEmptyInterfaceCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.WhatIsNext() == NilValue {
iter.skipFourBytes('n', 'u', 'l', 'l')
*((*interface{})(ptr)) = nil
return
}
nonEmptyInterface := (*nonEmptyInterface)(ptr)
if nonEmptyInterface.itab == nil {
iter.ReportError("read non-empty interface", "do not know which concrete type to decode to")
@ -453,11 +458,21 @@ func (codec *jsonNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
}
func (codec *jsonNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteRaw(string(*((*json.Number)(ptr))))
number := *((*json.Number)(ptr))
if len(number) == 0 {
stream.WriteRaw("0")
} else {
stream.WriteRaw(string(number))
}
}
func (codec *jsonNumberCodec) EncodeInterface(val interface{}, stream *Stream) {
stream.WriteRaw(string(val.(json.Number)))
number := val.(json.Number)
if len(number) == 0 {
stream.WriteRaw("0")
} else {
stream.WriteRaw(string(number))
}
}
func (codec *jsonNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
@ -480,11 +495,21 @@ func (codec *jsoniterNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
}
func (codec *jsoniterNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteRaw(string(*((*Number)(ptr))))
number := *((*Number)(ptr))
if len(number) == 0 {
stream.WriteRaw("0")
} else {
stream.WriteRaw(string(number))
}
}
func (codec *jsoniterNumberCodec) EncodeInterface(val interface{}, stream *Stream) {
stream.WriteRaw(string(val.(Number)))
number := val.(Number)
if len(number) == 0 {
stream.WriteRaw("0")
} else {
stream.WriteRaw(string(number))
}
}
func (codec *jsoniterNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {

View File

@ -8,17 +8,14 @@ import (
"unsafe"
)
func encoderOfStruct(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
func encoderOfStruct(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder {
type bindingTo struct {
binding *Binding
toName string
ignored bool
}
orderedBindings := []*bindingTo{}
structDescriptor, err := describeStruct(cfg, typ)
if err != nil {
return nil, err
}
structDescriptor := describeStruct(cfg, prefix, typ)
for _, binding := range structDescriptor.Fields {
for _, toName := range binding.ToNames {
new := &bindingTo{
@ -35,7 +32,7 @@ func encoderOfStruct(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
}
}
if len(orderedBindings) == 0 {
return &emptyStructEncoder{}, nil
return &emptyStructEncoder{}
}
finalOrderedFields := []structFieldTo{}
for _, bindingTo := range orderedBindings {
@ -46,7 +43,8 @@ func encoderOfStruct(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
})
}
}
return &structEncoder{structDescriptor.onePtrEmbedded, structDescriptor.onePtrOptimization, finalOrderedFields}, nil
return &structEncoder{typ, structDescriptor.onePtrEmbedded,
structDescriptor.onePtrOptimization, finalOrderedFields}
}
func resolveConflictBinding(cfg *frozenConfig, old, new *Binding) (ignoreOld, ignoreNew bool) {
@ -78,12 +76,9 @@ func resolveConflictBinding(cfg *frozenConfig, old, new *Binding) (ignoreOld, ig
}
}
func decoderOfStruct(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) {
func decoderOfStruct(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder {
bindings := map[string]*Binding{}
structDescriptor, err := describeStruct(cfg, typ)
if err != nil {
return nil, err
}
structDescriptor := describeStruct(cfg, prefix, typ)
for _, binding := range structDescriptor.Fields {
for _, fromName := range binding.FromNames {
old := bindings[fromName]
@ -131,6 +126,7 @@ func (encoder *structFieldEncoder) IsEmpty(ptr unsafe.Pointer) bool {
}
type structEncoder struct {
typ reflect.Type
onePtrEmbedded bool
onePtrOptimization bool
fields []structFieldTo
@ -156,6 +152,9 @@ func (encoder *structEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
isNotFirst = true
}
stream.WriteObjectEnd()
if stream.Error != nil && stream.Error != io.EOF {
stream.Error = fmt.Errorf("%v.%s", encoder.typ, stream.Error.Error())
}
}
func (encoder *structEncoder) EncodeInterface(val interface{}, stream *Stream) {

124
feature_reflect_optional.go Normal file
View File

@ -0,0 +1,124 @@
package jsoniter
import (
"reflect"
"unsafe"
)
func decoderOfOptional(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder {
elemType := typ.Elem()
decoder := decoderOfType(cfg, prefix, elemType)
return &OptionalDecoder{elemType, decoder}
}
func encoderOfOptional(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder {
elemType := typ.Elem()
elemEncoder := encoderOfType(cfg, prefix, elemType)
encoder := &OptionalEncoder{elemEncoder}
if elemType.Kind() == reflect.Map {
encoder = &OptionalEncoder{encoder}
}
return encoder
}
type OptionalDecoder struct {
ValueType reflect.Type
ValueDecoder ValDecoder
}
func (decoder *OptionalDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
*((*unsafe.Pointer)(ptr)) = nil
} else {
if *((*unsafe.Pointer)(ptr)) == nil {
//pointer to null, we have to allocate memory to hold the value
value := reflect.New(decoder.ValueType)
newPtr := extractInterface(value.Interface()).word
decoder.ValueDecoder.Decode(newPtr, iter)
*((*uintptr)(ptr)) = uintptr(newPtr)
} else {
//reuse existing instance
decoder.ValueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
}
}
}
type dereferenceDecoder struct {
// only to deference a pointer
valueType reflect.Type
valueDecoder ValDecoder
}
func (decoder *dereferenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if *((*unsafe.Pointer)(ptr)) == nil {
//pointer to null, we have to allocate memory to hold the value
value := reflect.New(decoder.valueType)
newPtr := extractInterface(value.Interface()).word
decoder.valueDecoder.Decode(newPtr, iter)
*((*uintptr)(ptr)) = uintptr(newPtr)
} else {
//reuse existing instance
decoder.valueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
}
}
type OptionalEncoder struct {
ValueEncoder ValEncoder
}
func (encoder *OptionalEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if *((*unsafe.Pointer)(ptr)) == nil {
stream.WriteNil()
} else {
encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
}
}
func (encoder *OptionalEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *OptionalEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return *((*unsafe.Pointer)(ptr)) == nil
}
type dereferenceEncoder struct {
ValueEncoder ValEncoder
}
func (encoder *dereferenceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if *((*unsafe.Pointer)(ptr)) == nil {
stream.WriteNil()
} else {
encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
}
}
func (encoder *dereferenceEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *dereferenceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return encoder.ValueEncoder.IsEmpty(*((*unsafe.Pointer)(ptr)))
}
type optionalMapEncoder struct {
valueEncoder ValEncoder
}
func (encoder *optionalMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if *((*unsafe.Pointer)(ptr)) == nil {
stream.WriteNil()
} else {
encoder.valueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
}
}
func (encoder *optionalMapEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *optionalMapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
p := *((*unsafe.Pointer)(ptr))
return p == nil || encoder.valueEncoder.IsEmpty(p)
}

View File

@ -7,23 +7,17 @@ import (
"unsafe"
)
func decoderOfSlice(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) {
decoder, err := decoderOfType(cfg, typ.Elem())
if err != nil {
return nil, err
}
return &sliceDecoder{typ, typ.Elem(), decoder}, nil
func decoderOfSlice(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder {
decoder := decoderOfType(cfg, prefix+"[slice]->", typ.Elem())
return &sliceDecoder{typ, typ.Elem(), decoder}
}
func encoderOfSlice(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
encoder, err := encoderOfType(cfg, typ.Elem())
if err != nil {
return nil, err
}
func encoderOfSlice(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder {
encoder := encoderOfType(cfg, prefix+"[slice]->", typ.Elem())
if typ.Elem().Kind() == reflect.Map {
encoder = &optionalEncoder{encoder}
encoder = &OptionalEncoder{encoder}
}
return &sliceEncoder{typ, typ.Elem(), encoder}, nil
return &sliceEncoder{typ, typ.Elem(), encoder}
}
type sliceEncoder struct {
@ -124,15 +118,14 @@ func growOne(slice *sliceHeader, sliceType reflect.Type, elementType reflect.Typ
}
}
}
newVal := reflect.MakeSlice(sliceType, newLen, newCap)
dst := unsafe.Pointer(newVal.Pointer())
newVal := reflect.MakeSlice(sliceType, newLen, newCap).Interface()
newValPtr := extractInterface(newVal).word
dst := (*sliceHeader)(newValPtr).Data
// copy old array into new array
originalBytesCount := uintptr(slice.Len) * elementType.Size()
srcPtr := (*[1 << 30]byte)(slice.Data)
dstPtr := (*[1 << 30]byte)(dst)
for i := uintptr(0); i < originalBytesCount; i++ {
dstPtr[i] = srcPtr[i]
}
originalBytesCount := slice.Len * int(elementType.Size())
srcSliceHeader := (unsafe.Pointer)(&sliceHeader{slice.Data, originalBytesCount, originalBytesCount})
dstSliceHeader := (unsafe.Pointer)(&sliceHeader{dst, originalBytesCount, originalBytesCount})
copy(*(*[]byte)(dstSliceHeader), *(*[]byte)(srcSliceHeader))
slice.Data = dst
slice.Len = newLen
slice.Cap = newCap
@ -142,8 +135,9 @@ func reuseSlice(slice *sliceHeader, sliceType reflect.Type, expectedCap int) {
if expectedCap <= slice.Cap {
return
}
newVal := reflect.MakeSlice(sliceType, 0, expectedCap)
dst := unsafe.Pointer(newVal.Pointer())
newVal := reflect.MakeSlice(sliceType, 0, expectedCap).Interface()
newValPtr := extractInterface(newVal).word
dst := (*sliceHeader)(newValPtr).Data
slice.Data = dst
slice.Cap = expectedCap
}

View File

@ -8,22 +8,22 @@ import (
"unsafe"
)
func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder) (ValDecoder, error) {
func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder) ValDecoder {
knownHash := map[int32]struct{}{
0: {},
}
switch len(fields) {
case 0:
return &skipObjectDecoder{typ}, nil
return &skipObjectDecoder{typ}
case 1:
for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
knownHash[fieldHash] = struct{}{}
return &oneFieldStructDecoder{typ, fieldHash, fieldDecoder}, nil
return &oneFieldStructDecoder{typ, fieldHash, fieldDecoder}
}
case 2:
var fieldHash1 int32
@ -34,7 +34,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
knownHash[fieldHash] = struct{}{}
if fieldHash1 == 0 {
@ -45,7 +45,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldDecoder2 = fieldDecoder
}
}
return &twoFieldsStructDecoder{typ, fieldHash1, fieldDecoder1, fieldHash2, fieldDecoder2}, nil
return &twoFieldsStructDecoder{typ, fieldHash1, fieldDecoder1, fieldHash2, fieldDecoder2}
case 3:
var fieldName1 int32
var fieldName2 int32
@ -57,7 +57,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 {
@ -72,7 +72,9 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &threeFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3}, nil
fieldName1, fieldDecoder1,
fieldName2, fieldDecoder2,
fieldName3, fieldDecoder3}
case 4:
var fieldName1 int32
var fieldName2 int32
@ -86,7 +88,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 {
@ -104,8 +106,10 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &fourFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4}, nil
fieldName1, fieldDecoder1,
fieldName2, fieldDecoder2,
fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4}
case 5:
var fieldName1 int32
var fieldName2 int32
@ -121,7 +125,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 {
@ -142,8 +146,11 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &fiveFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5}, nil
fieldName1, fieldDecoder1,
fieldName2, fieldDecoder2,
fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4,
fieldName5, fieldDecoder5}
case 6:
var fieldName1 int32
var fieldName2 int32
@ -161,7 +168,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 {
@ -185,8 +192,12 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &sixFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6}, nil
fieldName1, fieldDecoder1,
fieldName2, fieldDecoder2,
fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4,
fieldName5, fieldDecoder5,
fieldName6, fieldDecoder6}
case 7:
var fieldName1 int32
var fieldName2 int32
@ -206,7 +217,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 {
@ -233,9 +244,13 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &sevenFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7}, nil
fieldName1, fieldDecoder1,
fieldName2, fieldDecoder2,
fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4,
fieldName5, fieldDecoder5,
fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7}
case 8:
var fieldName1 int32
var fieldName2 int32
@ -257,7 +272,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 {
@ -287,9 +302,14 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &eightFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8}, nil
fieldName1, fieldDecoder1,
fieldName2, fieldDecoder2,
fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4,
fieldName5, fieldDecoder5,
fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7,
fieldName8, fieldDecoder8}
case 9:
var fieldName1 int32
var fieldName2 int32
@ -313,7 +333,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 {
@ -346,9 +366,15 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &nineFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8, fieldName9, fieldDecoder9}, nil
fieldName1, fieldDecoder1,
fieldName2, fieldDecoder2,
fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4,
fieldName5, fieldDecoder5,
fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7,
fieldName8, fieldDecoder8,
fieldName9, fieldDecoder9}
case 10:
var fieldName1 int32
var fieldName2 int32
@ -374,7 +400,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 {
@ -410,12 +436,18 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &tenFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8, fieldName9, fieldDecoder9,
fieldName10, fieldDecoder10}, nil
fieldName1, fieldDecoder1,
fieldName2, fieldDecoder2,
fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4,
fieldName5, fieldDecoder5,
fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7,
fieldName8, fieldDecoder8,
fieldName9, fieldDecoder9,
fieldName10, fieldDecoder10}
}
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
type generalStructDecoder struct {
@ -427,8 +459,18 @@ func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
if !iter.readObjectStart() {
return
}
fieldBytes := iter.readObjectFieldAsBytes()
field := *(*string)(unsafe.Pointer(&fieldBytes))
var fieldBytes []byte
var field string
if iter.cfg.objectFieldMustBeSimpleString {
fieldBytes = iter.readObjectFieldAsBytes()
field = *(*string)(unsafe.Pointer(&fieldBytes))
} else {
field = iter.ReadString()
c := iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
}
fieldDecoder := decoder.fields[strings.ToLower(field)]
if fieldDecoder == nil {
iter.Skip()
@ -436,8 +478,16 @@ func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
fieldDecoder.Decode(ptr, iter)
}
for iter.nextToken() == ',' {
fieldBytes = iter.readObjectFieldAsBytes()
field = *(*string)(unsafe.Pointer(&fieldBytes))
if iter.cfg.objectFieldMustBeSimpleString {
fieldBytes := iter.readObjectFieldAsBytes()
field = *(*string)(unsafe.Pointer(&fieldBytes))
} else {
field = iter.ReadString()
c := iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
}
fieldDecoder = decoder.fields[strings.ToLower(field)]
if fieldDecoder == nil {
iter.Skip()
@ -446,7 +496,7 @@ func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}
@ -484,7 +534,7 @@ func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}
@ -514,7 +564,7 @@ func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}
@ -548,7 +598,7 @@ func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}
@ -586,7 +636,7 @@ func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}
@ -628,7 +678,7 @@ func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}
@ -674,7 +724,7 @@ func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}
@ -724,7 +774,7 @@ func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}
@ -778,7 +828,7 @@ func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}
@ -836,7 +886,7 @@ func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}
@ -898,7 +948,7 @@ func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}

View File

@ -4,15 +4,16 @@ import (
"io"
)
// Stream is a io.Writer like object, with JSON specific write functions.
// stream is a io.Writer like object, with JSON specific write functions.
// Error is not returned as return value, but stored as Error member on this stream instance.
type Stream struct {
cfg *frozenConfig
out io.Writer
buf []byte
n int
Error error
indention int
cfg *frozenConfig
out io.Writer
buf []byte
n int
Error error
indention int
Attachment interface{} // open for customized encoder
}
// NewStream create new stream instance.

View File

@ -22,7 +22,7 @@ func Test_new_encoder(t *testing.T) {
encoder2 := NewEncoder(buf2)
encoder2.SetEscapeHTML(false)
encoder2.Encode([]int{1})
should.Equal("[1]", buf2.String())
should.Equal("[1]\n", buf2.String())
}
func Test_string_encode_with_std_without_html_escape(t *testing.T) {

View File

@ -51,6 +51,7 @@ func Test_read_string_as_any(t *testing.T) {
func Test_wrap_string(t *testing.T) {
should := require.New(t)
any := WrapString("123")
should.Equal(123, any.ToInt())
any := Get([]byte("-32000")).MustBeValid()
should.Equal(-32000, any.ToInt())
should.NoError(any.LastError())
}

View File

@ -109,12 +109,13 @@ func (extension *testExtension) UpdateStructDescriptor(structDescriptor *StructD
func Test_customize_field_by_extension(t *testing.T) {
should := require.New(t)
RegisterExtension(&testExtension{})
cfg := Config{}.Froze()
cfg.RegisterExtension(&testExtension{})
obj := TestObject1{}
err := UnmarshalFromString(`{"field-1": 100}`, &obj)
err := cfg.UnmarshalFromString(`{"field-1": 100}`, &obj)
should.Nil(err)
should.Equal("100", obj.Field1)
str, err := MarshalToString(obj)
str, err := cfg.MarshalToString(obj)
should.Nil(err)
should.Equal(`{"field-1":100}`, str)
}

View File

@ -15,6 +15,15 @@ func Test_encode_fixed_array(t *testing.T) {
should.Equal("[0.1,1]", output)
}
func Test_encode_fixed_array_empty(t *testing.T) {
should := require.New(t)
type FixedArray [0]float64
fixed := FixedArray{}
output, err := MarshalToString(fixed)
should.Nil(err)
should.Equal("[]", output)
}
func Test_encode_fixed_array_of_map(t *testing.T) {
should := require.New(t)
type FixedArray [2]map[string]string

View File

@ -192,6 +192,13 @@ func Test_lossy_float_marshal(t *testing.T) {
should.Equal("0.123457", output)
}
func Test_read_number(t *testing.T) {
should := require.New(t)
iter := ParseString(ConfigDefault, `92233720368547758079223372036854775807`)
val := iter.ReadNumber()
should.Equal(`92233720368547758079223372036854775807`, string(val))
}
func Benchmark_jsoniter_float(b *testing.B) {
b.ReportAllocs()
input := []byte(`1.1123,`)

View File

@ -524,10 +524,22 @@ func Test_null_as_number(t *testing.T) {
err := json.Unmarshal([]byte(`null`), &v1)
should.Nil(err)
should.Equal("", string(v1))
output, err := json.Marshal(v1)
should.NoError(err)
should.Equal("0", string(output))
var v2 Number
err = Unmarshal([]byte(`null`), &v2)
should.Nil(err)
should.Equal("", string(v2))
output, err = Marshal(v2)
should.NoError(err)
should.Equal("0", string(output))
}
func Test_float_as_int(t *testing.T) {
should := require.New(t)
var i int
should.NotNil(Unmarshal([]byte(`1.1`), &i))
}
func Benchmark_jsoniter_encode_int(b *testing.B) {

View File

@ -7,8 +7,24 @@ import (
"unsafe"
"github.com/stretchr/testify/require"
"reflect"
)
func Test_write_empty_interface_via_placeholder(t *testing.T) {
fmt.Println(^uint(0) >> 1)
should := require.New(t)
m := map[uint32]interface{}{1: "hello"}
inf := reflect.ValueOf(m).MapIndex(reflect.ValueOf(uint32(1))).Interface()
encoder := &placeholderEncoder{
cfg: ConfigFastest.(*frozenConfig),
cacheKey: reflect.TypeOf(m).Elem(),
}
stream := ConfigFastest.BorrowStream(nil)
encoderOfType(ConfigFastest.(*frozenConfig), "", reflect.TypeOf(m).Elem())
encoder.EncodeInterface(inf, stream)
should.Equal(`"hello"`, string(stream.Buffer()))
}
func Test_write_array_of_interface(t *testing.T) {
should := require.New(t)
array := []interface{}{"hello"}
@ -392,7 +408,7 @@ func Test_omitempty_nil_nonempty_interface(t *testing.T) {
obj.Field = MyString("hello")
err = UnmarshalFromString(`{"field":null}`, &obj)
should.NoError(err)
should.Equal(nil, obj.Field)
should.Nil(obj.Field)
}
func Test_marshal_nil_marshaler_interface(t *testing.T) {

View File

@ -3,6 +3,7 @@ package jsoniter
import (
"bytes"
"encoding/json"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"io"
"testing"
@ -136,3 +137,64 @@ func Test_valid(t *testing.T) {
should.True(Valid([]byte(`{}`)))
should.False(Valid([]byte(`{`)))
}
func Test_nil_pointer(t *testing.T) {
should := require.New(t)
data := []byte(`{"A":0}`)
type T struct {
X int
}
var obj *T
err := Unmarshal(data, obj)
should.NotNil(err)
}
func Test_func_pointer_type(t *testing.T) {
type TestObject2 struct {
F func()
}
type TestObject1 struct {
Obj *TestObject2
}
t.Run("encode null is valid", func(t *testing.T) {
should := require.New(t)
output, err := json.Marshal(TestObject1{})
should.Nil(err)
should.Equal(`{"Obj":null}`, string(output))
output, err = Marshal(TestObject1{})
should.Nil(err)
should.Equal(`{"Obj":null}`, string(output))
})
t.Run("encode not null is invalid", func(t *testing.T) {
should := require.New(t)
_, err := json.Marshal(TestObject1{Obj: &TestObject2{}})
should.NotNil(err)
_, err = Marshal(TestObject1{Obj: &TestObject2{}})
should.NotNil(err)
})
t.Run("decode null is valid", func(t *testing.T) {
should := require.New(t)
var obj TestObject1
should.Nil(json.Unmarshal([]byte(`{"Obj":{"F": null}}`), &obj))
should.Nil(Unmarshal([]byte(`{"Obj":{"F": null}}`), &obj))
})
t.Run("decode not null is invalid", func(t *testing.T) {
should := require.New(t)
var obj TestObject1
should.NotNil(json.Unmarshal([]byte(`{"Obj":{"F": "hello"}}`), &obj))
should.NotNil(Unmarshal([]byte(`{"Obj":{"F": "hello"}}`), &obj))
})
}
func TestEOF(t *testing.T) {
var s string
err := ConfigCompatibleWithStandardLibrary.NewDecoder(&bytes.Buffer{}).Decode(&s)
assert.Equal(t, io.EOF, err)
}
func TestDecodeErrorType(t *testing.T) {
should := require.New(t)
var err error
should.Nil(Unmarshal([]byte("null"), &err))
should.NotNil(Unmarshal([]byte("123"), &err))
}

View File

@ -158,6 +158,28 @@ func Test_ignore_field_on_not_valid_type(t *testing.T) {
should.Equal(`{"field-1":"hello world"}`, str)
}
func Test_nested_field_omit_empty(t *testing.T) {
should := require.New(t)
type S1 struct {
F1 string `json:",omitempty"`
}
type S2 struct {
*S1
F2 string `json:",omitempty"`
}
s1 := &S1{
//F1: "abc",
}
s2 := &S2{
S1: s1,
F2: "123",
}
str, err := MarshalToString(s2)
should.Nil(err)
should.Equal(`{"F2":"123"}`, str)
}
func Test_recursive_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
@ -328,3 +350,15 @@ func Test_decode_nested(t *testing.T) {
t.Fatal(slice[2])
}
}
func Test_decode_field_with_escape(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
}
var obj TestObject
should.Nil(ConfigCompatibleWithStandardLibrary.Unmarshal([]byte(`{"Field\"1":"hello"}`), &obj))
should.Equal("", obj.Field1)
should.Nil(ConfigCompatibleWithStandardLibrary.Unmarshal([]byte(`{"\u0046ield1":"hello"}`), &obj))
should.Equal("hello", obj.Field1)
}

View File

@ -104,6 +104,11 @@ func Test_skip_and_return_bytes_with_reader(t *testing.T) {
should.Equal(`{"a" : [{"stream": "c"}], "d": 102 }`, string(skipped))
}
func Test_skip_empty(t *testing.T) {
should := require.New(t)
should.NotNil(Get([]byte("")).LastError())
}
type TestResp struct {
Code uint64
}

View File

@ -1 +0,0 @@
../number/skip_test.go

View File

@ -0,0 +1,34 @@
package test
import (
"encoding/json"
"errors"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"io"
"testing"
)
func Test_skip(t *testing.T) {
for _, input := range inputs {
t.Run(input, func(t *testing.T) {
should := require.New(t)
var dst typeForTest
stdErr := json.Unmarshal([]byte(input), &dst)
iter := jsoniter.ParseString(jsoniter.ConfigDefault, input)
iter.Skip()
iter.ReadNil() // trigger looking forward
err := iter.Error
if err == io.EOF {
err = nil
} else {
err = errors.New("remaining bytes")
}
if stdErr == nil {
should.Nil(err)
} else {
should.NotNil(err)
}
})
}
}

View File

@ -1 +0,0 @@
../number/skip_test.go

View File

@ -0,0 +1,34 @@
package test
import (
"encoding/json"
"errors"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"io"
"testing"
)
func Test_skip(t *testing.T) {
for _, input := range inputs {
t.Run(input, func(t *testing.T) {
should := require.New(t)
var dst typeForTest
stdErr := json.Unmarshal([]byte(input), &dst)
iter := jsoniter.ParseString(jsoniter.ConfigDefault, input)
iter.Skip()
iter.ReadNil() // trigger looking forward
err := iter.Error
if err == io.EOF {
err = nil
} else {
err = errors.New("remaining bytes")
}
if stdErr == nil {
should.Nil(err)
} else {
should.NotNil(err)
}
})
}
}

View File

@ -1 +0,0 @@
../number/skip_test.go

View File

@ -0,0 +1,34 @@
package test
import (
"encoding/json"
"errors"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"io"
"testing"
)
func Test_skip(t *testing.T) {
for _, input := range inputs {
t.Run(input, func(t *testing.T) {
should := require.New(t)
var dst typeForTest
stdErr := json.Unmarshal([]byte(input), &dst)
iter := jsoniter.ParseString(jsoniter.ConfigDefault, input)
iter.Skip()
iter.ReadNil() // trigger looking forward
err := iter.Error
if err == io.EOF {
err = nil
} else {
err = errors.New("remaining bytes")
}
if stdErr == nil {
should.Nil(err)
} else {
should.NotNil(err)
}
})
}
}