1
0
mirror of https://github.com/json-iterator/go.git synced 2025-06-15 22:50:24 +02:00

66 Commits
1.0.1 ... 1.0.5

Author SHA1 Message Date
28452fcdec cow cache is not same, as map read will modify the underlying map. use sync.Map for 1.9 and above, and mutex if sync.Map not available 2018-01-28 17:00:11 +08:00
ea8c33040f fix #228 2018-01-27 16:25:48 +08:00
358cfc3929 Merge branch 'master' of https://github.com/json-iterator/go 2018-01-25 14:48:02 +08:00
c39a632e65 fix #227, fix empty json.Number 2018-01-25 14:47:50 +08:00
e31252f2e2 Merge pull request #225 from mgood/empty-array-fix
Fix encoding 0-length arrays
2018-01-23 23:31:07 +08:00
807e4a8b20 Optimize 0-length array case
Instead of checking the array length in encode, this can be checked up
front in `encoderOfArray` since the array type has a fixed length
determined at compile time. So return an `emptyArrayEncoder` that simply
writes an empty array to the stream.
2018-01-22 14:03:50 -08:00
e78b7e89b6 Merge branch 'master' of https://github.com/json-iterator/go 2018-01-21 20:59:32 +08:00
945d1aaa19 fix #140 uintptr will no lock the address from gc 2018-01-21 20:59:18 +08:00
ba3857729b Fix encoding 0-length arrays
The array encoder assumed that arrays had at least one value, so it
would serialize them with a zero-value for the array, such as `[0]`.

This adds a test to reproduce the issue, and updates the encoder to
write an empty array if the length is 0.
2018-01-16 11:02:03 -08:00
c3ed5e85e0 Merge pull request #222 from neverlee/mydev
加入一个OnlyTaggedField选项
2018-01-09 18:30:25 +08:00
c27f6f9350 config: add OnlyTaggedField config, only process tagged fields in struct 2018-01-09 17:29:47 +08:00
0ab880662f fix #219 should check real value for empty instead of just the pointer for nested field 2018-01-07 13:57:46 +08:00
6dad2de6cc fix build 2018-01-04 17:18:16 +08:00
11c1cce0d8 fix #217 when input is null, non-decodable type should not be considered as error, to be compatible with stdlib 2018-01-04 16:19:26 +08:00
96fcb84835 fix #215 lazy load more 2017-12-23 10:52:17 +08:00
e7a8aea845 Merge branch 'master' of https://github.com/json-iterator/go 2017-12-21 22:18:40 +08:00
60a9df5ebc fix #214 report EOF like stdlib 2017-12-21 22:18:28 +08:00
7b060ec866 Merge pull request #210 from coocood/master
add ReadNumber for Iterator.
2017-12-18 08:22:47 +09:00
25f147f530 add ReadNumber for Iterator. 2017-12-17 16:44:04 +08:00
a9b9c73b4d fix #207 delay unsupported type error reporting 2017-12-15 10:13:11 +08:00
e0df39fda2 fix #206, do not allow nil pointer as unmarshal input 2017-12-14 17:18:05 +08:00
13f86432b8 do not use defer() in read int 2017-12-12 18:52:41 +08:00
d2a7335211 fix #202 #203 #204 map encoder not proplery initialized 2017-12-08 21:18:59 +08:00
b2a706d14b reverse last commit, need a better fix 2017-12-08 06:15:49 +08:00
23078876c5 fix #203 consider MarshalJSON as non empty 2017-12-07 23:20:43 +08:00
051434fab7 fix #198, use dep for vendoring 2017-11-30 10:42:24 +08:00
be6688fc1a fix #200, do not use symbolic link in the code 2017-11-30 10:34:05 +08:00
ff2b70c1db support config level extension 2017-11-23 00:09:35 +08:00
f7279a603e fix out of range 2017-11-15 23:34:21 +08:00
9f088cbcc4 fix #195 when decode float as int, report it clearly 2017-11-15 23:25:12 +08:00
3c0e5762c4 fix #196 do not hard code 1 << 49 2017-11-15 23:15:31 +08:00
d394a135a1 #197 fix place holder encoder to use EncodeInterface, WriteToStream is unsafe when the real encoder is unknown 2017-11-15 22:56:23 +08:00
9fddff05f0 try to fix #194 with larger array 2017-11-11 08:31:44 +08:00
b1b003864e expose OptionalEncoder&OptionalDecoder; add attachment to Stream&Iterator for customized decoder/encoder 2017-11-08 11:41:45 +08:00
aed5a81f09 fix #190 handle empty input 2017-10-31 22:47:02 +08:00
f1258b01aa fix #191 do not always assume the object field is simple string 2017-10-31 22:38:41 +08:00
fbd210edfc Merge pull request #189 from ggaaooppeenngg/compatible-with-map
Fix standard compatiblility
2017-10-26 18:39:38 -05:00
640251ab91 Fix standard compatiblility
Non-nil but empty map with omitempty should be ignored.

Signed-off-by: Peng Gao <peng.gao.dut@gmail.com>
2017-10-27 01:43:41 +08:00
06b2a7cf1d Merge pull request #188 from ggaaooppeenngg/compatible
Fix standard compatiblility
2017-10-26 06:41:01 -05:00
5fffb9b8f7 Fix standard compatiblility
Encode has trailing newline at the end.

Signed-off-by: Peng Gao <peng.gao.dut@gmail.com>
2017-10-26 15:15:36 +08:00
7e3b776024 change jsoniter-sloppy to jsoniter_sloppy 2017-10-23 15:03:44 +08:00
6240e1e798 #185 add jsoniter.Valid 2017-10-10 08:57:02 +08:00
0149a5cf4a fix #183 error message not only show expectation, but also the actual value 2017-10-09 08:24:51 +08:00
5068c8baaf #183 limit error message size 2017-10-09 08:16:52 +08:00
16f78601b5 fix #184, support null as number 2017-10-07 09:29:32 +08:00
8f50a91be2 fix #181, support string as json.Number and jsoniter.Number 2017-10-06 18:08:14 +08:00
73c7bc881e fix #180, add missing methods to jsoniter.Number 2017-10-06 17:56:36 +08:00
4de15a3a87 Merge pull request #182 from MOZGIII/patch-1
Used writeTwoBytes in Stream.WriteEmptyArray
2017-10-04 02:25:32 -05:00
14b28b2226 Used writeTwoBytes in Stream.WriteEmptyArray 2017-10-03 22:14:36 +03:00
abe3c4016b fix #179 2017-09-26 15:35:55 +08:00
dbb1ef3f63 #177 flush buffer should check available again 2017-09-21 21:04:45 +08:00
46b20bbbec #178 SkipAndReturnBytes should return copy of memory 2017-09-21 20:18:45 +08:00
fdfe0b9a69 Merge branch 'olegshaldybin-skip-unexported-fields' 2017-09-19 10:06:59 +08:00
faa3dcf46a do not report error when field is unexported 2017-09-19 10:06:34 +08:00
1f58120d43 Always skip unexported fields when encoding
Skip creating encoders for unexported fields. They are not participating
in JSON marshaling anyway. This allows using unexported fields of
non-marshalable types in structs.

As a side-effect of this change it's no longer possible to marshal
unexported JSON fields by adding a custom type extenstion. It seems this
is desired behavior since it matches standard library and jsoniter
already disallows `json:"-"` fields from participating in custom
extensions.

Fixes #174.
2017-09-18 11:02:15 -07:00
6ed27152e0 Update README.md 2017-09-17 16:07:42 +08:00
3c298d8a76 Merge pull request #172 from olegshaldybin/more-stdlib-compat
Improve stdlib compatibility
2017-09-17 03:05:36 -05:00
9f6e5962a9 Improve stdlib compatibility
1. Null values for primitive types no longer clear the original value in
the destination object.

2. Dereference multiple levels of pointers in the destination interface{}
type before unmarshaling into it. This is needed to match stdlib
behavior when working with nested interface{} fields. If the destination
object is a pointer to interface{} then the incoming nil value should
nil out the destination object but keep the reference to that nil value
on its parent object. However if the destination object is an
interface{} value it should set the reference to nil but keep the
original object intact.

3. Correctly handle typed nil decode destinations.
2017-09-16 16:57:51 -07:00
c463aa12c4 Merge pull request #173 from toffaletti/more-nil-interface-fixes
More nil interface fixes
2017-09-16 18:36:43 -05:00
b5d2607a6d replace should.Equal(nil, err) with should.NoError(err) 2017-09-16 16:30:04 -07:00
48cc4d965a improve test 2017-09-16 16:27:32 -07:00
c59c42fda0 fix decoding of nil non-empty interface 2017-09-16 16:24:55 -07:00
8324374402 add tests for decoding nil interfaces 2017-09-16 16:24:27 -07:00
2017f3866b fix encoding of nil marshaler interface 2017-09-16 16:08:32 -07:00
ddc5af4512 fix encoding of nil non-empty interface 2017-09-16 16:04:36 -07:00
2f7e5c8dd7 add failing tests for nil non-empty interfaces 2017-09-16 16:00:48 -07:00
52 changed files with 2052 additions and 898 deletions

5
.gitignore vendored
View File

@ -1,3 +1,4 @@
.idea
/vendor
/bug_test.go
/coverage.txt
/profile.out
/.idea

33
Gopkg.lock generated Normal file
View File

@ -0,0 +1,33 @@
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
[[projects]]
name = "github.com/davecgh/go-spew"
packages = ["spew"]
revision = "346938d642f2ec3594ed81d874461961cd0faa76"
version = "v1.1.0"
[[projects]]
branch = "master"
name = "github.com/google/gofuzz"
packages = ["."]
revision = "24818f796faf91cd76ec7bddd72458fbced7a6c1"
[[projects]]
name = "github.com/pmezard/go-difflib"
packages = ["difflib"]
revision = "792786c7400a136282c1664665ae0a8db921c6c2"
version = "v1.0.0"
[[projects]]
name = "github.com/stretchr/testify"
packages = ["assert","require"]
revision = "69483b4bd14f5845b5a1e55bca19e954e827f1d0"
version = "v1.1.4"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "f8b7cf3941d3792cbbd570bb53c093adaf774334d1162c651565c97a58dc9d09"
solver-name = "gps-cdcl"
solver-version = 1

33
Gopkg.toml Normal file
View File

@ -0,0 +1,33 @@
# Gopkg.toml example
#
# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
# for detailed Gopkg.toml documentation.
#
# required = ["github.com/user/thing/cmd/thing"]
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
#
# [[constraint]]
# name = "github.com/user/project"
# version = "1.0.0"
#
# [[constraint]]
# name = "github.com/user/project2"
# branch = "dev"
# source = "github.com/myfork/project2"
#
# [[override]]
# name = "github.com/x/y"
# version = "2.4.0"
[[constraint]]
name = "github.com/davecgh/go-spew"
version = "1.1.0"
[[constraint]]
branch = "master"
name = "github.com/google/gofuzz"
[[constraint]]
name = "github.com/stretchr/testify"
version = "1.1.4"

View File

@ -44,7 +44,9 @@ with
```go
import "github.com/json-iterator/go"
jsoniter.Marshal(&data)
var json = jsoniter.ConfigCompatibleWithStandardLibrary
json.Marshal(&data)
```
Replace
@ -58,7 +60,9 @@ with
```go
import "github.com/json-iterator/go"
jsoniter.Unmarshal(input, &data)
var json = jsoniter.ConfigCompatibleWithStandardLibrary
json.Unmarshal(input, &data)
```
[More documentation](http://jsoniter.com/migrate-from-go-std.html)
@ -76,5 +80,7 @@ Contributors
* [thockin](https://github.com/thockin)
* [mattn](https://github.com/mattn)
* [cch123](https://github.com/cch123)
* [Oleg Shaldybin](https://github.com/olegshaldybin)
* [Jason Toffaletti](https://github.com/toffaletti)
Report issue or pull request, or email taowen@gmail.com, or [![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby)

12
build.sh Executable file
View File

@ -0,0 +1,12 @@
#!/bin/bash
set -e
set -x
if [ ! -d /tmp/build-golang/src/github.com/json-iterator ]; then
mkdir -p /tmp/build-golang/src/github.com/json-iterator
ln -s $PWD /tmp/build-golang/src/github.com/json-iterator/go
fi
export GOPATH=/tmp/build-golang
go get -u github.com/golang/dep/cmd/dep
cd /tmp/build-golang/src/github.com/json-iterator/go
exec $GOPATH/bin/dep ensure -update

40
compatible_test.go Normal file
View File

@ -0,0 +1,40 @@
package jsoniter
import (
"bytes"
"encoding/json"
"testing"
"github.com/stretchr/testify/require"
)
// Standard Encoder has trailing newline.
func TestEncoderHasTrailingNewline(t *testing.T) {
should := require.New(t)
var buf, stdbuf bytes.Buffer
enc := ConfigCompatibleWithStandardLibrary.NewEncoder(&buf)
enc.Encode(1)
stdenc := json.NewEncoder(&stdbuf)
stdenc.Encode(1)
should.Equal(stdbuf.Bytes(), buf.Bytes())
}
// Non-nil but empty map should be ignored.
func TestOmitempty(t *testing.T) {
o := struct {
A string `json:"a,omitempty"`
B string `json:"b,omitempty"`
Annotations map[string]string `json:"annotations,omitempty"`
}{
A: "a",
B: "b",
Annotations: map[string]string{},
}
should := require.New(t)
var buf, stdbuf bytes.Buffer
enc := ConfigCompatibleWithStandardLibrary.NewEncoder(&buf)
enc.Encode(o)
stdenc := json.NewEncoder(&stdbuf)
stdenc.Encode(o)
should.Equal(string(stdbuf.Bytes()), string(buf.Bytes()))
}

View File

@ -71,6 +71,11 @@ type Decoder struct {
// Decode decode JSON into interface{}
func (adapter *Decoder) Decode(obj interface{}) error {
if adapter.iter.head == adapter.iter.tail && adapter.iter.reader != nil {
if !adapter.iter.loadMore() {
return io.EOF
}
}
adapter.iter.ReadVal(obj)
err := adapter.iter.Error
if err == io.EOF {
@ -110,6 +115,7 @@ type Encoder struct {
// Encode encode interface{} as JSON to io.Writer
func (adapter *Encoder) Encode(val interface{}) error {
adapter.stream.WriteVal(val)
adapter.stream.WriteRaw("\n")
adapter.stream.Flush()
return adapter.stream.Error
}
@ -125,3 +131,8 @@ func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) {
config.EscapeHTML = escapeHTML
adapter.stream.cfg = config.Froze().(*frozenConfig)
}
// Valid reports whether data is a valid JSON encoding.
func Valid(data []byte) bool {
return ConfigDefault.Valid(data)
}

View File

@ -1,6 +1,7 @@
package jsoniter
import (
"errors"
"fmt"
"io"
"reflect"
@ -157,6 +158,8 @@ func (iter *Iterator) readAny() Any {
return iter.readArrayAny()
case '-':
return iter.readNumberAny(false)
case 0:
return &invalidAny{baseAny{}, errors.New("input is empty")}
default:
return iter.readNumberAny(true)
}

View File

@ -1,6 +1,9 @@
package jsoniter
import "unsafe"
import (
"unsafe"
"io"
)
type numberLazyAny struct {
baseAny
@ -29,7 +32,9 @@ func (any *numberLazyAny) ToInt() int {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadInt()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
@ -37,7 +42,9 @@ func (any *numberLazyAny) ToInt32() int32 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadInt32()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
@ -45,7 +52,9 @@ func (any *numberLazyAny) ToInt64() int64 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadInt64()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
@ -53,7 +62,9 @@ func (any *numberLazyAny) ToUint() uint {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadUint()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
@ -61,7 +72,9 @@ func (any *numberLazyAny) ToUint32() uint32 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadUint32()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
@ -69,7 +82,9 @@ func (any *numberLazyAny) ToUint64() uint64 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadUint64()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
@ -77,7 +92,9 @@ func (any *numberLazyAny) ToFloat32() float32 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadFloat32()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
@ -85,7 +102,9 @@ func (any *numberLazyAny) ToFloat64() float64 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadFloat64()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}

View File

@ -5,7 +5,6 @@ import (
"errors"
"io"
"reflect"
"sync/atomic"
"unsafe"
)
@ -18,18 +17,9 @@ type Config struct {
SortMapKeys bool
UseNumber bool
TagKey string
OnlyTaggedField bool
ValidateJsonRawMessage bool
}
type frozenConfig struct {
configBeforeFrozen Config
sortMapKeys bool
indentionStep int
decoderCache unsafe.Pointer
encoderCache unsafe.Pointer
extensions []Extension
streamPool chan *Stream
iteratorPool chan *Iterator
ObjectFieldMustBeSimpleString bool
}
// API the public interface of this package.
@ -45,6 +35,8 @@ type API interface {
Get(data []byte, path ...interface{}) Any
NewEncoder(writer io.Writer) *Encoder
NewDecoder(reader io.Reader) *Decoder
Valid(data []byte) bool
RegisterExtension(extension Extension)
}
// ConfigDefault the default API
@ -62,7 +54,8 @@ var ConfigCompatibleWithStandardLibrary = Config{
// ConfigFastest marshals float with only 6 digits precision
var ConfigFastest = Config{
EscapeHTML: false,
MarshalFloatWith6Digits: true,
MarshalFloatWith6Digits: true, // will lose precession
ObjectFieldMustBeSimpleString: true, // do not unescape object field
}.Froze()
// Froze forge API from config
@ -71,11 +64,12 @@ func (cfg Config) Froze() API {
frozenConfig := &frozenConfig{
sortMapKeys: cfg.SortMapKeys,
indentionStep: cfg.IndentionStep,
objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString,
onlyTaggedField: cfg.OnlyTaggedField,
streamPool: make(chan *Stream, 16),
iteratorPool: make(chan *Iterator, 16),
}
atomic.StorePointer(&frozenConfig.decoderCache, unsafe.Pointer(&map[string]ValDecoder{}))
atomic.StorePointer(&frozenConfig.encoderCache, unsafe.Pointer(&map[string]ValEncoder{}))
frozenConfig.initCache()
if cfg.MarshalFloatWith6Digits {
frozenConfig.marshalFloatWith6Digits()
}
@ -127,7 +121,7 @@ func (cfg *frozenConfig) getTagKey() string {
return tagKey
}
func (cfg *frozenConfig) registerExtension(extension Extension) {
func (cfg *frozenConfig) RegisterExtension(extension Extension) {
cfg.extensions = append(cfg.extensions, extension)
}
@ -189,46 +183,6 @@ func (cfg *frozenConfig) escapeHTML() {
cfg.addEncoderToCache(reflect.TypeOf((*string)(nil)).Elem(), &htmlEscapedStringEncoder{})
}
func (cfg *frozenConfig) addDecoderToCache(cacheKey reflect.Type, decoder ValDecoder) {
done := false
for !done {
ptr := atomic.LoadPointer(&cfg.decoderCache)
cache := *(*map[reflect.Type]ValDecoder)(ptr)
copied := map[reflect.Type]ValDecoder{}
for k, v := range cache {
copied[k] = v
}
copied[cacheKey] = decoder
done = atomic.CompareAndSwapPointer(&cfg.decoderCache, ptr, unsafe.Pointer(&copied))
}
}
func (cfg *frozenConfig) addEncoderToCache(cacheKey reflect.Type, encoder ValEncoder) {
done := false
for !done {
ptr := atomic.LoadPointer(&cfg.encoderCache)
cache := *(*map[reflect.Type]ValEncoder)(ptr)
copied := map[reflect.Type]ValEncoder{}
for k, v := range cache {
copied[k] = v
}
copied[cacheKey] = encoder
done = atomic.CompareAndSwapPointer(&cfg.encoderCache, ptr, unsafe.Pointer(&copied))
}
}
func (cfg *frozenConfig) getDecoderFromCache(cacheKey reflect.Type) ValDecoder {
ptr := atomic.LoadPointer(&cfg.decoderCache)
cache := *(*map[reflect.Type]ValDecoder)(ptr)
return cache[cacheKey]
}
func (cfg *frozenConfig) getEncoderFromCache(cacheKey reflect.Type) ValEncoder {
ptr := atomic.LoadPointer(&cfg.encoderCache)
cache := *(*map[reflect.Type]ValEncoder)(ptr)
return cache[cacheKey]
}
func (cfg *frozenConfig) cleanDecoders() {
typeDecoders = map[string]ValDecoder{}
fieldDecoders = map[string]ValDecoder{}
@ -333,3 +287,10 @@ func (cfg *frozenConfig) NewDecoder(reader io.Reader) *Decoder {
iter := Parse(cfg, reader, 512)
return &Decoder{iter}
}
func (cfg *frozenConfig) Valid(data []byte) bool {
iter := cfg.BorrowIterator(data)
defer cfg.ReturnIterator(iter)
iter.Skip()
return iter.Error == nil
}

View File

@ -0,0 +1,51 @@
//+build go1.9
package jsoniter
import (
"reflect"
"sync"
)
type frozenConfig struct {
configBeforeFrozen Config
sortMapKeys bool
indentionStep int
objectFieldMustBeSimpleString bool
onlyTaggedField bool
decoderCache sync.Map
encoderCache sync.Map
extensions []Extension
streamPool chan *Stream
iteratorPool chan *Iterator
}
func (cfg *frozenConfig) initCache() {
cfg.decoderCache = sync.Map{}
cfg.encoderCache = sync.Map{}
}
func (cfg *frozenConfig) addDecoderToCache(cacheKey reflect.Type, decoder ValDecoder) {
cfg.decoderCache.Store(cacheKey, decoder)
}
func (cfg *frozenConfig) addEncoderToCache(cacheKey reflect.Type, encoder ValEncoder) {
cfg.encoderCache.Store(cacheKey, encoder)
}
func (cfg *frozenConfig) getDecoderFromCache(cacheKey reflect.Type) ValDecoder {
decoder, found := cfg.decoderCache.Load(cacheKey)
if found {
return decoder.(ValDecoder)
}
return nil
}
func (cfg *frozenConfig) getEncoderFromCache(cacheKey reflect.Type) ValEncoder {
encoder, found := cfg.encoderCache.Load(cacheKey)
if found {
return encoder.(ValEncoder)
}
return nil
}

View File

@ -0,0 +1,54 @@
//+build !go1.9
package jsoniter
import (
"reflect"
"sync"
)
type frozenConfig struct {
configBeforeFrozen Config
sortMapKeys bool
indentionStep int
objectFieldMustBeSimpleString bool
onlyTaggedField bool
cacheLock *sync.RWMutex
decoderCache map[reflect.Type]ValDecoder
encoderCache map[reflect.Type]ValEncoder
extensions []Extension
streamPool chan *Stream
iteratorPool chan *Iterator
}
func (cfg *frozenConfig) initCache() {
cfg.cacheLock = &sync.RWMutex{}
cfg.decoderCache = map[reflect.Type]ValDecoder{}
cfg.encoderCache = map[reflect.Type]ValEncoder{}
}
func (cfg *frozenConfig) addDecoderToCache(cacheKey reflect.Type, decoder ValDecoder) {
cfg.cacheLock.Lock()
cfg.decoderCache[cacheKey] = decoder
cfg.cacheLock.Unlock()
}
func (cfg *frozenConfig) addEncoderToCache(cacheKey reflect.Type, encoder ValEncoder) {
cfg.cacheLock.Lock()
cfg.encoderCache[cacheKey] = encoder
cfg.cacheLock.Unlock()
}
func (cfg *frozenConfig) getDecoderFromCache(cacheKey reflect.Type) ValDecoder {
cfg.cacheLock.RLock()
decoder, _ := cfg.decoderCache[cacheKey].(ValDecoder)
cfg.cacheLock.RUnlock()
return decoder
}
func (cfg *frozenConfig) getEncoderFromCache(cacheKey reflect.Type) ValEncoder {
cfg.cacheLock.RLock()
encoder, _ := cfg.encoderCache[cacheKey].(ValEncoder)
cfg.cacheLock.RUnlock()
return encoder
}

View File

@ -77,6 +77,7 @@ type Iterator struct {
captureStartedAt int
captured []byte
Error error
Attachment interface{} // open for customized decoder
}
// NewIterator creates an empty Iterator instance
@ -167,7 +168,7 @@ func (iter *Iterator) isObjectEnd() bool {
if c == '}' {
return true
}
iter.ReportError("isObjectEnd", "object ended prematurely")
iter.ReportError("isObjectEnd", "object ended prematurely, unexpected char "+string([]byte{c}))
return true
}
@ -200,8 +201,22 @@ func (iter *Iterator) ReportError(operation string, msg string) {
if peekStart < 0 {
peekStart = 0
}
iter.Error = fmt.Errorf("%s: %s, parsing %v ...%s... at %s", operation, msg, iter.head,
string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail]))
peekEnd := iter.head + 10
if peekEnd > iter.tail {
peekEnd = iter.tail
}
parsing := string(iter.buf[peekStart:peekEnd])
contextStart := iter.head - 50
if contextStart < 0 {
contextStart = 0
}
contextEnd := iter.head + 50
if contextEnd > iter.tail {
contextEnd = iter.tail
}
context := string(iter.buf[contextStart:contextEnd])
iter.Error = fmt.Errorf("%s: %s, error found in #%v byte of ...|%s|..., bigger context ...|%s|...",
operation, msg, iter.head-peekStart, parsing, context)
}
// CurrentBuffer gets current buffer as string for debugging purpose
@ -210,7 +225,7 @@ func (iter *Iterator) CurrentBuffer() string {
if peekStart < 0 {
peekStart = 0
}
return fmt.Sprintf("parsing %v ...|%s|... at %s", iter.head,
return fmt.Sprintf("parsing #%v byte, around ...|%s|..., whole buffer ...|%s|...", iter.head,
string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail]))
}

View File

@ -19,7 +19,7 @@ func (iter *Iterator) ReadArray() (ret bool) {
case ',':
return true
default:
iter.ReportError("ReadArray", "expect [ or , or ] or n, but found: "+string([]byte{c}))
iter.ReportError("ReadArray", "expect [ or , or ] or n, but found "+string([]byte{c}))
return
}
}
@ -42,7 +42,7 @@ func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
c = iter.nextToken()
}
if c != ']' {
iter.ReportError("ReadArrayCB", "expect ] in the end")
iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c}))
return false
}
return true
@ -53,6 +53,6 @@ func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
iter.skipThreeBytes('u', 'l', 'l')
return true // null
}
iter.ReportError("ReadArrayCB", "expect [ or n, but found: "+string([]byte{c}))
iter.ReportError("ReadArrayCB", "expect [ or n, but found "+string([]byte{c}))
return false
}

View File

@ -1,6 +1,7 @@
package jsoniter
import (
"encoding/json"
"io"
"math/big"
"strconv"
@ -339,3 +340,8 @@ func validateFloat(str string) string {
}
return ""
}
// ReadNumber read json.Number
func (iter *Iterator) ReadNumber() (ret json.Number) {
return json.Number(iter.readNumberAsString())
}

View File

@ -115,6 +115,7 @@ func (iter *Iterator) ReadUint32() (ret uint32) {
func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind := intDigits[c]
if ind == 0 {
iter.assertInteger()
return 0 // single zero
}
if ind == invalidCharForNumber {
@ -127,12 +128,14 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind2 := intDigits[iter.buf[i]]
if ind2 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value
}
i++
ind3 := intDigits[iter.buf[i]]
if ind3 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*10 + uint32(ind2)
}
//iter.head = i + 1
@ -141,30 +144,35 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind4 := intDigits[iter.buf[i]]
if ind4 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*100 + uint32(ind2)*10 + uint32(ind3)
}
i++
ind5 := intDigits[iter.buf[i]]
if ind5 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*1000 + uint32(ind2)*100 + uint32(ind3)*10 + uint32(ind4)
}
i++
ind6 := intDigits[iter.buf[i]]
if ind6 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*10000 + uint32(ind2)*1000 + uint32(ind3)*100 + uint32(ind4)*10 + uint32(ind5)
}
i++
ind7 := intDigits[iter.buf[i]]
if ind7 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*100000 + uint32(ind2)*10000 + uint32(ind3)*1000 + uint32(ind4)*100 + uint32(ind5)*10 + uint32(ind6)
}
i++
ind8 := intDigits[iter.buf[i]]
if ind8 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*1000000 + uint32(ind2)*100000 + uint32(ind3)*10000 + uint32(ind4)*1000 + uint32(ind5)*100 + uint32(ind6)*10 + uint32(ind7)
}
i++
@ -172,6 +180,7 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
value = value*10000000 + uint32(ind2)*1000000 + uint32(ind3)*100000 + uint32(ind4)*10000 + uint32(ind5)*1000 + uint32(ind6)*100 + uint32(ind7)*10 + uint32(ind8)
iter.head = i
if ind9 == invalidCharForNumber {
iter.assertInteger()
return value
}
}
@ -180,6 +189,7 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind = intDigits[iter.buf[i]]
if ind == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value
}
if value > uint32SafeToMultiply10 {
@ -194,6 +204,7 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
value = (value << 3) + (value << 1) + uint32(ind)
}
if !iter.loadMore() {
iter.assertInteger()
return value
}
}
@ -226,6 +237,7 @@ func (iter *Iterator) ReadUint64() uint64 {
func (iter *Iterator) readUint64(c byte) (ret uint64) {
ind := intDigits[c]
if ind == 0 {
iter.assertInteger()
return 0 // single zero
}
if ind == invalidCharForNumber {
@ -233,11 +245,73 @@ func (iter *Iterator) readUint64(c byte) (ret uint64) {
return
}
value := uint64(ind)
if iter.tail-iter.head > 10 {
i := iter.head
ind2 := intDigits[iter.buf[i]]
if ind2 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value
}
i++
ind3 := intDigits[iter.buf[i]]
if ind3 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*10 + uint64(ind2)
}
//iter.head = i + 1
//value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
i++
ind4 := intDigits[iter.buf[i]]
if ind4 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*100 + uint64(ind2)*10 + uint64(ind3)
}
i++
ind5 := intDigits[iter.buf[i]]
if ind5 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*1000 + uint64(ind2)*100 + uint64(ind3)*10 + uint64(ind4)
}
i++
ind6 := intDigits[iter.buf[i]]
if ind6 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*10000 + uint64(ind2)*1000 + uint64(ind3)*100 + uint64(ind4)*10 + uint64(ind5)
}
i++
ind7 := intDigits[iter.buf[i]]
if ind7 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*100000 + uint64(ind2)*10000 + uint64(ind3)*1000 + uint64(ind4)*100 + uint64(ind5)*10 + uint64(ind6)
}
i++
ind8 := intDigits[iter.buf[i]]
if ind8 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*1000000 + uint64(ind2)*100000 + uint64(ind3)*10000 + uint64(ind4)*1000 + uint64(ind5)*100 + uint64(ind6)*10 + uint64(ind7)
}
i++
ind9 := intDigits[iter.buf[i]]
value = value*10000000 + uint64(ind2)*1000000 + uint64(ind3)*100000 + uint64(ind4)*10000 + uint64(ind5)*1000 + uint64(ind6)*100 + uint64(ind7)*10 + uint64(ind8)
iter.head = i
if ind9 == invalidCharForNumber {
iter.assertInteger()
return value
}
}
for {
for i := iter.head; i < iter.tail; i++ {
ind = intDigits[iter.buf[i]]
if ind == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value
}
if value > uint64SafeToMultiple10 {
@ -252,7 +326,14 @@ func (iter *Iterator) readUint64(c byte) (ret uint64) {
value = (value << 3) + (value << 1) + uint64(ind)
}
if !iter.loadMore() {
iter.assertInteger()
return value
}
}
}
func (iter *Iterator) assertInteger() {
if iter.head < len(iter.buf) && iter.buf[iter.head] == '.' {
iter.ReportError("assertInteger", "can not decode float as int")
}
}

View File

@ -19,15 +19,33 @@ func (iter *Iterator) ReadObject() (ret string) {
c = iter.nextToken()
if c == '"' {
iter.unreadByte()
if iter.cfg.objectFieldMustBeSimpleString {
return string(iter.readObjectFieldAsBytes())
} else {
field := iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
return field
}
}
if c == '}' {
return "" // end of object
}
iter.ReportError("ReadObject", `expect " after {`)
iter.ReportError("ReadObject", `expect " after {, but found `+string([]byte{c}))
return
case ',':
if iter.cfg.objectFieldMustBeSimpleString {
return string(iter.readObjectFieldAsBytes())
} else {
field := iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
return field
}
case '}':
return "" // end of object
default:
@ -44,17 +62,34 @@ func (iter *Iterator) readFieldHash() int32 {
for i := iter.head; i < iter.tail; i++ {
// require ascii string and no escape
b := iter.buf[i]
if !iter.cfg.objectFieldMustBeSimpleString && b == '\\' {
iter.head = i
for _, b := range iter.readStringSlowPath() {
if 'A' <= b && b <= 'Z' {
b += 'a' - 'A'
}
hash ^= int64(b)
hash *= 0x1000193
}
c = iter.nextToken()
if c != ':' {
iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
return 0
}
return int32(hash)
}
if b == '"' {
iter.head = i + 1
c = iter.nextToken()
if c != ':' {
iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
return 0
}
return int32(hash)
}
if 'A' <= b && b <= 'Z' {
b += 'a' - 'A'
}
hash ^= int64(b)
hash *= 0x1000193
}
@ -80,18 +115,38 @@ func calcHash(str string) int32 {
// ReadObjectCB read object with callback, the key is ascii only and field name not copied
func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
c := iter.nextToken()
var fieldBytes []byte
var field string
if c == '{' {
c = iter.nextToken()
if c == '"' {
iter.unreadByte()
field := iter.readObjectFieldAsBytes()
if !callback(iter, *(*string)(unsafe.Pointer(&field))) {
if iter.cfg.objectFieldMustBeSimpleString {
fieldBytes = iter.readObjectFieldAsBytes()
field = *(*string)(unsafe.Pointer(&fieldBytes))
} else {
field = iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
}
if !callback(iter, field) {
return false
}
c = iter.nextToken()
for c == ',' {
field = iter.readObjectFieldAsBytes()
if !callback(iter, *(*string)(unsafe.Pointer(&field))) {
if iter.cfg.objectFieldMustBeSimpleString {
fieldBytes = iter.readObjectFieldAsBytes()
field = *(*string)(unsafe.Pointer(&fieldBytes))
} else {
field = iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
}
if !callback(iter, field) {
return false
}
c = iter.nextToken()
@ -105,14 +160,14 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
if c == '}' {
return true
}
iter.ReportError("ReadObjectCB", `expect " after }`)
iter.ReportError("ReadObjectCB", `expect " after }, but found `+string([]byte{c}))
return false
}
if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l')
return true // null
}
iter.ReportError("ReadObjectCB", `expect { or n`)
iter.ReportError("ReadObjectCB", `expect { or n, but found `+string([]byte{c}))
return false
}
@ -125,7 +180,7 @@ func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
iter.unreadByte()
field := iter.ReadString()
if iter.nextToken() != ':' {
iter.ReportError("ReadMapCB", "expect : after object field")
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
return false
}
if !callback(iter, field) {
@ -135,7 +190,7 @@ func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
for c == ',' {
field = iter.ReadString()
if iter.nextToken() != ':' {
iter.ReportError("ReadMapCB", "expect : after object field")
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
return false
}
if !callback(iter, field) {
@ -152,14 +207,14 @@ func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
if c == '}' {
return true
}
iter.ReportError("ReadMapCB", `expect " after }`)
iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c}))
return false
}
if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l')
return true // null
}
iter.ReportError("ReadMapCB", `expect { or n`)
iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c}))
return false
}
@ -176,7 +231,7 @@ func (iter *Iterator) readObjectStart() bool {
iter.skipThreeBytes('u', 'l', 'l')
return false
}
iter.ReportError("readObjectStart", "expect { or n")
iter.ReportError("readObjectStart", "expect { or n, but found "+string([]byte{c}))
return false
}
@ -192,7 +247,7 @@ func (iter *Iterator) readObjectFieldAsBytes() (ret []byte) {
}
}
if iter.buf[iter.head] != ':' {
iter.ReportError("readObjectFieldAsBytes", "expect : after object field")
iter.ReportError("readObjectFieldAsBytes", "expect : after object field, but found "+string([]byte{iter.buf[iter.head]}))
return
}
iter.head++

View File

@ -25,7 +25,7 @@ func (iter *Iterator) ReadBool() (ret bool) {
iter.skipFourBytes('a', 'l', 's', 'e')
return false
}
iter.ReportError("ReadBool", "expect t or f")
iter.ReportError("ReadBool", "expect t or f, but found "+string([]byte{c}))
return
}
@ -59,7 +59,9 @@ func (iter *Iterator) stopCapture() []byte {
iter.captureStartedAt = -1
iter.captured = nil
if len(captured) == 0 {
return remaining
copied := make([]byte, len(remaining))
copy(copied, remaining)
return copied
}
captured = append(captured, remaining...)
return captured

View File

@ -1,4 +1,4 @@
//+build jsoniter-sloppy
//+build jsoniter_sloppy
package jsoniter

View File

@ -1,4 +1,4 @@
//+build !jsoniter-sloppy
//+build !jsoniter_sloppy
package jsoniter

View File

@ -28,7 +28,7 @@ func (iter *Iterator) ReadString() (ret string) {
iter.skipThreeBytes('u', 'l', 'l')
return ""
}
iter.ReportError("ReadString", `expects " or n`)
iter.ReportError("ReadString", `expects " or n, but found `+string([]byte{c}))
return
}
@ -139,7 +139,7 @@ func (iter *Iterator) ReadStringAsSlice() (ret []byte) {
}
return copied
}
iter.ReportError("ReadStringAsSlice", `expects " or n`)
iter.ReportError("ReadStringAsSlice", `expects " or n, but found `+string([]byte{c}))
return
}
@ -156,7 +156,7 @@ func (iter *Iterator) readU4() (ret rune) {
} else if c >= 'A' && c <= 'F' {
ret = ret*16 + rune(c-'A'+10)
} else {
iter.ReportError("readU4", "expects 0~9 or a~f")
iter.ReportError("readU4", "expects 0~9 or a~f, but found "+string([]byte{c}))
return
}
}

View File

@ -1,9 +1,25 @@
package jsoniter
import "encoding/json"
import (
"encoding/json"
"strconv"
)
type Number string
// String returns the literal text of the number.
func (n Number) String() string { return string(n) }
// Float64 returns the number as a float64.
func (n Number) Float64() (float64, error) {
return strconv.ParseFloat(string(n), 64)
}
// Int64 returns the number as an int64.
func (n Number) Int64() (int64, error) {
return strconv.ParseInt(string(n), 10, 64)
}
func CastJsonNumber(val interface{}) (string, bool) {
switch typedVal := val.(type) {
case json.Number:

View File

@ -28,6 +28,7 @@ func (cfg *frozenConfig) BorrowStream(writer io.Writer) *Stream {
func (cfg *frozenConfig) ReturnStream(stream *Stream) {
stream.Error = nil
stream.Attachment = nil
select {
case cfg.streamPool <- stream:
return
@ -48,6 +49,7 @@ func (cfg *frozenConfig) BorrowIterator(data []byte) *Iterator {
func (cfg *frozenConfig) ReturnIterator(iter *Iterator) {
iter.Error = nil
iter.Attachment = nil
select {
case cfg.iteratorPool <- iter:
return

File diff suppressed because it is too large Load Diff

View File

@ -7,23 +7,34 @@ import (
"unsafe"
)
func decoderOfArray(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) {
decoder, err := decoderOfType(cfg, typ.Elem())
if err != nil {
return nil, err
}
return &arrayDecoder{typ, typ.Elem(), decoder}, nil
func decoderOfArray(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder {
decoder := decoderOfType(cfg, prefix+"[array]->", typ.Elem())
return &arrayDecoder{typ, typ.Elem(), decoder}
}
func encoderOfArray(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
encoder, err := encoderOfType(cfg, typ.Elem())
if err != nil {
return nil, err
func encoderOfArray(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder {
if typ.Len() == 0 {
return emptyArrayEncoder{}
}
encoder := encoderOfType(cfg, prefix+"[array]->", typ.Elem())
if typ.Elem().Kind() == reflect.Map {
encoder = &optionalEncoder{encoder}
encoder = &OptionalEncoder{encoder}
}
return &arrayEncoder{typ, typ.Elem(), encoder}, nil
return &arrayEncoder{typ, typ.Elem(), encoder}
}
type emptyArrayEncoder struct{}
func (encoder emptyArrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteEmptyArray()
}
func (encoder emptyArrayEncoder) EncodeInterface(val interface{}, stream *Stream) {
stream.WriteEmptyArray()
}
func (encoder emptyArrayEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return true
}
type arrayEncoder struct {

View File

@ -161,22 +161,31 @@ func RegisterExtension(extension Extension) {
extensions = append(extensions, extension)
}
func getTypeDecoderFromExtension(typ reflect.Type) ValDecoder {
decoder := _getTypeDecoderFromExtension(typ)
func getTypeDecoderFromExtension(cfg *frozenConfig, typ reflect.Type) ValDecoder {
decoder := _getTypeDecoderFromExtension(cfg, typ)
if decoder != nil {
for _, extension := range extensions {
decoder = extension.DecorateDecoder(typ, decoder)
}
for _, extension := range cfg.extensions {
decoder = extension.DecorateDecoder(typ, decoder)
}
}
return decoder
}
func _getTypeDecoderFromExtension(typ reflect.Type) ValDecoder {
func _getTypeDecoderFromExtension(cfg *frozenConfig, typ reflect.Type) ValDecoder {
for _, extension := range extensions {
decoder := extension.CreateDecoder(typ)
if decoder != nil {
return decoder
}
}
for _, extension := range cfg.extensions {
decoder := extension.CreateDecoder(typ)
if decoder != nil {
return decoder
}
}
typeName := typ.String()
decoder := typeDecoders[typeName]
if decoder != nil {
@ -185,29 +194,38 @@ func _getTypeDecoderFromExtension(typ reflect.Type) ValDecoder {
if typ.Kind() == reflect.Ptr {
decoder := typeDecoders[typ.Elem().String()]
if decoder != nil {
return &optionalDecoder{typ.Elem(), decoder}
return &OptionalDecoder{typ.Elem(), decoder}
}
}
return nil
}
func getTypeEncoderFromExtension(typ reflect.Type) ValEncoder {
encoder := _getTypeEncoderFromExtension(typ)
func getTypeEncoderFromExtension(cfg *frozenConfig, typ reflect.Type) ValEncoder {
encoder := _getTypeEncoderFromExtension(cfg, typ)
if encoder != nil {
for _, extension := range extensions {
encoder = extension.DecorateEncoder(typ, encoder)
}
for _, extension := range cfg.extensions {
encoder = extension.DecorateEncoder(typ, encoder)
}
}
return encoder
}
func _getTypeEncoderFromExtension(typ reflect.Type) ValEncoder {
func _getTypeEncoderFromExtension(cfg *frozenConfig, typ reflect.Type) ValEncoder {
for _, extension := range extensions {
encoder := extension.CreateEncoder(typ)
if encoder != nil {
return encoder
}
}
for _, extension := range cfg.extensions {
encoder := extension.CreateEncoder(typ)
if encoder != nil {
return encoder
}
}
typeName := typ.String()
encoder := typeEncoders[typeName]
if encoder != nil {
@ -216,28 +234,28 @@ func _getTypeEncoderFromExtension(typ reflect.Type) ValEncoder {
if typ.Kind() == reflect.Ptr {
encoder := typeEncoders[typ.Elem().String()]
if encoder != nil {
return &optionalEncoder{encoder}
return &OptionalEncoder{encoder}
}
}
return nil
}
func describeStruct(cfg *frozenConfig, typ reflect.Type) (*StructDescriptor, error) {
func describeStruct(cfg *frozenConfig, prefix string, typ reflect.Type) *StructDescriptor {
embeddedBindings := []*Binding{}
bindings := []*Binding{}
for i := 0; i < typ.NumField(); i++ {
field := typ.Field(i)
tag := field.Tag.Get(cfg.getTagKey())
tag, hastag := field.Tag.Lookup(cfg.getTagKey())
if cfg.onlyTaggedField && !hastag {
continue
}
tagParts := strings.Split(tag, ",")
if tag == "-" {
continue
}
if field.Anonymous && (tag == "" || tagParts[0] == "") {
if field.Type.Kind() == reflect.Struct {
structDescriptor, err := describeStruct(cfg, field.Type)
if err != nil {
return nil, err
}
structDescriptor := describeStruct(cfg, prefix, field.Type)
for _, binding := range structDescriptor.Fields {
binding.levels = append([]int{i}, binding.levels...)
omitempty := binding.Encoder.(*structFieldEncoder).omitempty
@ -247,16 +265,13 @@ func describeStruct(cfg *frozenConfig, typ reflect.Type) (*StructDescriptor, err
}
continue
} else if field.Type.Kind() == reflect.Ptr && field.Type.Elem().Kind() == reflect.Struct {
structDescriptor, err := describeStruct(cfg, field.Type.Elem())
if err != nil {
return nil, err
}
structDescriptor := describeStruct(cfg, prefix, field.Type.Elem())
for _, binding := range structDescriptor.Fields {
binding.levels = append([]int{i}, binding.levels...)
omitempty := binding.Encoder.(*structFieldEncoder).omitempty
binding.Encoder = &optionalEncoder{binding.Encoder}
binding.Encoder = &dereferenceEncoder{binding.Encoder}
binding.Encoder = &structFieldEncoder{&field, binding.Encoder, omitempty}
binding.Decoder = &deferenceDecoder{field.Type.Elem(), binding.Decoder}
binding.Decoder = &dereferenceDecoder{field.Type.Elem(), binding.Decoder}
binding.Decoder = &structFieldDecoder{&field, binding.Decoder}
embeddedBindings = append(embeddedBindings, binding)
}
@ -267,22 +282,15 @@ func describeStruct(cfg *frozenConfig, typ reflect.Type) (*StructDescriptor, err
fieldCacheKey := fmt.Sprintf("%s/%s", typ.String(), field.Name)
decoder := fieldDecoders[fieldCacheKey]
if decoder == nil {
var err error
decoder, err = decoderOfType(cfg, field.Type)
if err != nil {
return nil, err
}
decoder = decoderOfType(cfg, prefix+typ.String()+"."+field.Name+"->", field.Type)
}
encoder := fieldEncoders[fieldCacheKey]
if encoder == nil {
var err error
encoder, err = encoderOfType(cfg, field.Type)
if err != nil {
return nil, err
}
// map is stored as pointer in the struct
if field.Type.Kind() == reflect.Map {
encoder = &optionalEncoder{encoder}
encoder = encoderOfType(cfg, prefix+typ.String()+"."+field.Name+"->", field.Type)
// map is stored as pointer in the struct,
// and treat nil or empty map as empty field
if encoder != nil && field.Type.Kind() == reflect.Map {
encoder = &optionalMapEncoder{encoder}
}
}
binding := &Binding{
@ -295,7 +303,7 @@ func describeStruct(cfg *frozenConfig, typ reflect.Type) (*StructDescriptor, err
binding.levels = []int{i}
bindings = append(bindings, binding)
}
return createStructDescriptor(cfg, typ, bindings, embeddedBindings), nil
return createStructDescriptor(cfg, typ, bindings, embeddedBindings)
}
func createStructDescriptor(cfg *frozenConfig, typ reflect.Type, bindings []*Binding, embeddedBindings []*Binding) *StructDescriptor {
onePtrEmbedded := false
@ -323,6 +331,9 @@ func createStructDescriptor(cfg *frozenConfig, typ reflect.Type, bindings []*Bin
for _, extension := range extensions {
extension.UpdateStructDescriptor(structDescriptor)
}
for _, extension := range cfg.extensions {
extension.UpdateStructDescriptor(structDescriptor)
}
processTags(structDescriptor, cfg)
// merge normal & embedded bindings & sort with original order
allBindings := sortableBindings(append(embeddedBindings, structDescriptor.Fields...))

View File

@ -9,6 +9,22 @@ import (
"unsafe"
)
func decoderOfMap(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder {
decoder := decoderOfType(cfg, prefix+"[map]->", typ.Elem())
mapInterface := reflect.New(typ).Interface()
return &mapDecoder{typ, typ.Key(), typ.Elem(), decoder, extractInterface(mapInterface)}
}
func encoderOfMap(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder {
elemType := typ.Elem()
encoder := encoderOfType(cfg, prefix+"[map]->", elemType)
mapInterface := reflect.New(typ).Elem().Interface()
if cfg.sortMapKeys {
return &sortKeysMapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}
}
return &mapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}
}
type mapDecoder struct {
mapType reflect.Type
keyType reflect.Type
@ -32,7 +48,7 @@ func (decoder *mapDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
}
iter.ReadMapCB(func(iter *Iterator, keyStr string) bool {
elem := reflect.New(decoder.elemType)
decoder.elemDecoder.Decode(unsafe.Pointer(elem.Pointer()), iter)
decoder.elemDecoder.Decode(extractInterface(elem.Interface()).word, iter)
// to put into map, we have to use reflection
keyType := decoder.keyType
// TODO: remove this from loop

View File

@ -32,12 +32,10 @@ type intCodec struct {
}
func (codec *intCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
*((*int)(ptr)) = 0
return
}
if !iter.ReadNil() {
*((*int)(ptr)) = iter.ReadInt()
}
}
func (codec *intCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteInt(*((*int)(ptr)))
@ -55,12 +53,10 @@ type uintptrCodec struct {
}
func (codec *uintptrCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
*((*uintptr)(ptr)) = 0
return
}
if !iter.ReadNil() {
*((*uintptr)(ptr)) = uintptr(iter.ReadUint64())
}
}
func (codec *uintptrCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint64(uint64(*((*uintptr)(ptr))))
@ -78,12 +74,10 @@ type int8Codec struct {
}
func (codec *int8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
*((*uint8)(ptr)) = 0
return
}
if !iter.ReadNil() {
*((*int8)(ptr)) = iter.ReadInt8()
}
}
func (codec *int8Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteInt8(*((*int8)(ptr)))
@ -101,12 +95,10 @@ type int16Codec struct {
}
func (codec *int16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
*((*int16)(ptr)) = 0
return
}
if !iter.ReadNil() {
*((*int16)(ptr)) = iter.ReadInt16()
}
}
func (codec *int16Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteInt16(*((*int16)(ptr)))
@ -124,12 +116,10 @@ type int32Codec struct {
}
func (codec *int32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
*((*int32)(ptr)) = 0
return
}
if !iter.ReadNil() {
*((*int32)(ptr)) = iter.ReadInt32()
}
}
func (codec *int32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteInt32(*((*int32)(ptr)))
@ -147,12 +137,10 @@ type int64Codec struct {
}
func (codec *int64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
*((*int64)(ptr)) = 0
return
}
if !iter.ReadNil() {
*((*int64)(ptr)) = iter.ReadInt64()
}
}
func (codec *int64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteInt64(*((*int64)(ptr)))
@ -170,11 +158,10 @@ type uintCodec struct {
}
func (codec *uintCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
*((*uint)(ptr)) = 0
if !iter.ReadNil() {
*((*uint)(ptr)) = iter.ReadUint()
return
}
*((*uint)(ptr)) = iter.ReadUint()
}
func (codec *uintCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
@ -193,12 +180,10 @@ type uint8Codec struct {
}
func (codec *uint8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
*((*uint8)(ptr)) = 0
return
}
if !iter.ReadNil() {
*((*uint8)(ptr)) = iter.ReadUint8()
}
}
func (codec *uint8Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint8(*((*uint8)(ptr)))
@ -216,12 +201,10 @@ type uint16Codec struct {
}
func (codec *uint16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
*((*uint16)(ptr)) = 0
return
}
if !iter.ReadNil() {
*((*uint16)(ptr)) = iter.ReadUint16()
}
}
func (codec *uint16Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint16(*((*uint16)(ptr)))
@ -239,12 +222,10 @@ type uint32Codec struct {
}
func (codec *uint32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
*((*uint32)(ptr)) = 0
return
}
if !iter.ReadNil() {
*((*uint32)(ptr)) = iter.ReadUint32()
}
}
func (codec *uint32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint32(*((*uint32)(ptr)))
@ -262,12 +243,10 @@ type uint64Codec struct {
}
func (codec *uint64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
*((*uint64)(ptr)) = 0
return
}
if !iter.ReadNil() {
*((*uint64)(ptr)) = iter.ReadUint64()
}
}
func (codec *uint64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint64(*((*uint64)(ptr)))
@ -285,12 +264,10 @@ type float32Codec struct {
}
func (codec *float32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
*((*float32)(ptr)) = 0
return
}
if !iter.ReadNil() {
*((*float32)(ptr)) = iter.ReadFloat32()
}
}
func (codec *float32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteFloat32(*((*float32)(ptr)))
@ -308,12 +285,10 @@ type float64Codec struct {
}
func (codec *float64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
*((*float64)(ptr)) = 0
return
}
if !iter.ReadNil() {
*((*float64)(ptr)) = iter.ReadFloat64()
}
}
func (codec *float64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteFloat64(*((*float64)(ptr)))
@ -352,13 +327,39 @@ type emptyInterfaceCodec struct {
}
func (codec *emptyInterfaceCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
existing := *((*interface{})(ptr))
// Checking for both typed and untyped nil pointers.
if existing != nil &&
reflect.TypeOf(existing).Kind() == reflect.Ptr &&
!reflect.ValueOf(existing).IsNil() {
var ptrToExisting interface{}
for {
elem := reflect.ValueOf(existing).Elem()
if elem.Kind() != reflect.Ptr || elem.IsNil() {
break
}
ptrToExisting = existing
existing = elem.Interface()
}
if iter.ReadNil() {
if ptrToExisting != nil {
nilPtr := reflect.Zero(reflect.TypeOf(ptrToExisting).Elem())
reflect.ValueOf(ptrToExisting).Elem().Set(nilPtr)
} else {
*((*interface{})(ptr)) = nil
}
} else {
iter.ReadVal(existing)
}
return
}
existing := *((*interface{})(ptr))
if existing != nil && reflect.TypeOf(existing).Kind() == reflect.Ptr {
iter.ReadVal(existing)
if iter.ReadNil() {
*((*interface{})(ptr)) = nil
} else {
*((*interface{})(ptr)) = iter.Read()
}
@ -381,6 +382,11 @@ type nonEmptyInterfaceCodec struct {
}
func (codec *nonEmptyInterfaceCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.WhatIsNext() == NilValue {
iter.skipFourBytes('n', 'u', 'l', 'l')
*((*interface{})(ptr)) = nil
return
}
nonEmptyInterface := (*nonEmptyInterface)(ptr)
if nonEmptyInterface.itab == nil {
iter.ReportError("read non-empty interface", "do not know which concrete type to decode to")
@ -391,15 +397,20 @@ func (codec *nonEmptyInterfaceCodec) Decode(ptr unsafe.Pointer, iter *Iterator)
e.typ = nonEmptyInterface.itab.typ
e.word = nonEmptyInterface.word
iter.ReadVal(&i)
if e.word == nil {
nonEmptyInterface.itab = nil
}
nonEmptyInterface.word = e.word
}
func (codec *nonEmptyInterfaceCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
nonEmptyInterface := (*nonEmptyInterface)(ptr)
var i interface{}
if nonEmptyInterface.itab != nil {
e := (*emptyInterface)(unsafe.Pointer(&i))
e.typ = nonEmptyInterface.itab.typ
e.word = nonEmptyInterface.word
}
stream.WriteVal(i)
}
@ -435,15 +446,33 @@ type jsonNumberCodec struct {
}
func (codec *jsonNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
switch iter.WhatIsNext() {
case StringValue:
*((*json.Number)(ptr)) = json.Number(iter.ReadString())
case NilValue:
iter.skipFourBytes('n', 'u', 'l', 'l')
*((*json.Number)(ptr)) = ""
default:
*((*json.Number)(ptr)) = json.Number([]byte(iter.readNumberAsString()))
}
}
func (codec *jsonNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteRaw(string(*((*json.Number)(ptr))))
number := *((*json.Number)(ptr))
if len(number) == 0 {
stream.WriteRaw("0")
} else {
stream.WriteRaw(string(number))
}
}
func (codec *jsonNumberCodec) EncodeInterface(val interface{}, stream *Stream) {
stream.WriteRaw(string(val.(json.Number)))
number := val.(json.Number)
if len(number) == 0 {
stream.WriteRaw("0")
} else {
stream.WriteRaw(string(number))
}
}
func (codec *jsonNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
@ -454,15 +483,33 @@ type jsoniterNumberCodec struct {
}
func (codec *jsoniterNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
switch iter.WhatIsNext() {
case StringValue:
*((*Number)(ptr)) = Number(iter.ReadString())
case NilValue:
iter.skipFourBytes('n', 'u', 'l', 'l')
*((*Number)(ptr)) = ""
default:
*((*Number)(ptr)) = Number([]byte(iter.readNumberAsString()))
}
}
func (codec *jsoniterNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteRaw(string(*((*Number)(ptr))))
number := *((*Number)(ptr))
if len(number) == 0 {
stream.WriteRaw("0")
} else {
stream.WriteRaw(string(number))
}
}
func (codec *jsoniterNumberCodec) EncodeInterface(val interface{}, stream *Stream) {
stream.WriteRaw(string(val.(Number)))
number := val.(Number)
if len(number) == 0 {
stream.WriteRaw("0")
} else {
stream.WriteRaw(string(number))
}
}
func (codec *jsoniterNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
@ -586,7 +633,7 @@ type stringModeNumberDecoder struct {
func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
c := iter.nextToken()
if c != '"' {
iter.ReportError("stringModeNumberDecoder", `expect "`)
iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
return
}
decoder.elemDecoder.Decode(ptr, iter)
@ -595,7 +642,7 @@ func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
}
c = iter.readByte()
if c != '"' {
iter.ReportError("stringModeNumberDecoder", `expect "`)
iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
return
}
}
@ -660,7 +707,11 @@ func (encoder *marshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
templateInterface := encoder.templateInterface
templateInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&templateInterface))
marshaler := (*realInterface).(json.Marshaler)
marshaler, ok := (*realInterface).(json.Marshaler)
if !ok {
stream.WriteVal(nil)
return
}
bytes, err := marshaler.MarshalJSON()
if err != nil {

View File

@ -8,17 +8,14 @@ import (
"unsafe"
)
func encoderOfStruct(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
func encoderOfStruct(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder {
type bindingTo struct {
binding *Binding
toName string
ignored bool
}
orderedBindings := []*bindingTo{}
structDescriptor, err := describeStruct(cfg, typ)
if err != nil {
return nil, err
}
structDescriptor := describeStruct(cfg, prefix, typ)
for _, binding := range structDescriptor.Fields {
for _, toName := range binding.ToNames {
new := &bindingTo{
@ -35,7 +32,7 @@ func encoderOfStruct(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
}
}
if len(orderedBindings) == 0 {
return &emptyStructEncoder{}, nil
return &emptyStructEncoder{}
}
finalOrderedFields := []structFieldTo{}
for _, bindingTo := range orderedBindings {
@ -46,7 +43,8 @@ func encoderOfStruct(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
})
}
}
return &structEncoder{structDescriptor.onePtrEmbedded, structDescriptor.onePtrOptimization, finalOrderedFields}, nil
return &structEncoder{typ, structDescriptor.onePtrEmbedded,
structDescriptor.onePtrOptimization, finalOrderedFields}
}
func resolveConflictBinding(cfg *frozenConfig, old, new *Binding) (ignoreOld, ignoreNew bool) {
@ -78,12 +76,9 @@ func resolveConflictBinding(cfg *frozenConfig, old, new *Binding) (ignoreOld, ig
}
}
func decoderOfStruct(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) {
func decoderOfStruct(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder {
bindings := map[string]*Binding{}
structDescriptor, err := describeStruct(cfg, typ)
if err != nil {
return nil, err
}
structDescriptor := describeStruct(cfg, prefix, typ)
for _, binding := range structDescriptor.Fields {
for _, fromName := range binding.FromNames {
old := bindings[fromName]
@ -131,6 +126,7 @@ func (encoder *structFieldEncoder) IsEmpty(ptr unsafe.Pointer) bool {
}
type structEncoder struct {
typ reflect.Type
onePtrEmbedded bool
onePtrOptimization bool
fields []structFieldTo
@ -156,6 +152,9 @@ func (encoder *structEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
isNotFirst = true
}
stream.WriteObjectEnd()
if stream.Error != nil && stream.Error != io.EOF {
stream.Error = fmt.Errorf("%v.%s", encoder.typ, stream.Error.Error())
}
}
func (encoder *structEncoder) EncodeInterface(val interface{}, stream *Stream) {

124
feature_reflect_optional.go Normal file
View File

@ -0,0 +1,124 @@
package jsoniter
import (
"reflect"
"unsafe"
)
func decoderOfOptional(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder {
elemType := typ.Elem()
decoder := decoderOfType(cfg, prefix, elemType)
return &OptionalDecoder{elemType, decoder}
}
func encoderOfOptional(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder {
elemType := typ.Elem()
elemEncoder := encoderOfType(cfg, prefix, elemType)
encoder := &OptionalEncoder{elemEncoder}
if elemType.Kind() == reflect.Map {
encoder = &OptionalEncoder{encoder}
}
return encoder
}
type OptionalDecoder struct {
ValueType reflect.Type
ValueDecoder ValDecoder
}
func (decoder *OptionalDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
*((*unsafe.Pointer)(ptr)) = nil
} else {
if *((*unsafe.Pointer)(ptr)) == nil {
//pointer to null, we have to allocate memory to hold the value
value := reflect.New(decoder.ValueType)
newPtr := extractInterface(value.Interface()).word
decoder.ValueDecoder.Decode(newPtr, iter)
*((*uintptr)(ptr)) = uintptr(newPtr)
} else {
//reuse existing instance
decoder.ValueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
}
}
}
type dereferenceDecoder struct {
// only to deference a pointer
valueType reflect.Type
valueDecoder ValDecoder
}
func (decoder *dereferenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if *((*unsafe.Pointer)(ptr)) == nil {
//pointer to null, we have to allocate memory to hold the value
value := reflect.New(decoder.valueType)
newPtr := extractInterface(value.Interface()).word
decoder.valueDecoder.Decode(newPtr, iter)
*((*uintptr)(ptr)) = uintptr(newPtr)
} else {
//reuse existing instance
decoder.valueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
}
}
type OptionalEncoder struct {
ValueEncoder ValEncoder
}
func (encoder *OptionalEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if *((*unsafe.Pointer)(ptr)) == nil {
stream.WriteNil()
} else {
encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
}
}
func (encoder *OptionalEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *OptionalEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return *((*unsafe.Pointer)(ptr)) == nil
}
type dereferenceEncoder struct {
ValueEncoder ValEncoder
}
func (encoder *dereferenceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if *((*unsafe.Pointer)(ptr)) == nil {
stream.WriteNil()
} else {
encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
}
}
func (encoder *dereferenceEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *dereferenceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return encoder.ValueEncoder.IsEmpty(*((*unsafe.Pointer)(ptr)))
}
type optionalMapEncoder struct {
valueEncoder ValEncoder
}
func (encoder *optionalMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if *((*unsafe.Pointer)(ptr)) == nil {
stream.WriteNil()
} else {
encoder.valueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
}
}
func (encoder *optionalMapEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *optionalMapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
p := *((*unsafe.Pointer)(ptr))
return p == nil || encoder.valueEncoder.IsEmpty(p)
}

View File

@ -7,23 +7,17 @@ import (
"unsafe"
)
func decoderOfSlice(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) {
decoder, err := decoderOfType(cfg, typ.Elem())
if err != nil {
return nil, err
}
return &sliceDecoder{typ, typ.Elem(), decoder}, nil
func decoderOfSlice(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder {
decoder := decoderOfType(cfg, prefix+"[slice]->", typ.Elem())
return &sliceDecoder{typ, typ.Elem(), decoder}
}
func encoderOfSlice(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
encoder, err := encoderOfType(cfg, typ.Elem())
if err != nil {
return nil, err
}
func encoderOfSlice(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder {
encoder := encoderOfType(cfg, prefix+"[slice]->", typ.Elem())
if typ.Elem().Kind() == reflect.Map {
encoder = &optionalEncoder{encoder}
encoder = &OptionalEncoder{encoder}
}
return &sliceEncoder{typ, typ.Elem(), encoder}, nil
return &sliceEncoder{typ, typ.Elem(), encoder}
}
type sliceEncoder struct {
@ -124,15 +118,14 @@ func growOne(slice *sliceHeader, sliceType reflect.Type, elementType reflect.Typ
}
}
}
newVal := reflect.MakeSlice(sliceType, newLen, newCap)
dst := unsafe.Pointer(newVal.Pointer())
newVal := reflect.MakeSlice(sliceType, newLen, newCap).Interface()
newValPtr := extractInterface(newVal).word
dst := (*sliceHeader)(newValPtr).Data
// copy old array into new array
originalBytesCount := uintptr(slice.Len) * elementType.Size()
srcPtr := (*[1 << 30]byte)(slice.Data)
dstPtr := (*[1 << 30]byte)(dst)
for i := uintptr(0); i < originalBytesCount; i++ {
dstPtr[i] = srcPtr[i]
}
originalBytesCount := slice.Len * int(elementType.Size())
srcSliceHeader := (unsafe.Pointer)(&sliceHeader{slice.Data, originalBytesCount, originalBytesCount})
dstSliceHeader := (unsafe.Pointer)(&sliceHeader{dst, originalBytesCount, originalBytesCount})
copy(*(*[]byte)(dstSliceHeader), *(*[]byte)(srcSliceHeader))
slice.Data = dst
slice.Len = newLen
slice.Cap = newCap
@ -142,8 +135,9 @@ func reuseSlice(slice *sliceHeader, sliceType reflect.Type, expectedCap int) {
if expectedCap <= slice.Cap {
return
}
newVal := reflect.MakeSlice(sliceType, 0, expectedCap)
dst := unsafe.Pointer(newVal.Pointer())
newVal := reflect.MakeSlice(sliceType, 0, expectedCap).Interface()
newValPtr := extractInterface(newVal).word
dst := (*sliceHeader)(newValPtr).Data
slice.Data = dst
slice.Cap = expectedCap
}

View File

@ -8,22 +8,22 @@ import (
"unsafe"
)
func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder) (ValDecoder, error) {
func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder) ValDecoder {
knownHash := map[int32]struct{}{
0: {},
}
switch len(fields) {
case 0:
return &skipObjectDecoder{typ}, nil
return &skipObjectDecoder{typ}
case 1:
for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
knownHash[fieldHash] = struct{}{}
return &oneFieldStructDecoder{typ, fieldHash, fieldDecoder}, nil
return &oneFieldStructDecoder{typ, fieldHash, fieldDecoder}
}
case 2:
var fieldHash1 int32
@ -34,7 +34,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
knownHash[fieldHash] = struct{}{}
if fieldHash1 == 0 {
@ -45,7 +45,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldDecoder2 = fieldDecoder
}
}
return &twoFieldsStructDecoder{typ, fieldHash1, fieldDecoder1, fieldHash2, fieldDecoder2}, nil
return &twoFieldsStructDecoder{typ, fieldHash1, fieldDecoder1, fieldHash2, fieldDecoder2}
case 3:
var fieldName1 int32
var fieldName2 int32
@ -57,7 +57,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 {
@ -72,7 +72,9 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &threeFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3}, nil
fieldName1, fieldDecoder1,
fieldName2, fieldDecoder2,
fieldName3, fieldDecoder3}
case 4:
var fieldName1 int32
var fieldName2 int32
@ -86,7 +88,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 {
@ -104,8 +106,10 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &fourFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4}, nil
fieldName1, fieldDecoder1,
fieldName2, fieldDecoder2,
fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4}
case 5:
var fieldName1 int32
var fieldName2 int32
@ -121,7 +125,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 {
@ -142,8 +146,11 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &fiveFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5}, nil
fieldName1, fieldDecoder1,
fieldName2, fieldDecoder2,
fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4,
fieldName5, fieldDecoder5}
case 6:
var fieldName1 int32
var fieldName2 int32
@ -161,7 +168,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 {
@ -185,8 +192,12 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &sixFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6}, nil
fieldName1, fieldDecoder1,
fieldName2, fieldDecoder2,
fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4,
fieldName5, fieldDecoder5,
fieldName6, fieldDecoder6}
case 7:
var fieldName1 int32
var fieldName2 int32
@ -206,7 +217,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 {
@ -233,9 +244,13 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &sevenFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7}, nil
fieldName1, fieldDecoder1,
fieldName2, fieldDecoder2,
fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4,
fieldName5, fieldDecoder5,
fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7}
case 8:
var fieldName1 int32
var fieldName2 int32
@ -257,7 +272,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 {
@ -287,9 +302,14 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &eightFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8}, nil
fieldName1, fieldDecoder1,
fieldName2, fieldDecoder2,
fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4,
fieldName5, fieldDecoder5,
fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7,
fieldName8, fieldDecoder8}
case 9:
var fieldName1 int32
var fieldName2 int32
@ -313,7 +333,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 {
@ -346,9 +366,15 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &nineFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8, fieldName9, fieldDecoder9}, nil
fieldName1, fieldDecoder1,
fieldName2, fieldDecoder2,
fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4,
fieldName5, fieldDecoder5,
fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7,
fieldName8, fieldDecoder8,
fieldName9, fieldDecoder9}
case 10:
var fieldName1 int32
var fieldName2 int32
@ -374,7 +400,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 {
@ -410,12 +436,18 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
}
}
return &tenFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8, fieldName9, fieldDecoder9,
fieldName10, fieldDecoder10}, nil
fieldName1, fieldDecoder1,
fieldName2, fieldDecoder2,
fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4,
fieldName5, fieldDecoder5,
fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7,
fieldName8, fieldDecoder8,
fieldName9, fieldDecoder9,
fieldName10, fieldDecoder10}
}
return &generalStructDecoder{typ, fields}, nil
return &generalStructDecoder{typ, fields}
}
type generalStructDecoder struct {
@ -427,8 +459,18 @@ func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
if !iter.readObjectStart() {
return
}
fieldBytes := iter.readObjectFieldAsBytes()
field := *(*string)(unsafe.Pointer(&fieldBytes))
var fieldBytes []byte
var field string
if iter.cfg.objectFieldMustBeSimpleString {
fieldBytes = iter.readObjectFieldAsBytes()
field = *(*string)(unsafe.Pointer(&fieldBytes))
} else {
field = iter.ReadString()
c := iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
}
fieldDecoder := decoder.fields[strings.ToLower(field)]
if fieldDecoder == nil {
iter.Skip()
@ -436,8 +478,16 @@ func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
fieldDecoder.Decode(ptr, iter)
}
for iter.nextToken() == ',' {
fieldBytes = iter.readObjectFieldAsBytes()
if iter.cfg.objectFieldMustBeSimpleString {
fieldBytes := iter.readObjectFieldAsBytes()
field = *(*string)(unsafe.Pointer(&fieldBytes))
} else {
field = iter.ReadString()
c := iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
}
fieldDecoder = decoder.fields[strings.ToLower(field)]
if fieldDecoder == nil {
iter.Skip()
@ -446,7 +496,7 @@ func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}
@ -484,7 +534,7 @@ func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}
@ -514,7 +564,7 @@ func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}
@ -548,7 +598,7 @@ func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}
@ -586,7 +636,7 @@ func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}
@ -628,7 +678,7 @@ func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}
@ -674,7 +724,7 @@ func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}
@ -724,7 +774,7 @@ func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}
@ -778,7 +828,7 @@ func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}
@ -836,7 +886,7 @@ func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}
@ -898,7 +948,7 @@ func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
}
}
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
}

View File

@ -4,7 +4,7 @@ import (
"io"
)
// Stream is a io.Writer like object, with JSON specific write functions.
// stream is a io.Writer like object, with JSON specific write functions.
// Error is not returned as return value, but stored as Error member on this stream instance.
type Stream struct {
cfg *frozenConfig
@ -13,6 +13,7 @@ type Stream struct {
n int
Error error
indention int
Attachment interface{} // open for customized encoder
}
// NewStream create new stream instance.
@ -191,6 +192,9 @@ func (stream *Stream) ensure(minimal int) {
func (stream *Stream) growAtLeast(minimal int) {
if stream.out != nil {
stream.Flush()
if stream.Available() >= minimal {
return
}
}
toGrow := len(stream.buf)
if toGrow < minimal {
@ -280,8 +284,7 @@ func (stream *Stream) WriteArrayStart() {
// WriteEmptyArray write []
func (stream *Stream) WriteEmptyArray() {
stream.writeByte('[')
stream.writeByte(']')
stream.writeTwoBytes('[', ']')
}
// WriteArrayEnd write ] with possible indention

View File

@ -22,7 +22,7 @@ func Test_new_encoder(t *testing.T) {
encoder2 := NewEncoder(buf2)
encoder2.SetEscapeHTML(false)
encoder2.Encode([]int{1})
should.Equal("[1]", buf2.String())
should.Equal("[1]\n", buf2.String())
}
func Test_string_encode_with_std_without_html_escape(t *testing.T) {

View File

@ -51,6 +51,7 @@ func Test_read_string_as_any(t *testing.T) {
func Test_wrap_string(t *testing.T) {
should := require.New(t)
any := WrapString("123")
should.Equal(123, any.ToInt())
any := Get([]byte("-32000")).MustBeValid()
should.Equal(-32000, any.ToInt())
should.NoError(any.LastError())
}

View File

@ -92,22 +92,22 @@ func Test_bool_can_be_null(t *testing.T) {
obj := TestData{}
data1 := []byte(`{"field": true}`)
err := Unmarshal(data1, &obj)
should.Equal(nil, err)
should.NoError(err)
should.Equal(true, obj.Field)
data2 := []byte(`{"field": null}`)
err = Unmarshal(data2, &obj)
should.Equal(nil, err)
should.NoError(err)
// Same behavior as stdlib, not touching the existing value.
should.Equal(true, obj.Field)
// Checking stdlib behavior as well
obj2 := TestData{}
err = json.Unmarshal(data1, &obj2)
should.Equal(nil, err)
should.NoError(err)
should.Equal(true, obj2.Field)
err = json.Unmarshal(data2, &obj2)
should.Equal(nil, err)
should.NoError(err)
should.Equal(true, obj2.Field)
}

View File

@ -2,11 +2,12 @@ package jsoniter
import (
"encoding/json"
"github.com/stretchr/testify/require"
"strconv"
"testing"
"time"
"unsafe"
"github.com/stretchr/testify/require"
)
func Test_customize_type_decoder(t *testing.T) {
@ -82,7 +83,7 @@ func Test_customize_field_decoder(t *testing.T) {
}
type TestObject1 struct {
field1 string
Field1 string
}
type testExtension struct {
@ -93,7 +94,7 @@ func (extension *testExtension) UpdateStructDescriptor(structDescriptor *StructD
if structDescriptor.Type.String() != "jsoniter.TestObject1" {
return
}
binding := structDescriptor.GetField("field1")
binding := structDescriptor.GetField("Field1")
binding.Encoder = &funcEncoder{fun: func(ptr unsafe.Pointer, stream *Stream) {
str := *((*string)(ptr))
val, _ := strconv.Atoi(str)
@ -108,12 +109,13 @@ func (extension *testExtension) UpdateStructDescriptor(structDescriptor *StructD
func Test_customize_field_by_extension(t *testing.T) {
should := require.New(t)
RegisterExtension(&testExtension{})
cfg := Config{}.Froze()
cfg.RegisterExtension(&testExtension{})
obj := TestObject1{}
err := UnmarshalFromString(`{"field-1": 100}`, &obj)
err := cfg.UnmarshalFromString(`{"field-1": 100}`, &obj)
should.Nil(err)
should.Equal("100", obj.field1)
str, err := MarshalToString(obj)
should.Equal("100", obj.Field1)
str, err := cfg.MarshalToString(obj)
should.Nil(err)
should.Equal(`{"field-1":100}`, str)
}

View File

@ -40,11 +40,11 @@ func Test_custom_marshaler_on_enum(t *testing.T) {
w := Wrapper{Payload: MyEnumB}
jb, err := Marshal(w)
should.Equal(nil, err)
should.NoError(err)
should.Equal(`{"Payload":"foo-1"}`, string(jb))
var w2 Wrapper2
err = Unmarshal(jb, &w2)
should.Equal(nil, err)
should.NoError(err)
should.Equal(MyEnumB, w2.Payload)
}

View File

@ -15,6 +15,15 @@ func Test_encode_fixed_array(t *testing.T) {
should.Equal("[0.1,1]", output)
}
func Test_encode_fixed_array_empty(t *testing.T) {
should := require.New(t)
type FixedArray [0]float64
fixed := FixedArray{}
output, err := MarshalToString(fixed)
should.Nil(err)
should.Equal("[]", output)
}
func Test_encode_fixed_array_of_map(t *testing.T) {
should := require.New(t)
type FixedArray [2]map[string]string

View File

@ -192,6 +192,13 @@ func Test_lossy_float_marshal(t *testing.T) {
should.Equal("0.123457", output)
}
func Test_read_number(t *testing.T) {
should := require.New(t)
iter := ParseString(ConfigDefault, `92233720368547758079223372036854775807`)
val := iter.ReadNumber()
should.Equal(`92233720368547758079223372036854775807`, string(val))
}
func Benchmark_jsoniter_float(b *testing.B) {
b.ReportAllocs()
input := []byte(`1.1123,`)

View File

@ -506,6 +506,42 @@ func Test_jsoniter_number(t *testing.T) {
should.Equal("1", str)
}
func Test_non_numeric_as_number(t *testing.T) {
should := require.New(t)
var v1 json.Number
err := Unmarshal([]byte(`"500"`), &v1)
should.Nil(err)
should.Equal("500", string(v1))
var v2 Number
err = Unmarshal([]byte(`"500"`), &v2)
should.Nil(err)
should.Equal("500", string(v2))
}
func Test_null_as_number(t *testing.T) {
should := require.New(t)
var v1 json.Number
err := json.Unmarshal([]byte(`null`), &v1)
should.Nil(err)
should.Equal("", string(v1))
output, err := json.Marshal(v1)
should.NoError(err)
should.Equal("0", string(output))
var v2 Number
err = Unmarshal([]byte(`null`), &v2)
should.Nil(err)
should.Equal("", string(v2))
output, err = Marshal(v2)
should.NoError(err)
should.Equal("0", string(output))
}
func Test_float_as_int(t *testing.T) {
should := require.New(t)
var i int
should.NotNil(Unmarshal([]byte(`1.1`), &i))
}
func Benchmark_jsoniter_encode_int(b *testing.B) {
stream := NewStream(ConfigDefault, ioutil.Discard, 64)
for n := 0; n < b.N; n++ {

View File

@ -7,8 +7,24 @@ import (
"unsafe"
"github.com/stretchr/testify/require"
"reflect"
)
func Test_write_empty_interface_via_placeholder(t *testing.T) {
fmt.Println(^uint(0) >> 1)
should := require.New(t)
m := map[uint32]interface{}{1: "hello"}
inf := reflect.ValueOf(m).MapIndex(reflect.ValueOf(uint32(1))).Interface()
encoder := &placeholderEncoder{
cfg: ConfigFastest.(*frozenConfig),
cacheKey: reflect.TypeOf(m).Elem(),
}
stream := ConfigFastest.BorrowStream(nil)
encoderOfType(ConfigFastest.(*frozenConfig), "", reflect.TypeOf(m).Elem())
encoder.EncodeInterface(inf, stream)
should.Equal(`"hello"`, string(stream.Buffer()))
}
func Test_write_array_of_interface(t *testing.T) {
should := require.New(t)
array := []interface{}{"hello"}
@ -329,13 +345,13 @@ func Test_nil_out_null_interface(t *testing.T) {
data1 := []byte(`{"field": true}`)
err := Unmarshal(data1, &obj)
should.Equal(nil, err)
should.NoError(err)
should.Equal(true, *(obj.Field.(*bool)))
data2 := []byte(`{"field": null}`)
err = Unmarshal(data2, &obj)
should.Equal(nil, err)
should.NoError(err)
should.Equal(nil, obj.Field)
// Checking stdlib behavior matches.
@ -344,11 +360,11 @@ func Test_nil_out_null_interface(t *testing.T) {
}
err = json.Unmarshal(data1, &obj2)
should.Equal(nil, err)
should.NoError(err)
should.Equal(true, *(obj2.Field.(*bool)))
err = json.Unmarshal(data2, &obj2)
should.Equal(nil, err)
should.NoError(err)
should.Equal(nil, obj2.Field)
}
@ -363,10 +379,198 @@ func Test_omitempty_nil_interface(t *testing.T) {
}
js, err := json.Marshal(obj)
should.Equal(nil, err)
should.NoError(err)
should.Equal("{}", string(js))
str, err := MarshalToString(obj)
should.Equal(nil, err)
should.NoError(err)
should.Equal(string(js), str)
}
func Test_omitempty_nil_nonempty_interface(t *testing.T) {
type TestData struct {
Field MyInterface `json:"field,omitempty"`
}
should := require.New(t)
obj := TestData{
Field: nil,
}
js, err := json.Marshal(obj)
should.NoError(err)
should.Equal("{}", string(js))
str, err := MarshalToString(obj)
should.NoError(err)
should.Equal(string(js), str)
obj.Field = MyString("hello")
err = UnmarshalFromString(`{"field":null}`, &obj)
should.NoError(err)
should.Nil(obj.Field)
}
func Test_marshal_nil_marshaler_interface(t *testing.T) {
type TestData struct {
Field json.Marshaler `json:"field"`
}
should := require.New(t)
obj := TestData{
Field: nil,
}
js, err := json.Marshal(obj)
should.NoError(err)
should.Equal(`{"field":null}`, string(js))
str, err := MarshalToString(obj)
should.NoError(err)
should.Equal(string(js), str)
}
func Test_marshal_nil_nonempty_interface(t *testing.T) {
type TestData struct {
Field MyInterface `json:"field"`
}
should := require.New(t)
obj := TestData{
Field: nil,
}
js, err := json.Marshal(obj)
should.NoError(err)
should.Equal(`{"field":null}`, string(js))
str, err := MarshalToString(obj)
should.NoError(err)
should.Equal(string(js), str)
obj.Field = MyString("hello")
err = Unmarshal(js, &obj)
should.NoError(err)
should.Equal(nil, obj.Field)
}
func Test_overwrite_interface_ptr_value_with_nil(t *testing.T) {
type Wrapper struct {
Payload interface{} `json:"payload,omitempty"`
}
type Payload struct {
Value int `json:"val,omitempty"`
}
should := require.New(t)
payload := &Payload{}
wrapper := &Wrapper{
Payload: &payload,
}
err := json.Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
should.Equal(nil, err)
should.Equal(&payload, wrapper.Payload)
should.Equal(42, (*(wrapper.Payload.(**Payload))).Value)
err = json.Unmarshal([]byte(`{"payload": null}`), &wrapper)
should.Equal(nil, err)
should.Equal(&payload, wrapper.Payload)
should.Equal((*Payload)(nil), payload)
payload = &Payload{}
wrapper = &Wrapper{
Payload: &payload,
}
err = Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
should.Equal(nil, err)
should.Equal(&payload, wrapper.Payload)
should.Equal(42, (*(wrapper.Payload.(**Payload))).Value)
err = Unmarshal([]byte(`{"payload": null}`), &wrapper)
should.Equal(nil, err)
should.Equal(&payload, wrapper.Payload)
should.Equal((*Payload)(nil), payload)
}
func Test_overwrite_interface_value_with_nil(t *testing.T) {
type Wrapper struct {
Payload interface{} `json:"payload,omitempty"`
}
type Payload struct {
Value int `json:"val,omitempty"`
}
should := require.New(t)
payload := &Payload{}
wrapper := &Wrapper{
Payload: payload,
}
err := json.Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
should.Equal(nil, err)
should.Equal(42, (*(wrapper.Payload.(*Payload))).Value)
err = json.Unmarshal([]byte(`{"payload": null}`), &wrapper)
should.Equal(nil, err)
should.Equal(nil, wrapper.Payload)
should.Equal(42, payload.Value)
payload = &Payload{}
wrapper = &Wrapper{
Payload: payload,
}
err = Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
should.Equal(nil, err)
should.Equal(42, (*(wrapper.Payload.(*Payload))).Value)
err = Unmarshal([]byte(`{"payload": null}`), &wrapper)
should.Equal(nil, err)
should.Equal(nil, wrapper.Payload)
should.Equal(42, payload.Value)
}
func Test_unmarshal_into_nil(t *testing.T) {
type Payload struct {
Value int `json:"val,omitempty"`
}
type Wrapper struct {
Payload interface{} `json:"payload,omitempty"`
}
should := require.New(t)
var payload *Payload
wrapper := &Wrapper{
Payload: payload,
}
err := json.Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
should.Nil(err)
should.NotNil(wrapper.Payload)
should.Nil(payload)
err = json.Unmarshal([]byte(`{"payload": null}`), &wrapper)
should.Nil(err)
should.Nil(wrapper.Payload)
should.Nil(payload)
payload = nil
wrapper = &Wrapper{
Payload: payload,
}
err = Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
should.Nil(err)
should.NotNil(wrapper.Payload)
should.Nil(payload)
err = Unmarshal([]byte(`{"payload": null}`), &wrapper)
should.Nil(err)
should.Nil(wrapper.Payload)
should.Nil(payload)
}

View File

@ -3,6 +3,7 @@ package jsoniter
import (
"bytes"
"encoding/json"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"io"
"testing"
@ -130,3 +131,70 @@ func Test_invalid_number(t *testing.T) {
should.Nil(err)
should.Equal(string(result2), string(result))
}
func Test_valid(t *testing.T) {
should := require.New(t)
should.True(Valid([]byte(`{}`)))
should.False(Valid([]byte(`{`)))
}
func Test_nil_pointer(t *testing.T) {
should := require.New(t)
data := []byte(`{"A":0}`)
type T struct {
X int
}
var obj *T
err := Unmarshal(data, obj)
should.NotNil(err)
}
func Test_func_pointer_type(t *testing.T) {
type TestObject2 struct {
F func()
}
type TestObject1 struct {
Obj *TestObject2
}
t.Run("encode null is valid", func(t *testing.T) {
should := require.New(t)
output, err := json.Marshal(TestObject1{})
should.Nil(err)
should.Equal(`{"Obj":null}`, string(output))
output, err = Marshal(TestObject1{})
should.Nil(err)
should.Equal(`{"Obj":null}`, string(output))
})
t.Run("encode not null is invalid", func(t *testing.T) {
should := require.New(t)
_, err := json.Marshal(TestObject1{Obj: &TestObject2{}})
should.NotNil(err)
_, err = Marshal(TestObject1{Obj: &TestObject2{}})
should.NotNil(err)
})
t.Run("decode null is valid", func(t *testing.T) {
should := require.New(t)
var obj TestObject1
should.Nil(json.Unmarshal([]byte(`{"Obj":{"F": null}}`), &obj))
should.Nil(Unmarshal([]byte(`{"Obj":{"F": null}}`), &obj))
})
t.Run("decode not null is invalid", func(t *testing.T) {
should := require.New(t)
var obj TestObject1
should.NotNil(json.Unmarshal([]byte(`{"Obj":{"F": "hello"}}`), &obj))
should.NotNil(Unmarshal([]byte(`{"Obj":{"F": "hello"}}`), &obj))
})
}
func TestEOF(t *testing.T) {
var s string
err := ConfigCompatibleWithStandardLibrary.NewDecoder(&bytes.Buffer{}).Decode(&s)
assert.Equal(t, io.EOF, err)
}
func TestDecodeErrorType(t *testing.T) {
should := require.New(t)
var err error
should.Nil(Unmarshal([]byte("null"), &err))
should.NotNil(Unmarshal([]byte("123"), &err))
}

View File

@ -3,9 +3,10 @@ package jsoniter
import (
"bytes"
"encoding/json"
"github.com/stretchr/testify/require"
"io"
"testing"
"github.com/stretchr/testify/require"
)
func Test_read_null(t *testing.T) {
@ -135,3 +136,33 @@ func Test_encode_nil_array(t *testing.T) {
should.Nil(err)
should.Equal("null", string(output))
}
func Test_decode_nil_num(t *testing.T) {
type TestData struct {
Field int `json:"field"`
}
should := require.New(t)
data1 := []byte(`{"field": 42}`)
data2 := []byte(`{"field": null}`)
// Checking stdlib behavior as well
obj2 := TestData{}
err := json.Unmarshal(data1, &obj2)
should.Equal(nil, err)
should.Equal(42, obj2.Field)
err = json.Unmarshal(data2, &obj2)
should.Equal(nil, err)
should.Equal(42, obj2.Field)
obj := TestData{}
err = Unmarshal(data1, &obj)
should.Equal(nil, err)
should.Equal(42, obj.Field)
err = Unmarshal(data2, &obj)
should.Equal(nil, err)
should.Equal(42, obj.Field)
}

View File

@ -158,6 +158,28 @@ func Test_ignore_field_on_not_valid_type(t *testing.T) {
should.Equal(`{"field-1":"hello world"}`, str)
}
func Test_nested_field_omit_empty(t *testing.T) {
should := require.New(t)
type S1 struct {
F1 string `json:",omitempty"`
}
type S2 struct {
*S1
F2 string `json:",omitempty"`
}
s1 := &S1{
//F1: "abc",
}
s2 := &S2{
S1: s1,
F2: "123",
}
str, err := MarshalToString(s2)
should.Nil(err)
should.Equal(`{"F2":"123"}`, str)
}
func Test_recursive_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
@ -328,3 +350,15 @@ func Test_decode_nested(t *testing.T) {
t.Fatal(slice[2])
}
}
func Test_decode_field_with_escape(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
}
var obj TestObject
should.Nil(ConfigCompatibleWithStandardLibrary.Unmarshal([]byte(`{"Field\"1":"hello"}`), &obj))
should.Equal("", obj.Field1)
should.Nil(ConfigCompatibleWithStandardLibrary.Unmarshal([]byte(`{"\u0046ield1":"hello"}`), &obj))
should.Equal("hello", obj.Field1)
}

View File

@ -3,6 +3,7 @@ package jsoniter
import (
"encoding/json"
"github.com/stretchr/testify/require"
"strings"
"testing"
)
@ -86,3 +87,28 @@ func Test_marshal_invalid_json_raw_message(t *testing.T) {
should.Equal(`{"raw":null}`, string(aout))
should.Nil(aouterr)
}
func Test_raw_message_memory_not_copied_issue(t *testing.T) {
jsonStream := `{"name":"xxxxx","bundle_id":"com.zonst.majiang","app_platform":"ios","app_category":"100103", "budget_day":1000,"bidding_min":1,"bidding_max":2,"bidding_type":"CPM", "freq":{"open":true,"type":"day","num":100},"speed":1, "targeting":{"vendor":{"open":true,"list":["zonst"]}, "geo_code":{"open":true,"list":["156110100"]},"app_category":{"open":true,"list":["100101"]}, "day_parting":{"open":true,"list":["100409","100410"]},"device_type":{"open":true,"list":["ipad"]}, "os_version":{"open":true,"list":[10]},"carrier":{"open":true,"list":["mobile"]}, "network":{"open":true,"list":["4G"]}},"url":{"tracking_imp_url":"http://www.baidu.com", "tracking_clk_url":"http://www.baidu.com","jump_url":"http://www.baidu.com","deep_link_url":"http://www.baidu.com"}}`
type IteratorObject struct {
Name *string `json:"name"`
BundleId *string `json:"bundle_id"`
AppCategory *string `json:"app_category"`
AppPlatform *string `json:"app_platform"`
BudgetDay *float32 `json:"budget_day"`
BiddingMax *float32 `json:"bidding_max"`
BiddingMin *float32 `json:"bidding_min"`
BiddingType *string `json:"bidding_type"`
Freq *RawMessage `json:"freq"`
Targeting *RawMessage `json:"targeting"`
Url *RawMessage `json:"url"`
Speed *int `json:"speed" db:"speed"`
}
obj := &IteratorObject{}
decoder := NewDecoder(strings.NewReader(jsonStream))
err := decoder.Decode(obj)
should := require.New(t)
should.Nil(err)
should.Equal(`{"open":true,"type":"day","num":100}`, string(*obj.Freq))
}

View File

@ -104,6 +104,11 @@ func Test_skip_and_return_bytes_with_reader(t *testing.T) {
should.Equal(`{"a" : [{"stream": "c"}], "d": 102 }`, string(skipped))
}
func Test_skip_empty(t *testing.T) {
should := require.New(t)
should.NotNil(Get([]byte("")).LastError())
}
type TestResp struct {
Code uint64
}

View File

@ -51,3 +51,19 @@ func Test_writeString_should_grow_buffer(t *testing.T) {
should.Nil(stream.Error)
should.Equal(`"123"`, string(stream.Buffer()))
}
type NopWriter struct {
bufferSize int
}
func (w *NopWriter) Write(p []byte) (n int, err error) {
w.bufferSize = cap(p)
return len(p), nil
}
func Test_flush_buffer_should_stop_grow_buffer(t *testing.T) {
writer := new(NopWriter)
NewEncoder(writer).Encode(make([]int, 10000000))
should := require.New(t)
should.Equal(512, writer.bufferSize)
}

View File

@ -0,0 +1,52 @@
package jsoniter
import (
"encoding/json"
"testing"
"time"
"github.com/stretchr/testify/require"
)
func Test_encode_unexported_field(t *testing.T) {
type TestData struct {
a int
b <-chan int
C int
d *time.Timer
}
should := require.New(t)
testChan := make(<-chan int, 10)
testTimer := time.NewTimer(10 * time.Second)
obj := &TestData{
a: 42,
b: testChan,
C: 21,
d: testTimer,
}
jb, err := json.Marshal(obj)
should.NoError(err)
should.Equal([]byte(`{"C":21}`), jb)
err = json.Unmarshal([]byte(`{"a": 444, "b":"bad", "C":55, "d":{"not": "a timer"}}`), obj)
should.NoError(err)
should.Equal(42, obj.a)
should.Equal(testChan, obj.b)
should.Equal(55, obj.C)
should.Equal(testTimer, obj.d)
jb, err = Marshal(obj)
should.NoError(err)
should.Equal(jb, []byte(`{"C":55}`))
err = Unmarshal([]byte(`{"a": 444, "b":"bad", "C":256, "d":{"not":"a timer"}}`), obj)
should.NoError(err)
should.Equal(42, obj.a)
should.Equal(testChan, obj.b)
should.Equal(256, obj.C)
should.Equal(testTimer, obj.d)
}

View File

@ -1 +0,0 @@
../number/skip_test.go

View File

@ -0,0 +1,34 @@
package test
import (
"encoding/json"
"errors"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"io"
"testing"
)
func Test_skip(t *testing.T) {
for _, input := range inputs {
t.Run(input, func(t *testing.T) {
should := require.New(t)
var dst typeForTest
stdErr := json.Unmarshal([]byte(input), &dst)
iter := jsoniter.ParseString(jsoniter.ConfigDefault, input)
iter.Skip()
iter.ReadNil() // trigger looking forward
err := iter.Error
if err == io.EOF {
err = nil
} else {
err = errors.New("remaining bytes")
}
if stdErr == nil {
should.Nil(err)
} else {
should.NotNil(err)
}
})
}
}

View File

@ -1 +0,0 @@
../number/skip_test.go

View File

@ -0,0 +1,34 @@
package test
import (
"encoding/json"
"errors"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"io"
"testing"
)
func Test_skip(t *testing.T) {
for _, input := range inputs {
t.Run(input, func(t *testing.T) {
should := require.New(t)
var dst typeForTest
stdErr := json.Unmarshal([]byte(input), &dst)
iter := jsoniter.ParseString(jsoniter.ConfigDefault, input)
iter.Skip()
iter.ReadNil() // trigger looking forward
err := iter.Error
if err == io.EOF {
err = nil
} else {
err = errors.New("remaining bytes")
}
if stdErr == nil {
should.Nil(err)
} else {
should.NotNil(err)
}
})
}
}

View File

@ -1 +0,0 @@
../number/skip_test.go

View File

@ -0,0 +1,34 @@
package test
import (
"encoding/json"
"errors"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"io"
"testing"
)
func Test_skip(t *testing.T) {
for _, input := range inputs {
t.Run(input, func(t *testing.T) {
should := require.New(t)
var dst typeForTest
stdErr := json.Unmarshal([]byte(input), &dst)
iter := jsoniter.ParseString(jsoniter.ConfigDefault, input)
iter.Skip()
iter.ReadNil() // trigger looking forward
err := iter.Error
if err == io.EOF {
err = nil
} else {
err = errors.New("remaining bytes")
}
if stdErr == nil {
should.Nil(err)
} else {
should.NotNil(err)
}
})
}
}