1
0
mirror of https://github.com/json-iterator/go.git synced 2025-06-15 22:50:24 +02:00

55 Commits
1.0.2 ... 1.0.5

Author SHA1 Message Date
28452fcdec cow cache is not same, as map read will modify the underlying map. use sync.Map for 1.9 and above, and mutex if sync.Map not available 2018-01-28 17:00:11 +08:00
ea8c33040f fix #228 2018-01-27 16:25:48 +08:00
358cfc3929 Merge branch 'master' of https://github.com/json-iterator/go 2018-01-25 14:48:02 +08:00
c39a632e65 fix #227, fix empty json.Number 2018-01-25 14:47:50 +08:00
e31252f2e2 Merge pull request #225 from mgood/empty-array-fix
Fix encoding 0-length arrays
2018-01-23 23:31:07 +08:00
807e4a8b20 Optimize 0-length array case
Instead of checking the array length in encode, this can be checked up
front in `encoderOfArray` since the array type has a fixed length
determined at compile time. So return an `emptyArrayEncoder` that simply
writes an empty array to the stream.
2018-01-22 14:03:50 -08:00
e78b7e89b6 Merge branch 'master' of https://github.com/json-iterator/go 2018-01-21 20:59:32 +08:00
945d1aaa19 fix #140 uintptr will no lock the address from gc 2018-01-21 20:59:18 +08:00
ba3857729b Fix encoding 0-length arrays
The array encoder assumed that arrays had at least one value, so it
would serialize them with a zero-value for the array, such as `[0]`.

This adds a test to reproduce the issue, and updates the encoder to
write an empty array if the length is 0.
2018-01-16 11:02:03 -08:00
c3ed5e85e0 Merge pull request #222 from neverlee/mydev
加入一个OnlyTaggedField选项
2018-01-09 18:30:25 +08:00
c27f6f9350 config: add OnlyTaggedField config, only process tagged fields in struct 2018-01-09 17:29:47 +08:00
0ab880662f fix #219 should check real value for empty instead of just the pointer for nested field 2018-01-07 13:57:46 +08:00
6dad2de6cc fix build 2018-01-04 17:18:16 +08:00
11c1cce0d8 fix #217 when input is null, non-decodable type should not be considered as error, to be compatible with stdlib 2018-01-04 16:19:26 +08:00
96fcb84835 fix #215 lazy load more 2017-12-23 10:52:17 +08:00
e7a8aea845 Merge branch 'master' of https://github.com/json-iterator/go 2017-12-21 22:18:40 +08:00
60a9df5ebc fix #214 report EOF like stdlib 2017-12-21 22:18:28 +08:00
7b060ec866 Merge pull request #210 from coocood/master
add ReadNumber for Iterator.
2017-12-18 08:22:47 +09:00
25f147f530 add ReadNumber for Iterator. 2017-12-17 16:44:04 +08:00
a9b9c73b4d fix #207 delay unsupported type error reporting 2017-12-15 10:13:11 +08:00
e0df39fda2 fix #206, do not allow nil pointer as unmarshal input 2017-12-14 17:18:05 +08:00
13f86432b8 do not use defer() in read int 2017-12-12 18:52:41 +08:00
d2a7335211 fix #202 #203 #204 map encoder not proplery initialized 2017-12-08 21:18:59 +08:00
b2a706d14b reverse last commit, need a better fix 2017-12-08 06:15:49 +08:00
23078876c5 fix #203 consider MarshalJSON as non empty 2017-12-07 23:20:43 +08:00
051434fab7 fix #198, use dep for vendoring 2017-11-30 10:42:24 +08:00
be6688fc1a fix #200, do not use symbolic link in the code 2017-11-30 10:34:05 +08:00
ff2b70c1db support config level extension 2017-11-23 00:09:35 +08:00
f7279a603e fix out of range 2017-11-15 23:34:21 +08:00
9f088cbcc4 fix #195 when decode float as int, report it clearly 2017-11-15 23:25:12 +08:00
3c0e5762c4 fix #196 do not hard code 1 << 49 2017-11-15 23:15:31 +08:00
d394a135a1 #197 fix place holder encoder to use EncodeInterface, WriteToStream is unsafe when the real encoder is unknown 2017-11-15 22:56:23 +08:00
9fddff05f0 try to fix #194 with larger array 2017-11-11 08:31:44 +08:00
b1b003864e expose OptionalEncoder&OptionalDecoder; add attachment to Stream&Iterator for customized decoder/encoder 2017-11-08 11:41:45 +08:00
aed5a81f09 fix #190 handle empty input 2017-10-31 22:47:02 +08:00
f1258b01aa fix #191 do not always assume the object field is simple string 2017-10-31 22:38:41 +08:00
fbd210edfc Merge pull request #189 from ggaaooppeenngg/compatible-with-map
Fix standard compatiblility
2017-10-26 18:39:38 -05:00
640251ab91 Fix standard compatiblility
Non-nil but empty map with omitempty should be ignored.

Signed-off-by: Peng Gao <peng.gao.dut@gmail.com>
2017-10-27 01:43:41 +08:00
06b2a7cf1d Merge pull request #188 from ggaaooppeenngg/compatible
Fix standard compatiblility
2017-10-26 06:41:01 -05:00
5fffb9b8f7 Fix standard compatiblility
Encode has trailing newline at the end.

Signed-off-by: Peng Gao <peng.gao.dut@gmail.com>
2017-10-26 15:15:36 +08:00
7e3b776024 change jsoniter-sloppy to jsoniter_sloppy 2017-10-23 15:03:44 +08:00
6240e1e798 #185 add jsoniter.Valid 2017-10-10 08:57:02 +08:00
0149a5cf4a fix #183 error message not only show expectation, but also the actual value 2017-10-09 08:24:51 +08:00
5068c8baaf #183 limit error message size 2017-10-09 08:16:52 +08:00
16f78601b5 fix #184, support null as number 2017-10-07 09:29:32 +08:00
8f50a91be2 fix #181, support string as json.Number and jsoniter.Number 2017-10-06 18:08:14 +08:00
73c7bc881e fix #180, add missing methods to jsoniter.Number 2017-10-06 17:56:36 +08:00
4de15a3a87 Merge pull request #182 from MOZGIII/patch-1
Used writeTwoBytes in Stream.WriteEmptyArray
2017-10-04 02:25:32 -05:00
14b28b2226 Used writeTwoBytes in Stream.WriteEmptyArray 2017-10-03 22:14:36 +03:00
abe3c4016b fix #179 2017-09-26 15:35:55 +08:00
dbb1ef3f63 #177 flush buffer should check available again 2017-09-21 21:04:45 +08:00
46b20bbbec #178 SkipAndReturnBytes should return copy of memory 2017-09-21 20:18:45 +08:00
fdfe0b9a69 Merge branch 'olegshaldybin-skip-unexported-fields' 2017-09-19 10:06:59 +08:00
faa3dcf46a do not report error when field is unexported 2017-09-19 10:06:34 +08:00
1f58120d43 Always skip unexported fields when encoding
Skip creating encoders for unexported fields. They are not participating
in JSON marshaling anyway. This allows using unexported fields of
non-marshalable types in structs.

As a side-effect of this change it's no longer possible to marshal
unexported JSON fields by adding a custom type extenstion. It seems this
is desired behavior since it matches standard library and jsoniter
already disallows `json:"-"` fields from participating in custom
extensions.

Fixes #174.
2017-09-18 11:02:15 -07:00
49 changed files with 1749 additions and 826 deletions

5
.gitignore vendored
View File

@ -1,3 +1,4 @@
.idea /vendor
/bug_test.go
/coverage.txt /coverage.txt
/profile.out /.idea

33
Gopkg.lock generated Normal file
View File

@ -0,0 +1,33 @@
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
[[projects]]
name = "github.com/davecgh/go-spew"
packages = ["spew"]
revision = "346938d642f2ec3594ed81d874461961cd0faa76"
version = "v1.1.0"
[[projects]]
branch = "master"
name = "github.com/google/gofuzz"
packages = ["."]
revision = "24818f796faf91cd76ec7bddd72458fbced7a6c1"
[[projects]]
name = "github.com/pmezard/go-difflib"
packages = ["difflib"]
revision = "792786c7400a136282c1664665ae0a8db921c6c2"
version = "v1.0.0"
[[projects]]
name = "github.com/stretchr/testify"
packages = ["assert","require"]
revision = "69483b4bd14f5845b5a1e55bca19e954e827f1d0"
version = "v1.1.4"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "f8b7cf3941d3792cbbd570bb53c093adaf774334d1162c651565c97a58dc9d09"
solver-name = "gps-cdcl"
solver-version = 1

33
Gopkg.toml Normal file
View File

@ -0,0 +1,33 @@
# Gopkg.toml example
#
# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
# for detailed Gopkg.toml documentation.
#
# required = ["github.com/user/thing/cmd/thing"]
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
#
# [[constraint]]
# name = "github.com/user/project"
# version = "1.0.0"
#
# [[constraint]]
# name = "github.com/user/project2"
# branch = "dev"
# source = "github.com/myfork/project2"
#
# [[override]]
# name = "github.com/x/y"
# version = "2.4.0"
[[constraint]]
name = "github.com/davecgh/go-spew"
version = "1.1.0"
[[constraint]]
branch = "master"
name = "github.com/google/gofuzz"
[[constraint]]
name = "github.com/stretchr/testify"
version = "1.1.4"

View File

@ -44,7 +44,9 @@ with
```go ```go
import "github.com/json-iterator/go" import "github.com/json-iterator/go"
jsoniter.Marshal(&data)
var json = jsoniter.ConfigCompatibleWithStandardLibrary
json.Marshal(&data)
``` ```
Replace Replace
@ -58,7 +60,9 @@ with
```go ```go
import "github.com/json-iterator/go" import "github.com/json-iterator/go"
jsoniter.Unmarshal(input, &data)
var json = jsoniter.ConfigCompatibleWithStandardLibrary
json.Unmarshal(input, &data)
``` ```
[More documentation](http://jsoniter.com/migrate-from-go-std.html) [More documentation](http://jsoniter.com/migrate-from-go-std.html)

12
build.sh Executable file
View File

@ -0,0 +1,12 @@
#!/bin/bash
set -e
set -x
if [ ! -d /tmp/build-golang/src/github.com/json-iterator ]; then
mkdir -p /tmp/build-golang/src/github.com/json-iterator
ln -s $PWD /tmp/build-golang/src/github.com/json-iterator/go
fi
export GOPATH=/tmp/build-golang
go get -u github.com/golang/dep/cmd/dep
cd /tmp/build-golang/src/github.com/json-iterator/go
exec $GOPATH/bin/dep ensure -update

40
compatible_test.go Normal file
View File

@ -0,0 +1,40 @@
package jsoniter
import (
"bytes"
"encoding/json"
"testing"
"github.com/stretchr/testify/require"
)
// Standard Encoder has trailing newline.
func TestEncoderHasTrailingNewline(t *testing.T) {
should := require.New(t)
var buf, stdbuf bytes.Buffer
enc := ConfigCompatibleWithStandardLibrary.NewEncoder(&buf)
enc.Encode(1)
stdenc := json.NewEncoder(&stdbuf)
stdenc.Encode(1)
should.Equal(stdbuf.Bytes(), buf.Bytes())
}
// Non-nil but empty map should be ignored.
func TestOmitempty(t *testing.T) {
o := struct {
A string `json:"a,omitempty"`
B string `json:"b,omitempty"`
Annotations map[string]string `json:"annotations,omitempty"`
}{
A: "a",
B: "b",
Annotations: map[string]string{},
}
should := require.New(t)
var buf, stdbuf bytes.Buffer
enc := ConfigCompatibleWithStandardLibrary.NewEncoder(&buf)
enc.Encode(o)
stdenc := json.NewEncoder(&stdbuf)
stdenc.Encode(o)
should.Equal(string(stdbuf.Bytes()), string(buf.Bytes()))
}

View File

@ -71,6 +71,11 @@ type Decoder struct {
// Decode decode JSON into interface{} // Decode decode JSON into interface{}
func (adapter *Decoder) Decode(obj interface{}) error { func (adapter *Decoder) Decode(obj interface{}) error {
if adapter.iter.head == adapter.iter.tail && adapter.iter.reader != nil {
if !adapter.iter.loadMore() {
return io.EOF
}
}
adapter.iter.ReadVal(obj) adapter.iter.ReadVal(obj)
err := adapter.iter.Error err := adapter.iter.Error
if err == io.EOF { if err == io.EOF {
@ -110,6 +115,7 @@ type Encoder struct {
// Encode encode interface{} as JSON to io.Writer // Encode encode interface{} as JSON to io.Writer
func (adapter *Encoder) Encode(val interface{}) error { func (adapter *Encoder) Encode(val interface{}) error {
adapter.stream.WriteVal(val) adapter.stream.WriteVal(val)
adapter.stream.WriteRaw("\n")
adapter.stream.Flush() adapter.stream.Flush()
return adapter.stream.Error return adapter.stream.Error
} }
@ -125,3 +131,8 @@ func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) {
config.EscapeHTML = escapeHTML config.EscapeHTML = escapeHTML
adapter.stream.cfg = config.Froze().(*frozenConfig) adapter.stream.cfg = config.Froze().(*frozenConfig)
} }
// Valid reports whether data is a valid JSON encoding.
func Valid(data []byte) bool {
return ConfigDefault.Valid(data)
}

View File

@ -1,6 +1,7 @@
package jsoniter package jsoniter
import ( import (
"errors"
"fmt" "fmt"
"io" "io"
"reflect" "reflect"
@ -157,6 +158,8 @@ func (iter *Iterator) readAny() Any {
return iter.readArrayAny() return iter.readArrayAny()
case '-': case '-':
return iter.readNumberAny(false) return iter.readNumberAny(false)
case 0:
return &invalidAny{baseAny{}, errors.New("input is empty")}
default: default:
return iter.readNumberAny(true) return iter.readNumberAny(true)
} }

View File

@ -1,6 +1,9 @@
package jsoniter package jsoniter
import "unsafe" import (
"unsafe"
"io"
)
type numberLazyAny struct { type numberLazyAny struct {
baseAny baseAny
@ -29,7 +32,9 @@ func (any *numberLazyAny) ToInt() int {
iter := any.cfg.BorrowIterator(any.buf) iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter) defer any.cfg.ReturnIterator(iter)
val := iter.ReadInt() val := iter.ReadInt()
any.err = iter.Error if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val return val
} }
@ -37,7 +42,9 @@ func (any *numberLazyAny) ToInt32() int32 {
iter := any.cfg.BorrowIterator(any.buf) iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter) defer any.cfg.ReturnIterator(iter)
val := iter.ReadInt32() val := iter.ReadInt32()
any.err = iter.Error if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val return val
} }
@ -45,7 +52,9 @@ func (any *numberLazyAny) ToInt64() int64 {
iter := any.cfg.BorrowIterator(any.buf) iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter) defer any.cfg.ReturnIterator(iter)
val := iter.ReadInt64() val := iter.ReadInt64()
any.err = iter.Error if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val return val
} }
@ -53,7 +62,9 @@ func (any *numberLazyAny) ToUint() uint {
iter := any.cfg.BorrowIterator(any.buf) iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter) defer any.cfg.ReturnIterator(iter)
val := iter.ReadUint() val := iter.ReadUint()
any.err = iter.Error if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val return val
} }
@ -61,7 +72,9 @@ func (any *numberLazyAny) ToUint32() uint32 {
iter := any.cfg.BorrowIterator(any.buf) iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter) defer any.cfg.ReturnIterator(iter)
val := iter.ReadUint32() val := iter.ReadUint32()
any.err = iter.Error if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val return val
} }
@ -69,7 +82,9 @@ func (any *numberLazyAny) ToUint64() uint64 {
iter := any.cfg.BorrowIterator(any.buf) iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter) defer any.cfg.ReturnIterator(iter)
val := iter.ReadUint64() val := iter.ReadUint64()
any.err = iter.Error if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val return val
} }
@ -77,7 +92,9 @@ func (any *numberLazyAny) ToFloat32() float32 {
iter := any.cfg.BorrowIterator(any.buf) iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter) defer any.cfg.ReturnIterator(iter)
val := iter.ReadFloat32() val := iter.ReadFloat32()
any.err = iter.Error if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val return val
} }
@ -85,7 +102,9 @@ func (any *numberLazyAny) ToFloat64() float64 {
iter := any.cfg.BorrowIterator(any.buf) iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter) defer any.cfg.ReturnIterator(iter)
val := iter.ReadFloat64() val := iter.ReadFloat64()
any.err = iter.Error if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val return val
} }

View File

@ -5,31 +5,21 @@ import (
"errors" "errors"
"io" "io"
"reflect" "reflect"
"sync/atomic"
"unsafe" "unsafe"
) )
// Config customize how the API should behave. // Config customize how the API should behave.
// The API is created from Config by Froze. // The API is created from Config by Froze.
type Config struct { type Config struct {
IndentionStep int IndentionStep int
MarshalFloatWith6Digits bool MarshalFloatWith6Digits bool
EscapeHTML bool EscapeHTML bool
SortMapKeys bool SortMapKeys bool
UseNumber bool UseNumber bool
TagKey string TagKey string
ValidateJsonRawMessage bool OnlyTaggedField bool
} ValidateJsonRawMessage bool
ObjectFieldMustBeSimpleString bool
type frozenConfig struct {
configBeforeFrozen Config
sortMapKeys bool
indentionStep int
decoderCache unsafe.Pointer
encoderCache unsafe.Pointer
extensions []Extension
streamPool chan *Stream
iteratorPool chan *Iterator
} }
// API the public interface of this package. // API the public interface of this package.
@ -45,6 +35,8 @@ type API interface {
Get(data []byte, path ...interface{}) Any Get(data []byte, path ...interface{}) Any
NewEncoder(writer io.Writer) *Encoder NewEncoder(writer io.Writer) *Encoder
NewDecoder(reader io.Reader) *Decoder NewDecoder(reader io.Reader) *Decoder
Valid(data []byte) bool
RegisterExtension(extension Extension)
} }
// ConfigDefault the default API // ConfigDefault the default API
@ -61,21 +53,23 @@ var ConfigCompatibleWithStandardLibrary = Config{
// ConfigFastest marshals float with only 6 digits precision // ConfigFastest marshals float with only 6 digits precision
var ConfigFastest = Config{ var ConfigFastest = Config{
EscapeHTML: false, EscapeHTML: false,
MarshalFloatWith6Digits: true, MarshalFloatWith6Digits: true, // will lose precession
ObjectFieldMustBeSimpleString: true, // do not unescape object field
}.Froze() }.Froze()
// Froze forge API from config // Froze forge API from config
func (cfg Config) Froze() API { func (cfg Config) Froze() API {
// TODO: cache frozen config // TODO: cache frozen config
frozenConfig := &frozenConfig{ frozenConfig := &frozenConfig{
sortMapKeys: cfg.SortMapKeys, sortMapKeys: cfg.SortMapKeys,
indentionStep: cfg.IndentionStep, indentionStep: cfg.IndentionStep,
streamPool: make(chan *Stream, 16), objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString,
iteratorPool: make(chan *Iterator, 16), onlyTaggedField: cfg.OnlyTaggedField,
streamPool: make(chan *Stream, 16),
iteratorPool: make(chan *Iterator, 16),
} }
atomic.StorePointer(&frozenConfig.decoderCache, unsafe.Pointer(&map[string]ValDecoder{})) frozenConfig.initCache()
atomic.StorePointer(&frozenConfig.encoderCache, unsafe.Pointer(&map[string]ValEncoder{}))
if cfg.MarshalFloatWith6Digits { if cfg.MarshalFloatWith6Digits {
frozenConfig.marshalFloatWith6Digits() frozenConfig.marshalFloatWith6Digits()
} }
@ -127,7 +121,7 @@ func (cfg *frozenConfig) getTagKey() string {
return tagKey return tagKey
} }
func (cfg *frozenConfig) registerExtension(extension Extension) { func (cfg *frozenConfig) RegisterExtension(extension Extension) {
cfg.extensions = append(cfg.extensions, extension) cfg.extensions = append(cfg.extensions, extension)
} }
@ -189,46 +183,6 @@ func (cfg *frozenConfig) escapeHTML() {
cfg.addEncoderToCache(reflect.TypeOf((*string)(nil)).Elem(), &htmlEscapedStringEncoder{}) cfg.addEncoderToCache(reflect.TypeOf((*string)(nil)).Elem(), &htmlEscapedStringEncoder{})
} }
func (cfg *frozenConfig) addDecoderToCache(cacheKey reflect.Type, decoder ValDecoder) {
done := false
for !done {
ptr := atomic.LoadPointer(&cfg.decoderCache)
cache := *(*map[reflect.Type]ValDecoder)(ptr)
copied := map[reflect.Type]ValDecoder{}
for k, v := range cache {
copied[k] = v
}
copied[cacheKey] = decoder
done = atomic.CompareAndSwapPointer(&cfg.decoderCache, ptr, unsafe.Pointer(&copied))
}
}
func (cfg *frozenConfig) addEncoderToCache(cacheKey reflect.Type, encoder ValEncoder) {
done := false
for !done {
ptr := atomic.LoadPointer(&cfg.encoderCache)
cache := *(*map[reflect.Type]ValEncoder)(ptr)
copied := map[reflect.Type]ValEncoder{}
for k, v := range cache {
copied[k] = v
}
copied[cacheKey] = encoder
done = atomic.CompareAndSwapPointer(&cfg.encoderCache, ptr, unsafe.Pointer(&copied))
}
}
func (cfg *frozenConfig) getDecoderFromCache(cacheKey reflect.Type) ValDecoder {
ptr := atomic.LoadPointer(&cfg.decoderCache)
cache := *(*map[reflect.Type]ValDecoder)(ptr)
return cache[cacheKey]
}
func (cfg *frozenConfig) getEncoderFromCache(cacheKey reflect.Type) ValEncoder {
ptr := atomic.LoadPointer(&cfg.encoderCache)
cache := *(*map[reflect.Type]ValEncoder)(ptr)
return cache[cacheKey]
}
func (cfg *frozenConfig) cleanDecoders() { func (cfg *frozenConfig) cleanDecoders() {
typeDecoders = map[string]ValDecoder{} typeDecoders = map[string]ValDecoder{}
fieldDecoders = map[string]ValDecoder{} fieldDecoders = map[string]ValDecoder{}
@ -333,3 +287,10 @@ func (cfg *frozenConfig) NewDecoder(reader io.Reader) *Decoder {
iter := Parse(cfg, reader, 512) iter := Parse(cfg, reader, 512)
return &Decoder{iter} return &Decoder{iter}
} }
func (cfg *frozenConfig) Valid(data []byte) bool {
iter := cfg.BorrowIterator(data)
defer cfg.ReturnIterator(iter)
iter.Skip()
return iter.Error == nil
}

View File

@ -0,0 +1,51 @@
//+build go1.9
package jsoniter
import (
"reflect"
"sync"
)
type frozenConfig struct {
configBeforeFrozen Config
sortMapKeys bool
indentionStep int
objectFieldMustBeSimpleString bool
onlyTaggedField bool
decoderCache sync.Map
encoderCache sync.Map
extensions []Extension
streamPool chan *Stream
iteratorPool chan *Iterator
}
func (cfg *frozenConfig) initCache() {
cfg.decoderCache = sync.Map{}
cfg.encoderCache = sync.Map{}
}
func (cfg *frozenConfig) addDecoderToCache(cacheKey reflect.Type, decoder ValDecoder) {
cfg.decoderCache.Store(cacheKey, decoder)
}
func (cfg *frozenConfig) addEncoderToCache(cacheKey reflect.Type, encoder ValEncoder) {
cfg.encoderCache.Store(cacheKey, encoder)
}
func (cfg *frozenConfig) getDecoderFromCache(cacheKey reflect.Type) ValDecoder {
decoder, found := cfg.decoderCache.Load(cacheKey)
if found {
return decoder.(ValDecoder)
}
return nil
}
func (cfg *frozenConfig) getEncoderFromCache(cacheKey reflect.Type) ValEncoder {
encoder, found := cfg.encoderCache.Load(cacheKey)
if found {
return encoder.(ValEncoder)
}
return nil
}

View File

@ -0,0 +1,54 @@
//+build !go1.9
package jsoniter
import (
"reflect"
"sync"
)
type frozenConfig struct {
configBeforeFrozen Config
sortMapKeys bool
indentionStep int
objectFieldMustBeSimpleString bool
onlyTaggedField bool
cacheLock *sync.RWMutex
decoderCache map[reflect.Type]ValDecoder
encoderCache map[reflect.Type]ValEncoder
extensions []Extension
streamPool chan *Stream
iteratorPool chan *Iterator
}
func (cfg *frozenConfig) initCache() {
cfg.cacheLock = &sync.RWMutex{}
cfg.decoderCache = map[reflect.Type]ValDecoder{}
cfg.encoderCache = map[reflect.Type]ValEncoder{}
}
func (cfg *frozenConfig) addDecoderToCache(cacheKey reflect.Type, decoder ValDecoder) {
cfg.cacheLock.Lock()
cfg.decoderCache[cacheKey] = decoder
cfg.cacheLock.Unlock()
}
func (cfg *frozenConfig) addEncoderToCache(cacheKey reflect.Type, encoder ValEncoder) {
cfg.cacheLock.Lock()
cfg.encoderCache[cacheKey] = encoder
cfg.cacheLock.Unlock()
}
func (cfg *frozenConfig) getDecoderFromCache(cacheKey reflect.Type) ValDecoder {
cfg.cacheLock.RLock()
decoder, _ := cfg.decoderCache[cacheKey].(ValDecoder)
cfg.cacheLock.RUnlock()
return decoder
}
func (cfg *frozenConfig) getEncoderFromCache(cacheKey reflect.Type) ValEncoder {
cfg.cacheLock.RLock()
encoder, _ := cfg.encoderCache[cacheKey].(ValEncoder)
cfg.cacheLock.RUnlock()
return encoder
}

View File

@ -77,6 +77,7 @@ type Iterator struct {
captureStartedAt int captureStartedAt int
captured []byte captured []byte
Error error Error error
Attachment interface{} // open for customized decoder
} }
// NewIterator creates an empty Iterator instance // NewIterator creates an empty Iterator instance
@ -167,7 +168,7 @@ func (iter *Iterator) isObjectEnd() bool {
if c == '}' { if c == '}' {
return true return true
} }
iter.ReportError("isObjectEnd", "object ended prematurely") iter.ReportError("isObjectEnd", "object ended prematurely, unexpected char "+string([]byte{c}))
return true return true
} }
@ -200,8 +201,22 @@ func (iter *Iterator) ReportError(operation string, msg string) {
if peekStart < 0 { if peekStart < 0 {
peekStart = 0 peekStart = 0
} }
iter.Error = fmt.Errorf("%s: %s, parsing %v ...%s... at %s", operation, msg, iter.head, peekEnd := iter.head + 10
string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail])) if peekEnd > iter.tail {
peekEnd = iter.tail
}
parsing := string(iter.buf[peekStart:peekEnd])
contextStart := iter.head - 50
if contextStart < 0 {
contextStart = 0
}
contextEnd := iter.head + 50
if contextEnd > iter.tail {
contextEnd = iter.tail
}
context := string(iter.buf[contextStart:contextEnd])
iter.Error = fmt.Errorf("%s: %s, error found in #%v byte of ...|%s|..., bigger context ...|%s|...",
operation, msg, iter.head-peekStart, parsing, context)
} }
// CurrentBuffer gets current buffer as string for debugging purpose // CurrentBuffer gets current buffer as string for debugging purpose
@ -210,7 +225,7 @@ func (iter *Iterator) CurrentBuffer() string {
if peekStart < 0 { if peekStart < 0 {
peekStart = 0 peekStart = 0
} }
return fmt.Sprintf("parsing %v ...|%s|... at %s", iter.head, return fmt.Sprintf("parsing #%v byte, around ...|%s|..., whole buffer ...|%s|...", iter.head,
string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail])) string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail]))
} }

View File

@ -19,7 +19,7 @@ func (iter *Iterator) ReadArray() (ret bool) {
case ',': case ',':
return true return true
default: default:
iter.ReportError("ReadArray", "expect [ or , or ] or n, but found: "+string([]byte{c})) iter.ReportError("ReadArray", "expect [ or , or ] or n, but found "+string([]byte{c}))
return return
} }
} }
@ -42,7 +42,7 @@ func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
c = iter.nextToken() c = iter.nextToken()
} }
if c != ']' { if c != ']' {
iter.ReportError("ReadArrayCB", "expect ] in the end") iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c}))
return false return false
} }
return true return true
@ -53,6 +53,6 @@ func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
iter.skipThreeBytes('u', 'l', 'l') iter.skipThreeBytes('u', 'l', 'l')
return true // null return true // null
} }
iter.ReportError("ReadArrayCB", "expect [ or n, but found: "+string([]byte{c})) iter.ReportError("ReadArrayCB", "expect [ or n, but found "+string([]byte{c}))
return false return false
} }

View File

@ -1,6 +1,7 @@
package jsoniter package jsoniter
import ( import (
"encoding/json"
"io" "io"
"math/big" "math/big"
"strconv" "strconv"
@ -339,3 +340,8 @@ func validateFloat(str string) string {
} }
return "" return ""
} }
// ReadNumber read json.Number
func (iter *Iterator) ReadNumber() (ret json.Number) {
return json.Number(iter.readNumberAsString())
}

View File

@ -115,6 +115,7 @@ func (iter *Iterator) ReadUint32() (ret uint32) {
func (iter *Iterator) readUint32(c byte) (ret uint32) { func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind := intDigits[c] ind := intDigits[c]
if ind == 0 { if ind == 0 {
iter.assertInteger()
return 0 // single zero return 0 // single zero
} }
if ind == invalidCharForNumber { if ind == invalidCharForNumber {
@ -127,12 +128,14 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind2 := intDigits[iter.buf[i]] ind2 := intDigits[iter.buf[i]]
if ind2 == invalidCharForNumber { if ind2 == invalidCharForNumber {
iter.head = i iter.head = i
iter.assertInteger()
return value return value
} }
i++ i++
ind3 := intDigits[iter.buf[i]] ind3 := intDigits[iter.buf[i]]
if ind3 == invalidCharForNumber { if ind3 == invalidCharForNumber {
iter.head = i iter.head = i
iter.assertInteger()
return value*10 + uint32(ind2) return value*10 + uint32(ind2)
} }
//iter.head = i + 1 //iter.head = i + 1
@ -141,30 +144,35 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind4 := intDigits[iter.buf[i]] ind4 := intDigits[iter.buf[i]]
if ind4 == invalidCharForNumber { if ind4 == invalidCharForNumber {
iter.head = i iter.head = i
iter.assertInteger()
return value*100 + uint32(ind2)*10 + uint32(ind3) return value*100 + uint32(ind2)*10 + uint32(ind3)
} }
i++ i++
ind5 := intDigits[iter.buf[i]] ind5 := intDigits[iter.buf[i]]
if ind5 == invalidCharForNumber { if ind5 == invalidCharForNumber {
iter.head = i iter.head = i
iter.assertInteger()
return value*1000 + uint32(ind2)*100 + uint32(ind3)*10 + uint32(ind4) return value*1000 + uint32(ind2)*100 + uint32(ind3)*10 + uint32(ind4)
} }
i++ i++
ind6 := intDigits[iter.buf[i]] ind6 := intDigits[iter.buf[i]]
if ind6 == invalidCharForNumber { if ind6 == invalidCharForNumber {
iter.head = i iter.head = i
iter.assertInteger()
return value*10000 + uint32(ind2)*1000 + uint32(ind3)*100 + uint32(ind4)*10 + uint32(ind5) return value*10000 + uint32(ind2)*1000 + uint32(ind3)*100 + uint32(ind4)*10 + uint32(ind5)
} }
i++ i++
ind7 := intDigits[iter.buf[i]] ind7 := intDigits[iter.buf[i]]
if ind7 == invalidCharForNumber { if ind7 == invalidCharForNumber {
iter.head = i iter.head = i
iter.assertInteger()
return value*100000 + uint32(ind2)*10000 + uint32(ind3)*1000 + uint32(ind4)*100 + uint32(ind5)*10 + uint32(ind6) return value*100000 + uint32(ind2)*10000 + uint32(ind3)*1000 + uint32(ind4)*100 + uint32(ind5)*10 + uint32(ind6)
} }
i++ i++
ind8 := intDigits[iter.buf[i]] ind8 := intDigits[iter.buf[i]]
if ind8 == invalidCharForNumber { if ind8 == invalidCharForNumber {
iter.head = i iter.head = i
iter.assertInteger()
return value*1000000 + uint32(ind2)*100000 + uint32(ind3)*10000 + uint32(ind4)*1000 + uint32(ind5)*100 + uint32(ind6)*10 + uint32(ind7) return value*1000000 + uint32(ind2)*100000 + uint32(ind3)*10000 + uint32(ind4)*1000 + uint32(ind5)*100 + uint32(ind6)*10 + uint32(ind7)
} }
i++ i++
@ -172,6 +180,7 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
value = value*10000000 + uint32(ind2)*1000000 + uint32(ind3)*100000 + uint32(ind4)*10000 + uint32(ind5)*1000 + uint32(ind6)*100 + uint32(ind7)*10 + uint32(ind8) value = value*10000000 + uint32(ind2)*1000000 + uint32(ind3)*100000 + uint32(ind4)*10000 + uint32(ind5)*1000 + uint32(ind6)*100 + uint32(ind7)*10 + uint32(ind8)
iter.head = i iter.head = i
if ind9 == invalidCharForNumber { if ind9 == invalidCharForNumber {
iter.assertInteger()
return value return value
} }
} }
@ -180,6 +189,7 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind = intDigits[iter.buf[i]] ind = intDigits[iter.buf[i]]
if ind == invalidCharForNumber { if ind == invalidCharForNumber {
iter.head = i iter.head = i
iter.assertInteger()
return value return value
} }
if value > uint32SafeToMultiply10 { if value > uint32SafeToMultiply10 {
@ -194,6 +204,7 @@ func (iter *Iterator) readUint32(c byte) (ret uint32) {
value = (value << 3) + (value << 1) + uint32(ind) value = (value << 3) + (value << 1) + uint32(ind)
} }
if !iter.loadMore() { if !iter.loadMore() {
iter.assertInteger()
return value return value
} }
} }
@ -226,6 +237,7 @@ func (iter *Iterator) ReadUint64() uint64 {
func (iter *Iterator) readUint64(c byte) (ret uint64) { func (iter *Iterator) readUint64(c byte) (ret uint64) {
ind := intDigits[c] ind := intDigits[c]
if ind == 0 { if ind == 0 {
iter.assertInteger()
return 0 // single zero return 0 // single zero
} }
if ind == invalidCharForNumber { if ind == invalidCharForNumber {
@ -233,11 +245,73 @@ func (iter *Iterator) readUint64(c byte) (ret uint64) {
return return
} }
value := uint64(ind) value := uint64(ind)
if iter.tail-iter.head > 10 {
i := iter.head
ind2 := intDigits[iter.buf[i]]
if ind2 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value
}
i++
ind3 := intDigits[iter.buf[i]]
if ind3 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*10 + uint64(ind2)
}
//iter.head = i + 1
//value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
i++
ind4 := intDigits[iter.buf[i]]
if ind4 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*100 + uint64(ind2)*10 + uint64(ind3)
}
i++
ind5 := intDigits[iter.buf[i]]
if ind5 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*1000 + uint64(ind2)*100 + uint64(ind3)*10 + uint64(ind4)
}
i++
ind6 := intDigits[iter.buf[i]]
if ind6 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*10000 + uint64(ind2)*1000 + uint64(ind3)*100 + uint64(ind4)*10 + uint64(ind5)
}
i++
ind7 := intDigits[iter.buf[i]]
if ind7 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*100000 + uint64(ind2)*10000 + uint64(ind3)*1000 + uint64(ind4)*100 + uint64(ind5)*10 + uint64(ind6)
}
i++
ind8 := intDigits[iter.buf[i]]
if ind8 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*1000000 + uint64(ind2)*100000 + uint64(ind3)*10000 + uint64(ind4)*1000 + uint64(ind5)*100 + uint64(ind6)*10 + uint64(ind7)
}
i++
ind9 := intDigits[iter.buf[i]]
value = value*10000000 + uint64(ind2)*1000000 + uint64(ind3)*100000 + uint64(ind4)*10000 + uint64(ind5)*1000 + uint64(ind6)*100 + uint64(ind7)*10 + uint64(ind8)
iter.head = i
if ind9 == invalidCharForNumber {
iter.assertInteger()
return value
}
}
for { for {
for i := iter.head; i < iter.tail; i++ { for i := iter.head; i < iter.tail; i++ {
ind = intDigits[iter.buf[i]] ind = intDigits[iter.buf[i]]
if ind == invalidCharForNumber { if ind == invalidCharForNumber {
iter.head = i iter.head = i
iter.assertInteger()
return value return value
} }
if value > uint64SafeToMultiple10 { if value > uint64SafeToMultiple10 {
@ -252,7 +326,14 @@ func (iter *Iterator) readUint64(c byte) (ret uint64) {
value = (value << 3) + (value << 1) + uint64(ind) value = (value << 3) + (value << 1) + uint64(ind)
} }
if !iter.loadMore() { if !iter.loadMore() {
iter.assertInteger()
return value return value
} }
} }
} }
func (iter *Iterator) assertInteger() {
if iter.head < len(iter.buf) && iter.buf[iter.head] == '.' {
iter.ReportError("assertInteger", "can not decode float as int")
}
}

View File

@ -19,15 +19,33 @@ func (iter *Iterator) ReadObject() (ret string) {
c = iter.nextToken() c = iter.nextToken()
if c == '"' { if c == '"' {
iter.unreadByte() iter.unreadByte()
return string(iter.readObjectFieldAsBytes()) if iter.cfg.objectFieldMustBeSimpleString {
return string(iter.readObjectFieldAsBytes())
} else {
field := iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
return field
}
} }
if c == '}' { if c == '}' {
return "" // end of object return "" // end of object
} }
iter.ReportError("ReadObject", `expect " after {`) iter.ReportError("ReadObject", `expect " after {, but found `+string([]byte{c}))
return return
case ',': case ',':
return string(iter.readObjectFieldAsBytes()) if iter.cfg.objectFieldMustBeSimpleString {
return string(iter.readObjectFieldAsBytes())
} else {
field := iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
return field
}
case '}': case '}':
return "" // end of object return "" // end of object
default: default:
@ -44,17 +62,34 @@ func (iter *Iterator) readFieldHash() int32 {
for i := iter.head; i < iter.tail; i++ { for i := iter.head; i < iter.tail; i++ {
// require ascii string and no escape // require ascii string and no escape
b := iter.buf[i] b := iter.buf[i]
if 'A' <= b && b <= 'Z' { if !iter.cfg.objectFieldMustBeSimpleString && b == '\\' {
b += 'a' - 'A' iter.head = i
for _, b := range iter.readStringSlowPath() {
if 'A' <= b && b <= 'Z' {
b += 'a' - 'A'
}
hash ^= int64(b)
hash *= 0x1000193
}
c = iter.nextToken()
if c != ':' {
iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
return 0
}
return int32(hash)
} }
if b == '"' { if b == '"' {
iter.head = i + 1 iter.head = i + 1
c = iter.nextToken() c = iter.nextToken()
if c != ':' { if c != ':' {
iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c})) iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
return 0
} }
return int32(hash) return int32(hash)
} }
if 'A' <= b && b <= 'Z' {
b += 'a' - 'A'
}
hash ^= int64(b) hash ^= int64(b)
hash *= 0x1000193 hash *= 0x1000193
} }
@ -80,18 +115,38 @@ func calcHash(str string) int32 {
// ReadObjectCB read object with callback, the key is ascii only and field name not copied // ReadObjectCB read object with callback, the key is ascii only and field name not copied
func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool { func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
c := iter.nextToken() c := iter.nextToken()
var fieldBytes []byte
var field string
if c == '{' { if c == '{' {
c = iter.nextToken() c = iter.nextToken()
if c == '"' { if c == '"' {
iter.unreadByte() iter.unreadByte()
field := iter.readObjectFieldAsBytes() if iter.cfg.objectFieldMustBeSimpleString {
if !callback(iter, *(*string)(unsafe.Pointer(&field))) { fieldBytes = iter.readObjectFieldAsBytes()
field = *(*string)(unsafe.Pointer(&fieldBytes))
} else {
field = iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
}
if !callback(iter, field) {
return false return false
} }
c = iter.nextToken() c = iter.nextToken()
for c == ',' { for c == ',' {
field = iter.readObjectFieldAsBytes() if iter.cfg.objectFieldMustBeSimpleString {
if !callback(iter, *(*string)(unsafe.Pointer(&field))) { fieldBytes = iter.readObjectFieldAsBytes()
field = *(*string)(unsafe.Pointer(&fieldBytes))
} else {
field = iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
}
if !callback(iter, field) {
return false return false
} }
c = iter.nextToken() c = iter.nextToken()
@ -105,14 +160,14 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
if c == '}' { if c == '}' {
return true return true
} }
iter.ReportError("ReadObjectCB", `expect " after }`) iter.ReportError("ReadObjectCB", `expect " after }, but found `+string([]byte{c}))
return false return false
} }
if c == 'n' { if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l') iter.skipThreeBytes('u', 'l', 'l')
return true // null return true // null
} }
iter.ReportError("ReadObjectCB", `expect { or n`) iter.ReportError("ReadObjectCB", `expect { or n, but found `+string([]byte{c}))
return false return false
} }
@ -125,7 +180,7 @@ func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
iter.unreadByte() iter.unreadByte()
field := iter.ReadString() field := iter.ReadString()
if iter.nextToken() != ':' { if iter.nextToken() != ':' {
iter.ReportError("ReadMapCB", "expect : after object field") iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
return false return false
} }
if !callback(iter, field) { if !callback(iter, field) {
@ -135,7 +190,7 @@ func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
for c == ',' { for c == ',' {
field = iter.ReadString() field = iter.ReadString()
if iter.nextToken() != ':' { if iter.nextToken() != ':' {
iter.ReportError("ReadMapCB", "expect : after object field") iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
return false return false
} }
if !callback(iter, field) { if !callback(iter, field) {
@ -152,14 +207,14 @@ func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
if c == '}' { if c == '}' {
return true return true
} }
iter.ReportError("ReadMapCB", `expect " after }`) iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c}))
return false return false
} }
if c == 'n' { if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l') iter.skipThreeBytes('u', 'l', 'l')
return true // null return true // null
} }
iter.ReportError("ReadMapCB", `expect { or n`) iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c}))
return false return false
} }
@ -176,7 +231,7 @@ func (iter *Iterator) readObjectStart() bool {
iter.skipThreeBytes('u', 'l', 'l') iter.skipThreeBytes('u', 'l', 'l')
return false return false
} }
iter.ReportError("readObjectStart", "expect { or n") iter.ReportError("readObjectStart", "expect { or n, but found "+string([]byte{c}))
return false return false
} }
@ -192,7 +247,7 @@ func (iter *Iterator) readObjectFieldAsBytes() (ret []byte) {
} }
} }
if iter.buf[iter.head] != ':' { if iter.buf[iter.head] != ':' {
iter.ReportError("readObjectFieldAsBytes", "expect : after object field") iter.ReportError("readObjectFieldAsBytes", "expect : after object field, but found "+string([]byte{iter.buf[iter.head]}))
return return
} }
iter.head++ iter.head++

View File

@ -25,7 +25,7 @@ func (iter *Iterator) ReadBool() (ret bool) {
iter.skipFourBytes('a', 'l', 's', 'e') iter.skipFourBytes('a', 'l', 's', 'e')
return false return false
} }
iter.ReportError("ReadBool", "expect t or f") iter.ReportError("ReadBool", "expect t or f, but found "+string([]byte{c}))
return return
} }
@ -59,7 +59,9 @@ func (iter *Iterator) stopCapture() []byte {
iter.captureStartedAt = -1 iter.captureStartedAt = -1
iter.captured = nil iter.captured = nil
if len(captured) == 0 { if len(captured) == 0 {
return remaining copied := make([]byte, len(remaining))
copy(copied, remaining)
return copied
} }
captured = append(captured, remaining...) captured = append(captured, remaining...)
return captured return captured

View File

@ -1,4 +1,4 @@
//+build jsoniter-sloppy //+build jsoniter_sloppy
package jsoniter package jsoniter

View File

@ -1,4 +1,4 @@
//+build !jsoniter-sloppy //+build !jsoniter_sloppy
package jsoniter package jsoniter

View File

@ -28,7 +28,7 @@ func (iter *Iterator) ReadString() (ret string) {
iter.skipThreeBytes('u', 'l', 'l') iter.skipThreeBytes('u', 'l', 'l')
return "" return ""
} }
iter.ReportError("ReadString", `expects " or n`) iter.ReportError("ReadString", `expects " or n, but found `+string([]byte{c}))
return return
} }
@ -139,7 +139,7 @@ func (iter *Iterator) ReadStringAsSlice() (ret []byte) {
} }
return copied return copied
} }
iter.ReportError("ReadStringAsSlice", `expects " or n`) iter.ReportError("ReadStringAsSlice", `expects " or n, but found `+string([]byte{c}))
return return
} }
@ -156,7 +156,7 @@ func (iter *Iterator) readU4() (ret rune) {
} else if c >= 'A' && c <= 'F' { } else if c >= 'A' && c <= 'F' {
ret = ret*16 + rune(c-'A'+10) ret = ret*16 + rune(c-'A'+10)
} else { } else {
iter.ReportError("readU4", "expects 0~9 or a~f") iter.ReportError("readU4", "expects 0~9 or a~f, but found "+string([]byte{c}))
return return
} }
} }

View File

@ -1,9 +1,25 @@
package jsoniter package jsoniter
import "encoding/json" import (
"encoding/json"
"strconv"
)
type Number string type Number string
// String returns the literal text of the number.
func (n Number) String() string { return string(n) }
// Float64 returns the number as a float64.
func (n Number) Float64() (float64, error) {
return strconv.ParseFloat(string(n), 64)
}
// Int64 returns the number as an int64.
func (n Number) Int64() (int64, error) {
return strconv.ParseInt(string(n), 10, 64)
}
func CastJsonNumber(val interface{}) (string, bool) { func CastJsonNumber(val interface{}) (string, bool) {
switch typedVal := val.(type) { switch typedVal := val.(type) {
case json.Number: case json.Number:

View File

@ -28,6 +28,7 @@ func (cfg *frozenConfig) BorrowStream(writer io.Writer) *Stream {
func (cfg *frozenConfig) ReturnStream(stream *Stream) { func (cfg *frozenConfig) ReturnStream(stream *Stream) {
stream.Error = nil stream.Error = nil
stream.Attachment = nil
select { select {
case cfg.streamPool <- stream: case cfg.streamPool <- stream:
return return
@ -48,6 +49,7 @@ func (cfg *frozenConfig) BorrowIterator(data []byte) *Iterator {
func (cfg *frozenConfig) ReturnIterator(iter *Iterator) { func (cfg *frozenConfig) ReturnIterator(iter *Iterator) {
iter.Error = nil iter.Error = nil
iter.Attachment = nil
select { select {
case cfg.iteratorPool <- iter: case cfg.iteratorPool <- iter:
return return

File diff suppressed because it is too large Load Diff

View File

@ -7,23 +7,34 @@ import (
"unsafe" "unsafe"
) )
func decoderOfArray(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) { func decoderOfArray(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder {
decoder, err := decoderOfType(cfg, typ.Elem()) decoder := decoderOfType(cfg, prefix+"[array]->", typ.Elem())
if err != nil { return &arrayDecoder{typ, typ.Elem(), decoder}
return nil, err
}
return &arrayDecoder{typ, typ.Elem(), decoder}, nil
} }
func encoderOfArray(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) { func encoderOfArray(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder {
encoder, err := encoderOfType(cfg, typ.Elem()) if typ.Len() == 0 {
if err != nil { return emptyArrayEncoder{}
return nil, err
} }
encoder := encoderOfType(cfg, prefix+"[array]->", typ.Elem())
if typ.Elem().Kind() == reflect.Map { if typ.Elem().Kind() == reflect.Map {
encoder = &optionalEncoder{encoder} encoder = &OptionalEncoder{encoder}
} }
return &arrayEncoder{typ, typ.Elem(), encoder}, nil return &arrayEncoder{typ, typ.Elem(), encoder}
}
type emptyArrayEncoder struct{}
func (encoder emptyArrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteEmptyArray()
}
func (encoder emptyArrayEncoder) EncodeInterface(val interface{}, stream *Stream) {
stream.WriteEmptyArray()
}
func (encoder emptyArrayEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return true
} }
type arrayEncoder struct { type arrayEncoder struct {

View File

@ -161,22 +161,31 @@ func RegisterExtension(extension Extension) {
extensions = append(extensions, extension) extensions = append(extensions, extension)
} }
func getTypeDecoderFromExtension(typ reflect.Type) ValDecoder { func getTypeDecoderFromExtension(cfg *frozenConfig, typ reflect.Type) ValDecoder {
decoder := _getTypeDecoderFromExtension(typ) decoder := _getTypeDecoderFromExtension(cfg, typ)
if decoder != nil { if decoder != nil {
for _, extension := range extensions { for _, extension := range extensions {
decoder = extension.DecorateDecoder(typ, decoder) decoder = extension.DecorateDecoder(typ, decoder)
} }
for _, extension := range cfg.extensions {
decoder = extension.DecorateDecoder(typ, decoder)
}
} }
return decoder return decoder
} }
func _getTypeDecoderFromExtension(typ reflect.Type) ValDecoder { func _getTypeDecoderFromExtension(cfg *frozenConfig, typ reflect.Type) ValDecoder {
for _, extension := range extensions { for _, extension := range extensions {
decoder := extension.CreateDecoder(typ) decoder := extension.CreateDecoder(typ)
if decoder != nil { if decoder != nil {
return decoder return decoder
} }
} }
for _, extension := range cfg.extensions {
decoder := extension.CreateDecoder(typ)
if decoder != nil {
return decoder
}
}
typeName := typ.String() typeName := typ.String()
decoder := typeDecoders[typeName] decoder := typeDecoders[typeName]
if decoder != nil { if decoder != nil {
@ -185,29 +194,38 @@ func _getTypeDecoderFromExtension(typ reflect.Type) ValDecoder {
if typ.Kind() == reflect.Ptr { if typ.Kind() == reflect.Ptr {
decoder := typeDecoders[typ.Elem().String()] decoder := typeDecoders[typ.Elem().String()]
if decoder != nil { if decoder != nil {
return &optionalDecoder{typ.Elem(), decoder} return &OptionalDecoder{typ.Elem(), decoder}
} }
} }
return nil return nil
} }
func getTypeEncoderFromExtension(typ reflect.Type) ValEncoder { func getTypeEncoderFromExtension(cfg *frozenConfig, typ reflect.Type) ValEncoder {
encoder := _getTypeEncoderFromExtension(typ) encoder := _getTypeEncoderFromExtension(cfg, typ)
if encoder != nil { if encoder != nil {
for _, extension := range extensions { for _, extension := range extensions {
encoder = extension.DecorateEncoder(typ, encoder) encoder = extension.DecorateEncoder(typ, encoder)
} }
for _, extension := range cfg.extensions {
encoder = extension.DecorateEncoder(typ, encoder)
}
} }
return encoder return encoder
} }
func _getTypeEncoderFromExtension(typ reflect.Type) ValEncoder { func _getTypeEncoderFromExtension(cfg *frozenConfig, typ reflect.Type) ValEncoder {
for _, extension := range extensions { for _, extension := range extensions {
encoder := extension.CreateEncoder(typ) encoder := extension.CreateEncoder(typ)
if encoder != nil { if encoder != nil {
return encoder return encoder
} }
} }
for _, extension := range cfg.extensions {
encoder := extension.CreateEncoder(typ)
if encoder != nil {
return encoder
}
}
typeName := typ.String() typeName := typ.String()
encoder := typeEncoders[typeName] encoder := typeEncoders[typeName]
if encoder != nil { if encoder != nil {
@ -216,28 +234,28 @@ func _getTypeEncoderFromExtension(typ reflect.Type) ValEncoder {
if typ.Kind() == reflect.Ptr { if typ.Kind() == reflect.Ptr {
encoder := typeEncoders[typ.Elem().String()] encoder := typeEncoders[typ.Elem().String()]
if encoder != nil { if encoder != nil {
return &optionalEncoder{encoder} return &OptionalEncoder{encoder}
} }
} }
return nil return nil
} }
func describeStruct(cfg *frozenConfig, typ reflect.Type) (*StructDescriptor, error) { func describeStruct(cfg *frozenConfig, prefix string, typ reflect.Type) *StructDescriptor {
embeddedBindings := []*Binding{} embeddedBindings := []*Binding{}
bindings := []*Binding{} bindings := []*Binding{}
for i := 0; i < typ.NumField(); i++ { for i := 0; i < typ.NumField(); i++ {
field := typ.Field(i) field := typ.Field(i)
tag := field.Tag.Get(cfg.getTagKey()) tag, hastag := field.Tag.Lookup(cfg.getTagKey())
if cfg.onlyTaggedField && !hastag {
continue
}
tagParts := strings.Split(tag, ",") tagParts := strings.Split(tag, ",")
if tag == "-" { if tag == "-" {
continue continue
} }
if field.Anonymous && (tag == "" || tagParts[0] == "") { if field.Anonymous && (tag == "" || tagParts[0] == "") {
if field.Type.Kind() == reflect.Struct { if field.Type.Kind() == reflect.Struct {
structDescriptor, err := describeStruct(cfg, field.Type) structDescriptor := describeStruct(cfg, prefix, field.Type)
if err != nil {
return nil, err
}
for _, binding := range structDescriptor.Fields { for _, binding := range structDescriptor.Fields {
binding.levels = append([]int{i}, binding.levels...) binding.levels = append([]int{i}, binding.levels...)
omitempty := binding.Encoder.(*structFieldEncoder).omitempty omitempty := binding.Encoder.(*structFieldEncoder).omitempty
@ -247,16 +265,13 @@ func describeStruct(cfg *frozenConfig, typ reflect.Type) (*StructDescriptor, err
} }
continue continue
} else if field.Type.Kind() == reflect.Ptr && field.Type.Elem().Kind() == reflect.Struct { } else if field.Type.Kind() == reflect.Ptr && field.Type.Elem().Kind() == reflect.Struct {
structDescriptor, err := describeStruct(cfg, field.Type.Elem()) structDescriptor := describeStruct(cfg, prefix, field.Type.Elem())
if err != nil {
return nil, err
}
for _, binding := range structDescriptor.Fields { for _, binding := range structDescriptor.Fields {
binding.levels = append([]int{i}, binding.levels...) binding.levels = append([]int{i}, binding.levels...)
omitempty := binding.Encoder.(*structFieldEncoder).omitempty omitempty := binding.Encoder.(*structFieldEncoder).omitempty
binding.Encoder = &optionalEncoder{binding.Encoder} binding.Encoder = &dereferenceEncoder{binding.Encoder}
binding.Encoder = &structFieldEncoder{&field, binding.Encoder, omitempty} binding.Encoder = &structFieldEncoder{&field, binding.Encoder, omitempty}
binding.Decoder = &deferenceDecoder{field.Type.Elem(), binding.Decoder} binding.Decoder = &dereferenceDecoder{field.Type.Elem(), binding.Decoder}
binding.Decoder = &structFieldDecoder{&field, binding.Decoder} binding.Decoder = &structFieldDecoder{&field, binding.Decoder}
embeddedBindings = append(embeddedBindings, binding) embeddedBindings = append(embeddedBindings, binding)
} }
@ -267,22 +282,15 @@ func describeStruct(cfg *frozenConfig, typ reflect.Type) (*StructDescriptor, err
fieldCacheKey := fmt.Sprintf("%s/%s", typ.String(), field.Name) fieldCacheKey := fmt.Sprintf("%s/%s", typ.String(), field.Name)
decoder := fieldDecoders[fieldCacheKey] decoder := fieldDecoders[fieldCacheKey]
if decoder == nil { if decoder == nil {
var err error decoder = decoderOfType(cfg, prefix+typ.String()+"."+field.Name+"->", field.Type)
decoder, err = decoderOfType(cfg, field.Type)
if err != nil {
return nil, err
}
} }
encoder := fieldEncoders[fieldCacheKey] encoder := fieldEncoders[fieldCacheKey]
if encoder == nil { if encoder == nil {
var err error encoder = encoderOfType(cfg, prefix+typ.String()+"."+field.Name+"->", field.Type)
encoder, err = encoderOfType(cfg, field.Type) // map is stored as pointer in the struct,
if err != nil { // and treat nil or empty map as empty field
return nil, err if encoder != nil && field.Type.Kind() == reflect.Map {
} encoder = &optionalMapEncoder{encoder}
// map is stored as pointer in the struct
if field.Type.Kind() == reflect.Map {
encoder = &optionalEncoder{encoder}
} }
} }
binding := &Binding{ binding := &Binding{
@ -295,7 +303,7 @@ func describeStruct(cfg *frozenConfig, typ reflect.Type) (*StructDescriptor, err
binding.levels = []int{i} binding.levels = []int{i}
bindings = append(bindings, binding) bindings = append(bindings, binding)
} }
return createStructDescriptor(cfg, typ, bindings, embeddedBindings), nil return createStructDescriptor(cfg, typ, bindings, embeddedBindings)
} }
func createStructDescriptor(cfg *frozenConfig, typ reflect.Type, bindings []*Binding, embeddedBindings []*Binding) *StructDescriptor { func createStructDescriptor(cfg *frozenConfig, typ reflect.Type, bindings []*Binding, embeddedBindings []*Binding) *StructDescriptor {
onePtrEmbedded := false onePtrEmbedded := false
@ -323,6 +331,9 @@ func createStructDescriptor(cfg *frozenConfig, typ reflect.Type, bindings []*Bin
for _, extension := range extensions { for _, extension := range extensions {
extension.UpdateStructDescriptor(structDescriptor) extension.UpdateStructDescriptor(structDescriptor)
} }
for _, extension := range cfg.extensions {
extension.UpdateStructDescriptor(structDescriptor)
}
processTags(structDescriptor, cfg) processTags(structDescriptor, cfg)
// merge normal & embedded bindings & sort with original order // merge normal & embedded bindings & sort with original order
allBindings := sortableBindings(append(embeddedBindings, structDescriptor.Fields...)) allBindings := sortableBindings(append(embeddedBindings, structDescriptor.Fields...))

View File

@ -9,6 +9,22 @@ import (
"unsafe" "unsafe"
) )
func decoderOfMap(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder {
decoder := decoderOfType(cfg, prefix+"[map]->", typ.Elem())
mapInterface := reflect.New(typ).Interface()
return &mapDecoder{typ, typ.Key(), typ.Elem(), decoder, extractInterface(mapInterface)}
}
func encoderOfMap(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder {
elemType := typ.Elem()
encoder := encoderOfType(cfg, prefix+"[map]->", elemType)
mapInterface := reflect.New(typ).Elem().Interface()
if cfg.sortMapKeys {
return &sortKeysMapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}
}
return &mapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}
}
type mapDecoder struct { type mapDecoder struct {
mapType reflect.Type mapType reflect.Type
keyType reflect.Type keyType reflect.Type
@ -32,7 +48,7 @@ func (decoder *mapDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
} }
iter.ReadMapCB(func(iter *Iterator, keyStr string) bool { iter.ReadMapCB(func(iter *Iterator, keyStr string) bool {
elem := reflect.New(decoder.elemType) elem := reflect.New(decoder.elemType)
decoder.elemDecoder.Decode(unsafe.Pointer(elem.Pointer()), iter) decoder.elemDecoder.Decode(extractInterface(elem.Interface()).word, iter)
// to put into map, we have to use reflection // to put into map, we have to use reflection
keyType := decoder.keyType keyType := decoder.keyType
// TODO: remove this from loop // TODO: remove this from loop

View File

@ -382,6 +382,11 @@ type nonEmptyInterfaceCodec struct {
} }
func (codec *nonEmptyInterfaceCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { func (codec *nonEmptyInterfaceCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.WhatIsNext() == NilValue {
iter.skipFourBytes('n', 'u', 'l', 'l')
*((*interface{})(ptr)) = nil
return
}
nonEmptyInterface := (*nonEmptyInterface)(ptr) nonEmptyInterface := (*nonEmptyInterface)(ptr)
if nonEmptyInterface.itab == nil { if nonEmptyInterface.itab == nil {
iter.ReportError("read non-empty interface", "do not know which concrete type to decode to") iter.ReportError("read non-empty interface", "do not know which concrete type to decode to")
@ -441,15 +446,33 @@ type jsonNumberCodec struct {
} }
func (codec *jsonNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { func (codec *jsonNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*json.Number)(ptr)) = json.Number([]byte(iter.readNumberAsString())) switch iter.WhatIsNext() {
case StringValue:
*((*json.Number)(ptr)) = json.Number(iter.ReadString())
case NilValue:
iter.skipFourBytes('n', 'u', 'l', 'l')
*((*json.Number)(ptr)) = ""
default:
*((*json.Number)(ptr)) = json.Number([]byte(iter.readNumberAsString()))
}
} }
func (codec *jsonNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) { func (codec *jsonNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteRaw(string(*((*json.Number)(ptr)))) number := *((*json.Number)(ptr))
if len(number) == 0 {
stream.WriteRaw("0")
} else {
stream.WriteRaw(string(number))
}
} }
func (codec *jsonNumberCodec) EncodeInterface(val interface{}, stream *Stream) { func (codec *jsonNumberCodec) EncodeInterface(val interface{}, stream *Stream) {
stream.WriteRaw(string(val.(json.Number))) number := val.(json.Number)
if len(number) == 0 {
stream.WriteRaw("0")
} else {
stream.WriteRaw(string(number))
}
} }
func (codec *jsonNumberCodec) IsEmpty(ptr unsafe.Pointer) bool { func (codec *jsonNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
@ -460,15 +483,33 @@ type jsoniterNumberCodec struct {
} }
func (codec *jsoniterNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { func (codec *jsoniterNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*Number)(ptr)) = Number([]byte(iter.readNumberAsString())) switch iter.WhatIsNext() {
case StringValue:
*((*Number)(ptr)) = Number(iter.ReadString())
case NilValue:
iter.skipFourBytes('n', 'u', 'l', 'l')
*((*Number)(ptr)) = ""
default:
*((*Number)(ptr)) = Number([]byte(iter.readNumberAsString()))
}
} }
func (codec *jsoniterNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) { func (codec *jsoniterNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteRaw(string(*((*Number)(ptr)))) number := *((*Number)(ptr))
if len(number) == 0 {
stream.WriteRaw("0")
} else {
stream.WriteRaw(string(number))
}
} }
func (codec *jsoniterNumberCodec) EncodeInterface(val interface{}, stream *Stream) { func (codec *jsoniterNumberCodec) EncodeInterface(val interface{}, stream *Stream) {
stream.WriteRaw(string(val.(Number))) number := val.(Number)
if len(number) == 0 {
stream.WriteRaw("0")
} else {
stream.WriteRaw(string(number))
}
} }
func (codec *jsoniterNumberCodec) IsEmpty(ptr unsafe.Pointer) bool { func (codec *jsoniterNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
@ -592,7 +633,7 @@ type stringModeNumberDecoder struct {
func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
c := iter.nextToken() c := iter.nextToken()
if c != '"' { if c != '"' {
iter.ReportError("stringModeNumberDecoder", `expect "`) iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
return return
} }
decoder.elemDecoder.Decode(ptr, iter) decoder.elemDecoder.Decode(ptr, iter)
@ -601,7 +642,7 @@ func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
} }
c = iter.readByte() c = iter.readByte()
if c != '"' { if c != '"' {
iter.ReportError("stringModeNumberDecoder", `expect "`) iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
return return
} }
} }

View File

@ -8,17 +8,14 @@ import (
"unsafe" "unsafe"
) )
func encoderOfStruct(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) { func encoderOfStruct(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder {
type bindingTo struct { type bindingTo struct {
binding *Binding binding *Binding
toName string toName string
ignored bool ignored bool
} }
orderedBindings := []*bindingTo{} orderedBindings := []*bindingTo{}
structDescriptor, err := describeStruct(cfg, typ) structDescriptor := describeStruct(cfg, prefix, typ)
if err != nil {
return nil, err
}
for _, binding := range structDescriptor.Fields { for _, binding := range structDescriptor.Fields {
for _, toName := range binding.ToNames { for _, toName := range binding.ToNames {
new := &bindingTo{ new := &bindingTo{
@ -35,7 +32,7 @@ func encoderOfStruct(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
} }
} }
if len(orderedBindings) == 0 { if len(orderedBindings) == 0 {
return &emptyStructEncoder{}, nil return &emptyStructEncoder{}
} }
finalOrderedFields := []structFieldTo{} finalOrderedFields := []structFieldTo{}
for _, bindingTo := range orderedBindings { for _, bindingTo := range orderedBindings {
@ -46,7 +43,8 @@ func encoderOfStruct(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
}) })
} }
} }
return &structEncoder{structDescriptor.onePtrEmbedded, structDescriptor.onePtrOptimization, finalOrderedFields}, nil return &structEncoder{typ, structDescriptor.onePtrEmbedded,
structDescriptor.onePtrOptimization, finalOrderedFields}
} }
func resolveConflictBinding(cfg *frozenConfig, old, new *Binding) (ignoreOld, ignoreNew bool) { func resolveConflictBinding(cfg *frozenConfig, old, new *Binding) (ignoreOld, ignoreNew bool) {
@ -78,12 +76,9 @@ func resolveConflictBinding(cfg *frozenConfig, old, new *Binding) (ignoreOld, ig
} }
} }
func decoderOfStruct(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) { func decoderOfStruct(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder {
bindings := map[string]*Binding{} bindings := map[string]*Binding{}
structDescriptor, err := describeStruct(cfg, typ) structDescriptor := describeStruct(cfg, prefix, typ)
if err != nil {
return nil, err
}
for _, binding := range structDescriptor.Fields { for _, binding := range structDescriptor.Fields {
for _, fromName := range binding.FromNames { for _, fromName := range binding.FromNames {
old := bindings[fromName] old := bindings[fromName]
@ -131,6 +126,7 @@ func (encoder *structFieldEncoder) IsEmpty(ptr unsafe.Pointer) bool {
} }
type structEncoder struct { type structEncoder struct {
typ reflect.Type
onePtrEmbedded bool onePtrEmbedded bool
onePtrOptimization bool onePtrOptimization bool
fields []structFieldTo fields []structFieldTo
@ -156,6 +152,9 @@ func (encoder *structEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
isNotFirst = true isNotFirst = true
} }
stream.WriteObjectEnd() stream.WriteObjectEnd()
if stream.Error != nil && stream.Error != io.EOF {
stream.Error = fmt.Errorf("%v.%s", encoder.typ, stream.Error.Error())
}
} }
func (encoder *structEncoder) EncodeInterface(val interface{}, stream *Stream) { func (encoder *structEncoder) EncodeInterface(val interface{}, stream *Stream) {

124
feature_reflect_optional.go Normal file
View File

@ -0,0 +1,124 @@
package jsoniter
import (
"reflect"
"unsafe"
)
func decoderOfOptional(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder {
elemType := typ.Elem()
decoder := decoderOfType(cfg, prefix, elemType)
return &OptionalDecoder{elemType, decoder}
}
func encoderOfOptional(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder {
elemType := typ.Elem()
elemEncoder := encoderOfType(cfg, prefix, elemType)
encoder := &OptionalEncoder{elemEncoder}
if elemType.Kind() == reflect.Map {
encoder = &OptionalEncoder{encoder}
}
return encoder
}
type OptionalDecoder struct {
ValueType reflect.Type
ValueDecoder ValDecoder
}
func (decoder *OptionalDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
*((*unsafe.Pointer)(ptr)) = nil
} else {
if *((*unsafe.Pointer)(ptr)) == nil {
//pointer to null, we have to allocate memory to hold the value
value := reflect.New(decoder.ValueType)
newPtr := extractInterface(value.Interface()).word
decoder.ValueDecoder.Decode(newPtr, iter)
*((*uintptr)(ptr)) = uintptr(newPtr)
} else {
//reuse existing instance
decoder.ValueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
}
}
}
type dereferenceDecoder struct {
// only to deference a pointer
valueType reflect.Type
valueDecoder ValDecoder
}
func (decoder *dereferenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if *((*unsafe.Pointer)(ptr)) == nil {
//pointer to null, we have to allocate memory to hold the value
value := reflect.New(decoder.valueType)
newPtr := extractInterface(value.Interface()).word
decoder.valueDecoder.Decode(newPtr, iter)
*((*uintptr)(ptr)) = uintptr(newPtr)
} else {
//reuse existing instance
decoder.valueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
}
}
type OptionalEncoder struct {
ValueEncoder ValEncoder
}
func (encoder *OptionalEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if *((*unsafe.Pointer)(ptr)) == nil {
stream.WriteNil()
} else {
encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
}
}
func (encoder *OptionalEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *OptionalEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return *((*unsafe.Pointer)(ptr)) == nil
}
type dereferenceEncoder struct {
ValueEncoder ValEncoder
}
func (encoder *dereferenceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if *((*unsafe.Pointer)(ptr)) == nil {
stream.WriteNil()
} else {
encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
}
}
func (encoder *dereferenceEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *dereferenceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return encoder.ValueEncoder.IsEmpty(*((*unsafe.Pointer)(ptr)))
}
type optionalMapEncoder struct {
valueEncoder ValEncoder
}
func (encoder *optionalMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if *((*unsafe.Pointer)(ptr)) == nil {
stream.WriteNil()
} else {
encoder.valueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
}
}
func (encoder *optionalMapEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *optionalMapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
p := *((*unsafe.Pointer)(ptr))
return p == nil || encoder.valueEncoder.IsEmpty(p)
}

View File

@ -7,23 +7,17 @@ import (
"unsafe" "unsafe"
) )
func decoderOfSlice(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) { func decoderOfSlice(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder {
decoder, err := decoderOfType(cfg, typ.Elem()) decoder := decoderOfType(cfg, prefix+"[slice]->", typ.Elem())
if err != nil { return &sliceDecoder{typ, typ.Elem(), decoder}
return nil, err
}
return &sliceDecoder{typ, typ.Elem(), decoder}, nil
} }
func encoderOfSlice(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) { func encoderOfSlice(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder {
encoder, err := encoderOfType(cfg, typ.Elem()) encoder := encoderOfType(cfg, prefix+"[slice]->", typ.Elem())
if err != nil {
return nil, err
}
if typ.Elem().Kind() == reflect.Map { if typ.Elem().Kind() == reflect.Map {
encoder = &optionalEncoder{encoder} encoder = &OptionalEncoder{encoder}
} }
return &sliceEncoder{typ, typ.Elem(), encoder}, nil return &sliceEncoder{typ, typ.Elem(), encoder}
} }
type sliceEncoder struct { type sliceEncoder struct {
@ -124,15 +118,14 @@ func growOne(slice *sliceHeader, sliceType reflect.Type, elementType reflect.Typ
} }
} }
} }
newVal := reflect.MakeSlice(sliceType, newLen, newCap) newVal := reflect.MakeSlice(sliceType, newLen, newCap).Interface()
dst := unsafe.Pointer(newVal.Pointer()) newValPtr := extractInterface(newVal).word
dst := (*sliceHeader)(newValPtr).Data
// copy old array into new array // copy old array into new array
originalBytesCount := uintptr(slice.Len) * elementType.Size() originalBytesCount := slice.Len * int(elementType.Size())
srcPtr := (*[1 << 30]byte)(slice.Data) srcSliceHeader := (unsafe.Pointer)(&sliceHeader{slice.Data, originalBytesCount, originalBytesCount})
dstPtr := (*[1 << 30]byte)(dst) dstSliceHeader := (unsafe.Pointer)(&sliceHeader{dst, originalBytesCount, originalBytesCount})
for i := uintptr(0); i < originalBytesCount; i++ { copy(*(*[]byte)(dstSliceHeader), *(*[]byte)(srcSliceHeader))
dstPtr[i] = srcPtr[i]
}
slice.Data = dst slice.Data = dst
slice.Len = newLen slice.Len = newLen
slice.Cap = newCap slice.Cap = newCap
@ -142,8 +135,9 @@ func reuseSlice(slice *sliceHeader, sliceType reflect.Type, expectedCap int) {
if expectedCap <= slice.Cap { if expectedCap <= slice.Cap {
return return
} }
newVal := reflect.MakeSlice(sliceType, 0, expectedCap) newVal := reflect.MakeSlice(sliceType, 0, expectedCap).Interface()
dst := unsafe.Pointer(newVal.Pointer()) newValPtr := extractInterface(newVal).word
dst := (*sliceHeader)(newValPtr).Data
slice.Data = dst slice.Data = dst
slice.Cap = expectedCap slice.Cap = expectedCap
} }

View File

@ -8,22 +8,22 @@ import (
"unsafe" "unsafe"
) )
func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder) (ValDecoder, error) { func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder) ValDecoder {
knownHash := map[int32]struct{}{ knownHash := map[int32]struct{}{
0: {}, 0: {},
} }
switch len(fields) { switch len(fields) {
case 0: case 0:
return &skipObjectDecoder{typ}, nil return &skipObjectDecoder{typ}
case 1: case 1:
for fieldName, fieldDecoder := range fields { for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName) fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash] _, known := knownHash[fieldHash]
if known { if known {
return &generalStructDecoder{typ, fields}, nil return &generalStructDecoder{typ, fields}
} }
knownHash[fieldHash] = struct{}{} knownHash[fieldHash] = struct{}{}
return &oneFieldStructDecoder{typ, fieldHash, fieldDecoder}, nil return &oneFieldStructDecoder{typ, fieldHash, fieldDecoder}
} }
case 2: case 2:
var fieldHash1 int32 var fieldHash1 int32
@ -34,7 +34,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName) fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash] _, known := knownHash[fieldHash]
if known { if known {
return &generalStructDecoder{typ, fields}, nil return &generalStructDecoder{typ, fields}
} }
knownHash[fieldHash] = struct{}{} knownHash[fieldHash] = struct{}{}
if fieldHash1 == 0 { if fieldHash1 == 0 {
@ -45,7 +45,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldDecoder2 = fieldDecoder fieldDecoder2 = fieldDecoder
} }
} }
return &twoFieldsStructDecoder{typ, fieldHash1, fieldDecoder1, fieldHash2, fieldDecoder2}, nil return &twoFieldsStructDecoder{typ, fieldHash1, fieldDecoder1, fieldHash2, fieldDecoder2}
case 3: case 3:
var fieldName1 int32 var fieldName1 int32
var fieldName2 int32 var fieldName2 int32
@ -57,7 +57,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName) fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash] _, known := knownHash[fieldHash]
if known { if known {
return &generalStructDecoder{typ, fields}, nil return &generalStructDecoder{typ, fields}
} }
knownHash[fieldHash] = struct{}{} knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 { if fieldName1 == 0 {
@ -72,7 +72,9 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
} }
} }
return &threeFieldsStructDecoder{typ, return &threeFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3}, nil fieldName1, fieldDecoder1,
fieldName2, fieldDecoder2,
fieldName3, fieldDecoder3}
case 4: case 4:
var fieldName1 int32 var fieldName1 int32
var fieldName2 int32 var fieldName2 int32
@ -86,7 +88,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName) fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash] _, known := knownHash[fieldHash]
if known { if known {
return &generalStructDecoder{typ, fields}, nil return &generalStructDecoder{typ, fields}
} }
knownHash[fieldHash] = struct{}{} knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 { if fieldName1 == 0 {
@ -104,8 +106,10 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
} }
} }
return &fourFieldsStructDecoder{typ, return &fourFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3, fieldName1, fieldDecoder1,
fieldName4, fieldDecoder4}, nil fieldName2, fieldDecoder2,
fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4}
case 5: case 5:
var fieldName1 int32 var fieldName1 int32
var fieldName2 int32 var fieldName2 int32
@ -121,7 +125,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName) fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash] _, known := knownHash[fieldHash]
if known { if known {
return &generalStructDecoder{typ, fields}, nil return &generalStructDecoder{typ, fields}
} }
knownHash[fieldHash] = struct{}{} knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 { if fieldName1 == 0 {
@ -142,8 +146,11 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
} }
} }
return &fiveFieldsStructDecoder{typ, return &fiveFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3, fieldName1, fieldDecoder1,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5}, nil fieldName2, fieldDecoder2,
fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4,
fieldName5, fieldDecoder5}
case 6: case 6:
var fieldName1 int32 var fieldName1 int32
var fieldName2 int32 var fieldName2 int32
@ -161,7 +168,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName) fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash] _, known := knownHash[fieldHash]
if known { if known {
return &generalStructDecoder{typ, fields}, nil return &generalStructDecoder{typ, fields}
} }
knownHash[fieldHash] = struct{}{} knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 { if fieldName1 == 0 {
@ -185,8 +192,12 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
} }
} }
return &sixFieldsStructDecoder{typ, return &sixFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3, fieldName1, fieldDecoder1,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6}, nil fieldName2, fieldDecoder2,
fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4,
fieldName5, fieldDecoder5,
fieldName6, fieldDecoder6}
case 7: case 7:
var fieldName1 int32 var fieldName1 int32
var fieldName2 int32 var fieldName2 int32
@ -206,7 +217,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName) fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash] _, known := knownHash[fieldHash]
if known { if known {
return &generalStructDecoder{typ, fields}, nil return &generalStructDecoder{typ, fields}
} }
knownHash[fieldHash] = struct{}{} knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 { if fieldName1 == 0 {
@ -233,9 +244,13 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
} }
} }
return &sevenFieldsStructDecoder{typ, return &sevenFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3, fieldName1, fieldDecoder1,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6, fieldName2, fieldDecoder2,
fieldName7, fieldDecoder7}, nil fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4,
fieldName5, fieldDecoder5,
fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7}
case 8: case 8:
var fieldName1 int32 var fieldName1 int32
var fieldName2 int32 var fieldName2 int32
@ -257,7 +272,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName) fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash] _, known := knownHash[fieldHash]
if known { if known {
return &generalStructDecoder{typ, fields}, nil return &generalStructDecoder{typ, fields}
} }
knownHash[fieldHash] = struct{}{} knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 { if fieldName1 == 0 {
@ -287,9 +302,14 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
} }
} }
return &eightFieldsStructDecoder{typ, return &eightFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3, fieldName1, fieldDecoder1,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6, fieldName2, fieldDecoder2,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8}, nil fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4,
fieldName5, fieldDecoder5,
fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7,
fieldName8, fieldDecoder8}
case 9: case 9:
var fieldName1 int32 var fieldName1 int32
var fieldName2 int32 var fieldName2 int32
@ -313,7 +333,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName) fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash] _, known := knownHash[fieldHash]
if known { if known {
return &generalStructDecoder{typ, fields}, nil return &generalStructDecoder{typ, fields}
} }
knownHash[fieldHash] = struct{}{} knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 { if fieldName1 == 0 {
@ -346,9 +366,15 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
} }
} }
return &nineFieldsStructDecoder{typ, return &nineFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3, fieldName1, fieldDecoder1,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6, fieldName2, fieldDecoder2,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8, fieldName9, fieldDecoder9}, nil fieldName3, fieldDecoder3,
fieldName4, fieldDecoder4,
fieldName5, fieldDecoder5,
fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7,
fieldName8, fieldDecoder8,
fieldName9, fieldDecoder9}
case 10: case 10:
var fieldName1 int32 var fieldName1 int32
var fieldName2 int32 var fieldName2 int32
@ -374,7 +400,7 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
fieldHash := calcHash(fieldName) fieldHash := calcHash(fieldName)
_, known := knownHash[fieldHash] _, known := knownHash[fieldHash]
if known { if known {
return &generalStructDecoder{typ, fields}, nil return &generalStructDecoder{typ, fields}
} }
knownHash[fieldHash] = struct{}{} knownHash[fieldHash] = struct{}{}
if fieldName1 == 0 { if fieldName1 == 0 {
@ -410,12 +436,18 @@ func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder
} }
} }
return &tenFieldsStructDecoder{typ, return &tenFieldsStructDecoder{typ,
fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3, fieldName1, fieldDecoder1,
fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6, fieldName2, fieldDecoder2,
fieldName7, fieldDecoder7, fieldName8, fieldDecoder8, fieldName9, fieldDecoder9, fieldName3, fieldDecoder3,
fieldName10, fieldDecoder10}, nil fieldName4, fieldDecoder4,
fieldName5, fieldDecoder5,
fieldName6, fieldDecoder6,
fieldName7, fieldDecoder7,
fieldName8, fieldDecoder8,
fieldName9, fieldDecoder9,
fieldName10, fieldDecoder10}
} }
return &generalStructDecoder{typ, fields}, nil return &generalStructDecoder{typ, fields}
} }
type generalStructDecoder struct { type generalStructDecoder struct {
@ -427,8 +459,18 @@ func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
if !iter.readObjectStart() { if !iter.readObjectStart() {
return return
} }
fieldBytes := iter.readObjectFieldAsBytes() var fieldBytes []byte
field := *(*string)(unsafe.Pointer(&fieldBytes)) var field string
if iter.cfg.objectFieldMustBeSimpleString {
fieldBytes = iter.readObjectFieldAsBytes()
field = *(*string)(unsafe.Pointer(&fieldBytes))
} else {
field = iter.ReadString()
c := iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
}
fieldDecoder := decoder.fields[strings.ToLower(field)] fieldDecoder := decoder.fields[strings.ToLower(field)]
if fieldDecoder == nil { if fieldDecoder == nil {
iter.Skip() iter.Skip()
@ -436,8 +478,16 @@ func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
fieldDecoder.Decode(ptr, iter) fieldDecoder.Decode(ptr, iter)
} }
for iter.nextToken() == ',' { for iter.nextToken() == ',' {
fieldBytes = iter.readObjectFieldAsBytes() if iter.cfg.objectFieldMustBeSimpleString {
field = *(*string)(unsafe.Pointer(&fieldBytes)) fieldBytes := iter.readObjectFieldAsBytes()
field = *(*string)(unsafe.Pointer(&fieldBytes))
} else {
field = iter.ReadString()
c := iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
}
fieldDecoder = decoder.fields[strings.ToLower(field)] fieldDecoder = decoder.fields[strings.ToLower(field)]
if fieldDecoder == nil { if fieldDecoder == nil {
iter.Skip() iter.Skip()
@ -446,7 +496,7 @@ func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
} }
} }
if iter.Error != nil && iter.Error != io.EOF { if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
} }
} }
@ -484,7 +534,7 @@ func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
} }
} }
if iter.Error != nil && iter.Error != io.EOF { if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
} }
} }
@ -514,7 +564,7 @@ func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
} }
} }
if iter.Error != nil && iter.Error != io.EOF { if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
} }
} }
@ -548,7 +598,7 @@ func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
} }
} }
if iter.Error != nil && iter.Error != io.EOF { if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
} }
} }
@ -586,7 +636,7 @@ func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
} }
} }
if iter.Error != nil && iter.Error != io.EOF { if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
} }
} }
@ -628,7 +678,7 @@ func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
} }
} }
if iter.Error != nil && iter.Error != io.EOF { if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
} }
} }
@ -674,7 +724,7 @@ func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
} }
} }
if iter.Error != nil && iter.Error != io.EOF { if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
} }
} }
@ -724,7 +774,7 @@ func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
} }
} }
if iter.Error != nil && iter.Error != io.EOF { if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
} }
} }
@ -778,7 +828,7 @@ func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
} }
} }
if iter.Error != nil && iter.Error != io.EOF { if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
} }
} }
@ -836,7 +886,7 @@ func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
} }
} }
if iter.Error != nil && iter.Error != io.EOF { if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
} }
} }
@ -898,7 +948,7 @@ func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
} }
} }
if iter.Error != nil && iter.Error != io.EOF { if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error()) iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
} }
} }

View File

@ -4,15 +4,16 @@ import (
"io" "io"
) )
// Stream is a io.Writer like object, with JSON specific write functions. // stream is a io.Writer like object, with JSON specific write functions.
// Error is not returned as return value, but stored as Error member on this stream instance. // Error is not returned as return value, but stored as Error member on this stream instance.
type Stream struct { type Stream struct {
cfg *frozenConfig cfg *frozenConfig
out io.Writer out io.Writer
buf []byte buf []byte
n int n int
Error error Error error
indention int indention int
Attachment interface{} // open for customized encoder
} }
// NewStream create new stream instance. // NewStream create new stream instance.
@ -191,6 +192,9 @@ func (stream *Stream) ensure(minimal int) {
func (stream *Stream) growAtLeast(minimal int) { func (stream *Stream) growAtLeast(minimal int) {
if stream.out != nil { if stream.out != nil {
stream.Flush() stream.Flush()
if stream.Available() >= minimal {
return
}
} }
toGrow := len(stream.buf) toGrow := len(stream.buf)
if toGrow < minimal { if toGrow < minimal {
@ -280,8 +284,7 @@ func (stream *Stream) WriteArrayStart() {
// WriteEmptyArray write [] // WriteEmptyArray write []
func (stream *Stream) WriteEmptyArray() { func (stream *Stream) WriteEmptyArray() {
stream.writeByte('[') stream.writeTwoBytes('[', ']')
stream.writeByte(']')
} }
// WriteArrayEnd write ] with possible indention // WriteArrayEnd write ] with possible indention

View File

@ -22,7 +22,7 @@ func Test_new_encoder(t *testing.T) {
encoder2 := NewEncoder(buf2) encoder2 := NewEncoder(buf2)
encoder2.SetEscapeHTML(false) encoder2.SetEscapeHTML(false)
encoder2.Encode([]int{1}) encoder2.Encode([]int{1})
should.Equal("[1]", buf2.String()) should.Equal("[1]\n", buf2.String())
} }
func Test_string_encode_with_std_without_html_escape(t *testing.T) { func Test_string_encode_with_std_without_html_escape(t *testing.T) {

View File

@ -51,6 +51,7 @@ func Test_read_string_as_any(t *testing.T) {
func Test_wrap_string(t *testing.T) { func Test_wrap_string(t *testing.T) {
should := require.New(t) should := require.New(t)
any := WrapString("123") any := Get([]byte("-32000")).MustBeValid()
should.Equal(123, any.ToInt()) should.Equal(-32000, any.ToInt())
should.NoError(any.LastError())
} }

View File

@ -2,11 +2,12 @@ package jsoniter
import ( import (
"encoding/json" "encoding/json"
"github.com/stretchr/testify/require"
"strconv" "strconv"
"testing" "testing"
"time" "time"
"unsafe" "unsafe"
"github.com/stretchr/testify/require"
) )
func Test_customize_type_decoder(t *testing.T) { func Test_customize_type_decoder(t *testing.T) {
@ -82,7 +83,7 @@ func Test_customize_field_decoder(t *testing.T) {
} }
type TestObject1 struct { type TestObject1 struct {
field1 string Field1 string
} }
type testExtension struct { type testExtension struct {
@ -93,7 +94,7 @@ func (extension *testExtension) UpdateStructDescriptor(structDescriptor *StructD
if structDescriptor.Type.String() != "jsoniter.TestObject1" { if structDescriptor.Type.String() != "jsoniter.TestObject1" {
return return
} }
binding := structDescriptor.GetField("field1") binding := structDescriptor.GetField("Field1")
binding.Encoder = &funcEncoder{fun: func(ptr unsafe.Pointer, stream *Stream) { binding.Encoder = &funcEncoder{fun: func(ptr unsafe.Pointer, stream *Stream) {
str := *((*string)(ptr)) str := *((*string)(ptr))
val, _ := strconv.Atoi(str) val, _ := strconv.Atoi(str)
@ -108,12 +109,13 @@ func (extension *testExtension) UpdateStructDescriptor(structDescriptor *StructD
func Test_customize_field_by_extension(t *testing.T) { func Test_customize_field_by_extension(t *testing.T) {
should := require.New(t) should := require.New(t)
RegisterExtension(&testExtension{}) cfg := Config{}.Froze()
cfg.RegisterExtension(&testExtension{})
obj := TestObject1{} obj := TestObject1{}
err := UnmarshalFromString(`{"field-1": 100}`, &obj) err := cfg.UnmarshalFromString(`{"field-1": 100}`, &obj)
should.Nil(err) should.Nil(err)
should.Equal("100", obj.field1) should.Equal("100", obj.Field1)
str, err := MarshalToString(obj) str, err := cfg.MarshalToString(obj)
should.Nil(err) should.Nil(err)
should.Equal(`{"field-1":100}`, str) should.Equal(`{"field-1":100}`, str)
} }

View File

@ -15,6 +15,15 @@ func Test_encode_fixed_array(t *testing.T) {
should.Equal("[0.1,1]", output) should.Equal("[0.1,1]", output)
} }
func Test_encode_fixed_array_empty(t *testing.T) {
should := require.New(t)
type FixedArray [0]float64
fixed := FixedArray{}
output, err := MarshalToString(fixed)
should.Nil(err)
should.Equal("[]", output)
}
func Test_encode_fixed_array_of_map(t *testing.T) { func Test_encode_fixed_array_of_map(t *testing.T) {
should := require.New(t) should := require.New(t)
type FixedArray [2]map[string]string type FixedArray [2]map[string]string

View File

@ -192,6 +192,13 @@ func Test_lossy_float_marshal(t *testing.T) {
should.Equal("0.123457", output) should.Equal("0.123457", output)
} }
func Test_read_number(t *testing.T) {
should := require.New(t)
iter := ParseString(ConfigDefault, `92233720368547758079223372036854775807`)
val := iter.ReadNumber()
should.Equal(`92233720368547758079223372036854775807`, string(val))
}
func Benchmark_jsoniter_float(b *testing.B) { func Benchmark_jsoniter_float(b *testing.B) {
b.ReportAllocs() b.ReportAllocs()
input := []byte(`1.1123,`) input := []byte(`1.1123,`)

View File

@ -506,6 +506,42 @@ func Test_jsoniter_number(t *testing.T) {
should.Equal("1", str) should.Equal("1", str)
} }
func Test_non_numeric_as_number(t *testing.T) {
should := require.New(t)
var v1 json.Number
err := Unmarshal([]byte(`"500"`), &v1)
should.Nil(err)
should.Equal("500", string(v1))
var v2 Number
err = Unmarshal([]byte(`"500"`), &v2)
should.Nil(err)
should.Equal("500", string(v2))
}
func Test_null_as_number(t *testing.T) {
should := require.New(t)
var v1 json.Number
err := json.Unmarshal([]byte(`null`), &v1)
should.Nil(err)
should.Equal("", string(v1))
output, err := json.Marshal(v1)
should.NoError(err)
should.Equal("0", string(output))
var v2 Number
err = Unmarshal([]byte(`null`), &v2)
should.Nil(err)
should.Equal("", string(v2))
output, err = Marshal(v2)
should.NoError(err)
should.Equal("0", string(output))
}
func Test_float_as_int(t *testing.T) {
should := require.New(t)
var i int
should.NotNil(Unmarshal([]byte(`1.1`), &i))
}
func Benchmark_jsoniter_encode_int(b *testing.B) { func Benchmark_jsoniter_encode_int(b *testing.B) {
stream := NewStream(ConfigDefault, ioutil.Discard, 64) stream := NewStream(ConfigDefault, ioutil.Discard, 64)
for n := 0; n < b.N; n++ { for n := 0; n < b.N; n++ {

View File

@ -7,8 +7,24 @@ import (
"unsafe" "unsafe"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"reflect"
) )
func Test_write_empty_interface_via_placeholder(t *testing.T) {
fmt.Println(^uint(0) >> 1)
should := require.New(t)
m := map[uint32]interface{}{1: "hello"}
inf := reflect.ValueOf(m).MapIndex(reflect.ValueOf(uint32(1))).Interface()
encoder := &placeholderEncoder{
cfg: ConfigFastest.(*frozenConfig),
cacheKey: reflect.TypeOf(m).Elem(),
}
stream := ConfigFastest.BorrowStream(nil)
encoderOfType(ConfigFastest.(*frozenConfig), "", reflect.TypeOf(m).Elem())
encoder.EncodeInterface(inf, stream)
should.Equal(`"hello"`, string(stream.Buffer()))
}
func Test_write_array_of_interface(t *testing.T) { func Test_write_array_of_interface(t *testing.T) {
should := require.New(t) should := require.New(t)
array := []interface{}{"hello"} array := []interface{}{"hello"}
@ -392,7 +408,7 @@ func Test_omitempty_nil_nonempty_interface(t *testing.T) {
obj.Field = MyString("hello") obj.Field = MyString("hello")
err = UnmarshalFromString(`{"field":null}`, &obj) err = UnmarshalFromString(`{"field":null}`, &obj)
should.NoError(err) should.NoError(err)
should.Equal(nil, obj.Field) should.Nil(obj.Field)
} }
func Test_marshal_nil_marshaler_interface(t *testing.T) { func Test_marshal_nil_marshaler_interface(t *testing.T) {

View File

@ -3,6 +3,7 @@ package jsoniter
import ( import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"io" "io"
"testing" "testing"
@ -130,3 +131,70 @@ func Test_invalid_number(t *testing.T) {
should.Nil(err) should.Nil(err)
should.Equal(string(result2), string(result)) should.Equal(string(result2), string(result))
} }
func Test_valid(t *testing.T) {
should := require.New(t)
should.True(Valid([]byte(`{}`)))
should.False(Valid([]byte(`{`)))
}
func Test_nil_pointer(t *testing.T) {
should := require.New(t)
data := []byte(`{"A":0}`)
type T struct {
X int
}
var obj *T
err := Unmarshal(data, obj)
should.NotNil(err)
}
func Test_func_pointer_type(t *testing.T) {
type TestObject2 struct {
F func()
}
type TestObject1 struct {
Obj *TestObject2
}
t.Run("encode null is valid", func(t *testing.T) {
should := require.New(t)
output, err := json.Marshal(TestObject1{})
should.Nil(err)
should.Equal(`{"Obj":null}`, string(output))
output, err = Marshal(TestObject1{})
should.Nil(err)
should.Equal(`{"Obj":null}`, string(output))
})
t.Run("encode not null is invalid", func(t *testing.T) {
should := require.New(t)
_, err := json.Marshal(TestObject1{Obj: &TestObject2{}})
should.NotNil(err)
_, err = Marshal(TestObject1{Obj: &TestObject2{}})
should.NotNil(err)
})
t.Run("decode null is valid", func(t *testing.T) {
should := require.New(t)
var obj TestObject1
should.Nil(json.Unmarshal([]byte(`{"Obj":{"F": null}}`), &obj))
should.Nil(Unmarshal([]byte(`{"Obj":{"F": null}}`), &obj))
})
t.Run("decode not null is invalid", func(t *testing.T) {
should := require.New(t)
var obj TestObject1
should.NotNil(json.Unmarshal([]byte(`{"Obj":{"F": "hello"}}`), &obj))
should.NotNil(Unmarshal([]byte(`{"Obj":{"F": "hello"}}`), &obj))
})
}
func TestEOF(t *testing.T) {
var s string
err := ConfigCompatibleWithStandardLibrary.NewDecoder(&bytes.Buffer{}).Decode(&s)
assert.Equal(t, io.EOF, err)
}
func TestDecodeErrorType(t *testing.T) {
should := require.New(t)
var err error
should.Nil(Unmarshal([]byte("null"), &err))
should.NotNil(Unmarshal([]byte("123"), &err))
}

View File

@ -158,6 +158,28 @@ func Test_ignore_field_on_not_valid_type(t *testing.T) {
should.Equal(`{"field-1":"hello world"}`, str) should.Equal(`{"field-1":"hello world"}`, str)
} }
func Test_nested_field_omit_empty(t *testing.T) {
should := require.New(t)
type S1 struct {
F1 string `json:",omitempty"`
}
type S2 struct {
*S1
F2 string `json:",omitempty"`
}
s1 := &S1{
//F1: "abc",
}
s2 := &S2{
S1: s1,
F2: "123",
}
str, err := MarshalToString(s2)
should.Nil(err)
should.Equal(`{"F2":"123"}`, str)
}
func Test_recursive_struct(t *testing.T) { func Test_recursive_struct(t *testing.T) {
should := require.New(t) should := require.New(t)
type TestObject struct { type TestObject struct {
@ -328,3 +350,15 @@ func Test_decode_nested(t *testing.T) {
t.Fatal(slice[2]) t.Fatal(slice[2])
} }
} }
func Test_decode_field_with_escape(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
}
var obj TestObject
should.Nil(ConfigCompatibleWithStandardLibrary.Unmarshal([]byte(`{"Field\"1":"hello"}`), &obj))
should.Equal("", obj.Field1)
should.Nil(ConfigCompatibleWithStandardLibrary.Unmarshal([]byte(`{"\u0046ield1":"hello"}`), &obj))
should.Equal("hello", obj.Field1)
}

View File

@ -3,6 +3,7 @@ package jsoniter
import ( import (
"encoding/json" "encoding/json"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"strings"
"testing" "testing"
) )
@ -86,3 +87,28 @@ func Test_marshal_invalid_json_raw_message(t *testing.T) {
should.Equal(`{"raw":null}`, string(aout)) should.Equal(`{"raw":null}`, string(aout))
should.Nil(aouterr) should.Nil(aouterr)
} }
func Test_raw_message_memory_not_copied_issue(t *testing.T) {
jsonStream := `{"name":"xxxxx","bundle_id":"com.zonst.majiang","app_platform":"ios","app_category":"100103", "budget_day":1000,"bidding_min":1,"bidding_max":2,"bidding_type":"CPM", "freq":{"open":true,"type":"day","num":100},"speed":1, "targeting":{"vendor":{"open":true,"list":["zonst"]}, "geo_code":{"open":true,"list":["156110100"]},"app_category":{"open":true,"list":["100101"]}, "day_parting":{"open":true,"list":["100409","100410"]},"device_type":{"open":true,"list":["ipad"]}, "os_version":{"open":true,"list":[10]},"carrier":{"open":true,"list":["mobile"]}, "network":{"open":true,"list":["4G"]}},"url":{"tracking_imp_url":"http://www.baidu.com", "tracking_clk_url":"http://www.baidu.com","jump_url":"http://www.baidu.com","deep_link_url":"http://www.baidu.com"}}`
type IteratorObject struct {
Name *string `json:"name"`
BundleId *string `json:"bundle_id"`
AppCategory *string `json:"app_category"`
AppPlatform *string `json:"app_platform"`
BudgetDay *float32 `json:"budget_day"`
BiddingMax *float32 `json:"bidding_max"`
BiddingMin *float32 `json:"bidding_min"`
BiddingType *string `json:"bidding_type"`
Freq *RawMessage `json:"freq"`
Targeting *RawMessage `json:"targeting"`
Url *RawMessage `json:"url"`
Speed *int `json:"speed" db:"speed"`
}
obj := &IteratorObject{}
decoder := NewDecoder(strings.NewReader(jsonStream))
err := decoder.Decode(obj)
should := require.New(t)
should.Nil(err)
should.Equal(`{"open":true,"type":"day","num":100}`, string(*obj.Freq))
}

View File

@ -104,6 +104,11 @@ func Test_skip_and_return_bytes_with_reader(t *testing.T) {
should.Equal(`{"a" : [{"stream": "c"}], "d": 102 }`, string(skipped)) should.Equal(`{"a" : [{"stream": "c"}], "d": 102 }`, string(skipped))
} }
func Test_skip_empty(t *testing.T) {
should := require.New(t)
should.NotNil(Get([]byte("")).LastError())
}
type TestResp struct { type TestResp struct {
Code uint64 Code uint64
} }

View File

@ -51,3 +51,19 @@ func Test_writeString_should_grow_buffer(t *testing.T) {
should.Nil(stream.Error) should.Nil(stream.Error)
should.Equal(`"123"`, string(stream.Buffer())) should.Equal(`"123"`, string(stream.Buffer()))
} }
type NopWriter struct {
bufferSize int
}
func (w *NopWriter) Write(p []byte) (n int, err error) {
w.bufferSize = cap(p)
return len(p), nil
}
func Test_flush_buffer_should_stop_grow_buffer(t *testing.T) {
writer := new(NopWriter)
NewEncoder(writer).Encode(make([]int, 10000000))
should := require.New(t)
should.Equal(512, writer.bufferSize)
}

View File

@ -0,0 +1,52 @@
package jsoniter
import (
"encoding/json"
"testing"
"time"
"github.com/stretchr/testify/require"
)
func Test_encode_unexported_field(t *testing.T) {
type TestData struct {
a int
b <-chan int
C int
d *time.Timer
}
should := require.New(t)
testChan := make(<-chan int, 10)
testTimer := time.NewTimer(10 * time.Second)
obj := &TestData{
a: 42,
b: testChan,
C: 21,
d: testTimer,
}
jb, err := json.Marshal(obj)
should.NoError(err)
should.Equal([]byte(`{"C":21}`), jb)
err = json.Unmarshal([]byte(`{"a": 444, "b":"bad", "C":55, "d":{"not": "a timer"}}`), obj)
should.NoError(err)
should.Equal(42, obj.a)
should.Equal(testChan, obj.b)
should.Equal(55, obj.C)
should.Equal(testTimer, obj.d)
jb, err = Marshal(obj)
should.NoError(err)
should.Equal(jb, []byte(`{"C":55}`))
err = Unmarshal([]byte(`{"a": 444, "b":"bad", "C":256, "d":{"not":"a timer"}}`), obj)
should.NoError(err)
should.Equal(42, obj.a)
should.Equal(testChan, obj.b)
should.Equal(256, obj.C)
should.Equal(testTimer, obj.d)
}

View File

@ -1 +0,0 @@
../number/skip_test.go

View File

@ -0,0 +1,34 @@
package test
import (
"encoding/json"
"errors"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"io"
"testing"
)
func Test_skip(t *testing.T) {
for _, input := range inputs {
t.Run(input, func(t *testing.T) {
should := require.New(t)
var dst typeForTest
stdErr := json.Unmarshal([]byte(input), &dst)
iter := jsoniter.ParseString(jsoniter.ConfigDefault, input)
iter.Skip()
iter.ReadNil() // trigger looking forward
err := iter.Error
if err == io.EOF {
err = nil
} else {
err = errors.New("remaining bytes")
}
if stdErr == nil {
should.Nil(err)
} else {
should.NotNil(err)
}
})
}
}

View File

@ -1 +0,0 @@
../number/skip_test.go

View File

@ -0,0 +1,34 @@
package test
import (
"encoding/json"
"errors"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"io"
"testing"
)
func Test_skip(t *testing.T) {
for _, input := range inputs {
t.Run(input, func(t *testing.T) {
should := require.New(t)
var dst typeForTest
stdErr := json.Unmarshal([]byte(input), &dst)
iter := jsoniter.ParseString(jsoniter.ConfigDefault, input)
iter.Skip()
iter.ReadNil() // trigger looking forward
err := iter.Error
if err == io.EOF {
err = nil
} else {
err = errors.New("remaining bytes")
}
if stdErr == nil {
should.Nil(err)
} else {
should.NotNil(err)
}
})
}
}

View File

@ -1 +0,0 @@
../number/skip_test.go

View File

@ -0,0 +1,34 @@
package test
import (
"encoding/json"
"errors"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"io"
"testing"
)
func Test_skip(t *testing.T) {
for _, input := range inputs {
t.Run(input, func(t *testing.T) {
should := require.New(t)
var dst typeForTest
stdErr := json.Unmarshal([]byte(input), &dst)
iter := jsoniter.ParseString(jsoniter.ConfigDefault, input)
iter.Skip()
iter.ReadNil() // trigger looking forward
err := iter.Error
if err == io.EOF {
err = nil
} else {
err = errors.New("remaining bytes")
}
if stdErr == nil {
should.Nil(err)
} else {
should.NotNil(err)
}
})
}
}