1
0
mirror of https://github.com/json-iterator/go.git synced 2025-06-15 22:50:24 +02:00

27 Commits
1.0.2 ... 1.0.4

Author SHA1 Message Date
f7279a603e fix out of range 2017-11-15 23:34:21 +08:00
9f088cbcc4 fix #195 when decode float as int, report it clearly 2017-11-15 23:25:12 +08:00
3c0e5762c4 fix #196 do not hard code 1 << 49 2017-11-15 23:15:31 +08:00
d394a135a1 #197 fix place holder encoder to use EncodeInterface, WriteToStream is unsafe when the real encoder is unknown 2017-11-15 22:56:23 +08:00
9fddff05f0 try to fix #194 with larger array 2017-11-11 08:31:44 +08:00
b1b003864e expose OptionalEncoder&OptionalDecoder; add attachment to Stream&Iterator for customized decoder/encoder 2017-11-08 11:41:45 +08:00
aed5a81f09 fix #190 handle empty input 2017-10-31 22:47:02 +08:00
f1258b01aa fix #191 do not always assume the object field is simple string 2017-10-31 22:38:41 +08:00
fbd210edfc Merge pull request #189 from ggaaooppeenngg/compatible-with-map
Fix standard compatiblility
2017-10-26 18:39:38 -05:00
640251ab91 Fix standard compatiblility
Non-nil but empty map with omitempty should be ignored.

Signed-off-by: Peng Gao <peng.gao.dut@gmail.com>
2017-10-27 01:43:41 +08:00
06b2a7cf1d Merge pull request #188 from ggaaooppeenngg/compatible
Fix standard compatiblility
2017-10-26 06:41:01 -05:00
5fffb9b8f7 Fix standard compatiblility
Encode has trailing newline at the end.

Signed-off-by: Peng Gao <peng.gao.dut@gmail.com>
2017-10-26 15:15:36 +08:00
7e3b776024 change jsoniter-sloppy to jsoniter_sloppy 2017-10-23 15:03:44 +08:00
6240e1e798 #185 add jsoniter.Valid 2017-10-10 08:57:02 +08:00
0149a5cf4a fix #183 error message not only show expectation, but also the actual value 2017-10-09 08:24:51 +08:00
5068c8baaf #183 limit error message size 2017-10-09 08:16:52 +08:00
16f78601b5 fix #184, support null as number 2017-10-07 09:29:32 +08:00
8f50a91be2 fix #181, support string as json.Number and jsoniter.Number 2017-10-06 18:08:14 +08:00
73c7bc881e fix #180, add missing methods to jsoniter.Number 2017-10-06 17:56:36 +08:00
4de15a3a87 Merge pull request #182 from MOZGIII/patch-1
Used writeTwoBytes in Stream.WriteEmptyArray
2017-10-04 02:25:32 -05:00
14b28b2226 Used writeTwoBytes in Stream.WriteEmptyArray 2017-10-03 22:14:36 +03:00
abe3c4016b fix #179 2017-09-26 15:35:55 +08:00
dbb1ef3f63 #177 flush buffer should check available again 2017-09-21 21:04:45 +08:00
46b20bbbec #178 SkipAndReturnBytes should return copy of memory 2017-09-21 20:18:45 +08:00
fdfe0b9a69 Merge branch 'olegshaldybin-skip-unexported-fields' 2017-09-19 10:06:59 +08:00
faa3dcf46a do not report error when field is unexported 2017-09-19 10:06:34 +08:00
1f58120d43 Always skip unexported fields when encoding
Skip creating encoders for unexported fields. They are not participating
in JSON marshaling anyway. This allows using unexported fields of
non-marshalable types in structs.

As a side-effect of this change it's no longer possible to marshal
unexported JSON fields by adding a custom type extenstion. It seems this
is desired behavior since it matches standard library and jsoniter
already disallows `json:"-"` fields from participating in custom
extensions.

Fixes #174.
2017-09-18 11:02:15 -07:00
33 changed files with 501 additions and 117 deletions

1
.gitignore vendored
View File

@ -1,3 +1,4 @@
.idea .idea
/coverage.txt /coverage.txt
/profile.out /profile.out
/bug_test.go

View File

@ -44,7 +44,9 @@ with
```go ```go
import "github.com/json-iterator/go" import "github.com/json-iterator/go"
jsoniter.Marshal(&data)
var json = jsoniter.ConfigCompatibleWithStandardLibrary
json.Marshal(&data)
``` ```
Replace Replace
@ -58,7 +60,9 @@ with
```go ```go
import "github.com/json-iterator/go" import "github.com/json-iterator/go"
jsoniter.Unmarshal(input, &data)
var json = jsoniter.ConfigCompatibleWithStandardLibrary
json.Unmarshal(input, &data)
``` ```
[More documentation](http://jsoniter.com/migrate-from-go-std.html) [More documentation](http://jsoniter.com/migrate-from-go-std.html)

40
compatible_test.go Normal file
View File

@ -0,0 +1,40 @@
package jsoniter
import (
"bytes"
"encoding/json"
"testing"
"github.com/stretchr/testify/require"
)
// Standard Encoder has trailing newline.
func TestEncoderHasTrailingNewline(t *testing.T) {
should := require.New(t)
var buf, stdbuf bytes.Buffer
enc := ConfigCompatibleWithStandardLibrary.NewEncoder(&buf)
enc.Encode(1)
stdenc := json.NewEncoder(&stdbuf)
stdenc.Encode(1)
should.Equal(stdbuf.Bytes(), buf.Bytes())
}
// Non-nil but empty map should be ignored.
func TestOmitempty(t *testing.T) {
o := struct {
A string `json:"a,omitempty"`
B string `json:"b,omitempty"`
Annotations map[string]string `json:"annotations,omitempty"`
}{
A: "a",
B: "b",
Annotations: map[string]string{},
}
should := require.New(t)
var buf, stdbuf bytes.Buffer
enc := ConfigCompatibleWithStandardLibrary.NewEncoder(&buf)
enc.Encode(o)
stdenc := json.NewEncoder(&stdbuf)
stdenc.Encode(o)
should.Equal(string(stdbuf.Bytes()), string(buf.Bytes()))
}

View File

@ -110,6 +110,7 @@ type Encoder struct {
// Encode encode interface{} as JSON to io.Writer // Encode encode interface{} as JSON to io.Writer
func (adapter *Encoder) Encode(val interface{}) error { func (adapter *Encoder) Encode(val interface{}) error {
adapter.stream.WriteVal(val) adapter.stream.WriteVal(val)
adapter.stream.WriteRaw("\n")
adapter.stream.Flush() adapter.stream.Flush()
return adapter.stream.Error return adapter.stream.Error
} }
@ -125,3 +126,8 @@ func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) {
config.EscapeHTML = escapeHTML config.EscapeHTML = escapeHTML
adapter.stream.cfg = config.Froze().(*frozenConfig) adapter.stream.cfg = config.Froze().(*frozenConfig)
} }
// Valid reports whether data is a valid JSON encoding.
func Valid(data []byte) bool {
return ConfigDefault.Valid(data)
}

View File

@ -1,6 +1,7 @@
package jsoniter package jsoniter
import ( import (
"errors"
"fmt" "fmt"
"io" "io"
"reflect" "reflect"
@ -157,6 +158,8 @@ func (iter *Iterator) readAny() Any {
return iter.readArrayAny() return iter.readArrayAny()
case '-': case '-':
return iter.readNumberAny(false) return iter.readNumberAny(false)
case 0:
return &invalidAny{baseAny{}, errors.New("input is empty")}
default: default:
return iter.readNumberAny(true) return iter.readNumberAny(true)
} }

View File

@ -19,12 +19,14 @@ type Config struct {
UseNumber bool UseNumber bool
TagKey string TagKey string
ValidateJsonRawMessage bool ValidateJsonRawMessage bool
ObjectFieldMustBeSimpleString bool
} }
type frozenConfig struct { type frozenConfig struct {
configBeforeFrozen Config configBeforeFrozen Config
sortMapKeys bool sortMapKeys bool
indentionStep int indentionStep int
objectFieldMustBeSimpleString bool
decoderCache unsafe.Pointer decoderCache unsafe.Pointer
encoderCache unsafe.Pointer encoderCache unsafe.Pointer
extensions []Extension extensions []Extension
@ -45,6 +47,7 @@ type API interface {
Get(data []byte, path ...interface{}) Any Get(data []byte, path ...interface{}) Any
NewEncoder(writer io.Writer) *Encoder NewEncoder(writer io.Writer) *Encoder
NewDecoder(reader io.Reader) *Decoder NewDecoder(reader io.Reader) *Decoder
Valid(data []byte) bool
} }
// ConfigDefault the default API // ConfigDefault the default API
@ -62,7 +65,8 @@ var ConfigCompatibleWithStandardLibrary = Config{
// ConfigFastest marshals float with only 6 digits precision // ConfigFastest marshals float with only 6 digits precision
var ConfigFastest = Config{ var ConfigFastest = Config{
EscapeHTML: false, EscapeHTML: false,
MarshalFloatWith6Digits: true, MarshalFloatWith6Digits: true, // will lose precession
ObjectFieldMustBeSimpleString: true, // do not unescape object field
}.Froze() }.Froze()
// Froze forge API from config // Froze forge API from config
@ -71,6 +75,7 @@ func (cfg Config) Froze() API {
frozenConfig := &frozenConfig{ frozenConfig := &frozenConfig{
sortMapKeys: cfg.SortMapKeys, sortMapKeys: cfg.SortMapKeys,
indentionStep: cfg.IndentionStep, indentionStep: cfg.IndentionStep,
objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString,
streamPool: make(chan *Stream, 16), streamPool: make(chan *Stream, 16),
iteratorPool: make(chan *Iterator, 16), iteratorPool: make(chan *Iterator, 16),
} }
@ -333,3 +338,10 @@ func (cfg *frozenConfig) NewDecoder(reader io.Reader) *Decoder {
iter := Parse(cfg, reader, 512) iter := Parse(cfg, reader, 512)
return &Decoder{iter} return &Decoder{iter}
} }
func (cfg *frozenConfig) Valid(data []byte) bool {
iter := cfg.BorrowIterator(data)
defer cfg.ReturnIterator(iter)
iter.Skip()
return iter.Error == nil
}

View File

@ -77,6 +77,7 @@ type Iterator struct {
captureStartedAt int captureStartedAt int
captured []byte captured []byte
Error error Error error
Attachment interface{} // open for customized decoder
} }
// NewIterator creates an empty Iterator instance // NewIterator creates an empty Iterator instance
@ -167,7 +168,7 @@ func (iter *Iterator) isObjectEnd() bool {
if c == '}' { if c == '}' {
return true return true
} }
iter.ReportError("isObjectEnd", "object ended prematurely") iter.ReportError("isObjectEnd", "object ended prematurely, unexpected char "+string([]byte{c}))
return true return true
} }
@ -200,8 +201,22 @@ func (iter *Iterator) ReportError(operation string, msg string) {
if peekStart < 0 { if peekStart < 0 {
peekStart = 0 peekStart = 0
} }
iter.Error = fmt.Errorf("%s: %s, parsing %v ...%s... at %s", operation, msg, iter.head, peekEnd := iter.head + 10
string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail])) if peekEnd > iter.tail {
peekEnd = iter.tail
}
parsing := string(iter.buf[peekStart:peekEnd])
contextStart := iter.head - 50
if contextStart < 0 {
contextStart = 0
}
contextEnd := iter.head + 50
if contextEnd > iter.tail {
contextEnd = iter.tail
}
context := string(iter.buf[contextStart:contextEnd])
iter.Error = fmt.Errorf("%s: %s, error found in #%v byte of ...|%s|..., bigger context ...|%s|...",
operation, msg, iter.head-peekStart, parsing, context)
} }
// CurrentBuffer gets current buffer as string for debugging purpose // CurrentBuffer gets current buffer as string for debugging purpose
@ -210,7 +225,7 @@ func (iter *Iterator) CurrentBuffer() string {
if peekStart < 0 { if peekStart < 0 {
peekStart = 0 peekStart = 0
} }
return fmt.Sprintf("parsing %v ...|%s|... at %s", iter.head, return fmt.Sprintf("parsing #%v byte, around ...|%s|..., whole buffer ...|%s|...", iter.head,
string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail])) string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail]))
} }

View File

@ -19,7 +19,7 @@ func (iter *Iterator) ReadArray() (ret bool) {
case ',': case ',':
return true return true
default: default:
iter.ReportError("ReadArray", "expect [ or , or ] or n, but found: "+string([]byte{c})) iter.ReportError("ReadArray", "expect [ or , or ] or n, but found "+string([]byte{c}))
return return
} }
} }
@ -42,7 +42,7 @@ func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
c = iter.nextToken() c = iter.nextToken()
} }
if c != ']' { if c != ']' {
iter.ReportError("ReadArrayCB", "expect ] in the end") iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c}))
return false return false
} }
return true return true
@ -53,6 +53,6 @@ func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
iter.skipThreeBytes('u', 'l', 'l') iter.skipThreeBytes('u', 'l', 'l')
return true // null return true // null
} }
iter.ReportError("ReadArrayCB", "expect [ or n, but found: "+string([]byte{c})) iter.ReportError("ReadArrayCB", "expect [ or n, but found "+string([]byte{c}))
return false return false
} }

View File

@ -113,6 +113,11 @@ func (iter *Iterator) ReadUint32() (ret uint32) {
} }
func (iter *Iterator) readUint32(c byte) (ret uint32) { func (iter *Iterator) readUint32(c byte) (ret uint32) {
defer func() {
if iter.head < len(iter.buf) && iter.buf[iter.head] == '.' {
iter.ReportError("readUint32", "can not decode float as int")
}
}()
ind := intDigits[c] ind := intDigits[c]
if ind == 0 { if ind == 0 {
return 0 // single zero return 0 // single zero
@ -224,6 +229,11 @@ func (iter *Iterator) ReadUint64() uint64 {
} }
func (iter *Iterator) readUint64(c byte) (ret uint64) { func (iter *Iterator) readUint64(c byte) (ret uint64) {
defer func() {
if iter.head < len(iter.buf) && iter.buf[iter.head] == '.' {
iter.ReportError("readUint64", "can not decode float as int")
}
}()
ind := intDigits[c] ind := intDigits[c]
if ind == 0 { if ind == 0 {
return 0 // single zero return 0 // single zero

View File

@ -19,15 +19,33 @@ func (iter *Iterator) ReadObject() (ret string) {
c = iter.nextToken() c = iter.nextToken()
if c == '"' { if c == '"' {
iter.unreadByte() iter.unreadByte()
if iter.cfg.objectFieldMustBeSimpleString {
return string(iter.readObjectFieldAsBytes()) return string(iter.readObjectFieldAsBytes())
} else {
field := iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
return field
}
} }
if c == '}' { if c == '}' {
return "" // end of object return "" // end of object
} }
iter.ReportError("ReadObject", `expect " after {`) iter.ReportError("ReadObject", `expect " after {, but found `+string([]byte{c}))
return return
case ',': case ',':
if iter.cfg.objectFieldMustBeSimpleString {
return string(iter.readObjectFieldAsBytes()) return string(iter.readObjectFieldAsBytes())
} else {
field := iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
return field
}
case '}': case '}':
return "" // end of object return "" // end of object
default: default:
@ -44,17 +62,34 @@ func (iter *Iterator) readFieldHash() int32 {
for i := iter.head; i < iter.tail; i++ { for i := iter.head; i < iter.tail; i++ {
// require ascii string and no escape // require ascii string and no escape
b := iter.buf[i] b := iter.buf[i]
if !iter.cfg.objectFieldMustBeSimpleString && b == '\\' {
iter.head = i
for _, b := range iter.readStringSlowPath() {
if 'A' <= b && b <= 'Z' { if 'A' <= b && b <= 'Z' {
b += 'a' - 'A' b += 'a' - 'A'
} }
hash ^= int64(b)
hash *= 0x1000193
}
c = iter.nextToken()
if c != ':' {
iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
return 0
}
return int32(hash)
}
if b == '"' { if b == '"' {
iter.head = i + 1 iter.head = i + 1
c = iter.nextToken() c = iter.nextToken()
if c != ':' { if c != ':' {
iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c})) iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
return 0
} }
return int32(hash) return int32(hash)
} }
if 'A' <= b && b <= 'Z' {
b += 'a' - 'A'
}
hash ^= int64(b) hash ^= int64(b)
hash *= 0x1000193 hash *= 0x1000193
} }
@ -80,18 +115,38 @@ func calcHash(str string) int32 {
// ReadObjectCB read object with callback, the key is ascii only and field name not copied // ReadObjectCB read object with callback, the key is ascii only and field name not copied
func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool { func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
c := iter.nextToken() c := iter.nextToken()
var fieldBytes []byte
var field string
if c == '{' { if c == '{' {
c = iter.nextToken() c = iter.nextToken()
if c == '"' { if c == '"' {
iter.unreadByte() iter.unreadByte()
field := iter.readObjectFieldAsBytes() if iter.cfg.objectFieldMustBeSimpleString {
if !callback(iter, *(*string)(unsafe.Pointer(&field))) { fieldBytes = iter.readObjectFieldAsBytes()
field = *(*string)(unsafe.Pointer(&fieldBytes))
} else {
field = iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
}
if !callback(iter, field) {
return false return false
} }
c = iter.nextToken() c = iter.nextToken()
for c == ',' { for c == ',' {
field = iter.readObjectFieldAsBytes() if iter.cfg.objectFieldMustBeSimpleString {
if !callback(iter, *(*string)(unsafe.Pointer(&field))) { fieldBytes = iter.readObjectFieldAsBytes()
field = *(*string)(unsafe.Pointer(&fieldBytes))
} else {
field = iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
}
if !callback(iter, field) {
return false return false
} }
c = iter.nextToken() c = iter.nextToken()
@ -105,14 +160,14 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
if c == '}' { if c == '}' {
return true return true
} }
iter.ReportError("ReadObjectCB", `expect " after }`) iter.ReportError("ReadObjectCB", `expect " after }, but found `+string([]byte{c}))
return false return false
} }
if c == 'n' { if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l') iter.skipThreeBytes('u', 'l', 'l')
return true // null return true // null
} }
iter.ReportError("ReadObjectCB", `expect { or n`) iter.ReportError("ReadObjectCB", `expect { or n, but found `+string([]byte{c}))
return false return false
} }
@ -125,7 +180,7 @@ func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
iter.unreadByte() iter.unreadByte()
field := iter.ReadString() field := iter.ReadString()
if iter.nextToken() != ':' { if iter.nextToken() != ':' {
iter.ReportError("ReadMapCB", "expect : after object field") iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
return false return false
} }
if !callback(iter, field) { if !callback(iter, field) {
@ -135,7 +190,7 @@ func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
for c == ',' { for c == ',' {
field = iter.ReadString() field = iter.ReadString()
if iter.nextToken() != ':' { if iter.nextToken() != ':' {
iter.ReportError("ReadMapCB", "expect : after object field") iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
return false return false
} }
if !callback(iter, field) { if !callback(iter, field) {
@ -152,14 +207,14 @@ func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
if c == '}' { if c == '}' {
return true return true
} }
iter.ReportError("ReadMapCB", `expect " after }`) iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c}))
return false return false
} }
if c == 'n' { if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l') iter.skipThreeBytes('u', 'l', 'l')
return true // null return true // null
} }
iter.ReportError("ReadMapCB", `expect { or n`) iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c}))
return false return false
} }
@ -176,7 +231,7 @@ func (iter *Iterator) readObjectStart() bool {
iter.skipThreeBytes('u', 'l', 'l') iter.skipThreeBytes('u', 'l', 'l')
return false return false
} }
iter.ReportError("readObjectStart", "expect { or n") iter.ReportError("readObjectStart", "expect { or n, but found "+string([]byte{c}))
return false return false
} }
@ -192,7 +247,7 @@ func (iter *Iterator) readObjectFieldAsBytes() (ret []byte) {
} }
} }
if iter.buf[iter.head] != ':' { if iter.buf[iter.head] != ':' {
iter.ReportError("readObjectFieldAsBytes", "expect : after object field") iter.ReportError("readObjectFieldAsBytes", "expect : after object field, but found "+string([]byte{iter.buf[iter.head]}))
return return
} }
iter.head++ iter.head++

View File

@ -25,7 +25,7 @@ func (iter *Iterator) ReadBool() (ret bool) {
iter.skipFourBytes('a', 'l', 's', 'e') iter.skipFourBytes('a', 'l', 's', 'e')
return false return false
} }
iter.ReportError("ReadBool", "expect t or f") iter.ReportError("ReadBool", "expect t or f, but found "+string([]byte{c}))
return return
} }
@ -59,7 +59,9 @@ func (iter *Iterator) stopCapture() []byte {
iter.captureStartedAt = -1 iter.captureStartedAt = -1
iter.captured = nil iter.captured = nil
if len(captured) == 0 { if len(captured) == 0 {
return remaining copied := make([]byte, len(remaining))
copy(copied, remaining)
return copied
} }
captured = append(captured, remaining...) captured = append(captured, remaining...)
return captured return captured

View File

@ -1,4 +1,4 @@
//+build jsoniter-sloppy //+build jsoniter_sloppy
package jsoniter package jsoniter

View File

@ -1,4 +1,4 @@
//+build !jsoniter-sloppy //+build !jsoniter_sloppy
package jsoniter package jsoniter

View File

@ -28,7 +28,7 @@ func (iter *Iterator) ReadString() (ret string) {
iter.skipThreeBytes('u', 'l', 'l') iter.skipThreeBytes('u', 'l', 'l')
return "" return ""
} }
iter.ReportError("ReadString", `expects " or n`) iter.ReportError("ReadString", `expects " or n, but found `+string([]byte{c}))
return return
} }
@ -139,7 +139,7 @@ func (iter *Iterator) ReadStringAsSlice() (ret []byte) {
} }
return copied return copied
} }
iter.ReportError("ReadStringAsSlice", `expects " or n`) iter.ReportError("ReadStringAsSlice", `expects " or n, but found `+string([]byte{c}))
return return
} }
@ -156,7 +156,7 @@ func (iter *Iterator) readU4() (ret rune) {
} else if c >= 'A' && c <= 'F' { } else if c >= 'A' && c <= 'F' {
ret = ret*16 + rune(c-'A'+10) ret = ret*16 + rune(c-'A'+10)
} else { } else {
iter.ReportError("readU4", "expects 0~9 or a~f") iter.ReportError("readU4", "expects 0~9 or a~f, but found "+string([]byte{c}))
return return
} }
} }

View File

@ -1,9 +1,25 @@
package jsoniter package jsoniter
import "encoding/json" import (
"encoding/json"
"strconv"
)
type Number string type Number string
// String returns the literal text of the number.
func (n Number) String() string { return string(n) }
// Float64 returns the number as a float64.
func (n Number) Float64() (float64, error) {
return strconv.ParseFloat(string(n), 64)
}
// Int64 returns the number as an int64.
func (n Number) Int64() (int64, error) {
return strconv.ParseInt(string(n), 10, 64)
}
func CastJsonNumber(val interface{}) (string, bool) { func CastJsonNumber(val interface{}) (string, bool) {
switch typedVal := val.(type) { switch typedVal := val.(type) {
case json.Number: case json.Number:

View File

@ -28,6 +28,7 @@ func (cfg *frozenConfig) BorrowStream(writer io.Writer) *Stream {
func (cfg *frozenConfig) ReturnStream(stream *Stream) { func (cfg *frozenConfig) ReturnStream(stream *Stream) {
stream.Error = nil stream.Error = nil
stream.Attachment = nil
select { select {
case cfg.streamPool <- stream: case cfg.streamPool <- stream:
return return
@ -48,6 +49,7 @@ func (cfg *frozenConfig) BorrowIterator(data []byte) *Iterator {
func (cfg *frozenConfig) ReturnIterator(iter *Iterator) { func (cfg *frozenConfig) ReturnIterator(iter *Iterator) {
iter.Error = nil iter.Error = nil
iter.Attachment = nil
select { select {
case cfg.iteratorPool <- iter: case cfg.iteratorPool <- iter:
return return

View File

@ -72,24 +72,24 @@ func init() {
textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem() textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
} }
type optionalDecoder struct { type OptionalDecoder struct {
valueType reflect.Type ValueType reflect.Type
valueDecoder ValDecoder ValueDecoder ValDecoder
} }
func (decoder *optionalDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { func (decoder *OptionalDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() { if iter.ReadNil() {
*((*unsafe.Pointer)(ptr)) = nil *((*unsafe.Pointer)(ptr)) = nil
} else { } else {
if *((*unsafe.Pointer)(ptr)) == nil { if *((*unsafe.Pointer)(ptr)) == nil {
//pointer to null, we have to allocate memory to hold the value //pointer to null, we have to allocate memory to hold the value
value := reflect.New(decoder.valueType) value := reflect.New(decoder.ValueType)
newPtr := extractInterface(value.Interface()).word newPtr := extractInterface(value.Interface()).word
decoder.valueDecoder.Decode(newPtr, iter) decoder.ValueDecoder.Decode(newPtr, iter)
*((*uintptr)(ptr)) = uintptr(newPtr) *((*uintptr)(ptr)) = uintptr(newPtr)
} else { } else {
//reuse existing instance //reuse existing instance
decoder.valueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter) decoder.ValueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
} }
} }
} }
@ -113,11 +113,31 @@ func (decoder *deferenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
} }
} }
type optionalEncoder struct { type OptionalEncoder struct {
ValueEncoder ValEncoder
}
func (encoder *OptionalEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if *((*unsafe.Pointer)(ptr)) == nil {
stream.WriteNil()
} else {
encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
}
}
func (encoder *OptionalEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
func (encoder *OptionalEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return *((*unsafe.Pointer)(ptr)) == nil
}
type optionalMapEncoder struct {
valueEncoder ValEncoder valueEncoder ValEncoder
} }
func (encoder *optionalEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { func (encoder *optionalMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if *((*unsafe.Pointer)(ptr)) == nil { if *((*unsafe.Pointer)(ptr)) == nil {
stream.WriteNil() stream.WriteNil()
} else { } else {
@ -125,15 +145,13 @@ func (encoder *optionalEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
} }
} }
func (encoder *optionalEncoder) EncodeInterface(val interface{}, stream *Stream) { func (encoder *optionalMapEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) WriteToStream(val, stream, encoder)
} }
func (encoder *optionalEncoder) IsEmpty(ptr unsafe.Pointer) bool { func (encoder *optionalMapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
if *((*unsafe.Pointer)(ptr)) == nil { p := *((*unsafe.Pointer)(ptr))
return true return p == nil || encoder.valueEncoder.IsEmpty(p)
}
return false
} }
type placeholderEncoder struct { type placeholderEncoder struct {
@ -146,7 +164,7 @@ func (encoder *placeholderEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
} }
func (encoder *placeholderEncoder) EncodeInterface(val interface{}, stream *Stream) { func (encoder *placeholderEncoder) EncodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder) encoder.getRealEncoder().EncodeInterface(val, stream)
} }
func (encoder *placeholderEncoder) IsEmpty(ptr unsafe.Pointer) bool { func (encoder *placeholderEncoder) IsEmpty(ptr unsafe.Pointer) bool {
@ -289,7 +307,7 @@ func createDecoderOfType(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error
templateInterface := reflect.New(typ).Elem().Interface() templateInterface := reflect.New(typ).Elem().Interface()
var decoder ValDecoder = &unmarshalerDecoder{extractInterface(templateInterface)} var decoder ValDecoder = &unmarshalerDecoder{extractInterface(templateInterface)}
if typ.Kind() == reflect.Ptr { if typ.Kind() == reflect.Ptr {
decoder = &optionalDecoder{typ.Elem(), decoder} decoder = &OptionalDecoder{typ.Elem(), decoder}
} }
return decoder, nil return decoder, nil
} }
@ -302,7 +320,7 @@ func createDecoderOfType(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error
templateInterface := reflect.New(typ).Elem().Interface() templateInterface := reflect.New(typ).Elem().Interface()
var decoder ValDecoder = &textUnmarshalerDecoder{extractInterface(templateInterface)} var decoder ValDecoder = &textUnmarshalerDecoder{extractInterface(templateInterface)}
if typ.Kind() == reflect.Ptr { if typ.Kind() == reflect.Ptr {
decoder = &optionalDecoder{typ.Elem(), decoder} decoder = &OptionalDecoder{typ.Elem(), decoder}
} }
return decoder, nil return decoder, nil
} }
@ -462,7 +480,7 @@ func createEncoderOfType(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error
checkIsEmpty: checkIsEmpty, checkIsEmpty: checkIsEmpty,
} }
if typ.Kind() == reflect.Ptr { if typ.Kind() == reflect.Ptr {
encoder = &optionalEncoder{encoder} encoder = &OptionalEncoder{encoder}
} }
return encoder, nil return encoder, nil
} }
@ -489,7 +507,7 @@ func createEncoderOfType(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error
checkIsEmpty: checkIsEmpty, checkIsEmpty: checkIsEmpty,
} }
if typ.Kind() == reflect.Ptr { if typ.Kind() == reflect.Ptr {
encoder = &optionalEncoder{encoder} encoder = &OptionalEncoder{encoder}
} }
return encoder, nil return encoder, nil
} }
@ -549,7 +567,7 @@ func createCheckIsEmpty(typ reflect.Type) (checkIsEmpty, error) {
case reflect.Map: case reflect.Map:
return &mapEncoder{}, nil return &mapEncoder{}, nil
case reflect.Ptr: case reflect.Ptr:
return &optionalEncoder{}, nil return &OptionalEncoder{}, nil
default: default:
return nil, fmt.Errorf("unsupported type: %v", typ) return nil, fmt.Errorf("unsupported type: %v", typ)
} }
@ -660,7 +678,7 @@ func decoderOfOptional(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return &optionalDecoder{elemType, decoder}, nil return &OptionalDecoder{elemType, decoder}, nil
} }
func encoderOfOptional(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) { func encoderOfOptional(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
@ -669,9 +687,9 @@ func encoderOfOptional(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error)
if err != nil { if err != nil {
return nil, err return nil, err
} }
encoder := &optionalEncoder{elemEncoder} encoder := &OptionalEncoder{elemEncoder}
if elemType.Kind() == reflect.Map { if elemType.Kind() == reflect.Map {
encoder = &optionalEncoder{encoder} encoder = &OptionalEncoder{encoder}
} }
return encoder, nil return encoder, nil
} }

View File

@ -21,7 +21,7 @@ func encoderOfArray(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
return nil, err return nil, err
} }
if typ.Elem().Kind() == reflect.Map { if typ.Elem().Kind() == reflect.Map {
encoder = &optionalEncoder{encoder} encoder = &OptionalEncoder{encoder}
} }
return &arrayEncoder{typ, typ.Elem(), encoder}, nil return &arrayEncoder{typ, typ.Elem(), encoder}, nil
} }

View File

@ -185,7 +185,7 @@ func _getTypeDecoderFromExtension(typ reflect.Type) ValDecoder {
if typ.Kind() == reflect.Ptr { if typ.Kind() == reflect.Ptr {
decoder := typeDecoders[typ.Elem().String()] decoder := typeDecoders[typ.Elem().String()]
if decoder != nil { if decoder != nil {
return &optionalDecoder{typ.Elem(), decoder} return &OptionalDecoder{typ.Elem(), decoder}
} }
} }
return nil return nil
@ -216,7 +216,7 @@ func _getTypeEncoderFromExtension(typ reflect.Type) ValEncoder {
if typ.Kind() == reflect.Ptr { if typ.Kind() == reflect.Ptr {
encoder := typeEncoders[typ.Elem().String()] encoder := typeEncoders[typ.Elem().String()]
if encoder != nil { if encoder != nil {
return &optionalEncoder{encoder} return &OptionalEncoder{encoder}
} }
} }
return nil return nil
@ -254,7 +254,7 @@ func describeStruct(cfg *frozenConfig, typ reflect.Type) (*StructDescriptor, err
for _, binding := range structDescriptor.Fields { for _, binding := range structDescriptor.Fields {
binding.levels = append([]int{i}, binding.levels...) binding.levels = append([]int{i}, binding.levels...)
omitempty := binding.Encoder.(*structFieldEncoder).omitempty omitempty := binding.Encoder.(*structFieldEncoder).omitempty
binding.Encoder = &optionalEncoder{binding.Encoder} binding.Encoder = &OptionalEncoder{binding.Encoder}
binding.Encoder = &structFieldEncoder{&field, binding.Encoder, omitempty} binding.Encoder = &structFieldEncoder{&field, binding.Encoder, omitempty}
binding.Decoder = &deferenceDecoder{field.Type.Elem(), binding.Decoder} binding.Decoder = &deferenceDecoder{field.Type.Elem(), binding.Decoder}
binding.Decoder = &structFieldDecoder{&field, binding.Decoder} binding.Decoder = &structFieldDecoder{&field, binding.Decoder}
@ -269,7 +269,7 @@ func describeStruct(cfg *frozenConfig, typ reflect.Type) (*StructDescriptor, err
if decoder == nil { if decoder == nil {
var err error var err error
decoder, err = decoderOfType(cfg, field.Type) decoder, err = decoderOfType(cfg, field.Type)
if err != nil { if len(fieldNames) > 0 && err != nil {
return nil, err return nil, err
} }
} }
@ -277,12 +277,13 @@ func describeStruct(cfg *frozenConfig, typ reflect.Type) (*StructDescriptor, err
if encoder == nil { if encoder == nil {
var err error var err error
encoder, err = encoderOfType(cfg, field.Type) encoder, err = encoderOfType(cfg, field.Type)
if err != nil { if len(fieldNames) > 0 && err != nil {
return nil, err return nil, err
} }
// map is stored as pointer in the struct // map is stored as pointer in the struct,
if field.Type.Kind() == reflect.Map { // and treat nil or empty map as empty field
encoder = &optionalEncoder{encoder} if encoder != nil && field.Type.Kind() == reflect.Map {
encoder = &optionalMapEncoder{encoder}
} }
} }
binding := &Binding{ binding := &Binding{

View File

@ -441,8 +441,16 @@ type jsonNumberCodec struct {
} }
func (codec *jsonNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { func (codec *jsonNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
switch iter.WhatIsNext() {
case StringValue:
*((*json.Number)(ptr)) = json.Number(iter.ReadString())
case NilValue:
iter.skipFourBytes('n', 'u', 'l', 'l')
*((*json.Number)(ptr)) = ""
default:
*((*json.Number)(ptr)) = json.Number([]byte(iter.readNumberAsString())) *((*json.Number)(ptr)) = json.Number([]byte(iter.readNumberAsString()))
} }
}
func (codec *jsonNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) { func (codec *jsonNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteRaw(string(*((*json.Number)(ptr)))) stream.WriteRaw(string(*((*json.Number)(ptr))))
@ -460,8 +468,16 @@ type jsoniterNumberCodec struct {
} }
func (codec *jsoniterNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { func (codec *jsoniterNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
switch iter.WhatIsNext() {
case StringValue:
*((*Number)(ptr)) = Number(iter.ReadString())
case NilValue:
iter.skipFourBytes('n', 'u', 'l', 'l')
*((*Number)(ptr)) = ""
default:
*((*Number)(ptr)) = Number([]byte(iter.readNumberAsString())) *((*Number)(ptr)) = Number([]byte(iter.readNumberAsString()))
} }
}
func (codec *jsoniterNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) { func (codec *jsoniterNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteRaw(string(*((*Number)(ptr)))) stream.WriteRaw(string(*((*Number)(ptr))))
@ -592,7 +608,7 @@ type stringModeNumberDecoder struct {
func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
c := iter.nextToken() c := iter.nextToken()
if c != '"' { if c != '"' {
iter.ReportError("stringModeNumberDecoder", `expect "`) iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
return return
} }
decoder.elemDecoder.Decode(ptr, iter) decoder.elemDecoder.Decode(ptr, iter)
@ -601,7 +617,7 @@ func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
} }
c = iter.readByte() c = iter.readByte()
if c != '"' { if c != '"' {
iter.ReportError("stringModeNumberDecoder", `expect "`) iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
return return
} }
} }

View File

@ -21,7 +21,7 @@ func encoderOfSlice(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
return nil, err return nil, err
} }
if typ.Elem().Kind() == reflect.Map { if typ.Elem().Kind() == reflect.Map {
encoder = &optionalEncoder{encoder} encoder = &OptionalEncoder{encoder}
} }
return &sliceEncoder{typ, typ.Elem(), encoder}, nil return &sliceEncoder{typ, typ.Elem(), encoder}, nil
} }
@ -127,12 +127,10 @@ func growOne(slice *sliceHeader, sliceType reflect.Type, elementType reflect.Typ
newVal := reflect.MakeSlice(sliceType, newLen, newCap) newVal := reflect.MakeSlice(sliceType, newLen, newCap)
dst := unsafe.Pointer(newVal.Pointer()) dst := unsafe.Pointer(newVal.Pointer())
// copy old array into new array // copy old array into new array
originalBytesCount := uintptr(slice.Len) * elementType.Size() originalBytesCount := slice.Len * int(elementType.Size())
srcPtr := (*[1 << 30]byte)(slice.Data) srcSliceHeader := (unsafe.Pointer)(&sliceHeader{slice.Data, originalBytesCount, originalBytesCount})
dstPtr := (*[1 << 30]byte)(dst) dstSliceHeader := (unsafe.Pointer)(&sliceHeader{dst, originalBytesCount, originalBytesCount})
for i := uintptr(0); i < originalBytesCount; i++ { copy(*(*[]byte)(dstSliceHeader), *(*[]byte)(srcSliceHeader))
dstPtr[i] = srcPtr[i]
}
slice.Data = dst slice.Data = dst
slice.Len = newLen slice.Len = newLen
slice.Cap = newCap slice.Cap = newCap

View File

@ -427,8 +427,18 @@ func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
if !iter.readObjectStart() { if !iter.readObjectStart() {
return return
} }
fieldBytes := iter.readObjectFieldAsBytes() var fieldBytes []byte
field := *(*string)(unsafe.Pointer(&fieldBytes)) var field string
if iter.cfg.objectFieldMustBeSimpleString {
fieldBytes = iter.readObjectFieldAsBytes()
field = *(*string)(unsafe.Pointer(&fieldBytes))
} else {
field = iter.ReadString()
c := iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
}
fieldDecoder := decoder.fields[strings.ToLower(field)] fieldDecoder := decoder.fields[strings.ToLower(field)]
if fieldDecoder == nil { if fieldDecoder == nil {
iter.Skip() iter.Skip()
@ -436,8 +446,16 @@ func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
fieldDecoder.Decode(ptr, iter) fieldDecoder.Decode(ptr, iter)
} }
for iter.nextToken() == ',' { for iter.nextToken() == ',' {
fieldBytes = iter.readObjectFieldAsBytes() if iter.cfg.objectFieldMustBeSimpleString {
fieldBytes := iter.readObjectFieldAsBytes()
field = *(*string)(unsafe.Pointer(&fieldBytes)) field = *(*string)(unsafe.Pointer(&fieldBytes))
} else {
field = iter.ReadString()
c := iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
}
fieldDecoder = decoder.fields[strings.ToLower(field)] fieldDecoder = decoder.fields[strings.ToLower(field)]
if fieldDecoder == nil { if fieldDecoder == nil {
iter.Skip() iter.Skip()

View File

@ -4,7 +4,7 @@ import (
"io" "io"
) )
// Stream is a io.Writer like object, with JSON specific write functions. // stream is a io.Writer like object, with JSON specific write functions.
// Error is not returned as return value, but stored as Error member on this stream instance. // Error is not returned as return value, but stored as Error member on this stream instance.
type Stream struct { type Stream struct {
cfg *frozenConfig cfg *frozenConfig
@ -13,6 +13,7 @@ type Stream struct {
n int n int
Error error Error error
indention int indention int
Attachment interface{} // open for customized encoder
} }
// NewStream create new stream instance. // NewStream create new stream instance.
@ -191,6 +192,9 @@ func (stream *Stream) ensure(minimal int) {
func (stream *Stream) growAtLeast(minimal int) { func (stream *Stream) growAtLeast(minimal int) {
if stream.out != nil { if stream.out != nil {
stream.Flush() stream.Flush()
if stream.Available() >= minimal {
return
}
} }
toGrow := len(stream.buf) toGrow := len(stream.buf)
if toGrow < minimal { if toGrow < minimal {
@ -280,8 +284,7 @@ func (stream *Stream) WriteArrayStart() {
// WriteEmptyArray write [] // WriteEmptyArray write []
func (stream *Stream) WriteEmptyArray() { func (stream *Stream) WriteEmptyArray() {
stream.writeByte('[') stream.writeTwoBytes('[', ']')
stream.writeByte(']')
} }
// WriteArrayEnd write ] with possible indention // WriteArrayEnd write ] with possible indention

View File

@ -22,7 +22,7 @@ func Test_new_encoder(t *testing.T) {
encoder2 := NewEncoder(buf2) encoder2 := NewEncoder(buf2)
encoder2.SetEscapeHTML(false) encoder2.SetEscapeHTML(false)
encoder2.Encode([]int{1}) encoder2.Encode([]int{1})
should.Equal("[1]", buf2.String()) should.Equal("[1]\n", buf2.String())
} }
func Test_string_encode_with_std_without_html_escape(t *testing.T) { func Test_string_encode_with_std_without_html_escape(t *testing.T) {

View File

@ -2,11 +2,12 @@ package jsoniter
import ( import (
"encoding/json" "encoding/json"
"github.com/stretchr/testify/require"
"strconv" "strconv"
"testing" "testing"
"time" "time"
"unsafe" "unsafe"
"github.com/stretchr/testify/require"
) )
func Test_customize_type_decoder(t *testing.T) { func Test_customize_type_decoder(t *testing.T) {
@ -82,7 +83,7 @@ func Test_customize_field_decoder(t *testing.T) {
} }
type TestObject1 struct { type TestObject1 struct {
field1 string Field1 string
} }
type testExtension struct { type testExtension struct {
@ -93,7 +94,7 @@ func (extension *testExtension) UpdateStructDescriptor(structDescriptor *StructD
if structDescriptor.Type.String() != "jsoniter.TestObject1" { if structDescriptor.Type.String() != "jsoniter.TestObject1" {
return return
} }
binding := structDescriptor.GetField("field1") binding := structDescriptor.GetField("Field1")
binding.Encoder = &funcEncoder{fun: func(ptr unsafe.Pointer, stream *Stream) { binding.Encoder = &funcEncoder{fun: func(ptr unsafe.Pointer, stream *Stream) {
str := *((*string)(ptr)) str := *((*string)(ptr))
val, _ := strconv.Atoi(str) val, _ := strconv.Atoi(str)
@ -112,7 +113,7 @@ func Test_customize_field_by_extension(t *testing.T) {
obj := TestObject1{} obj := TestObject1{}
err := UnmarshalFromString(`{"field-1": 100}`, &obj) err := UnmarshalFromString(`{"field-1": 100}`, &obj)
should.Nil(err) should.Nil(err)
should.Equal("100", obj.field1) should.Equal("100", obj.Field1)
str, err := MarshalToString(obj) str, err := MarshalToString(obj)
should.Nil(err) should.Nil(err)
should.Equal(`{"field-1":100}`, str) should.Equal(`{"field-1":100}`, str)

View File

@ -506,6 +506,36 @@ func Test_jsoniter_number(t *testing.T) {
should.Equal("1", str) should.Equal("1", str)
} }
func Test_non_numeric_as_number(t *testing.T) {
should := require.New(t)
var v1 json.Number
err := Unmarshal([]byte(`"500"`), &v1)
should.Nil(err)
should.Equal("500", string(v1))
var v2 Number
err = Unmarshal([]byte(`"500"`), &v2)
should.Nil(err)
should.Equal("500", string(v2))
}
func Test_null_as_number(t *testing.T) {
should := require.New(t)
var v1 json.Number
err := json.Unmarshal([]byte(`null`), &v1)
should.Nil(err)
should.Equal("", string(v1))
var v2 Number
err = Unmarshal([]byte(`null`), &v2)
should.Nil(err)
should.Equal("", string(v2))
}
func Test_float_as_int(t *testing.T) {
should := require.New(t)
var i int
should.NotNil(Unmarshal([]byte(`1.1`), &i))
}
func Benchmark_jsoniter_encode_int(b *testing.B) { func Benchmark_jsoniter_encode_int(b *testing.B) {
stream := NewStream(ConfigDefault, ioutil.Discard, 64) stream := NewStream(ConfigDefault, ioutil.Discard, 64)
for n := 0; n < b.N; n++ { for n := 0; n < b.N; n++ {

View File

@ -7,8 +7,24 @@ import (
"unsafe" "unsafe"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"reflect"
) )
func Test_write_empty_interface_via_placeholder(t *testing.T) {
fmt.Println(^uint(0) >> 1)
should := require.New(t)
m := map[uint32]interface{}{1: "hello"}
inf := reflect.ValueOf(m).MapIndex(reflect.ValueOf(uint32(1))).Interface()
encoder := &placeholderEncoder{
cfg: ConfigFastest.(*frozenConfig),
cacheKey: reflect.TypeOf(m).Elem(),
}
stream := ConfigFastest.BorrowStream(nil)
encoderOfType(ConfigFastest.(*frozenConfig), reflect.TypeOf(m).Elem())
encoder.EncodeInterface(inf, stream)
should.Equal(`"hello"`, string(stream.Buffer()))
}
func Test_write_array_of_interface(t *testing.T) { func Test_write_array_of_interface(t *testing.T) {
should := require.New(t) should := require.New(t)
array := []interface{}{"hello"} array := []interface{}{"hello"}

View File

@ -130,3 +130,9 @@ func Test_invalid_number(t *testing.T) {
should.Nil(err) should.Nil(err)
should.Equal(string(result2), string(result)) should.Equal(string(result2), string(result))
} }
func Test_valid(t *testing.T) {
should := require.New(t)
should.True(Valid([]byte(`{}`)))
should.False(Valid([]byte(`{`)))
}

View File

@ -328,3 +328,15 @@ func Test_decode_nested(t *testing.T) {
t.Fatal(slice[2]) t.Fatal(slice[2])
} }
} }
func Test_decode_field_with_escape(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
}
var obj TestObject
should.Nil(ConfigCompatibleWithStandardLibrary.Unmarshal([]byte(`{"Field\"1":"hello"}`), &obj))
should.Equal("", obj.Field1)
should.Nil(ConfigCompatibleWithStandardLibrary.Unmarshal([]byte(`{"\u0046ield1":"hello"}`), &obj))
should.Equal("hello", obj.Field1)
}

View File

@ -3,6 +3,7 @@ package jsoniter
import ( import (
"encoding/json" "encoding/json"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"strings"
"testing" "testing"
) )
@ -86,3 +87,28 @@ func Test_marshal_invalid_json_raw_message(t *testing.T) {
should.Equal(`{"raw":null}`, string(aout)) should.Equal(`{"raw":null}`, string(aout))
should.Nil(aouterr) should.Nil(aouterr)
} }
func Test_raw_message_memory_not_copied_issue(t *testing.T) {
jsonStream := `{"name":"xxxxx","bundle_id":"com.zonst.majiang","app_platform":"ios","app_category":"100103", "budget_day":1000,"bidding_min":1,"bidding_max":2,"bidding_type":"CPM", "freq":{"open":true,"type":"day","num":100},"speed":1, "targeting":{"vendor":{"open":true,"list":["zonst"]}, "geo_code":{"open":true,"list":["156110100"]},"app_category":{"open":true,"list":["100101"]}, "day_parting":{"open":true,"list":["100409","100410"]},"device_type":{"open":true,"list":["ipad"]}, "os_version":{"open":true,"list":[10]},"carrier":{"open":true,"list":["mobile"]}, "network":{"open":true,"list":["4G"]}},"url":{"tracking_imp_url":"http://www.baidu.com", "tracking_clk_url":"http://www.baidu.com","jump_url":"http://www.baidu.com","deep_link_url":"http://www.baidu.com"}}`
type IteratorObject struct {
Name *string `json:"name"`
BundleId *string `json:"bundle_id"`
AppCategory *string `json:"app_category"`
AppPlatform *string `json:"app_platform"`
BudgetDay *float32 `json:"budget_day"`
BiddingMax *float32 `json:"bidding_max"`
BiddingMin *float32 `json:"bidding_min"`
BiddingType *string `json:"bidding_type"`
Freq *RawMessage `json:"freq"`
Targeting *RawMessage `json:"targeting"`
Url *RawMessage `json:"url"`
Speed *int `json:"speed" db:"speed"`
}
obj := &IteratorObject{}
decoder := NewDecoder(strings.NewReader(jsonStream))
err := decoder.Decode(obj)
should := require.New(t)
should.Nil(err)
should.Equal(`{"open":true,"type":"day","num":100}`, string(*obj.Freq))
}

View File

@ -104,6 +104,11 @@ func Test_skip_and_return_bytes_with_reader(t *testing.T) {
should.Equal(`{"a" : [{"stream": "c"}], "d": 102 }`, string(skipped)) should.Equal(`{"a" : [{"stream": "c"}], "d": 102 }`, string(skipped))
} }
func Test_skip_empty(t *testing.T) {
should := require.New(t)
should.NotNil(Get([]byte("")).LastError())
}
type TestResp struct { type TestResp struct {
Code uint64 Code uint64
} }

View File

@ -51,3 +51,19 @@ func Test_writeString_should_grow_buffer(t *testing.T) {
should.Nil(stream.Error) should.Nil(stream.Error)
should.Equal(`"123"`, string(stream.Buffer())) should.Equal(`"123"`, string(stream.Buffer()))
} }
type NopWriter struct {
bufferSize int
}
func (w *NopWriter) Write(p []byte) (n int, err error) {
w.bufferSize = cap(p)
return len(p), nil
}
func Test_flush_buffer_should_stop_grow_buffer(t *testing.T) {
writer := new(NopWriter)
NewEncoder(writer).Encode(make([]int, 10000000))
should := require.New(t)
should.Equal(512, writer.bufferSize)
}

View File

@ -0,0 +1,52 @@
package jsoniter
import (
"encoding/json"
"testing"
"time"
"github.com/stretchr/testify/require"
)
func Test_encode_unexported_field(t *testing.T) {
type TestData struct {
a int
b <-chan int
C int
d *time.Timer
}
should := require.New(t)
testChan := make(<-chan int, 10)
testTimer := time.NewTimer(10 * time.Second)
obj := &TestData{
a: 42,
b: testChan,
C: 21,
d: testTimer,
}
jb, err := json.Marshal(obj)
should.NoError(err)
should.Equal([]byte(`{"C":21}`), jb)
err = json.Unmarshal([]byte(`{"a": 444, "b":"bad", "C":55, "d":{"not": "a timer"}}`), obj)
should.NoError(err)
should.Equal(42, obj.a)
should.Equal(testChan, obj.b)
should.Equal(55, obj.C)
should.Equal(testTimer, obj.d)
jb, err = Marshal(obj)
should.NoError(err)
should.Equal(jb, []byte(`{"C":55}`))
err = Unmarshal([]byte(`{"a": 444, "b":"bad", "C":256, "d":{"not":"a timer"}}`), obj)
should.NoError(err)
should.Equal(42, obj.a)
should.Equal(testChan, obj.b)
should.Equal(256, obj.C)
should.Equal(testTimer, obj.d)
}