mirror of
https://github.com/json-iterator/go.git
synced 2025-01-08 13:06:29 +02:00
support slice
This commit is contained in:
parent
33e3df45dd
commit
7bb029bca5
@ -19,15 +19,18 @@ func (decoder *stringDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
|
|||||||
*((*string)(ptr)) = iter.ReadString()
|
*((*string)(ptr)) = iter.ReadString()
|
||||||
}
|
}
|
||||||
|
|
||||||
type stringOptionalDecoder struct {
|
type optionalDecoder struct {
|
||||||
|
valueType reflect.Type
|
||||||
|
valueDecoder Decoder
|
||||||
}
|
}
|
||||||
|
|
||||||
func (decoder *stringOptionalDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
|
func (decoder *optionalDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||||
if iter.ReadNull() {
|
if iter.ReadNull() {
|
||||||
*((**string)(ptr)) = nil
|
*((*unsafe.Pointer)(ptr)) = nil
|
||||||
} else {
|
} else {
|
||||||
result := iter.ReadString()
|
value := reflect.New(decoder.valueType)
|
||||||
*((**string)(ptr)) = &result
|
decoder.valueDecoder.decode(unsafe.Pointer(value.Pointer()), iter)
|
||||||
|
*((*uintptr)(ptr)) = value.Pointer()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -56,34 +59,88 @@ func (decoder *structFieldDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
|
|||||||
decoder.fieldDecoder.decode(unsafe.Pointer(fieldPtr), iter)
|
decoder.fieldDecoder.decode(unsafe.Pointer(fieldPtr), iter)
|
||||||
}
|
}
|
||||||
|
|
||||||
var DECODER_STRING *stringDecoder
|
type sliceDecoder struct {
|
||||||
var DECODER_OPTIONAL_STRING *stringOptionalDecoder
|
sliceType reflect.Type
|
||||||
var DECODERS_STRUCT unsafe.Pointer
|
elemType reflect.Type
|
||||||
|
elemDecoder Decoder
|
||||||
|
}
|
||||||
|
|
||||||
func addStructDecoderToCache(cacheKey string, decoder *structDecoder) {
|
// sliceHeader is a safe version of SliceHeader used within this package.
|
||||||
|
type sliceHeader struct {
|
||||||
|
Data unsafe.Pointer
|
||||||
|
Len int
|
||||||
|
Cap int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (decoder *sliceDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||||
|
slice := (*sliceHeader)(ptr)
|
||||||
|
slice.Len = 0
|
||||||
|
for iter.ReadArray() {
|
||||||
|
offset := uintptr(slice.Len) * decoder.elemType.Size()
|
||||||
|
growOne(slice, decoder.sliceType, decoder.elemType)
|
||||||
|
dataPtr := uintptr(slice.Data) + offset
|
||||||
|
decoder.elemDecoder.decode(unsafe.Pointer(dataPtr), iter)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// grow grows the slice s so that it can hold extra more values, allocating
|
||||||
|
// more capacity if needed. It also returns the old and new slice lengths.
|
||||||
|
func growOne(slice *sliceHeader, sliceType reflect.Type, elementType reflect.Type) {
|
||||||
|
newLen := slice.Len + 1
|
||||||
|
if newLen <= slice.Cap {
|
||||||
|
slice.Len = newLen
|
||||||
|
return
|
||||||
|
}
|
||||||
|
newCap := slice.Cap
|
||||||
|
if newCap == 0 {
|
||||||
|
newCap = 1
|
||||||
|
} else {
|
||||||
|
for newCap < newLen {
|
||||||
|
if slice.Len < 1024 {
|
||||||
|
newCap += newCap
|
||||||
|
} else {
|
||||||
|
newCap += newCap / 4
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dst := unsafe.Pointer(reflect.MakeSlice(sliceType, newLen, newCap).Pointer())
|
||||||
|
originalBytesCount := uintptr(slice.Len) * elementType.Size()
|
||||||
|
srcPtr := (*[1<<30]byte)(slice.Data)
|
||||||
|
dstPtr := (*[1<<30]byte)(dst)
|
||||||
|
for i := uintptr(0); i < originalBytesCount; i++ {
|
||||||
|
dstPtr[i] = srcPtr[i]
|
||||||
|
}
|
||||||
|
slice.Len = newLen
|
||||||
|
slice.Cap = newCap
|
||||||
|
slice.Data = dst
|
||||||
|
}
|
||||||
|
|
||||||
|
var DECODER_STRING *stringDecoder
|
||||||
|
var DECODERS unsafe.Pointer
|
||||||
|
|
||||||
|
func addDecoderToCache(cacheKey string, decoder Decoder) {
|
||||||
retry := true
|
retry := true
|
||||||
for retry {
|
for retry {
|
||||||
ptr := atomic.LoadPointer(&DECODERS_STRUCT)
|
ptr := atomic.LoadPointer(&DECODERS)
|
||||||
cache := *(*map[string]*structDecoder)(ptr)
|
cache := *(*map[string]Decoder)(ptr)
|
||||||
copy := map[string]*structDecoder{}
|
copy := map[string]Decoder{}
|
||||||
for k, v := range cache {
|
for k, v := range cache {
|
||||||
copy[k] = v
|
copy[k] = v
|
||||||
}
|
}
|
||||||
copy[cacheKey] = decoder
|
copy[cacheKey] = decoder
|
||||||
retry = !atomic.CompareAndSwapPointer(&DECODERS_STRUCT, ptr, unsafe.Pointer(©))
|
retry = !atomic.CompareAndSwapPointer(&DECODERS, ptr, unsafe.Pointer(©))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func getStructDecoderFromCache(cacheKey string) *structDecoder {
|
func getDecoderFromCache(cacheKey string) Decoder {
|
||||||
ptr := atomic.LoadPointer(&DECODERS_STRUCT)
|
ptr := atomic.LoadPointer(&DECODERS)
|
||||||
cache := *(*map[string]*structDecoder)(ptr)
|
cache := *(*map[string]Decoder)(ptr)
|
||||||
return cache[cacheKey]
|
return cache[cacheKey]
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
DECODER_STRING = &stringDecoder{}
|
DECODER_STRING = &stringDecoder{}
|
||||||
DECODER_OPTIONAL_STRING = &stringOptionalDecoder{}
|
atomic.StorePointer(&DECODERS, unsafe.Pointer(&map[string]Decoder{}))
|
||||||
atomic.StorePointer(&DECODERS_STRUCT, unsafe.Pointer(&map[string]*structDecoder{}))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// emptyInterface is the header for an interface{} value.
|
// emptyInterface is the header for an interface{} value.
|
||||||
@ -94,13 +151,19 @@ type emptyInterface struct {
|
|||||||
|
|
||||||
func (iter *Iterator) Read(obj interface{}) {
|
func (iter *Iterator) Read(obj interface{}) {
|
||||||
type_ := reflect.TypeOf(obj)
|
type_ := reflect.TypeOf(obj)
|
||||||
decoder, err := decoderOfType(type_)
|
cacheKey := type_.String()
|
||||||
if err != nil {
|
cachedDecoder := getDecoderFromCache(cacheKey)
|
||||||
iter.Error = err
|
if cachedDecoder == nil {
|
||||||
return
|
decoder, err := decoderOfType(type_)
|
||||||
|
if err != nil {
|
||||||
|
iter.Error = err
|
||||||
|
return
|
||||||
|
}
|
||||||
|
cachedDecoder = decoder
|
||||||
|
addDecoderToCache(cacheKey, decoder)
|
||||||
}
|
}
|
||||||
e := (*emptyInterface)(unsafe.Pointer(&obj))
|
e := (*emptyInterface)(unsafe.Pointer(&obj))
|
||||||
decoder.decode(e.word, iter)
|
cachedDecoder.decode(e.word, iter)
|
||||||
}
|
}
|
||||||
|
|
||||||
type prefix string
|
type prefix string
|
||||||
@ -139,7 +202,7 @@ func decoderOfPtr(type_ reflect.Type) (Decoder, error) {
|
|||||||
func decoderOfOptional(type_ reflect.Type) (Decoder, error) {
|
func decoderOfOptional(type_ reflect.Type) (Decoder, error) {
|
||||||
switch type_.Kind() {
|
switch type_.Kind() {
|
||||||
case reflect.String:
|
case reflect.String:
|
||||||
return DECODER_OPTIONAL_STRING, nil
|
return &optionalDecoder{type_, DECODER_STRING}, nil
|
||||||
default:
|
default:
|
||||||
return nil, errors.New("expect string")
|
return nil, errors.New("expect string")
|
||||||
}
|
}
|
||||||
@ -147,25 +210,22 @@ func decoderOfOptional(type_ reflect.Type) (Decoder, error) {
|
|||||||
|
|
||||||
|
|
||||||
func decoderOfStruct(type_ reflect.Type) (Decoder, error) {
|
func decoderOfStruct(type_ reflect.Type) (Decoder, error) {
|
||||||
cacheKey := type_.String()
|
fields := map[string]Decoder{}
|
||||||
cachedDecoder := getStructDecoderFromCache(cacheKey)
|
for i := 0; i < type_.NumField(); i++ {
|
||||||
if cachedDecoder == nil {
|
field := type_.Field(i)
|
||||||
fields := map[string]Decoder{}
|
decoder, err := decoderOfPtr(field.Type)
|
||||||
for i := 0; i < type_.NumField(); i++ {
|
if err != nil {
|
||||||
field := type_.Field(i)
|
return prefix(fmt.Sprintf("{%s}", field.Name)).addTo(decoder, err)
|
||||||
decoder, err := decoderOfPtr(field.Type)
|
|
||||||
if err != nil {
|
|
||||||
return prefix(fmt.Sprintf("[%s]", field.Name)).addTo(decoder, err)
|
|
||||||
}
|
|
||||||
fields[field.Name] = &structFieldDecoder{field.Offset, decoder}
|
|
||||||
}
|
}
|
||||||
cachedDecoder = &structDecoder{fields}
|
fields[field.Name] = &structFieldDecoder{field.Offset, decoder}
|
||||||
addStructDecoderToCache(cacheKey, cachedDecoder)
|
|
||||||
}
|
}
|
||||||
return cachedDecoder, nil
|
return &structDecoder{fields}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func decoderOfSlice(type_ reflect.Type) (Decoder, error) {
|
func decoderOfSlice(type_ reflect.Type) (Decoder, error) {
|
||||||
fmt.Println(type_.Elem())
|
decoder, err := decoderOfPtr(type_.Elem())
|
||||||
return nil, errors.New("n/a")
|
if err != nil {
|
||||||
|
return prefix("[elem]").addTo(decoder, err)
|
||||||
|
}
|
||||||
|
return &sliceDecoder{type_, type_.Elem(), decoder}, nil
|
||||||
}
|
}
|
||||||
|
@ -11,6 +11,16 @@ func Test_reflect_str(t *testing.T) {
|
|||||||
str := ""
|
str := ""
|
||||||
iter.Read(&str)
|
iter.Read(&str)
|
||||||
if str != "hello" {
|
if str != "hello" {
|
||||||
|
fmt.Println(iter.Error)
|
||||||
|
t.Fatal(str)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Test_reflect_ptr_str(t *testing.T) {
|
||||||
|
iter := ParseString(`"hello"`)
|
||||||
|
var str *string
|
||||||
|
iter.Read(&str)
|
||||||
|
if *str != "hello" {
|
||||||
t.Fatal(str)
|
t.Fatal(str)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -53,9 +63,9 @@ func Test_reflect_struct_string_ptr(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func Test_reflect_array(t *testing.T) {
|
func Test_reflect_slice(t *testing.T) {
|
||||||
iter := ParseString(`{"hello", "world"}`)
|
iter := ParseString(`["hello", "world"]`)
|
||||||
array := []string{}
|
array := make([]string, 0, 1)
|
||||||
iter.Read(&array)
|
iter.Read(&array)
|
||||||
if len(array) != 2 {
|
if len(array) != 2 {
|
||||||
fmt.Println(iter.Error)
|
fmt.Println(iter.Error)
|
||||||
@ -74,26 +84,34 @@ func Test_reflect_array(t *testing.T) {
|
|||||||
func Benchmark_jsoniter_reflect(b *testing.B) {
|
func Benchmark_jsoniter_reflect(b *testing.B) {
|
||||||
b.ReportAllocs()
|
b.ReportAllocs()
|
||||||
for n := 0; n < b.N; n++ {
|
for n := 0; n < b.N; n++ {
|
||||||
iter := ParseString(`{"field1": "hello", "field2": "world"}`)
|
//iter := ParseString(`{"field1": "hello", "field2": "world"}`)
|
||||||
struct_ := StructOfString{}
|
//struct_ := StructOfString{}
|
||||||
iter.Read(&struct_)
|
//iter.Read(&struct_)
|
||||||
|
iter := ParseString(`["hello", "world"]`)
|
||||||
|
array := make([]string, 0, 1)
|
||||||
|
iter.Read(&array)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func Benchmark_jsoniter_direct(b *testing.B) {
|
func Benchmark_jsoniter_direct(b *testing.B) {
|
||||||
b.ReportAllocs()
|
b.ReportAllocs()
|
||||||
for n := 0; n < b.N; n++ {
|
for n := 0; n < b.N; n++ {
|
||||||
iter := ParseString(`{"field1": "hello", "field2": "world"}`)
|
//iter := ParseString(`{"field1": "hello", "field2": "world"}`)
|
||||||
struct_ := StructOfString{}
|
//struct_ := StructOfString{}
|
||||||
for field := iter.ReadObject(); field != ""; field = iter.ReadObject() {
|
//for field := iter.ReadObject(); field != ""; field = iter.ReadObject() {
|
||||||
switch field {
|
// switch field {
|
||||||
case "field1":
|
// case "field1":
|
||||||
struct_.field1 = iter.ReadString()
|
// struct_.field1 = iter.ReadString()
|
||||||
case "field2":
|
// case "field2":
|
||||||
struct_.field2 = iter.ReadString()
|
// struct_.field2 = iter.ReadString()
|
||||||
default:
|
// default:
|
||||||
iter.Skip()
|
// iter.Skip()
|
||||||
}
|
// }
|
||||||
|
//}
|
||||||
|
iter := ParseString(`["hello", "world"]`)
|
||||||
|
array := make([]string, 0, 2)
|
||||||
|
for iter.ReadArray() {
|
||||||
|
array = append(array, iter.ReadString())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -101,7 +119,9 @@ func Benchmark_jsoniter_direct(b *testing.B) {
|
|||||||
func Benchmark_json_reflect(b *testing.B) {
|
func Benchmark_json_reflect(b *testing.B) {
|
||||||
b.ReportAllocs()
|
b.ReportAllocs()
|
||||||
for n := 0; n < b.N; n++ {
|
for n := 0; n < b.N; n++ {
|
||||||
struct_ := StructOfString{}
|
//struct_ := StructOfString{}
|
||||||
json.Unmarshal([]byte(`{"field1": "hello", "field2": "world"}`), &struct_)
|
//json.Unmarshal([]byte(`{"field1": "hello", "field2": "world"}`), &struct_)
|
||||||
|
array := make([]string, 0, 2)
|
||||||
|
json.Unmarshal([]byte(`["hello", "world"]`), &array)
|
||||||
}
|
}
|
||||||
}
|
}
|
Loading…
Reference in New Issue
Block a user