1
0
mirror of https://github.com/json-iterator/go.git synced 2025-01-08 13:06:29 +02:00

support slice

This commit is contained in:
Tao Wen 2016-12-04 20:50:55 +08:00
parent 33e3df45dd
commit 7bb029bca5
2 changed files with 139 additions and 59 deletions

View File

@ -19,15 +19,18 @@ func (decoder *stringDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
*((*string)(ptr)) = iter.ReadString()
}
type stringOptionalDecoder struct {
type optionalDecoder struct {
valueType reflect.Type
valueDecoder Decoder
}
func (decoder *stringOptionalDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
func (decoder *optionalDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNull() {
*((**string)(ptr)) = nil
*((*unsafe.Pointer)(ptr)) = nil
} else {
result := iter.ReadString()
*((**string)(ptr)) = &result
value := reflect.New(decoder.valueType)
decoder.valueDecoder.decode(unsafe.Pointer(value.Pointer()), iter)
*((*uintptr)(ptr)) = value.Pointer()
}
}
@ -56,34 +59,88 @@ func (decoder *structFieldDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
decoder.fieldDecoder.decode(unsafe.Pointer(fieldPtr), iter)
}
var DECODER_STRING *stringDecoder
var DECODER_OPTIONAL_STRING *stringOptionalDecoder
var DECODERS_STRUCT unsafe.Pointer
type sliceDecoder struct {
sliceType reflect.Type
elemType reflect.Type
elemDecoder Decoder
}
func addStructDecoderToCache(cacheKey string, decoder *structDecoder) {
// sliceHeader is a safe version of SliceHeader used within this package.
type sliceHeader struct {
Data unsafe.Pointer
Len int
Cap int
}
func (decoder *sliceDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
slice := (*sliceHeader)(ptr)
slice.Len = 0
for iter.ReadArray() {
offset := uintptr(slice.Len) * decoder.elemType.Size()
growOne(slice, decoder.sliceType, decoder.elemType)
dataPtr := uintptr(slice.Data) + offset
decoder.elemDecoder.decode(unsafe.Pointer(dataPtr), iter)
}
}
// grow grows the slice s so that it can hold extra more values, allocating
// more capacity if needed. It also returns the old and new slice lengths.
func growOne(slice *sliceHeader, sliceType reflect.Type, elementType reflect.Type) {
newLen := slice.Len + 1
if newLen <= slice.Cap {
slice.Len = newLen
return
}
newCap := slice.Cap
if newCap == 0 {
newCap = 1
} else {
for newCap < newLen {
if slice.Len < 1024 {
newCap += newCap
} else {
newCap += newCap / 4
}
}
}
dst := unsafe.Pointer(reflect.MakeSlice(sliceType, newLen, newCap).Pointer())
originalBytesCount := uintptr(slice.Len) * elementType.Size()
srcPtr := (*[1<<30]byte)(slice.Data)
dstPtr := (*[1<<30]byte)(dst)
for i := uintptr(0); i < originalBytesCount; i++ {
dstPtr[i] = srcPtr[i]
}
slice.Len = newLen
slice.Cap = newCap
slice.Data = dst
}
var DECODER_STRING *stringDecoder
var DECODERS unsafe.Pointer
func addDecoderToCache(cacheKey string, decoder Decoder) {
retry := true
for retry {
ptr := atomic.LoadPointer(&DECODERS_STRUCT)
cache := *(*map[string]*structDecoder)(ptr)
copy := map[string]*structDecoder{}
ptr := atomic.LoadPointer(&DECODERS)
cache := *(*map[string]Decoder)(ptr)
copy := map[string]Decoder{}
for k, v := range cache {
copy[k] = v
}
copy[cacheKey] = decoder
retry = !atomic.CompareAndSwapPointer(&DECODERS_STRUCT, ptr, unsafe.Pointer(&copy))
retry = !atomic.CompareAndSwapPointer(&DECODERS, ptr, unsafe.Pointer(&copy))
}
}
func getStructDecoderFromCache(cacheKey string) *structDecoder {
ptr := atomic.LoadPointer(&DECODERS_STRUCT)
cache := *(*map[string]*structDecoder)(ptr)
func getDecoderFromCache(cacheKey string) Decoder {
ptr := atomic.LoadPointer(&DECODERS)
cache := *(*map[string]Decoder)(ptr)
return cache[cacheKey]
}
func init() {
DECODER_STRING = &stringDecoder{}
DECODER_OPTIONAL_STRING = &stringOptionalDecoder{}
atomic.StorePointer(&DECODERS_STRUCT, unsafe.Pointer(&map[string]*structDecoder{}))
atomic.StorePointer(&DECODERS, unsafe.Pointer(&map[string]Decoder{}))
}
// emptyInterface is the header for an interface{} value.
@ -94,13 +151,19 @@ type emptyInterface struct {
func (iter *Iterator) Read(obj interface{}) {
type_ := reflect.TypeOf(obj)
decoder, err := decoderOfType(type_)
if err != nil {
iter.Error = err
return
cacheKey := type_.String()
cachedDecoder := getDecoderFromCache(cacheKey)
if cachedDecoder == nil {
decoder, err := decoderOfType(type_)
if err != nil {
iter.Error = err
return
}
cachedDecoder = decoder
addDecoderToCache(cacheKey, decoder)
}
e := (*emptyInterface)(unsafe.Pointer(&obj))
decoder.decode(e.word, iter)
cachedDecoder.decode(e.word, iter)
}
type prefix string
@ -139,7 +202,7 @@ func decoderOfPtr(type_ reflect.Type) (Decoder, error) {
func decoderOfOptional(type_ reflect.Type) (Decoder, error) {
switch type_.Kind() {
case reflect.String:
return DECODER_OPTIONAL_STRING, nil
return &optionalDecoder{type_, DECODER_STRING}, nil
default:
return nil, errors.New("expect string")
}
@ -147,25 +210,22 @@ func decoderOfOptional(type_ reflect.Type) (Decoder, error) {
func decoderOfStruct(type_ reflect.Type) (Decoder, error) {
cacheKey := type_.String()
cachedDecoder := getStructDecoderFromCache(cacheKey)
if cachedDecoder == nil {
fields := map[string]Decoder{}
for i := 0; i < type_.NumField(); i++ {
field := type_.Field(i)
decoder, err := decoderOfPtr(field.Type)
if err != nil {
return prefix(fmt.Sprintf("[%s]", field.Name)).addTo(decoder, err)
}
fields[field.Name] = &structFieldDecoder{field.Offset, decoder}
fields := map[string]Decoder{}
for i := 0; i < type_.NumField(); i++ {
field := type_.Field(i)
decoder, err := decoderOfPtr(field.Type)
if err != nil {
return prefix(fmt.Sprintf("{%s}", field.Name)).addTo(decoder, err)
}
cachedDecoder = &structDecoder{fields}
addStructDecoderToCache(cacheKey, cachedDecoder)
fields[field.Name] = &structFieldDecoder{field.Offset, decoder}
}
return cachedDecoder, nil
return &structDecoder{fields}, nil
}
func decoderOfSlice(type_ reflect.Type) (Decoder, error) {
fmt.Println(type_.Elem())
return nil, errors.New("n/a")
decoder, err := decoderOfPtr(type_.Elem())
if err != nil {
return prefix("[elem]").addTo(decoder, err)
}
return &sliceDecoder{type_, type_.Elem(), decoder}, nil
}

View File

@ -11,6 +11,16 @@ func Test_reflect_str(t *testing.T) {
str := ""
iter.Read(&str)
if str != "hello" {
fmt.Println(iter.Error)
t.Fatal(str)
}
}
func Test_reflect_ptr_str(t *testing.T) {
iter := ParseString(`"hello"`)
var str *string
iter.Read(&str)
if *str != "hello" {
t.Fatal(str)
}
}
@ -53,9 +63,9 @@ func Test_reflect_struct_string_ptr(t *testing.T) {
}
}
func Test_reflect_array(t *testing.T) {
iter := ParseString(`{"hello", "world"}`)
array := []string{}
func Test_reflect_slice(t *testing.T) {
iter := ParseString(`["hello", "world"]`)
array := make([]string, 0, 1)
iter.Read(&array)
if len(array) != 2 {
fmt.Println(iter.Error)
@ -74,26 +84,34 @@ func Test_reflect_array(t *testing.T) {
func Benchmark_jsoniter_reflect(b *testing.B) {
b.ReportAllocs()
for n := 0; n < b.N; n++ {
iter := ParseString(`{"field1": "hello", "field2": "world"}`)
struct_ := StructOfString{}
iter.Read(&struct_)
//iter := ParseString(`{"field1": "hello", "field2": "world"}`)
//struct_ := StructOfString{}
//iter.Read(&struct_)
iter := ParseString(`["hello", "world"]`)
array := make([]string, 0, 1)
iter.Read(&array)
}
}
func Benchmark_jsoniter_direct(b *testing.B) {
b.ReportAllocs()
for n := 0; n < b.N; n++ {
iter := ParseString(`{"field1": "hello", "field2": "world"}`)
struct_ := StructOfString{}
for field := iter.ReadObject(); field != ""; field = iter.ReadObject() {
switch field {
case "field1":
struct_.field1 = iter.ReadString()
case "field2":
struct_.field2 = iter.ReadString()
default:
iter.Skip()
}
//iter := ParseString(`{"field1": "hello", "field2": "world"}`)
//struct_ := StructOfString{}
//for field := iter.ReadObject(); field != ""; field = iter.ReadObject() {
// switch field {
// case "field1":
// struct_.field1 = iter.ReadString()
// case "field2":
// struct_.field2 = iter.ReadString()
// default:
// iter.Skip()
// }
//}
iter := ParseString(`["hello", "world"]`)
array := make([]string, 0, 2)
for iter.ReadArray() {
array = append(array, iter.ReadString())
}
}
}
@ -101,7 +119,9 @@ func Benchmark_jsoniter_direct(b *testing.B) {
func Benchmark_json_reflect(b *testing.B) {
b.ReportAllocs()
for n := 0; n < b.N; n++ {
struct_ := StructOfString{}
json.Unmarshal([]byte(`{"field1": "hello", "field2": "world"}`), &struct_)
//struct_ := StructOfString{}
//json.Unmarshal([]byte(`{"field1": "hello", "field2": "world"}`), &struct_)
array := make([]string, 0, 2)
json.Unmarshal([]byte(`["hello", "world"]`), &array)
}
}