1
0
mirror of https://github.com/json-iterator/go.git synced 2025-06-15 22:50:24 +02:00

631 Commits

Author SHA1 Message Date
acfec88f7a Merge pull request #422 from JensErat/map-invalid-type
pass nested error in compatible configuration, fixes #388
2019-12-21 11:10:28 +08:00
e88512faf8 Merge pull request #423 from vano144/fix-attachments-on-stream
fix nil attachment on stream in custom encoder on sorted map
2019-12-21 11:09:53 +08:00
b681149eae Merge pull request #424 from aaronbee/sortKeysMapAllocations
Reduce allocations in sortKeysMapEncoder
2019-12-21 11:09:15 +08:00
d1af7639b3 Merge pull request #425 from liggitt/default-max-depth
Revert "Merge pull request #418 from bbrks/configurable_maxDepth"
2019-12-21 11:04:54 +08:00
7c9f8c2d20 Revert "Merge pull request #418 from bbrks/configurable_maxDepth"
This reverts commit 44a7e7340d, reversing
changes made to dc11f49689.
2019-12-19 19:06:29 -05:00
f814d6c0f1 Reduce allocations in sortKeysMapEncoder
Use one buffer for all values.
2019-12-03 11:55:47 -08:00
aba8654400 fix nil attachment on stream in custom encoder on sorted map 2019-11-28 17:39:42 +03:00
a1c9557592 pass nested error in compatible configuration
When invalid types inside a map were marshalled (in general, as soon as
sorted maps have been configured), the error message has not been
propagated out of the map's `subStream`.

Also fix and re-enable the channel test, which now resembles the
behavior of `encoding/json` and tests both default and compatible
configurations.

Signed-off-by: Jens Erat <email@jenserat.de>
2019-11-22 16:56:59 +01:00
44a7e7340d Merge pull request #418 from bbrks/configurable_maxDepth
Add MaxDepth as a config option
2019-11-12 22:47:28 +08:00
2834c7e43c Remove large test values that fail on 32-bit architectures 2019-11-11 16:35:50 +00:00
d296277d5c Adds MaxDepth config option
Defaults to 10,000 to match the existing maxDepth constant everywhetre,
except when using `ConfigCompatibleWithStandardLibrary` - which retains
the limitless depth (and causes a stack overflow).

Added tests for the new config, and also up to jsoniter's stack overflow limit.
2019-11-11 16:13:59 +00:00
dc11f49689 Merge pull request #416 from jarredhawkins/issue-415
Ignore unnamed literals in structs
2019-10-30 08:35:33 +08:00
83f7b825b3 Unnamed struct literals 2019-10-28 23:05:10 -07:00
03217c3e97 Merge pull request #410 from liggitt/stack
Limit nesting depth
2019-10-12 21:07:04 +08:00
908eaed151 Merge pull request #408 from onelrdm/master
skip - tag before spliting parts
2019-10-12 21:05:04 +08:00
eec24895fe Limit nesting depth 2019-10-08 11:17:01 -04:00
1ba732a07d skip - tag before spliting parts 2019-09-28 17:17:44 +08:00
819acad769 Merge pull request #398 from teou/master
use json.Marshaler then trim the last '\n' in reflect_marshaler
2019-09-23 14:02:24 +08:00
695ec2b83b Merge pull request #406 from bbrks/fix_nil_map_encoding
Fixes #405 - Encode nil map into null
2019-09-23 13:59:22 +08:00
028e2ef2bd Fixes #405 - Encode nil map into null 2019-09-19 13:11:30 +01:00
976454858b use json.Marshaler then trim the last '\n' in reflect_marshaler
N/A
2019-08-14 10:10:02 +08:00
27518f6661 Merge pull request #373 from ernado/append-skip
fix #372: add AppendSkip iterator method
2019-06-22 00:12:01 +08:00
94869abf43 Merge pull request #368 from alextomaili/fix-memory-allocation-overhead
allocate string for error description only if it really required
2019-06-22 00:11:00 +08:00
459f0e30ae fix #37: add SkipAndAppendBytes iterator method 2019-06-10 12:40:05 +03:00
0039f4ac3d Merge pull request #371 from nikhita/byte-base64-encode
Don't marshal empty byte or uint8 slice as null
2019-06-03 19:06:22 +02:00
fb5614a4ca Don't marshal empty byte or uint8 slice as null
[]byte or []uint8 are encoded as base-64 encoded string. Per this, non-nil
empty slice should not get marshalled as null, rather as "".

This restores compatibility with the standard library.
2019-06-03 16:19:17 +05:30
f71b9090aa allocate string for error description only if it really required 2019-05-27 03:02:21 +03:00
08047c174c fix #365, return error for +inf -inf and NaN 2019-05-23 13:57:43 +08:00
68347ec4d6 Merge pull request #366 from stephen-obashitech/master
Fix typo in UnmarshalFromString documentation
2019-05-22 13:56:51 +08:00
0fd91468bb Fix typo in UnmarshalFromString documentation 2019-05-21 12:48:31 +01:00
1bc9828b4f Merge pull request #361 from lggomez/master
Add go module definition
2019-05-19 23:23:02 +08:00
24c3d57281 Add go module definition 2019-04-25 17:40:48 -03:00
0ff49de124 update README 2019-03-06 22:29:09 +08:00
5bc9320502 Merge pull request #316 from proemergotech/master
fix #308 do NOT skip embedded structs without tag when OnlyTaggedFiel…
2019-02-08 15:56:42 +08:00
f64ce68b6e Merge pull request #338 from dvrkps/master
Clean go vet error and ineffassign warnings.
2019-01-14 23:53:30 +08:00
2d42ff74dd Merge pull request #337 from denverdino/fix-encode-with-MarshalJSON
Fix the incompatible encoding #336
2019-01-14 23:52:16 +08:00
3a023a5fbc clean readPositiveFloat64 2019-01-10 18:00:15 +01:00
16aef10b2b clean readPositiveFloat32 2019-01-10 17:55:28 +01:00
ae4c002f78 rename ExampleMyKey 2019-01-10 17:51:25 +01:00
e4aa2ec063 Fix the incompatible encoding 2019-01-03 18:19:22 +08:00
d05f387f50 fix #317, try parse as BigFloat if overflow 2018-11-12 14:45:56 +08:00
a9403d25cd fix #308 do NOT skip embedded structs without tag when OnlyTaggedField is set to true 2018-10-29 12:00:51 +01:00
05d041de10 fix #313 support json marshaller type as map key 2018-10-24 23:28:41 +08:00
5916df66b3 fix #311 handle nil any 2018-10-24 21:05:37 +08:00
2433035e51 Merge pull request #304 from Quasilyte/quasilyte/emptyFallthrough
use multi-value case clause instead of fallthrough
2018-09-14 09:48:43 +08:00
6dfc0bf2dd Merge pull request #305 from Quasilyte/quasilyte/assignOp
simplify `x = x <op> y` to `x <op>= y`
2018-09-14 09:48:25 +08:00
b9be8dd373 Merge pull request #306 from Quasilyte/quasilyte/underef
remove redundant dereferencing expressions
2018-09-14 09:47:54 +08:00
b8d78b6aaf Merge pull request #307 from Quasilyte/quasilyte/commentedOutCode
any_tests: remove commented-out code
2018-09-14 09:47:25 +08:00
7109b5e7dd any_tests: remove commented-out code
Found using https://go-critic.github.io/overview#commentedOutCode-ref
2018-09-13 21:57:53 +03:00
4cc76529e8 remove redundant dereferencing expressions
Found using https://go-critic.github.io/overview#underef-ref
2018-09-13 21:51:59 +03:00
c5ddac9dc3 simplify x = x <op> y to x <op>= y
Found using https://go-critic.github.io/overview#assignOp-ref
2018-09-13 21:48:13 +03:00
f76d712086 use multi-value case clause instead of fallthrough
Found using https://go-critic.github.io/overview#emptyFallthrough-ref
2018-09-13 21:43:37 +03:00
1624edc445 fix #295 decoder more was not compatible with standard library 2018-08-06 14:07:27 +08:00
5d789e5e02 fix #291 omit empty was not handled properly for json raw message 2018-08-06 13:58:33 +08:00
0260c89b54 fix #286 calcHash should use byte not rune to calc hash 2018-08-06 13:23:06 +08:00
10a568c511 fix #293 copy extensions 2018-07-22 11:51:51 +08:00
ab8a2e0c74 fix #276 allow rename when set naming strategy 2018-07-01 15:16:28 +08:00
2fbdfbb595 merge 2018-07-01 13:06:34 +08:00
720ab8dc7f add tests for #283 2018-07-01 13:05:25 +08:00
f2b4162afb Merge pull request #285 from nikhita/fix-case-sensitivity
Fix case sensitivity
2018-06-12 13:28:35 -07:00
3830516ed0 Fix case sensitivity for nested fields 2018-06-12 11:27:24 +05:30
7cceb6c2e3 Merge pull request #282 from caesarxuchao/optional-case-sensitivity
Make case sensitivity optional
2018-06-10 17:13:47 +08:00
b92cf78708 Make case sensitivity optional. Fix
https://github.com/kubernetes/kubernetes/issues/64612
2018-06-07 21:01:05 -07:00
8744d7c5c7 \n should not be ignored in base64 decode 2018-05-26 09:43:29 +08:00
37cc313d18 fix #274, unescape before base64 decode 2018-05-26 09:38:52 +08:00
2ddf6d7582 Merge pull request #266 from ceshihao/fix_base64_with_whitespace
fix base64 contains newline case
2018-04-24 08:46:23 +08:00
6a6742f0a2 fix base64 contains newline characters \r or \n 2018-04-23 23:10:55 +08:00
6c702ce12a fix #264 check io.EOF when test decoder.More 2018-04-20 16:10:56 +08:00
f88871b601 fix #263, support empty string as 0 in fuzz mode 2018-04-18 16:34:54 +08:00
f246f80f14 fix #260, support rename for extra.SupportPrivateFields 2018-04-18 16:28:55 +08:00
51dd70305b add more test for #252 2018-04-18 16:22:47 +08:00
a949c42748 fix #261 should load from reader 2018-04-18 16:11:14 +08:00
f89479f5c0 Merge pull request #257 from ash2k/release-writer
Release writer to enable GC
2018-04-08 08:25:46 +08:00
b858ec296c Release writer to enable GC 2018-04-07 21:40:08 +10:00
885a41a0a6 Merge branch 'master' of https://github.com/json-iterator/go 2018-04-03 13:41:12 +08:00
9e9a97040e always benchmark yourself 2018-04-03 13:41:01 +08:00
fb4d53e4cc Merge pull request #255 from bboreham/error-test
Add a test for input errors, and fix one bug that it finds
2018-04-02 13:50:44 +08:00
b53656d459 Check that a struct ends with closing brace 2018-04-01 22:02:44 +00:00
8f27a81d90 Add a test for input errors
Send various malformed JSON strings into the decoder for each type,
and check we get an error each time.
2018-04-01 22:01:21 +00:00
4930b053b8 explit test case sensitive for #252 2018-03-24 22:38:32 +08:00
06e0f9391e fix #250 case insensitive field match 2018-03-20 21:43:30 +08:00
ca39e5af3e suport encode map[interface{}]interface{} 2018-03-15 21:28:16 +08:00
39acec93e0 expose DecoderOf and EncoderOf 2018-03-14 23:18:20 +08:00
25fa392355 fix #245, always reuse existing value even UseNumber 2018-03-01 19:23:20 +08:00
d51e841de0 fix build on 1.8 2018-02-28 20:29:23 +08:00
3353055b2a use concurrent.Map for 1.8 support 2018-02-28 17:11:57 +08:00
455b3f8bb8 move reflect2 from plz to modern-go 2018-02-28 17:09:30 +08:00
2a93f9003e fix #244 use BinaryAsStringExtension to make []byte pretty, while the output is valid json, but it can not be decoded by other json codec, as \x01 is decoded as \x01 by them, which is not original input 2018-02-27 12:40:48 +08:00
9472474ffd test []byte behavior 2018-02-27 12:04:11 +08:00
ad83167dc6 fix #243 fuzzy decoder should take null as valid input 2018-02-26 23:22:22 +08:00
fff342fd04 gofmt 2018-02-24 22:04:41 +08:00
8d6662b81b fix #242 add CreateMapKeyEncoder and CreateMapKeyDecoder to extension spi 2018-02-24 22:04:11 +08:00
a377e2656b add map key example 2018-02-23 18:20:14 +08:00
0ac74bba4a upgrade reflect2 2018-02-23 08:20:31 +08:00
ebe943a4a6 fix #241, support 32bit platform 2018-02-23 08:12:45 +08:00
414d0307c9 fix struct decoder report error 2018-02-22 13:30:59 +08:00
86e9fd72bc update pkg 2018-02-22 11:48:56 +08:00
be70f29b04 detect remaining bytes 2018-02-22 10:37:32 +08:00
a3fdd37b9a use sync.Pool 2018-02-22 10:29:29 +08:00
d346ea6e55 get encoder without get type first 2018-02-22 10:20:19 +08:00
820ec30bd6 get decoder without get type first 2018-02-22 10:18:27 +08:00
df8295a48a fix 1.8 2018-02-22 10:13:38 +08:00
99fc16a363 use reflect2 to replace reflect 2018-02-22 10:12:08 +08:00
a3866383f5 support recursive struct 2018-02-21 17:59:41 +08:00
2fcbb23d96 rewrite how eface and iface are handled 2018-02-21 12:16:50 +08:00
ea6403326b fix #239, empty slice 2018-02-21 07:24:22 +08:00
404d90796f move type declaration into separate files 2018-02-20 23:08:58 +08:00
b79587753b move any codec 2018-02-20 23:04:04 +08:00
63ea5e3131 move encoder/decoder of native 2018-02-20 22:55:31 +08:00
895a19f2dc move json raw message 2018-02-20 22:38:35 +08:00
6327145300 move json number impl 2018-02-19 23:13:33 +08:00
c99d73acd0 rename 2018-02-19 23:08:01 +08:00
3d39af6dd9 remove feature prefix 2018-02-19 23:04:25 +08:00
a016e87b9f move any codec 2018-02-19 23:01:19 +08:00
08218647c3 use reflect2 to implement map decoder 2018-02-19 22:53:42 +08:00
d6f02cbd48 remove sliceHeader 2018-02-19 14:39:57 +08:00
b53aa13eb0 rename 2018-02-19 14:30:44 +08:00
e322da5531 rename 2018-02-19 14:30:23 +08:00
1d41f3c0ed only consider ptr type if not root 2018-02-19 14:30:01 +08:00
29604bf5c3 use reflect2 decode slice 2018-02-19 14:18:42 +08:00
cbc1786a76 change slice encoder to use reflect2 2018-02-18 23:27:34 +08:00
5a696808d6 fix any codec 2018-02-18 22:57:01 +08:00
d8e64aa825 support TextMarshaler as map key 2018-02-18 22:49:06 +08:00
577ddede74 use extension to implement configs 2018-02-18 21:14:37 +08:00
43d9384d67 fix marshaler support for iface case 2018-02-18 21:05:42 +08:00
2074f25bd3 use extension to implement EscapeHtml config option 2018-02-17 22:55:35 +08:00
ef3038593b check nil for interface{} 2018-02-17 22:33:09 +08:00
9dafbc667f when embedded ptr is nil, the fields should be omitted 2018-02-16 17:32:41 +08:00
a7a34507ab use reflect2 for json.Marshaler 2018-02-16 15:42:37 +08:00
0e2b54800a remove EncodeInterface 2018-02-14 15:04:23 +08:00
e7c7f3b337 fix coverage 2018-02-14 14:06:32 +08:00
75810179f6 remove n from stream 2018-02-14 13:58:51 +08:00
6a8f9fa342 Merge branch 'float-allocs' of git://github.com/brian-brazil/go into 1.1 2018-02-14 11:37:27 +08:00
24bb2eee9f fix #236 case sensitive when both upper case and lower case presents 2018-02-14 11:33:17 +08:00
64cc784089 remove special handling for field 2018-02-14 10:31:55 +08:00
477be43d00 consolidate more tests 2018-02-14 10:13:34 +08:00
a8708bca85 consolidate more tests 2018-02-14 08:58:59 +08:00
658ff9ef15 consolidate more tests 2018-02-14 08:48:12 +08:00
64c1c67885 consolidate more tests 2018-02-14 08:39:18 +08:00
e3bc511e5a consolidate more tests 2018-02-14 08:28:17 +08:00
8fa357ab7b consolidate mor tests 2018-02-13 23:49:40 +08:00
761ce8cce2 consolidate more tests 2018-02-13 20:58:29 +08:00
c3b6c1e845 consolidate skip tests 2018-02-13 20:41:21 +08:00
0ed9de94f2 support asymmetric tests 2018-02-13 20:25:27 +08:00
6fded6eb5f consolidate struct tags tests 2018-02-13 17:22:47 +08:00
dc3395f770 consolidate struct tests 2018-02-13 17:06:28 +08:00
bd4e013f98 consolidate slice tests 2018-02-13 16:20:08 +08:00
48a4a1e4db consolidate map tests 2018-02-13 16:07:14 +08:00
9bc223734a consolidate marshaler tests 2018-02-13 16:00:08 +08:00
eb9aeccee2 consolidate builtin tests 2018-02-13 15:48:39 +08:00
28adca2a14 consolidate array test 2018-02-13 15:43:10 +08:00
a9b3f36b2f add test framework 2018-02-13 15:32:21 +08:00
1e8e785321 Remove allocs from WriteFloat32/WriteFloat64
The use of strconv.FormatFloat causes a string allocation,
by setting aside a reusable buffer and using strconv.AppendFloat
this can be avoided.

Before:
BenchmarkRespond-4           300           5392189 ns/op          618936 B/op      20010 allocs/op

After:
BenchmarkRespond-4           300           4713746 ns/op          139744 B/op         10 allocs/op

This benchmark is using a custom encoder that calls WriteFloat64 20k
times, which is the bulk of the work.
2018-02-07 17:30:57 +00:00
002b5ae342 fix tests 2018-02-05 23:45:42 +08:00
07f99a1124 fix build 2018-02-05 23:05:57 +08:00
71f74dc71e implement #230 DisallowUnknownFields option added 2018-02-05 23:03:53 +08:00
7990317be5 gofmt 2018-02-05 22:45:04 +08:00
9edd73f752 fix build 2018-02-05 22:26:39 +08:00
3d5ee1098a Merge branch 'master' of https://github.com/json-iterator/go 2018-02-05 21:43:52 +08:00
ee8cfb7547 cache frozenConfig 2018-02-05 21:43:37 +08:00
bca911dae0 Update README.md 2018-01-28 22:27:09 +08:00
28452fcdec cow cache is not same, as map read will modify the underlying map. use sync.Map for 1.9 and above, and mutex if sync.Map not available 2018-01-28 17:00:11 +08:00
ea8c33040f fix #228 2018-01-27 16:25:48 +08:00
358cfc3929 Merge branch 'master' of https://github.com/json-iterator/go 2018-01-25 14:48:02 +08:00
c39a632e65 fix #227, fix empty json.Number 2018-01-25 14:47:50 +08:00
e31252f2e2 Merge pull request #225 from mgood/empty-array-fix
Fix encoding 0-length arrays
2018-01-23 23:31:07 +08:00
807e4a8b20 Optimize 0-length array case
Instead of checking the array length in encode, this can be checked up
front in `encoderOfArray` since the array type has a fixed length
determined at compile time. So return an `emptyArrayEncoder` that simply
writes an empty array to the stream.
2018-01-22 14:03:50 -08:00
e78b7e89b6 Merge branch 'master' of https://github.com/json-iterator/go 2018-01-21 20:59:32 +08:00
945d1aaa19 fix #140 uintptr will no lock the address from gc 2018-01-21 20:59:18 +08:00
ba3857729b Fix encoding 0-length arrays
The array encoder assumed that arrays had at least one value, so it
would serialize them with a zero-value for the array, such as `[0]`.

This adds a test to reproduce the issue, and updates the encoder to
write an empty array if the length is 0.
2018-01-16 11:02:03 -08:00
c3ed5e85e0 Merge pull request #222 from neverlee/mydev
加入一个OnlyTaggedField选项
2018-01-09 18:30:25 +08:00
c27f6f9350 config: add OnlyTaggedField config, only process tagged fields in struct 2018-01-09 17:29:47 +08:00
0ab880662f fix #219 should check real value for empty instead of just the pointer for nested field 2018-01-07 13:57:46 +08:00
6dad2de6cc fix build 2018-01-04 17:18:16 +08:00
11c1cce0d8 fix #217 when input is null, non-decodable type should not be considered as error, to be compatible with stdlib 2018-01-04 16:19:26 +08:00
96fcb84835 fix #215 lazy load more 2017-12-23 10:52:17 +08:00
e7a8aea845 Merge branch 'master' of https://github.com/json-iterator/go 2017-12-21 22:18:40 +08:00
60a9df5ebc fix #214 report EOF like stdlib 2017-12-21 22:18:28 +08:00
7b060ec866 Merge pull request #210 from coocood/master
add ReadNumber for Iterator.
2017-12-18 08:22:47 +09:00
25f147f530 add ReadNumber for Iterator. 2017-12-17 16:44:04 +08:00
a9b9c73b4d fix #207 delay unsupported type error reporting 2017-12-15 10:13:11 +08:00
e0df39fda2 fix #206, do not allow nil pointer as unmarshal input 2017-12-14 17:18:05 +08:00
13f86432b8 do not use defer() in read int 2017-12-12 18:52:41 +08:00
d2a7335211 fix #202 #203 #204 map encoder not proplery initialized 2017-12-08 21:18:59 +08:00
b2a706d14b reverse last commit, need a better fix 2017-12-08 06:15:49 +08:00
23078876c5 fix #203 consider MarshalJSON as non empty 2017-12-07 23:20:43 +08:00
051434fab7 fix #198, use dep for vendoring 2017-11-30 10:42:24 +08:00
be6688fc1a fix #200, do not use symbolic link in the code 2017-11-30 10:34:05 +08:00
ff2b70c1db support config level extension 2017-11-23 00:09:35 +08:00
f7279a603e fix out of range 2017-11-15 23:34:21 +08:00
9f088cbcc4 fix #195 when decode float as int, report it clearly 2017-11-15 23:25:12 +08:00
3c0e5762c4 fix #196 do not hard code 1 << 49 2017-11-15 23:15:31 +08:00
d394a135a1 #197 fix place holder encoder to use EncodeInterface, WriteToStream is unsafe when the real encoder is unknown 2017-11-15 22:56:23 +08:00
9fddff05f0 try to fix #194 with larger array 2017-11-11 08:31:44 +08:00
b1b003864e expose OptionalEncoder&OptionalDecoder; add attachment to Stream&Iterator for customized decoder/encoder 2017-11-08 11:41:45 +08:00
aed5a81f09 fix #190 handle empty input 2017-10-31 22:47:02 +08:00
f1258b01aa fix #191 do not always assume the object field is simple string 2017-10-31 22:38:41 +08:00
fbd210edfc Merge pull request #189 from ggaaooppeenngg/compatible-with-map
Fix standard compatiblility
2017-10-26 18:39:38 -05:00
640251ab91 Fix standard compatiblility
Non-nil but empty map with omitempty should be ignored.

Signed-off-by: Peng Gao <peng.gao.dut@gmail.com>
2017-10-27 01:43:41 +08:00
06b2a7cf1d Merge pull request #188 from ggaaooppeenngg/compatible
Fix standard compatiblility
2017-10-26 06:41:01 -05:00
5fffb9b8f7 Fix standard compatiblility
Encode has trailing newline at the end.

Signed-off-by: Peng Gao <peng.gao.dut@gmail.com>
2017-10-26 15:15:36 +08:00
7e3b776024 change jsoniter-sloppy to jsoniter_sloppy 2017-10-23 15:03:44 +08:00
6240e1e798 #185 add jsoniter.Valid 2017-10-10 08:57:02 +08:00
0149a5cf4a fix #183 error message not only show expectation, but also the actual value 2017-10-09 08:24:51 +08:00
5068c8baaf #183 limit error message size 2017-10-09 08:16:52 +08:00
16f78601b5 fix #184, support null as number 2017-10-07 09:29:32 +08:00
8f50a91be2 fix #181, support string as json.Number and jsoniter.Number 2017-10-06 18:08:14 +08:00
73c7bc881e fix #180, add missing methods to jsoniter.Number 2017-10-06 17:56:36 +08:00
4de15a3a87 Merge pull request #182 from MOZGIII/patch-1
Used writeTwoBytes in Stream.WriteEmptyArray
2017-10-04 02:25:32 -05:00
14b28b2226 Used writeTwoBytes in Stream.WriteEmptyArray 2017-10-03 22:14:36 +03:00
abe3c4016b fix #179 2017-09-26 15:35:55 +08:00
dbb1ef3f63 #177 flush buffer should check available again 2017-09-21 21:04:45 +08:00
46b20bbbec #178 SkipAndReturnBytes should return copy of memory 2017-09-21 20:18:45 +08:00
fdfe0b9a69 Merge branch 'olegshaldybin-skip-unexported-fields' 2017-09-19 10:06:59 +08:00
faa3dcf46a do not report error when field is unexported 2017-09-19 10:06:34 +08:00
1f58120d43 Always skip unexported fields when encoding
Skip creating encoders for unexported fields. They are not participating
in JSON marshaling anyway. This allows using unexported fields of
non-marshalable types in structs.

As a side-effect of this change it's no longer possible to marshal
unexported JSON fields by adding a custom type extenstion. It seems this
is desired behavior since it matches standard library and jsoniter
already disallows `json:"-"` fields from participating in custom
extensions.

Fixes #174.
2017-09-18 11:02:15 -07:00
6ed27152e0 Update README.md 2017-09-17 16:07:42 +08:00
3c298d8a76 Merge pull request #172 from olegshaldybin/more-stdlib-compat
Improve stdlib compatibility
2017-09-17 03:05:36 -05:00
9f6e5962a9 Improve stdlib compatibility
1. Null values for primitive types no longer clear the original value in
the destination object.

2. Dereference multiple levels of pointers in the destination interface{}
type before unmarshaling into it. This is needed to match stdlib
behavior when working with nested interface{} fields. If the destination
object is a pointer to interface{} then the incoming nil value should
nil out the destination object but keep the reference to that nil value
on its parent object. However if the destination object is an
interface{} value it should set the reference to nil but keep the
original object intact.

3. Correctly handle typed nil decode destinations.
2017-09-16 16:57:51 -07:00
c463aa12c4 Merge pull request #173 from toffaletti/more-nil-interface-fixes
More nil interface fixes
2017-09-16 18:36:43 -05:00
b5d2607a6d replace should.Equal(nil, err) with should.NoError(err) 2017-09-16 16:30:04 -07:00
48cc4d965a improve test 2017-09-16 16:27:32 -07:00
c59c42fda0 fix decoding of nil non-empty interface 2017-09-16 16:24:55 -07:00
8324374402 add tests for decoding nil interfaces 2017-09-16 16:24:27 -07:00
2017f3866b fix encoding of nil marshaler interface 2017-09-16 16:08:32 -07:00
ddc5af4512 fix encoding of nil non-empty interface 2017-09-16 16:04:36 -07:00
2f7e5c8dd7 add failing tests for nil non-empty interfaces 2017-09-16 16:00:48 -07:00
92772579dd Merge pull request #170 from olegshaldybin/marshal-enum-pointer
Fix custom marshaler for enum types
2017-09-15 09:08:45 -05:00
ae57d167e8 Fix custom marshaler for enum types
When MarshalJSON was defined on a pointer receiver custom enum type
marshaling/unmarshaling was panicing since the underlying primitive type
was treated as a pointer.

Since method set for pointer receivers includes value receiver methods
we don't really need optionalEncoder and can just use marshalEncoder
directly.
2017-09-14 23:26:12 -07:00
eef35e549b Merge pull request #169 from toffaletti/fix-nil-interface
Fix handling of nil empty interface
2017-09-15 00:45:11 -05:00
005d86dc44 fix handling of nil empty interface 2017-09-14 21:32:42 -07:00
e658f6597a add failing test for handling of nil interface with omitempty 2017-09-14 20:44:42 -07:00
f8eb43eda3 Merge pull request #168 from olegshaldybin/null-booleans
Allow null booleans
2017-09-14 20:58:31 -05:00
18a241d40b Allow null booleans
Make sure we do the same thing as stdlib with null booleans by not
touching the original value and discarding the null.

Another somewhat related change is nulling out null interface values in
the original structure. This also matches stdlib behavior.
2017-09-14 16:47:35 -07:00
0fdf883ac0 Merge pull request #167 from olegshaldybin/shorter-sleep
Shorter sleep while waiting for encoder/decoder
2017-09-14 18:23:29 -05:00
34fbec74ad Shorter sleep while waiting for encoder/decoder
If the client is using the same jsoniter config with multiple goroutines
it's very likely that few initial operations will encounter a placeholder
encoder/decoder while the real one is being created by another
goroutine. Having a full second sleep seems too conservative, since
encoder/decoder will be created in a very short time. This is very easy
to reproduce in any real environment with a few concurrent requests of
the same type. A few initial requests will have 1s+ response time.

Changing to 10ms should smooth out marshal/unmarshal times for these
initial concurrent requests.
2017-09-14 12:37:47 -07:00
90574c5ca3 #166 support ValidateJsonRawMessage in ConfigCompatibleWithStandardLibrary 2017-09-14 23:54:40 +08:00
6a4ba7bfa9 Merge branch 'master' of https://github.com/json-iterator/go 2017-09-09 08:46:07 +08:00
0828e559d0 #164 support interface{} with ptr 2017-09-09 08:45:57 +08:00
2c67d0f68a Merge pull request #163 from dvrkps/patch-2
travis: add 1.x to go versions
2017-09-07 11:25:07 -05:00
f29a0391bc travis: add 1.x to go versions 2017-09-07 17:12:42 +02:00
374e68a144 Merge pull request #162 from cch123/fix-bool-to-number
fix fuzzy decoder from bool value to number
2017-09-06 00:19:18 -05:00
b134d86290 optimize code 2017-09-06 13:18:05 +08:00
bc3221879d fix fuzzy decoder from bool value to number 2017-09-06 12:31:56 +08:00
8c7fc7584a #159 fix fuzzy decoder, the newIter assigned io.EOF error to original iterator, which stopped further processing 2017-09-06 00:31:25 +08:00
db32ee8c2d #157 number can be null 2017-09-05 13:00:03 +08:00
d80309af3b #156 invoke Marshaler defined on pointer types 2017-09-01 15:44:12 +08:00
36b14963da #153 fix invalid utf8 using same implementation as the standard library 2017-08-29 23:58:51 +08:00
f706335302 #153 fix critical bug: infinite loop when write string is invalid utf8 2017-08-29 23:39:43 +08:00
2dc0031b26 #152 gofmt 2017-08-25 12:53:23 +08:00
cdbd2ed810 #145 interface {} customizatoin is recursive 2017-08-22 10:39:01 +08:00
39e9d67807 Merge branch 'master' of https://github.com/json-iterator/go 2017-08-22 00:12:18 +08:00
2066b01acb #146 support config TagKey 2017-08-22 00:12:09 +08:00
ac3b3cd160 test []interface{} 2017-08-21 22:43:51 +08:00
887789156a Merge pull request #147 from thockin/output_tests
Add tests for int64
2017-08-11 12:55:48 +08:00
7df5a67d0d Add tests for int64 2017-08-10 20:58:49 -07:00
9c358632dc #144 make []byte support Unmarshaler&Marshaler 2017-08-09 13:59:40 +08:00
1cfa233923 #143 make jsoniter.Number same meaning as json.Number, however UseNumber still returns json.Number. 1.9 alias support should be added later 2017-08-05 07:22:53 +08:00
d249b05a85 rename ValueType, to avoid collision with json.Number 2017-08-05 07:10:15 +08:00
abbd16da6c #140 blind fix 2017-08-02 09:20:43 +08:00
b67201557a avoid gc issue 2017-08-01 08:34:38 +08:00
5124683f24 #140 try fix: maybe memory collected before assigned to existing object graph 2017-07-31 23:24:58 +08:00
4892de725b add ad 2017-07-31 21:49:02 +08:00
34a2174be3 #142 decode struct field should be case insensitiveyet another fix 2017-07-31 21:48:22 +08:00
24ecaff2a1 #142 decode struct field should be case insensitive, the bug only happen for struct with more than 10 fields 2017-07-31 20:50:07 +08:00
c15b4d116c #139 unmarshal non base64 into []byte 2017-07-19 12:04:22 +08:00
12cd299fa8 add benchmark for Skip() 2017-07-19 00:22:41 +08:00
60ba332980 acknowledge @mattn for #138 #137 #136 #135 2017-07-19 00:09:50 +08:00
f705934fbf #138 fix - without following digits; fix 1.e1 2017-07-18 23:48:40 +08:00
17a26a6e20 remove debug print 2017-07-18 23:24:21 +08:00
156284b028 #137 fix unicode surrogate incompatibility 2017-07-18 23:17:52 +08:00
6b6938829d #136 strconv.ParseFloat can not validate 1. , added extra validation for this special case 2017-07-18 22:19:52 +08:00
e066e54964 #135 verify 1e1 and 1.0e1 is handled same as std 2017-07-18 11:28:19 +08:00
18d6ae2668 #135 fix leading zero 2017-07-18 11:23:29 +08:00
c966eaa031 #135 fix double negative 2017-07-18 11:05:39 +08:00
f6da8e62c3 #133 validate json when Skip() 2017-07-18 09:45:25 +08:00
5eded4f6ae implement skip number and string strictly 2017-07-18 09:01:43 +08:00
9b3ec40fd9 #133 fix empty struct skip; fix ] as empty array 2017-07-17 09:09:00 +08:00
0d604da7d7 Merge pull request #134 from thockin/output_tests
Add a fuzz test for non-JSON input
2017-07-17 08:47:36 +08:00
b6ace7d51b Add a fuzz test for non-JSON input 2017-07-16 17:24:46 -07:00
6a4fbb9892 ensure buffer flushed to io.Writer 2017-07-16 10:47:24 +08:00
4ae426c4b7 Merge branch 'master' of https://github.com/json-iterator/go 2017-07-15 18:09:14 +08:00
b46d0a2324 make test faster 2017-07-15 18:09:06 +08:00
8b03604184 Merge pull request #131 from cch123/feature-increase-coverage
update conversion table, add string escape test
2017-07-13 19:01:45 +08:00
93ce14316d increase coverage 2017-07-13 15:32:26 +08:00
779c3e2164 update conversion table 2017-07-13 00:13:04 +08:00
4b33139ad0 #130 loadMore should use iter.captured 2017-07-12 17:56:51 +08:00
dc388588a3 Merge branch 'master' of https://github.com/json-iterator/go 2017-07-12 16:40:14 +08:00
bd4364ab7c #129 fix read map with reader, should use ReadMapCB instead of ReadObjectCB 2017-07-12 16:40:05 +08:00
b9dc3ebda7 Merge pull request #128 from carlcarl/fix-json-use-number
Fix #123, `UseNumber` not works with iterator
2017-07-12 07:04:30 +08:00
90137b4a60 Use readNumberAsString 2017-07-12 00:23:49 +08:00
be9d4ded4f Use json.Number as the return 2017-07-12 00:11:50 +08:00
7b1fd129cf Add test for iterator UseNumber 2017-07-11 23:39:09 +08:00
b91b7ac682 Fix #123, make iterator read int if using number 2017-07-11 22:07:08 +08:00
845d8438db #126 fix space in case map key is sorted 2017-07-11 01:07:18 +08:00
d37197e176 #126 add space between map key and value when MarshalIndent 2017-07-10 22:14:11 +08:00
45c22b130b Merge branch 'master' of https://github.com/json-iterator/go 2017-07-10 15:24:04 +08:00
4a84b0b30e Merge branch 'liggitt-malformed-string-test' 2017-07-10 15:23:52 +08:00
0187038bad check null/true/false 2017-07-10 15:23:35 +08:00
c38e47d169 control character in string is invalid 2017-07-10 15:13:31 +08:00
b27718d16b Merge pull request #125 from liggitt/exponents
Fix exponent parsing
2017-07-10 14:51:10 +08:00
a447a8f797 Add tests for malformed string input 2017-07-10 02:44:15 -04:00
0d6dae80e1 Fix exponent parsing 2017-07-10 02:06:37 -04:00
d336ee6da6 fix build 2017-07-09 16:28:35 +08:00
3606750b83 document public symbols 2017-07-09 16:26:30 +08:00
db3f5046d7 remove GetObject & GetArray from Any 2017-07-09 16:15:45 +08:00
f0487718f6 document public symbols 2017-07-09 16:09:23 +08:00
46574e7d09 document public symbols 2017-07-09 15:23:18 +08:00
3a6ecf051c make receiver names consistent 2017-07-09 15:11:24 +08:00
5862c51768 extract out feature_reflect_struct_decoder 2017-07-09 15:07:53 +08:00
ce479f3476 fix golint: document exported symbols 2017-07-09 14:57:49 +08:00
bede1d7f40 fix build; add document for exported symbols 2017-07-09 14:48:34 +08:00
d3448d3dbd fix golint: document exported symbols 2017-07-09 14:21:12 +08:00
8fbed91768 fix golint: document exported symbols 2017-07-09 14:17:40 +08:00
3b6853d209 fix golint: do not export test types 2017-07-09 14:12:58 +08:00
4351a2e6e9 fix golint: do not export test types 2017-07-09 11:55:58 +08:00
891d33b415 fix golint: do not export test types 2017-07-09 11:40:45 +08:00
ad20f12c34 fix golint: do not export test types 2017-07-09 11:33:03 +08:00
9ecb1fd36d fix go vet 2017-07-09 11:24:26 +08:00
6d0e6f3733 fix go report card 2017-07-09 11:12:37 +08:00
711f836582 fix go report card 2017-07-09 11:10:44 +08:00
37ba1b32b5 Merge branch 'javierprovecho-master' 2017-07-09 01:01:13 +08:00
2c10d8e6bb test(object): add test for ignored field on not valid type 2017-07-08 16:50:11 +02:00
aaf6160146 fix(reflect): don't process ignored struct fields 2017-07-08 16:50:05 +02:00
b1afefe058 Merge pull request #119 from cch123/feature-increase-coverage
user defined simple type test
2017-07-07 21:43:33 +08:00
3bb49c1e47 add type def marshal/unmarshal test 2017-07-07 19:01:53 +08:00
eb68fff85c Merge pull request #118 from cch123/feature-increase-coverage
add int/int8/int32/int64 overflow test
2017-07-07 18:49:35 +08:00
e07a4ca5ec add int/int8/int32/int64 overflow test 2017-07-07 17:14:52 +08:00
dfa4bdf888 merge 2017-07-07 09:14:24 +08:00
b74ffb2e03 import github.com/stretchr/testify/require 2017-07-07 09:13:25 +08:00
a46060dedc Merge pull request #117 from cch123/feature-increase-extra-coverage
increase extra coverage
2017-07-06 21:09:34 +08:00
5eadecbb66 increase extra coverage 2017-07-06 20:44:38 +08:00
5bc013d6a3 merge 2017-07-06 16:06:30 +08:00
f7df62f1b5 #115 check object end 2017-07-06 16:04:52 +08:00
07f423d248 Merge pull request #114 from cch123/feature-increase-coverage
increase reflect object coverage, perhaps need to optimize in the future
2017-07-06 15:44:56 +08:00
d4c0cb2986 increase reflect object coverage, need optimize in the future 2017-07-06 15:31:35 +08:00
84ed6b3caf Merge pull request #113 from cch123/feature-increase-coverage
increase coverage
2017-07-06 12:27:18 +08:00
ee6536c50a increase coverage 2017-07-06 11:44:39 +08:00
b6eb62e96b Merge pull request #112 from cch123/feature-increase-coverage
increase coverage
2017-07-05 20:42:42 +08:00
8675af13bf increase coverage 2017-07-05 20:30:54 +08:00
21ca11f96a Merge pull request #111 from cch123/feature-increase-coverage
fix codecov yaml ignore
2017-07-05 19:24:44 +08:00
dd88d25090 add require 2017-07-05 18:59:28 +08:00
6a289f32c2 fix codecov.yml curl https://codecov.io/validate --data-binary @.codecov.yml 2017-07-05 16:49:42 +08:00
4907dc00f6 change codecov file 2017-07-05 16:41:24 +08:00
2350982504 Merge pull request #109 from cch123/feature-increase-coverage
increase coverage
2017-07-05 16:18:34 +08:00
27725b7139 update codecov.yml 2017-07-05 15:17:39 +08:00
ca6a524d4f add codecov.yml ignore output tests 2017-07-05 14:36:15 +08:00
1de44419ea increase coverage 2017-07-05 13:55:10 +08:00
550531a046 increase coverage 2017-07-05 11:40:20 +08:00
1745078ab7 Merge pull request #108 from cch123/feature-add-string-tests
add string convert tests
2017-07-05 07:20:29 +08:00
6129e85d53 increase coverage 2017-07-05 01:21:33 +08:00
ee3313111c add string tests 2017-07-05 00:39:20 +08:00
4e65952c09 fix float convert 2017-07-05 00:23:00 +08:00
3829a470ae Merge pull request #106 from cch123/feature-add-int-tests
fix negative number to uint
2017-07-04 22:37:09 +08:00
3f35bed884 simplify float convert 2017-07-04 22:28:24 +08:00
8d7efe886c Merge pull request #107 from cch123/feature-add-convert-table-doc
add convert table doc
2017-07-04 21:07:43 +08:00
f245011c7d add any to float 2017-07-04 19:59:34 +08:00
4ea96ac7c3 change all negative convert to uint 0 2017-07-04 18:48:55 +08:00
50beb4f15d update fuzzy convert table 2017-07-04 17:05:39 +08:00
e5d7a65616 add convert table 2017-07-04 16:38:07 +08:00
d7b6b4e0bb add convert table document 2017-07-04 15:29:47 +08:00
712ddb1942 fix negative number to uint 2017-07-04 14:00:24 +08:00
ca8dd93d0b Merge pull request #105 from cch123/feature-add-int-tests
add any to int/uint test
2017-07-04 08:09:48 +08:00
ac8dd56dfb object cannot covert to int/float in php, so change covert result to zero 2017-07-04 00:53:10 +08:00
d8dbf14af4 add array to int 2017-07-04 00:29:19 +08:00
402c6c79e2 Merge branch 'master' of https://github.com/json-iterator/go into feature-add-int-tests 2017-07-04 00:20:16 +08:00
2e10d5fdad add basic int test 2017-07-04 00:19:41 +08:00
f0b07a2313 Merge pull request #104 from cch123/feature-add-bool-convert-test
add bool convert test map
2017-07-03 20:00:29 +08:00
919a2eff5c fix bool test 2017-07-03 19:40:12 +08:00
a743df1b8a add bool convert test map 2017-07-03 19:10:49 +08:00
8700644196 cut 0.9.18 2017-07-03 10:17:02 +08:00
6a7bf91c45 only test against 1.8 2017-07-02 19:26:06 +08:00
e417330822 fuzzy test from 1000 to 100 2017-07-02 18:18:12 +08:00
af876518e1 remove 1.6 and 1.7 2017-07-02 15:25:28 +08:00
21b9254da4 skip t.Run below 1.8 2017-07-02 15:20:18 +08:00
c009421781 fix write float compatibility 2017-07-02 15:11:36 +08:00
c4f54740f7 separate 1.8 tests 2017-07-02 14:27:16 +08:00
368bd0c1d8 fix 1.6 compatibility 2017-07-02 13:22:55 +08:00
11975d2a26 test with more go versions 2017-07-02 13:18:20 +08:00
e7404f3065 add badges 2017-07-02 13:10:00 +08:00
f60a6a17c2 test and cover multiple packages 2017-07-02 12:58:14 +08:00
2dfdcdd9db add travis.yml 2017-07-02 12:09:49 +08:00
496a8ea225 list contributors 2017-07-02 11:59:11 +08:00
ed6c434851 gofmt 2017-07-02 11:56:01 +08:00
bf002a02be #102 create correct type when pointer to Marshaler/Unmarshaler is nil 2017-07-02 11:35:30 +08:00
815aa331a8 #101 checkIsEmpty can not reuse createEncoderOfSimpleType, otherwise it will fail when struct member is not serializable 2017-07-02 11:13:21 +08:00
54ab168362 #99 support uintptr 2017-07-02 11:03:13 +08:00
87d1ea0c60 Merge pull request #100 from thockin/output_tests
Output tests for text marshal and uintptr
2017-07-02 07:41:50 +08:00
6268a1cbc8 add test for uintptr 2017-07-01 10:57:42 -07:00
d56566286c Add tests for omitempty and text methods 2017-07-01 10:28:59 -07:00
f2c50ef73b #97 omit empty behavior should follow the original type 2017-07-02 00:41:23 +08:00
d0c5988985 Merge pull request #98 from thockin/output_tests
Add tests for omitempty and json methods
2017-07-02 00:15:51 +08:00
17eed15fb5 Add tests for omitempty and json methods 2017-07-01 08:26:42 -07:00
e260979001 #95 struct and empty struct is considered as notempty 2017-07-01 11:48:17 +08:00
3cf9b7c253 Merge pull request #96 from thockin/output_tests
Add test for omitempty on struct
2017-07-01 11:33:17 +08:00
800df52ccd Add test for omitempty on struct 2017-06-30 20:32:27 -07:00
e3ba0e7b7e #93 fix omitempty within embedded struct 2017-07-01 09:27:48 +08:00
e366c72b81 Merge pull request #94 from thockin/output_tests
Add test for embedded+omitempty
2017-07-01 09:01:13 +08:00
69551ef38f Add test for embedded+omitempty 2017-06-30 15:16:42 -07:00
abcf9a8d76 fix tests 2017-07-01 00:35:19 +08:00
fa0965a968 #90 trim spaces when UnmarshalJSON 2017-07-01 00:33:42 +08:00
04eae11ba5 #91 fix one ptr embedded struct 2017-07-01 00:09:40 +08:00
ea8fa7cc63 #91 fix embedded and , 2017-07-01 00:00:38 +08:00
6540266aaf Merge pull request #92 from thockin/output_tests
Better tests for field name tags
2017-06-30 23:53:02 +08:00
ddfbb0c62e better tests for field name tags 2017-06-30 08:16:43 -07:00
a3a2d1cd25 #87 fix embedded field sorting order 2017-06-30 14:01:50 +08:00
401a56bc20 #88 #85 support embedded struct with json tags 2017-06-30 13:26:50 +08:00
cfaa11f837 Merge pull request #89 from thockin/output_tests
More output tests
2017-06-30 13:21:54 +08:00
c1411e0ad5 Add test for json tag on embedded field 2017-06-29 22:01:16 -07:00
9ec64591b6 Enhance test for overlap and embedded 2017-06-29 22:01:16 -07:00
b07d1abc4f Add output_test for partial unmarshal 2017-06-29 22:01:14 -07:00
79c4040505 sync up some straggler json_tests 2017-06-29 22:00:39 -07:00
7fa780bd5d Uncommit files accidentally added 2017-06-29 22:00:27 -07:00
7244d730b9 Merge pull request #84 from thockin/output_tests
Output tests for embedded marshalers and arrays
2017-06-29 22:51:46 +08:00
12be6e0d43 Add tests for arrays 2017-06-29 07:25:19 -07:00
3cfe590a13 Add output test for embedded marshalers 2017-06-29 07:14:33 -07:00
678c297af3 #75 support MarshalIndent 2017-06-29 20:48:27 +08:00
1253b8edd3 gofmt 2017-06-29 20:40:25 +08:00
fb382c0ec1 fix -, 2017-06-29 20:40:07 +08:00
09cb1d9236 #81 handle field name conflict properly 2017-06-29 20:34:40 +08:00
e6c24947ee array is just like struct, one element case special for interface{ 2017-06-29 18:58:40 +08:00
a6673c983a slice embedded in struct will not be optimized by interface{} 2017-06-29 18:52:03 +08:00
ec7b40d104 update ignore 2017-06-29 18:45:20 +08:00
84fa033353 #80 fix the case when embedded struct ptr is nil 2017-06-29 18:45:11 +08:00
4e608af2c7 #80 fix embedded builtins 2017-06-29 10:45:29 +08:00
3458ccdb20 Merge pull request #83 from thockin/output_tests
Methods on aliased types are not valid map keys
2017-06-29 09:43:12 +08:00
45ccfb031f Methods on aliased types are not valid map keys 2017-06-28 09:38:15 -07:00
545a32f2a1 #76 support TextUnmarshal 2017-06-29 00:14:55 +08:00
08dbc98040 #76 fix UnmarshalJSON 2017-06-28 23:55:32 +08:00
82dabdcdbf #77 support -, 2017-06-28 23:47:32 +08:00
76e62088df #78 not null pointer is considered not empty 2017-06-28 23:39:46 +08:00
faaa59222a #79 fix string tag on string field 2017-06-28 23:37:10 +08:00
91ef89a6a2 WIP: MarshalIndent 2017-06-28 23:22:05 +08:00
3e3caf9184 Merge pull request #82 from thockin/output_tests
Huge updates to output tests
2017-06-28 23:20:59 +08:00
03a2daaeee Add a test for caseless unmarshal 2017-06-28 08:11:38 -07:00
4652ac6cc2 Add output tests for embedded types 2017-06-28 08:11:38 -07:00
8a9f2b9179 Add output test for manual text marshalers 2017-06-28 08:11:33 -07:00
0db2d74de8 Add output test for manual json marshalers 2017-06-28 08:11:02 -07:00
d6ef711c18 Better errors in output_tests 2017-06-28 07:48:55 -07:00
628fedf63c Add output tests for map keys with TextMarshal 2017-06-28 07:48:55 -07:00
5bb7a1f7af Add output test for 'string' tag 2017-06-28 07:48:55 -07:00
c2c9981062 catch errors in output_tests 2017-06-28 07:48:55 -07:00
e40d614037 Add output test for json tag field names 2017-06-28 07:48:55 -07:00
1589ab2fd7 Add output tests for omitempty 2017-06-28 07:48:49 -07:00
97ee4ad4a2 Reorganize output_tests for structs
As I added more and more cases, I found the dir structure hard to
navigate.  The new structure follows how you read a type:

   e.g. struct { F *string } -> struct/ptr_string

This exposed some redundant cases and some missing cases, too.
2017-06-27 19:51:13 +01:00
f09f778ca9 Reorganize output_tests for slices
As I added more and more cases, I found the dir structure hard to
navigate.  The new structure follows how you read a type:

   e.g. []*string -> slice/ptr_string

This exposed some redundant cases and some missing cases, too.
2017-06-27 19:21:17 +01:00
9fc858b117 Reorganize output_tests for maps
As I added more and more cases, I found the dir structure hard to
navigate.  The new structure follows how you read a type:

e.g. map[string]*string -> map/string/ptr_string

This exposed some redundant cases and some missing cases, too. Now
map[string] is the "main" test case that exercises all the variants of
value types, and the other key types are just to prove that they
basically work.
2017-06-27 19:19:37 +01:00
f93d25f8b1 Add output tests for slices of slices 2017-06-27 18:28:52 +01:00
7cd7a6cc7c Add output tests for maps of maps/slices/structs 2017-06-27 11:58:07 +01:00
7d6c9374e8 Add output test for structs of struct ptrs 2017-06-27 11:46:55 +01:00
e16ee7f8ac Add output tests for structs of slices and maps 2017-06-27 11:41:17 +01:00
cf6367546b Add output tests for structs of structs 2017-06-27 11:28:17 +01:00
dc44e85a86 #73 fix interface{} optimization for one ptr field struct and array 2017-06-26 22:37:24 +08:00
85f7a1b0b3 Merge pull request #74 from thockin/output_tests
Output tests for structs
2017-06-26 21:49:06 +08:00
507a446eda Add output_tests for structs of ptrs 2017-06-26 06:09:00 -07:00
76eefc25ba Add output_test for empty structs 2017-06-26 05:45:54 -07:00
21a16bd252 Add output_tests for structs of builtins 2017-06-26 05:37:47 -07:00
0c0c9f119f update value proposition 2017-06-26 14:28:03 +08:00
cf77980493 #71 fixed []byte alias 2017-06-26 14:25:56 +08:00
7d681fe2c2 #71 fix map key type of string alias 2017-06-26 11:52:22 +08:00
0c07128d3c #71 sort non string map keys 2017-06-26 10:42:47 +08:00
f771d32291 #70 decode null to nil for map/slice 2017-06-26 10:20:49 +08:00
d100b0d41f fix typo 2017-06-26 10:02:45 +08:00
81e64121ba #71 fix html escape for string alias 2017-06-26 09:57:45 +08:00
dcc91365ee Merge pull request #72 from thockin/output_tests
Output tests
2017-06-26 08:51:10 +08:00
8f3de9c412 Adapt tests to use new Config structs
the unit test uses compatible mode.  The benchmarks measure compat,
default, and fastest.

This still fails for strings and slices and maps all over the place.
2017-06-25 10:29:48 -07:00
5d3508979f Add output tests for slices of builtins 2017-06-25 10:00:35 -07:00
8f8e16b4c2 #63 keep struct field order 2017-06-23 08:21:02 +08:00
d7ea1acd3f #63 fix embed struct at last 2017-06-23 07:45:18 +08:00
ebed7df895 fix unicode and escape 2017-06-22 16:00:47 +08:00
caaa04195e #69 fix unicode support 2017-06-21 18:25:37 +08:00
ff3c624fa9 fix anonymous fields 2017-06-21 00:26:18 +08:00
3333ec11a0 support private fields 2017-06-20 23:48:41 +08:00
29a928e1d2 support naming strategy 2017-06-20 23:09:53 +08:00
83fa27ff9a #67 time as int64 with decoder 2017-06-20 17:52:41 +08:00
cefb2972fd #67 time as int64 with specified precision 2017-06-20 17:46:29 +08:00
486534c67c #67 time as int64 2017-06-20 17:43:47 +08:00
ed79b1726e fix encoder/decoder cast issue 2017-06-20 17:01:21 +08:00
85be06b145 #68 empty array to object/map 2017-06-20 16:36:22 +08:00
086001225d #68 string to float64 2017-06-20 16:20:56 +08:00
2ea4d48e1f #68 string to float32 2017-06-20 16:17:00 +08:00
417011b497 #68 remove redundant math max constants 2017-06-20 16:10:29 +08:00
ae6ce2fc3f #68 fuzzy all kinds of integer 2017-06-20 16:07:30 +08:00
8ef0c22f25 #68 handle float to int safely 2017-06-20 15:46:22 +08:00
a5ae3a2649 #68 float to int 2017-06-20 15:20:56 +08:00
306b2896cf #68 string to int 2017-06-20 15:18:24 +08:00
818ae1331a #68 number to string 2017-06-20 15:11:01 +08:00
8f6a840c63 fix anonymous struct 2017-06-20 13:33:40 +08:00
be221df432 #66 Make extension api like the java version 2017-06-20 10:41:54 +08:00
499412ec4c #66 extract out feacture_reflect_extension 2017-06-20 08:42:36 +08:00
c36a7ed7cd #66 extract out feacture_reflect_extension 2017-06-20 08:42:25 +08:00
14588726a1 expose ValEncoder & ValDecoder 2017-06-20 08:08:59 +08:00
aa01f57b7f rename AdaptedDecoder => Decoder and AdaptedEncoder => Encoder 2017-06-20 08:00:43 +08:00
b3170a8cef rename Encoder => ValEncoder and Decoder => ValDecoder 2017-06-20 07:59:45 +08:00
43a832beee add isEmptyFunc 2017-06-20 07:57:23 +08:00
39c9bb226a fix lossy float marshal and omit empty 2017-06-20 07:51:38 +08:00
945fe53724 fix html escape test and omit empty 2017-06-20 07:46:13 +08:00
8367a97ad8 gofmt 2017-06-20 07:39:54 +08:00
365d399192 #65 make placeholder thread safe 2017-06-20 07:39:38 +08:00
839247df05 #63 fix Marshaler and Unmarshaler on struct 2017-06-20 07:23:22 +08:00
f5edf564c8 gofmt 2017-06-19 23:43:53 +08:00
c3f5a2c536 #64 support fixed array 2017-06-19 23:43:28 +08:00
c6a598e292 # add jsoniter.RawMessage 2017-06-19 23:10:20 +08:00
eecb062c32 #63 support decode anonymous struct 2017-06-19 23:02:57 +08:00
50583f6bae #63 support *json.RawMessage 2017-06-19 22:57:43 +08:00
3b883aeffc #63 add more tests for json.RawMessage 2017-06-19 21:24:59 +08:00
baca358b53 add MustBeValid to Any 2017-06-19 21:21:20 +08:00
514db10f97 add Any.ToVal 2017-06-19 15:40:00 +08:00
31afe6450e add Api interface to allow save the frozen config 2017-06-19 13:43:22 +08:00
7e9017caa2 remove unused files 2017-06-18 23:43:01 +08:00
b6dfbbd6bc add document 2017-06-18 23:42:23 +08:00
3ffa5af7ec #61 remove internal buffer from mapAny 2017-06-18 23:18:32 +08:00
02cf6a73cc #61 remove internal buffer from objectAny 2017-06-18 23:09:30 +08:00
15c92d48df #61 remove internal buffer from numberLazyAny 2017-06-18 22:48:28 +08:00
a84cdaa694 #61 remove internal buffer from arrayAny 2017-06-18 22:40:18 +08:00
9f9ca4c9fc #61 remove stringLazyAny 2017-06-18 22:24:11 +08:00
985e263300 #61 removed internal buffer from lazy array and object; jsoniter.Get replaced jsoniter.UnmarshalAny 2017-06-18 22:22:13 +08:00
1ec246d16b #61 read any reuse skip impl 2017-06-18 17:00:28 +08:00
54dbcda64d #62 SkipAndReturnBytes should support reader 2017-06-18 16:28:43 +08:00
7a049ec79c #60 support read interface{} as json.Number 2017-06-18 15:22:37 +08:00
77dcffe77d tweak performance 2017-06-17 22:42:11 +08:00
8ab46965bd extract out any tests 2017-06-17 21:32:48 +08:00
2503ef17eb marshal lazy array/object using the config 2017-06-17 21:13:17 +08:00
0195110b5b gofmt 2017-06-17 21:11:23 +08:00
55fc498d27 use iterator from cache for any 2017-06-17 21:10:08 +08:00
50e4910c63 document how to get best performance 2017-06-17 17:14:34 +08:00
f29fe7407e downgrade to lower golang version 2017-06-17 16:27:19 +08:00
3c8bd9ef54 #57 copy bytes 2017-06-17 14:36:38 +08:00
952a42af6c #57 copy bytes 2017-06-17 14:36:05 +08:00
17bd91fd71 #57 reuse stream and iterator 2017-06-17 14:23:02 +08:00
3d5f6d3a4a #58 string mode support both encoding and decoding 2017-06-17 11:38:09 +08:00
b31b1301e2 #59 add ConfigFastest 2017-06-17 10:21:37 +08:00
69bc64b6d8 #54 support sort map keys 2017-06-16 16:46:30 +08:00
e0e2423e9a #53 test compatibility without html escape 2017-06-16 16:03:02 +08:00
a6ea770365 #53 implement SetEscapeHtml 2017-06-16 00:10:05 +08:00
5f22e50c89 #53 support escapeHtml 2017-06-15 23:55:04 +08:00
d867c8ba5c #53 split config into Config and frozenConfig 2017-06-13 18:49:35 +08:00
d0418857ce #53 move current config EnableXXX 2017-06-13 17:47:40 +08:00
48e9f6ec84 move IndentionStep to config 2017-06-13 17:03:27 +08:00
acddcf5bbf #53 extract out config 2017-06-13 16:58:53 +08:00
788918b85d #56 nil map or array should be null not [] or {} 2017-06-13 09:14:19 +08:00
6e5817b773 Merge pull request #55 from thockin/output_tests
WIP: Tests to compare against stdlib
2017-06-12 01:47:50 -05:00
7480e41836 Add output tests for maps of builtins
This tests for exact stdlib compatibility.
2017-06-11 21:09:56 -07:00
9215b3c508 Add output tests for builtin types
This fuzzes a type, marshals it with stdlib and json-iterator, compares,
then unmarshals with stdlib and json-iterator and compares.  This is
checking for literal, byte-for-byte compatibility.

In every case the test is exactly the same.

It also include benchmark functions to compare stdlib vs json-iterator.

This depends on a couple PRs to be merged in gofuzz.
2017-06-11 21:04:59 -07:00
64e500f3c8 Merge branch 'master' of https://github.com/json-iterator/go 2017-06-12 10:13:22 +08:00
3307ce3ba2 #50 map key unlike object field, can contain escaped char 2017-06-12 10:13:13 +08:00
6f50f15678 decoder/encoder;float precision doc 2017-06-11 16:30:31 +08:00
cee09816e3 decoder/encoder;float precision doc 2017-06-11 16:28:31 +08:00
cdbad22d22 test more package description 2017-06-11 15:35:45 +08:00
b0c9f047e2 test more than one pakcage description 2017-06-11 15:32:58 +08:00
6bd13c2948 Merge branch 'master' of https://github.com/json-iterator/go 2017-06-09 17:06:38 +08:00
84ad508437 #48 should return error if concrete tpye unknown 2017-06-09 17:06:27 +08:00
4f909776cf Merge pull request #49 from zhaitianduo/master
Use jsoniter instead of json in example
2017-06-09 03:32:31 -05:00
962c470806 fix import not use 2017-06-09 16:28:20 +08:00
46d443fbad use jsoniter for example 2017-06-09 16:25:58 +08:00
2608d40f2a example unmarshal 2017-06-08 12:08:47 +08:00
3cf822853f example unmarshal 2017-06-08 12:07:03 +08:00
26708bccc9 report error when string end not found 2017-06-08 09:46:19 +08:00
d75b539bad add test for scientific float 2017-06-07 21:34:56 +08:00
cfffa29c8a gofmt 2017-06-06 23:27:00 +08:00
925df245d3 good enough indent implementation 2017-06-06 23:18:37 +08:00
962a8cd303 #40 support UseNumber 2017-06-06 23:15:15 +08:00
6509ba05df Merge pull request #41 from 1046102779/master
解析时,如果输出参数不是指针类型,直接报错,避免程序挂掉
2017-06-06 10:03:06 -05:00
579dbf3c1d Merge pull request #42 from 1046102779/patch-1
把floatDigits改为intDigits
2017-06-06 10:02:26 -05:00
aa5181db67 把floatDigits改为intDigits 2017-06-06 21:08:04 +08:00
67be6df2b1 Update feature_adapter.go 2017-06-06 20:01:43 +08:00
0f5379494a unmarshal failed return non-pointer error 2017-06-06 19:36:33 +08:00
d09e2419ba update benchmark 2017-06-06 16:55:32 +08:00
e1a71f6ba1 update benchmark 2017-06-06 16:54:26 +08:00
dcb78991c4 flush when buffer is large enough 2017-06-06 14:16:54 +08:00
9e8238cdc6 remove unused file 2017-06-06 12:41:13 +08:00
a4e5abf492 support []byte; marshal without copy 2017-06-06 09:44:56 +08:00
3979955e69 support TextMarshaler as map key 2017-06-06 00:09:33 +08:00
5fd09f0e02 remove mapInterfaceEncoder 2017-06-05 23:56:37 +08:00
af4982b22c support decode int key map 2017-06-05 23:53:48 +08:00
29dc1d407d write map with int key 2017-06-05 23:01:00 +08:00
5b27aaa62c update test 2017-06-05 22:10:01 +08:00
106636a191 update test 2017-06-05 22:08:28 +08:00
f50c4cfbbe Merge branch 'master' of https://github.com/json-iterator/go 2017-06-05 22:05:02 +08:00
87149ae489 add simple marshal benchmark 2017-06-05 22:04:52 +08:00
c0a4ad72e1 example test 2017-06-05 20:37:08 +08:00
404c0ee44b Decoder doc 2017-06-05 19:57:20 +08:00
10c1506f87 link test 2017-06-05 19:38:34 +08:00
9a43fe6468 adapter api comment 2017-06-05 19:31:30 +08:00
95e03f2937 Marshal comment 2017-06-05 19:19:46 +08:00
4406ed9e62 Marshal comment 2017-06-05 19:18:12 +08:00
ff027701f5 Marshal comment 2017-06-05 19:15:56 +08:00
c69b61f879 Marshal comment 2017-06-05 19:14:40 +08:00
d97f5db769 Marshal comment 2017-06-05 19:11:16 +08:00
45bbb40a9f #34 implement NewEncoder 2017-06-02 18:46:44 +08:00
e36f926072 fix random go test failure 2017-06-02 17:34:40 +08:00
59e71bacc8 #36 handle anonymous 2017-06-02 16:52:20 +08:00
5cb0d35610 eof is not error 2017-06-02 16:06:33 +08:00
69b742e73a #34 support More() and Buffered() 2017-06-02 16:00:12 +08:00
a7f992f0e1 #35 fix json.Number matches string 2017-06-02 15:43:58 +08:00
4cc44e7380 #34 add decoder adapter 2017-06-02 15:38:20 +08:00
5310d4aa9a syntax highlight 2017-06-02 11:20:54 +08:00
2051e3b8ae simplify readme 2017-06-02 11:20:24 +08:00
fe9fa8900e #31 support json.RawMessage 2017-06-02 10:50:23 +08:00
ad3a7fde32 #30 support json.Number 2017-06-02 10:21:43 +08:00
377b892102 support big float and int 2017-05-31 12:40:50 +08:00
707ed3b091 support non empty interface 2017-05-27 00:36:21 +08:00
a7a7c7879a allocate less buffer for writing to []byte 2017-05-26 07:52:54 +08:00
f20f74519d RegisterTypeEncoder and RegisterTypeDecoder should have higher priority 2017-05-24 23:39:34 +08:00
7d2ae80c37 #27 support json.Unmarshaler 2017-05-24 16:04:11 +08:00
f6f159e108 #27 support json.Marshaler 2017-05-24 14:34:00 +08:00
e5a1e704ad #25 make fielding binding case insensitive 2017-05-24 13:16:09 +08:00
7d5f90261e #28 extension should support specifying encoder 2017-05-24 10:58:56 +08:00
6126a6d3ca #23 hide unexported fields by default 2017-05-24 09:39:11 +08:00
5fbe4e387d #21 #22 marshal float precisely by default 2017-05-24 09:08:15 +08:00
fc44cb2d91 #26 do not enforce pointer as optional 2017-05-23 18:46:11 +08:00
7e046e6aa7 simplify read string, and support null 2017-05-23 18:32:39 +08:00
5488fde97f fix one field struct interface{} optimization compatibility 2017-05-23 17:44:50 +08:00
53f8d370b5 fix wrap any 2017-05-19 19:44:27 +08:00
3f1fcaff87 demonstrate how to customize float encoding 2017-05-11 08:00:50 +08:00
1df353727b customize []byte encoder 2017-05-06 20:52:36 +08:00
b893a0359d trim end space 2017-05-05 17:44:09 +08:00
a92111261c fix struct with one pointer field 2017-05-05 17:27:41 +08:00
91b9e828b7 support recursive type 2017-05-05 16:51:05 +08:00
6bd835aeb1 test type encoder 2017-05-05 08:22:19 +08:00
90888390bc fix readFloat64SlowPath not advancing the cursor 2017-05-02 10:15:21 +08:00
ccb972f58c merge 2017-04-28 09:10:06 +08:00
8711c74c85 support Any as field type 2017-04-28 09:09:24 +08:00
abcf2759ed Merge pull request #14 from eruca/master
fix the omitempty bug
2017-04-18 10:23:05 +10:00
e5476f70e7 #16 fix slice of map 2017-04-16 14:05:08 +08:00
b986d86f26 add test 2017-03-11 18:19:39 +08:00
9a138f8b6a fix bug of another comma added
previous code will add comma if the field is empty when is not first, like 
```
{"account":"1120","name":"nick",,"department":"fsaf","role":1,"privilege":32,,"created_at":1489226500}
```
2017-03-11 18:17:34 +08:00
d1aa59e34e #12 implement omitempty 2017-03-08 07:38:25 -08:00
ceb8c8a733 create map if nil 2017-03-07 18:36:58 -08:00
62028f1ede Merge pull request #11 from Kisesy/patch-1
fix #10
2017-02-26 09:42:45 -06:00
696f962eda fix https://github.com/json-iterator/go/issues/10
Update feature_iter_string.go

Update feature_iter_string.go
2017-02-25 13:30:55 +08:00
177 changed files with 16690 additions and 10209 deletions

3
.codecov.yml Normal file
View File

@ -0,0 +1,3 @@
ignore:
- "output_tests/.*"

4
.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
/vendor
/bug_test.go
/coverage.txt
/.idea

14
.travis.yml Normal file
View File

@ -0,0 +1,14 @@
language: go
go:
- 1.8.x
- 1.x
before_install:
- go get -t -v ./...
script:
- ./test.sh
after_success:
- bash <(curl -s https://codecov.io/bash)

21
Gopkg.lock generated Normal file
View File

@ -0,0 +1,21 @@
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
[[projects]]
name = "github.com/modern-go/concurrent"
packages = ["."]
revision = "e0a39a4cb4216ea8db28e22a69f4ec25610d513a"
version = "1.0.0"
[[projects]]
name = "github.com/modern-go/reflect2"
packages = ["."]
revision = "4b7aa43c6742a2c18fdef89dd197aaae7dac7ccd"
version = "1.0.1"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "ea54a775e5a354cb015502d2e7aa4b74230fc77e894f34a838b268c25ec8eeb8"
solver-name = "gps-cdcl"
solver-version = 1

26
Gopkg.toml Normal file
View File

@ -0,0 +1,26 @@
# Gopkg.toml example
#
# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
# for detailed Gopkg.toml documentation.
#
# required = ["github.com/user/thing/cmd/thing"]
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
#
# [[constraint]]
# name = "github.com/user/project"
# version = "1.0.0"
#
# [[constraint]]
# name = "github.com/user/project2"
# branch = "dev"
# source = "github.com/myfork/project2"
#
# [[override]]
# name = "github.com/x/y"
# version = "2.4.0"
ignored = ["github.com/davecgh/go-spew*","github.com/google/gofuzz*","github.com/stretchr/testify*"]
[[constraint]]
name = "github.com/modern-go/reflect2"
version = "1.0.1"

View File

@ -1,61 +1,72 @@
[![Sourcegraph](https://sourcegraph.com/github.com/json-iterator/go/-/badge.svg)](https://sourcegraph.com/github.com/json-iterator/go?badge)
[![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](http://godoc.org/github.com/json-iterator/go)
[![Build Status](https://travis-ci.org/json-iterator/go.svg?branch=master)](https://travis-ci.org/json-iterator/go)
[![codecov](https://codecov.io/gh/json-iterator/go/branch/master/graph/badge.svg)](https://codecov.io/gh/json-iterator/go)
[![rcard](https://goreportcard.com/badge/github.com/json-iterator/go)](https://goreportcard.com/report/github.com/json-iterator/go)
[![License](http://img.shields.io/badge/license-mit-blue.svg?style=flat-square)](https://raw.githubusercontent.com/json-iterator/go/master/LICENSE)
[![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby)
jsoniter (json-iterator) is fast and flexible JSON parser available in [Java](https://github.com/json-iterator/java) and [Go](https://github.com/json-iterator/go)
A high-performance 100% compatible drop-in replacement of "encoding/json"
# Why jsoniter?
You can also use thrift like JSON using [thrift-iterator](https://github.com/thrift-iterator/go)
* Jsoniter is the fastest JSON parser. It could be up to 10x faster than normal parser, data binding included. Shameless self [benchmark](http://jsoniter.com/benchmark.html)
* Extremely flexible api. You can mix and match three different styles: bind-api, any-api or iterator-api. Checkout your [api choices](http://jsoniter.com/api.html)
* Unique iterator api can iterate through JSON directly, zero memory allocation! See how [iterator](http://jsoniter.com/api.html#iterator-api) works
# Benchmark
# Show off
![benchmark](http://jsoniter.com/benchmarks/go-benchmark.png)
Here is a quick show off, for more complete report you can checkout the full [benchmark](http://jsoniter.com/benchmark.html) with [in-depth optimization](http://jsoniter.com/benchmark.html#optimization-used) to back the numbers up
Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/github.com/json-iterator/go-benchmark/benchmark_medium_payload_test.go
![go-medium](http://jsoniter.com/benchmarks/go-medium.png)
Raw Result (easyjson requires static code generation)
# Bind-API is the best
| | ns/op | allocation bytes | allocation times |
| --- | --- | --- | --- |
| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
Bind-api should always be the first choice. Given this JSON document `[0,1,2,3]`
Always benchmark with your own workload.
The result depends heavily on the data input.
Parse with Go bind-api
# Usage
100% compatibility with standard lib
Replace
```go
import "encoding/json"
json.Marshal(&data)
```
with
```go
import "github.com/json-iterator/go"
iter := jsoniter.ParseString(`[0,1,2,3]`)
var := iter.Read()
fmt.Println(val)
var json = jsoniter.ConfigCompatibleWithStandardLibrary
json.Marshal(&data)
```
# Iterator-API for quick extraction
Replace
When you do not need to get all the data back, just extract some.
```go
import "encoding/json"
json.Unmarshal(input, &data)
```
Parse with Go iterator-api
with
```go
import "github.com/json-iterator/go"
iter := ParseString(`[0, [1, 2], [3, 4], 5]`)
count := 0
for iter.ReadArray() {
iter.Skip()
count++
}
fmt.Println(count) // 4
var json = jsoniter.ConfigCompatibleWithStandardLibrary
json.Unmarshal(input, &data)
```
# Any-API for maximum flexibility
Parse with Go any-api
```go
import "github.com/json-iterator/go"
iter := jsoniter.ParseString(`[{"field1":"11","field2":"12"},{"field1":"21","field2":"22"}]`)
val := iter.ReadAny()
fmt.Println(val.ToInt(1, "field2")) // 22
```
Notice you can extract from nested data structure, and convert any type to the type to you want.
[More documentation](http://jsoniter.com/migrate-from-go-std.html)
# How to get
@ -65,4 +76,12 @@ go get github.com/json-iterator/go
# Contribution Welcomed !
Contributors
* [thockin](https://github.com/thockin)
* [mattn](https://github.com/mattn)
* [cch123](https://github.com/cch123)
* [Oleg Shaldybin](https://github.com/olegshaldybin)
* [Jason Toffaletti](https://github.com/toffaletti)
Report issue or pull request, or email taowen@gmail.com, or [![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby)

150
adapter.go Normal file
View File

@ -0,0 +1,150 @@
package jsoniter
import (
"bytes"
"io"
)
// RawMessage to make replace json with jsoniter
type RawMessage []byte
// Unmarshal adapts to json/encoding Unmarshal API
//
// Unmarshal parses the JSON-encoded data and stores the result in the value pointed to by v.
// Refer to https://godoc.org/encoding/json#Unmarshal for more information
func Unmarshal(data []byte, v interface{}) error {
return ConfigDefault.Unmarshal(data, v)
}
// UnmarshalFromString is a convenient method to read from string instead of []byte
func UnmarshalFromString(str string, v interface{}) error {
return ConfigDefault.UnmarshalFromString(str, v)
}
// Get quick method to get value from deeply nested JSON structure
func Get(data []byte, path ...interface{}) Any {
return ConfigDefault.Get(data, path...)
}
// Marshal adapts to json/encoding Marshal API
//
// Marshal returns the JSON encoding of v, adapts to json/encoding Marshal API
// Refer to https://godoc.org/encoding/json#Marshal for more information
func Marshal(v interface{}) ([]byte, error) {
return ConfigDefault.Marshal(v)
}
// MarshalIndent same as json.MarshalIndent. Prefix is not supported.
func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
return ConfigDefault.MarshalIndent(v, prefix, indent)
}
// MarshalToString convenient method to write as string instead of []byte
func MarshalToString(v interface{}) (string, error) {
return ConfigDefault.MarshalToString(v)
}
// NewDecoder adapts to json/stream NewDecoder API.
//
// NewDecoder returns a new decoder that reads from r.
//
// Instead of a json/encoding Decoder, an Decoder is returned
// Refer to https://godoc.org/encoding/json#NewDecoder for more information
func NewDecoder(reader io.Reader) *Decoder {
return ConfigDefault.NewDecoder(reader)
}
// Decoder reads and decodes JSON values from an input stream.
// Decoder provides identical APIs with json/stream Decoder (Token() and UseNumber() are in progress)
type Decoder struct {
iter *Iterator
}
// Decode decode JSON into interface{}
func (adapter *Decoder) Decode(obj interface{}) error {
if adapter.iter.head == adapter.iter.tail && adapter.iter.reader != nil {
if !adapter.iter.loadMore() {
return io.EOF
}
}
adapter.iter.ReadVal(obj)
err := adapter.iter.Error
if err == io.EOF {
return nil
}
return adapter.iter.Error
}
// More is there more?
func (adapter *Decoder) More() bool {
iter := adapter.iter
if iter.Error != nil {
return false
}
c := iter.nextToken()
if c == 0 {
return false
}
iter.unreadByte()
return c != ']' && c != '}'
}
// Buffered remaining buffer
func (adapter *Decoder) Buffered() io.Reader {
remaining := adapter.iter.buf[adapter.iter.head:adapter.iter.tail]
return bytes.NewReader(remaining)
}
// UseNumber causes the Decoder to unmarshal a number into an interface{} as a
// Number instead of as a float64.
func (adapter *Decoder) UseNumber() {
cfg := adapter.iter.cfg.configBeforeFrozen
cfg.UseNumber = true
adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
}
// DisallowUnknownFields causes the Decoder to return an error when the destination
// is a struct and the input contains object keys which do not match any
// non-ignored, exported fields in the destination.
func (adapter *Decoder) DisallowUnknownFields() {
cfg := adapter.iter.cfg.configBeforeFrozen
cfg.DisallowUnknownFields = true
adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
}
// NewEncoder same as json.NewEncoder
func NewEncoder(writer io.Writer) *Encoder {
return ConfigDefault.NewEncoder(writer)
}
// Encoder same as json.Encoder
type Encoder struct {
stream *Stream
}
// Encode encode interface{} as JSON to io.Writer
func (adapter *Encoder) Encode(val interface{}) error {
adapter.stream.WriteVal(val)
adapter.stream.WriteRaw("\n")
adapter.stream.Flush()
return adapter.stream.Error
}
// SetIndent set the indention. Prefix is not supported
func (adapter *Encoder) SetIndent(prefix, indent string) {
config := adapter.stream.cfg.configBeforeFrozen
config.IndentionStep = len(indent)
adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
}
// SetEscapeHTML escape html by default, set to false to disable
func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) {
config := adapter.stream.cfg.configBeforeFrozen
config.EscapeHTML = escapeHTML
adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
}
// Valid reports whether data is a valid JSON encoding.
func Valid(data []byte) bool {
return ConfigDefault.Valid(data)
}

325
any.go Normal file
View File

@ -0,0 +1,325 @@
package jsoniter
import (
"errors"
"fmt"
"github.com/modern-go/reflect2"
"io"
"reflect"
"strconv"
"unsafe"
)
// Any generic object representation.
// The lazy json implementation holds []byte and parse lazily.
type Any interface {
LastError() error
ValueType() ValueType
MustBeValid() Any
ToBool() bool
ToInt() int
ToInt32() int32
ToInt64() int64
ToUint() uint
ToUint32() uint32
ToUint64() uint64
ToFloat32() float32
ToFloat64() float64
ToString() string
ToVal(val interface{})
Get(path ...interface{}) Any
Size() int
Keys() []string
GetInterface() interface{}
WriteTo(stream *Stream)
}
type baseAny struct{}
func (any *baseAny) Get(path ...interface{}) Any {
return &invalidAny{baseAny{}, fmt.Errorf("GetIndex %v from simple value", path)}
}
func (any *baseAny) Size() int {
return 0
}
func (any *baseAny) Keys() []string {
return []string{}
}
func (any *baseAny) ToVal(obj interface{}) {
panic("not implemented")
}
// WrapInt32 turn int32 into Any interface
func WrapInt32(val int32) Any {
return &int32Any{baseAny{}, val}
}
// WrapInt64 turn int64 into Any interface
func WrapInt64(val int64) Any {
return &int64Any{baseAny{}, val}
}
// WrapUint32 turn uint32 into Any interface
func WrapUint32(val uint32) Any {
return &uint32Any{baseAny{}, val}
}
// WrapUint64 turn uint64 into Any interface
func WrapUint64(val uint64) Any {
return &uint64Any{baseAny{}, val}
}
// WrapFloat64 turn float64 into Any interface
func WrapFloat64(val float64) Any {
return &floatAny{baseAny{}, val}
}
// WrapString turn string into Any interface
func WrapString(val string) Any {
return &stringAny{baseAny{}, val}
}
// Wrap turn a go object into Any interface
func Wrap(val interface{}) Any {
if val == nil {
return &nilAny{}
}
asAny, isAny := val.(Any)
if isAny {
return asAny
}
typ := reflect2.TypeOf(val)
switch typ.Kind() {
case reflect.Slice:
return wrapArray(val)
case reflect.Struct:
return wrapStruct(val)
case reflect.Map:
return wrapMap(val)
case reflect.String:
return WrapString(val.(string))
case reflect.Int:
if strconv.IntSize == 32 {
return WrapInt32(int32(val.(int)))
}
return WrapInt64(int64(val.(int)))
case reflect.Int8:
return WrapInt32(int32(val.(int8)))
case reflect.Int16:
return WrapInt32(int32(val.(int16)))
case reflect.Int32:
return WrapInt32(val.(int32))
case reflect.Int64:
return WrapInt64(val.(int64))
case reflect.Uint:
if strconv.IntSize == 32 {
return WrapUint32(uint32(val.(uint)))
}
return WrapUint64(uint64(val.(uint)))
case reflect.Uintptr:
if ptrSize == 32 {
return WrapUint32(uint32(val.(uintptr)))
}
return WrapUint64(uint64(val.(uintptr)))
case reflect.Uint8:
return WrapUint32(uint32(val.(uint8)))
case reflect.Uint16:
return WrapUint32(uint32(val.(uint16)))
case reflect.Uint32:
return WrapUint32(uint32(val.(uint32)))
case reflect.Uint64:
return WrapUint64(val.(uint64))
case reflect.Float32:
return WrapFloat64(float64(val.(float32)))
case reflect.Float64:
return WrapFloat64(val.(float64))
case reflect.Bool:
if val.(bool) == true {
return &trueAny{}
}
return &falseAny{}
}
return &invalidAny{baseAny{}, fmt.Errorf("unsupported type: %v", typ)}
}
// ReadAny read next JSON element as an Any object. It is a better json.RawMessage.
func (iter *Iterator) ReadAny() Any {
return iter.readAny()
}
func (iter *Iterator) readAny() Any {
c := iter.nextToken()
switch c {
case '"':
iter.unreadByte()
return &stringAny{baseAny{}, iter.ReadString()}
case 'n':
iter.skipThreeBytes('u', 'l', 'l') // null
return &nilAny{}
case 't':
iter.skipThreeBytes('r', 'u', 'e') // true
return &trueAny{}
case 'f':
iter.skipFourBytes('a', 'l', 's', 'e') // false
return &falseAny{}
case '{':
return iter.readObjectAny()
case '[':
return iter.readArrayAny()
case '-':
return iter.readNumberAny(false)
case 0:
return &invalidAny{baseAny{}, errors.New("input is empty")}
default:
return iter.readNumberAny(true)
}
}
func (iter *Iterator) readNumberAny(positive bool) Any {
iter.startCapture(iter.head - 1)
iter.skipNumber()
lazyBuf := iter.stopCapture()
return &numberLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
}
func (iter *Iterator) readObjectAny() Any {
iter.startCapture(iter.head - 1)
iter.skipObject()
lazyBuf := iter.stopCapture()
return &objectLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
}
func (iter *Iterator) readArrayAny() Any {
iter.startCapture(iter.head - 1)
iter.skipArray()
lazyBuf := iter.stopCapture()
return &arrayLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
}
func locateObjectField(iter *Iterator, target string) []byte {
var found []byte
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
if field == target {
found = iter.SkipAndReturnBytes()
return false
}
iter.Skip()
return true
})
return found
}
func locateArrayElement(iter *Iterator, target int) []byte {
var found []byte
n := 0
iter.ReadArrayCB(func(iter *Iterator) bool {
if n == target {
found = iter.SkipAndReturnBytes()
return false
}
iter.Skip()
n++
return true
})
return found
}
func locatePath(iter *Iterator, path []interface{}) Any {
for i, pathKeyObj := range path {
switch pathKey := pathKeyObj.(type) {
case string:
valueBytes := locateObjectField(iter, pathKey)
if valueBytes == nil {
return newInvalidAny(path[i:])
}
iter.ResetBytes(valueBytes)
case int:
valueBytes := locateArrayElement(iter, pathKey)
if valueBytes == nil {
return newInvalidAny(path[i:])
}
iter.ResetBytes(valueBytes)
case int32:
if '*' == pathKey {
return iter.readAny().Get(path[i:]...)
}
return newInvalidAny(path[i:])
default:
return newInvalidAny(path[i:])
}
}
if iter.Error != nil && iter.Error != io.EOF {
return &invalidAny{baseAny{}, iter.Error}
}
return iter.readAny()
}
var anyType = reflect2.TypeOfPtr((*Any)(nil)).Elem()
func createDecoderOfAny(ctx *ctx, typ reflect2.Type) ValDecoder {
if typ == anyType {
return &directAnyCodec{}
}
if typ.Implements(anyType) {
return &anyCodec{
valType: typ,
}
}
return nil
}
func createEncoderOfAny(ctx *ctx, typ reflect2.Type) ValEncoder {
if typ == anyType {
return &directAnyCodec{}
}
if typ.Implements(anyType) {
return &anyCodec{
valType: typ,
}
}
return nil
}
type anyCodec struct {
valType reflect2.Type
}
func (codec *anyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
panic("not implemented")
}
func (codec *anyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
obj := codec.valType.UnsafeIndirect(ptr)
any := obj.(Any)
any.WriteTo(stream)
}
func (codec *anyCodec) IsEmpty(ptr unsafe.Pointer) bool {
obj := codec.valType.UnsafeIndirect(ptr)
any := obj.(Any)
return any.Size() == 0
}
type directAnyCodec struct {
}
func (codec *directAnyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*(*Any)(ptr) = iter.readAny()
}
func (codec *directAnyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
any := *(*Any)(ptr)
if any == nil {
stream.WriteNil()
return
}
any.WriteTo(stream)
}
func (codec *directAnyCodec) IsEmpty(ptr unsafe.Pointer) bool {
any := *(*Any)(ptr)
return any.Size() == 0
}

278
any_array.go Normal file
View File

@ -0,0 +1,278 @@
package jsoniter
import (
"reflect"
"unsafe"
)
type arrayLazyAny struct {
baseAny
cfg *frozenConfig
buf []byte
err error
}
func (any *arrayLazyAny) ValueType() ValueType {
return ArrayValue
}
func (any *arrayLazyAny) MustBeValid() Any {
return any
}
func (any *arrayLazyAny) LastError() error {
return any.err
}
func (any *arrayLazyAny) ToBool() bool {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
return iter.ReadArray()
}
func (any *arrayLazyAny) ToInt() int {
if any.ToBool() {
return 1
}
return 0
}
func (any *arrayLazyAny) ToInt32() int32 {
if any.ToBool() {
return 1
}
return 0
}
func (any *arrayLazyAny) ToInt64() int64 {
if any.ToBool() {
return 1
}
return 0
}
func (any *arrayLazyAny) ToUint() uint {
if any.ToBool() {
return 1
}
return 0
}
func (any *arrayLazyAny) ToUint32() uint32 {
if any.ToBool() {
return 1
}
return 0
}
func (any *arrayLazyAny) ToUint64() uint64 {
if any.ToBool() {
return 1
}
return 0
}
func (any *arrayLazyAny) ToFloat32() float32 {
if any.ToBool() {
return 1
}
return 0
}
func (any *arrayLazyAny) ToFloat64() float64 {
if any.ToBool() {
return 1
}
return 0
}
func (any *arrayLazyAny) ToString() string {
return *(*string)(unsafe.Pointer(&any.buf))
}
func (any *arrayLazyAny) ToVal(val interface{}) {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
iter.ReadVal(val)
}
func (any *arrayLazyAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
switch firstPath := path[0].(type) {
case int:
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
valueBytes := locateArrayElement(iter, firstPath)
if valueBytes == nil {
return newInvalidAny(path)
}
iter.ResetBytes(valueBytes)
return locatePath(iter, path[1:])
case int32:
if '*' == firstPath {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
arr := make([]Any, 0)
iter.ReadArrayCB(func(iter *Iterator) bool {
found := iter.readAny().Get(path[1:]...)
if found.ValueType() != InvalidValue {
arr = append(arr, found)
}
return true
})
return wrapArray(arr)
}
return newInvalidAny(path)
default:
return newInvalidAny(path)
}
}
func (any *arrayLazyAny) Size() int {
size := 0
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
iter.ReadArrayCB(func(iter *Iterator) bool {
size++
iter.Skip()
return true
})
return size
}
func (any *arrayLazyAny) WriteTo(stream *Stream) {
stream.Write(any.buf)
}
func (any *arrayLazyAny) GetInterface() interface{} {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
return iter.Read()
}
type arrayAny struct {
baseAny
val reflect.Value
}
func wrapArray(val interface{}) *arrayAny {
return &arrayAny{baseAny{}, reflect.ValueOf(val)}
}
func (any *arrayAny) ValueType() ValueType {
return ArrayValue
}
func (any *arrayAny) MustBeValid() Any {
return any
}
func (any *arrayAny) LastError() error {
return nil
}
func (any *arrayAny) ToBool() bool {
return any.val.Len() != 0
}
func (any *arrayAny) ToInt() int {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToInt32() int32 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToInt64() int64 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToUint() uint {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToUint32() uint32 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToUint64() uint64 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToFloat32() float32 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToFloat64() float64 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToString() string {
str, _ := MarshalToString(any.val.Interface())
return str
}
func (any *arrayAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
switch firstPath := path[0].(type) {
case int:
if firstPath < 0 || firstPath >= any.val.Len() {
return newInvalidAny(path)
}
return Wrap(any.val.Index(firstPath).Interface())
case int32:
if '*' == firstPath {
mappedAll := make([]Any, 0)
for i := 0; i < any.val.Len(); i++ {
mapped := Wrap(any.val.Index(i).Interface()).Get(path[1:]...)
if mapped.ValueType() != InvalidValue {
mappedAll = append(mappedAll, mapped)
}
}
return wrapArray(mappedAll)
}
return newInvalidAny(path)
default:
return newInvalidAny(path)
}
}
func (any *arrayAny) Size() int {
return any.val.Len()
}
func (any *arrayAny) WriteTo(stream *Stream) {
stream.WriteVal(any.val)
}
func (any *arrayAny) GetInterface() interface{} {
return any.val.Interface()
}

View File

@ -61,7 +61,11 @@ func (any *trueAny) GetInterface() interface{} {
}
func (any *trueAny) ValueType() ValueType {
return Bool
return BoolValue
}
func (any *trueAny) MustBeValid() Any {
return any
}
type falseAny struct {
@ -125,5 +129,9 @@ func (any *falseAny) GetInterface() interface{} {
}
func (any *falseAny) ValueType() ValueType {
return Bool
return BoolValue
}
func (any *falseAny) MustBeValid() Any {
return any
}

83
any_float.go Normal file
View File

@ -0,0 +1,83 @@
package jsoniter
import (
"strconv"
)
type floatAny struct {
baseAny
val float64
}
func (any *floatAny) Parse() *Iterator {
return nil
}
func (any *floatAny) ValueType() ValueType {
return NumberValue
}
func (any *floatAny) MustBeValid() Any {
return any
}
func (any *floatAny) LastError() error {
return nil
}
func (any *floatAny) ToBool() bool {
return any.ToFloat64() != 0
}
func (any *floatAny) ToInt() int {
return int(any.val)
}
func (any *floatAny) ToInt32() int32 {
return int32(any.val)
}
func (any *floatAny) ToInt64() int64 {
return int64(any.val)
}
func (any *floatAny) ToUint() uint {
if any.val > 0 {
return uint(any.val)
}
return 0
}
func (any *floatAny) ToUint32() uint32 {
if any.val > 0 {
return uint32(any.val)
}
return 0
}
func (any *floatAny) ToUint64() uint64 {
if any.val > 0 {
return uint64(any.val)
}
return 0
}
func (any *floatAny) ToFloat32() float32 {
return float32(any.val)
}
func (any *floatAny) ToFloat64() float64 {
return any.val
}
func (any *floatAny) ToString() string {
return strconv.FormatFloat(any.val, 'E', -1, 64)
}
func (any *floatAny) WriteTo(stream *Stream) {
stream.WriteFloat64(any.val)
}
func (any *floatAny) GetInterface() interface{} {
return any.val
}

View File

@ -14,7 +14,11 @@ func (any *int32Any) LastError() error {
}
func (any *int32Any) ValueType() ValueType {
return Number
return NumberValue
}
func (any *int32Any) MustBeValid() Any {
return any
}
func (any *int32Any) ToBool() bool {
@ -67,4 +71,4 @@ func (any *int32Any) Parse() *Iterator {
func (any *int32Any) GetInterface() interface{} {
return any.val
}
}

74
any_int64.go Normal file
View File

@ -0,0 +1,74 @@
package jsoniter
import (
"strconv"
)
type int64Any struct {
baseAny
val int64
}
func (any *int64Any) LastError() error {
return nil
}
func (any *int64Any) ValueType() ValueType {
return NumberValue
}
func (any *int64Any) MustBeValid() Any {
return any
}
func (any *int64Any) ToBool() bool {
return any.val != 0
}
func (any *int64Any) ToInt() int {
return int(any.val)
}
func (any *int64Any) ToInt32() int32 {
return int32(any.val)
}
func (any *int64Any) ToInt64() int64 {
return any.val
}
func (any *int64Any) ToUint() uint {
return uint(any.val)
}
func (any *int64Any) ToUint32() uint32 {
return uint32(any.val)
}
func (any *int64Any) ToUint64() uint64 {
return uint64(any.val)
}
func (any *int64Any) ToFloat32() float32 {
return float32(any.val)
}
func (any *int64Any) ToFloat64() float64 {
return float64(any.val)
}
func (any *int64Any) ToString() string {
return strconv.FormatInt(any.val, 10)
}
func (any *int64Any) WriteTo(stream *Stream) {
stream.WriteInt64(any.val)
}
func (any *int64Any) Parse() *Iterator {
return nil
}
func (any *int64Any) GetInterface() interface{} {
return any.val
}

View File

@ -7,12 +7,20 @@ type invalidAny struct {
err error
}
func newInvalidAny(path []interface{}) *invalidAny {
return &invalidAny{baseAny{}, fmt.Errorf("%v not found", path)}
}
func (any *invalidAny) LastError() error {
return any.err
}
func (any *invalidAny) ValueType() ValueType {
return Invalid
return InvalidValue
}
func (any *invalidAny) MustBeValid() Any {
panic(any.err)
}
func (any *invalidAny) ToBool() bool {
@ -61,9 +69,8 @@ func (any *invalidAny) WriteTo(stream *Stream) {
func (any *invalidAny) Get(path ...interface{}) Any {
if any.err == nil {
return &invalidAny{baseAny{}, fmt.Errorf("get %v from invalid", path)}
} else {
return &invalidAny{baseAny{}, fmt.Errorf("%v, get %v from invalid", any.err, path)}
}
return &invalidAny{baseAny{}, fmt.Errorf("%v, get %v from invalid", any.err, path)}
}
func (any *invalidAny) Parse() *Iterator {

View File

@ -9,7 +9,11 @@ func (any *nilAny) LastError() error {
}
func (any *nilAny) ValueType() ValueType {
return Nil
return NilValue
}
func (any *nilAny) MustBeValid() Any {
return any
}
func (any *nilAny) ToBool() bool {
@ -62,4 +66,4 @@ func (any *nilAny) Parse() *Iterator {
func (any *nilAny) GetInterface() interface{} {
return nil
}
}

123
any_number.go Normal file
View File

@ -0,0 +1,123 @@
package jsoniter
import (
"io"
"unsafe"
)
type numberLazyAny struct {
baseAny
cfg *frozenConfig
buf []byte
err error
}
func (any *numberLazyAny) ValueType() ValueType {
return NumberValue
}
func (any *numberLazyAny) MustBeValid() Any {
return any
}
func (any *numberLazyAny) LastError() error {
return any.err
}
func (any *numberLazyAny) ToBool() bool {
return any.ToFloat64() != 0
}
func (any *numberLazyAny) ToInt() int {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadInt()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
func (any *numberLazyAny) ToInt32() int32 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadInt32()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
func (any *numberLazyAny) ToInt64() int64 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadInt64()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
func (any *numberLazyAny) ToUint() uint {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadUint()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
func (any *numberLazyAny) ToUint32() uint32 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadUint32()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
func (any *numberLazyAny) ToUint64() uint64 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadUint64()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
func (any *numberLazyAny) ToFloat32() float32 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadFloat32()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
func (any *numberLazyAny) ToFloat64() float64 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadFloat64()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
func (any *numberLazyAny) ToString() string {
return *(*string)(unsafe.Pointer(&any.buf))
}
func (any *numberLazyAny) WriteTo(stream *Stream) {
stream.Write(any.buf)
}
func (any *numberLazyAny) GetInterface() interface{} {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
return iter.Read()
}

374
any_object.go Normal file
View File

@ -0,0 +1,374 @@
package jsoniter
import (
"reflect"
"unsafe"
)
type objectLazyAny struct {
baseAny
cfg *frozenConfig
buf []byte
err error
}
func (any *objectLazyAny) ValueType() ValueType {
return ObjectValue
}
func (any *objectLazyAny) MustBeValid() Any {
return any
}
func (any *objectLazyAny) LastError() error {
return any.err
}
func (any *objectLazyAny) ToBool() bool {
return true
}
func (any *objectLazyAny) ToInt() int {
return 0
}
func (any *objectLazyAny) ToInt32() int32 {
return 0
}
func (any *objectLazyAny) ToInt64() int64 {
return 0
}
func (any *objectLazyAny) ToUint() uint {
return 0
}
func (any *objectLazyAny) ToUint32() uint32 {
return 0
}
func (any *objectLazyAny) ToUint64() uint64 {
return 0
}
func (any *objectLazyAny) ToFloat32() float32 {
return 0
}
func (any *objectLazyAny) ToFloat64() float64 {
return 0
}
func (any *objectLazyAny) ToString() string {
return *(*string)(unsafe.Pointer(&any.buf))
}
func (any *objectLazyAny) ToVal(obj interface{}) {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
iter.ReadVal(obj)
}
func (any *objectLazyAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
switch firstPath := path[0].(type) {
case string:
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
valueBytes := locateObjectField(iter, firstPath)
if valueBytes == nil {
return newInvalidAny(path)
}
iter.ResetBytes(valueBytes)
return locatePath(iter, path[1:])
case int32:
if '*' == firstPath {
mappedAll := map[string]Any{}
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
iter.ReadMapCB(func(iter *Iterator, field string) bool {
mapped := locatePath(iter, path[1:])
if mapped.ValueType() != InvalidValue {
mappedAll[field] = mapped
}
return true
})
return wrapMap(mappedAll)
}
return newInvalidAny(path)
default:
return newInvalidAny(path)
}
}
func (any *objectLazyAny) Keys() []string {
keys := []string{}
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
iter.ReadMapCB(func(iter *Iterator, field string) bool {
iter.Skip()
keys = append(keys, field)
return true
})
return keys
}
func (any *objectLazyAny) Size() int {
size := 0
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
iter.Skip()
size++
return true
})
return size
}
func (any *objectLazyAny) WriteTo(stream *Stream) {
stream.Write(any.buf)
}
func (any *objectLazyAny) GetInterface() interface{} {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
return iter.Read()
}
type objectAny struct {
baseAny
err error
val reflect.Value
}
func wrapStruct(val interface{}) *objectAny {
return &objectAny{baseAny{}, nil, reflect.ValueOf(val)}
}
func (any *objectAny) ValueType() ValueType {
return ObjectValue
}
func (any *objectAny) MustBeValid() Any {
return any
}
func (any *objectAny) Parse() *Iterator {
return nil
}
func (any *objectAny) LastError() error {
return any.err
}
func (any *objectAny) ToBool() bool {
return any.val.NumField() != 0
}
func (any *objectAny) ToInt() int {
return 0
}
func (any *objectAny) ToInt32() int32 {
return 0
}
func (any *objectAny) ToInt64() int64 {
return 0
}
func (any *objectAny) ToUint() uint {
return 0
}
func (any *objectAny) ToUint32() uint32 {
return 0
}
func (any *objectAny) ToUint64() uint64 {
return 0
}
func (any *objectAny) ToFloat32() float32 {
return 0
}
func (any *objectAny) ToFloat64() float64 {
return 0
}
func (any *objectAny) ToString() string {
str, err := MarshalToString(any.val.Interface())
any.err = err
return str
}
func (any *objectAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
switch firstPath := path[0].(type) {
case string:
field := any.val.FieldByName(firstPath)
if !field.IsValid() {
return newInvalidAny(path)
}
return Wrap(field.Interface())
case int32:
if '*' == firstPath {
mappedAll := map[string]Any{}
for i := 0; i < any.val.NumField(); i++ {
field := any.val.Field(i)
if field.CanInterface() {
mapped := Wrap(field.Interface()).Get(path[1:]...)
if mapped.ValueType() != InvalidValue {
mappedAll[any.val.Type().Field(i).Name] = mapped
}
}
}
return wrapMap(mappedAll)
}
return newInvalidAny(path)
default:
return newInvalidAny(path)
}
}
func (any *objectAny) Keys() []string {
keys := make([]string, 0, any.val.NumField())
for i := 0; i < any.val.NumField(); i++ {
keys = append(keys, any.val.Type().Field(i).Name)
}
return keys
}
func (any *objectAny) Size() int {
return any.val.NumField()
}
func (any *objectAny) WriteTo(stream *Stream) {
stream.WriteVal(any.val)
}
func (any *objectAny) GetInterface() interface{} {
return any.val.Interface()
}
type mapAny struct {
baseAny
err error
val reflect.Value
}
func wrapMap(val interface{}) *mapAny {
return &mapAny{baseAny{}, nil, reflect.ValueOf(val)}
}
func (any *mapAny) ValueType() ValueType {
return ObjectValue
}
func (any *mapAny) MustBeValid() Any {
return any
}
func (any *mapAny) Parse() *Iterator {
return nil
}
func (any *mapAny) LastError() error {
return any.err
}
func (any *mapAny) ToBool() bool {
return true
}
func (any *mapAny) ToInt() int {
return 0
}
func (any *mapAny) ToInt32() int32 {
return 0
}
func (any *mapAny) ToInt64() int64 {
return 0
}
func (any *mapAny) ToUint() uint {
return 0
}
func (any *mapAny) ToUint32() uint32 {
return 0
}
func (any *mapAny) ToUint64() uint64 {
return 0
}
func (any *mapAny) ToFloat32() float32 {
return 0
}
func (any *mapAny) ToFloat64() float64 {
return 0
}
func (any *mapAny) ToString() string {
str, err := MarshalToString(any.val.Interface())
any.err = err
return str
}
func (any *mapAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
switch firstPath := path[0].(type) {
case int32:
if '*' == firstPath {
mappedAll := map[string]Any{}
for _, key := range any.val.MapKeys() {
keyAsStr := key.String()
element := Wrap(any.val.MapIndex(key).Interface())
mapped := element.Get(path[1:]...)
if mapped.ValueType() != InvalidValue {
mappedAll[keyAsStr] = mapped
}
}
return wrapMap(mappedAll)
}
return newInvalidAny(path)
default:
value := any.val.MapIndex(reflect.ValueOf(firstPath))
if !value.IsValid() {
return newInvalidAny(path)
}
return Wrap(value.Interface())
}
}
func (any *mapAny) Keys() []string {
keys := make([]string, 0, any.val.Len())
for _, key := range any.val.MapKeys() {
keys = append(keys, key.String())
}
return keys
}
func (any *mapAny) Size() int {
return any.val.Len()
}
func (any *mapAny) WriteTo(stream *Stream) {
stream.WriteVal(any.val)
}
func (any *mapAny) GetInterface() interface{} {
return any.val.Interface()
}

166
any_str.go Normal file
View File

@ -0,0 +1,166 @@
package jsoniter
import (
"fmt"
"strconv"
)
type stringAny struct {
baseAny
val string
}
func (any *stringAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
return &invalidAny{baseAny{}, fmt.Errorf("GetIndex %v from simple value", path)}
}
func (any *stringAny) Parse() *Iterator {
return nil
}
func (any *stringAny) ValueType() ValueType {
return StringValue
}
func (any *stringAny) MustBeValid() Any {
return any
}
func (any *stringAny) LastError() error {
return nil
}
func (any *stringAny) ToBool() bool {
str := any.ToString()
if str == "0" {
return false
}
for _, c := range str {
switch c {
case ' ', '\n', '\r', '\t':
default:
return true
}
}
return false
}
func (any *stringAny) ToInt() int {
return int(any.ToInt64())
}
func (any *stringAny) ToInt32() int32 {
return int32(any.ToInt64())
}
func (any *stringAny) ToInt64() int64 {
if any.val == "" {
return 0
}
flag := 1
startPos := 0
endPos := 0
if any.val[0] == '+' || any.val[0] == '-' {
startPos = 1
}
if any.val[0] == '-' {
flag = -1
}
for i := startPos; i < len(any.val); i++ {
if any.val[i] >= '0' && any.val[i] <= '9' {
endPos = i + 1
} else {
break
}
}
parsed, _ := strconv.ParseInt(any.val[startPos:endPos], 10, 64)
return int64(flag) * parsed
}
func (any *stringAny) ToUint() uint {
return uint(any.ToUint64())
}
func (any *stringAny) ToUint32() uint32 {
return uint32(any.ToUint64())
}
func (any *stringAny) ToUint64() uint64 {
if any.val == "" {
return 0
}
startPos := 0
endPos := 0
if any.val[0] == '-' {
return 0
}
if any.val[0] == '+' {
startPos = 1
}
for i := startPos; i < len(any.val); i++ {
if any.val[i] >= '0' && any.val[i] <= '9' {
endPos = i + 1
} else {
break
}
}
parsed, _ := strconv.ParseUint(any.val[startPos:endPos], 10, 64)
return parsed
}
func (any *stringAny) ToFloat32() float32 {
return float32(any.ToFloat64())
}
func (any *stringAny) ToFloat64() float64 {
if len(any.val) == 0 {
return 0
}
// first char invalid
if any.val[0] != '+' && any.val[0] != '-' && (any.val[0] > '9' || any.val[0] < '0') {
return 0
}
// extract valid num expression from string
// eg 123true => 123, -12.12xxa => -12.12
endPos := 1
for i := 1; i < len(any.val); i++ {
if any.val[i] == '.' || any.val[i] == 'e' || any.val[i] == 'E' || any.val[i] == '+' || any.val[i] == '-' {
endPos = i + 1
continue
}
// end position is the first char which is not digit
if any.val[i] >= '0' && any.val[i] <= '9' {
endPos = i + 1
} else {
endPos = i
break
}
}
parsed, _ := strconv.ParseFloat(any.val[:endPos], 64)
return parsed
}
func (any *stringAny) ToString() string {
return any.val
}
func (any *stringAny) WriteTo(stream *Stream) {
stream.WriteString(any.val)
}
func (any *stringAny) GetInterface() interface{} {
return any.val
}

View File

@ -0,0 +1,123 @@
package any_tests
import (
"testing"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
func Test_read_empty_array_as_any(t *testing.T) {
should := require.New(t)
any := jsoniter.Get([]byte("[]"))
should.Equal(jsoniter.ArrayValue, any.Get().ValueType())
should.Equal(jsoniter.InvalidValue, any.Get(0.3).ValueType())
should.Equal(0, any.Size())
should.Equal(jsoniter.ArrayValue, any.ValueType())
should.Nil(any.LastError())
should.Equal(0, any.ToInt())
should.Equal(int32(0), any.ToInt32())
should.Equal(int64(0), any.ToInt64())
should.Equal(uint(0), any.ToUint())
should.Equal(uint32(0), any.ToUint32())
should.Equal(uint64(0), any.ToUint64())
should.Equal(float32(0), any.ToFloat32())
should.Equal(float64(0), any.ToFloat64())
}
func Test_read_one_element_array_as_any(t *testing.T) {
should := require.New(t)
any := jsoniter.Get([]byte("[1]"))
should.Equal(1, any.Size())
}
func Test_read_two_element_array_as_any(t *testing.T) {
should := require.New(t)
any := jsoniter.Get([]byte("[1,2]"))
should.Equal(1, any.Get(0).ToInt())
should.Equal(2, any.Size())
should.True(any.ToBool())
should.Equal(1, any.ToInt())
should.Equal([]interface{}{float64(1), float64(2)}, any.GetInterface())
stream := jsoniter.NewStream(jsoniter.ConfigDefault, nil, 32)
any.WriteTo(stream)
should.Equal("[1,2]", string(stream.Buffer()))
arr := []int{}
any.ToVal(&arr)
should.Equal([]int{1, 2}, arr)
}
func Test_wrap_array_and_convert_to_any(t *testing.T) {
should := require.New(t)
any := jsoniter.Wrap([]int{1, 2, 3})
any2 := jsoniter.Wrap([]int{})
should.Equal("[1,2,3]", any.ToString())
should.True(any.ToBool())
should.False(any2.ToBool())
should.Equal(1, any.ToInt())
should.Equal(0, any2.ToInt())
should.Equal(int32(1), any.ToInt32())
should.Equal(int32(0), any2.ToInt32())
should.Equal(int64(1), any.ToInt64())
should.Equal(int64(0), any2.ToInt64())
should.Equal(uint(1), any.ToUint())
should.Equal(uint(0), any2.ToUint())
should.Equal(uint32(1), any.ToUint32())
should.Equal(uint32(0), any2.ToUint32())
should.Equal(uint64(1), any.ToUint64())
should.Equal(uint64(0), any2.ToUint64())
should.Equal(float32(1), any.ToFloat32())
should.Equal(float32(0), any2.ToFloat32())
should.Equal(float64(1), any.ToFloat64())
should.Equal(float64(0), any2.ToFloat64())
should.Equal(3, any.Size())
should.Equal(0, any2.Size())
var i interface{} = []int{1, 2, 3}
should.Equal(i, any.GetInterface())
}
func Test_array_lazy_any_get(t *testing.T) {
should := require.New(t)
any := jsoniter.Get([]byte("[1,[2,3],4]"))
should.Equal(3, any.Get(1, 1).ToInt())
should.Equal("[1,[2,3],4]", any.ToString())
}
func Test_array_lazy_any_get_all(t *testing.T) {
should := require.New(t)
any := jsoniter.Get([]byte("[[1],[2],[3,4]]"))
should.Equal("[1,2,3]", any.Get('*', 0).ToString())
any = jsoniter.Get([]byte("[[[1],[2],[3,4]]]"), 0, '*', 0)
should.Equal("[1,2,3]", any.ToString())
}
func Test_array_wrapper_any_get_all(t *testing.T) {
should := require.New(t)
any := jsoniter.Wrap([][]int{
{1, 2},
{3, 4},
{5, 6},
})
should.Equal("[1,3,5]", any.Get('*', 0).ToString())
should.Equal(jsoniter.ArrayValue, any.ValueType())
should.True(any.ToBool())
should.Equal(1, any.Get(0, 0).ToInt())
}
func Test_array_lazy_any_get_invalid(t *testing.T) {
should := require.New(t)
any := jsoniter.Get([]byte("[]"))
should.Equal(jsoniter.InvalidValue, any.Get(1, 1).ValueType())
should.NotNil(any.Get(1, 1).LastError())
should.Equal(jsoniter.InvalidValue, any.Get("1").ValueType())
should.NotNil(any.Get("1").LastError())
}
func Test_invalid_array(t *testing.T) {
should := require.New(t)
any := jsoniter.Get([]byte("["), 0)
should.Equal(jsoniter.InvalidValue, any.ValueType())
}

View File

@ -0,0 +1,65 @@
package any_tests
import (
"fmt"
"testing"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
var boolConvertMap = map[string]bool{
"null": false,
"true": true,
"false": false,
`"true"`: true,
`"false"`: true,
"123": true,
`"123"`: true,
"0": false,
`"0"`: false,
"-1": true,
`"-1"`: true,
"1.1": true,
"0.0": false,
"-1.1": true,
`""`: false,
"[1,2]": true,
"[]": false,
"{}": true,
`{"abc":1}`: true,
}
func Test_read_bool_as_any(t *testing.T) {
should := require.New(t)
var any jsoniter.Any
for k, v := range boolConvertMap {
any = jsoniter.Get([]byte(k))
if v {
should.True(any.ToBool(), fmt.Sprintf("origin val is %v", k))
} else {
should.False(any.ToBool(), fmt.Sprintf("origin val is %v", k))
}
}
}
func Test_write_bool_to_stream(t *testing.T) {
should := require.New(t)
any := jsoniter.Get([]byte("true"))
stream := jsoniter.NewStream(jsoniter.ConfigDefault, nil, 32)
any.WriteTo(stream)
should.Equal("true", string(stream.Buffer()))
should.Equal(any.ValueType(), jsoniter.BoolValue)
any = jsoniter.Get([]byte("false"))
stream = jsoniter.NewStream(jsoniter.ConfigDefault, nil, 32)
any.WriteTo(stream)
should.Equal("false", string(stream.Buffer()))
should.Equal(any.ValueType(), jsoniter.BoolValue)
}

View File

@ -0,0 +1,101 @@
package any_tests
import (
"testing"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
var floatConvertMap = map[string]float64{
"null": 0,
"true": 1,
"false": 0,
`"true"`: 0,
`"false"`: 0,
"1e1": 10,
"1e+1": 10,
"1e-1": .1,
"1E1": 10,
"1E+1": 10,
"1E-1": .1,
"-1e1": -10,
"-1e+1": -10,
"-1e-1": -.1,
"-1E1": -10,
"-1E+1": -10,
"-1E-1": -.1,
`"1e1"`: 10,
`"1e+1"`: 10,
`"1e-1"`: .1,
`"1E1"`: 10,
`"1E+1"`: 10,
`"1E-1"`: .1,
`"-1e1"`: -10,
`"-1e+1"`: -10,
`"-1e-1"`: -.1,
`"-1E1"`: -10,
`"-1E+1"`: -10,
`"-1E-1"`: -.1,
"123": 123,
`"123true"`: 123,
`"+"`: 0,
`"-"`: 0,
`"-123true"`: -123,
`"-99.9true"`: -99.9,
"0": 0,
`"0"`: 0,
"-1": -1,
"1.1": 1.1,
"0.0": 0,
"-1.1": -1.1,
`"+1.1"`: 1.1,
`""`: 0,
"[1,2]": 1,
"[]": 0,
"{}": 0,
`{"abc":1}`: 0,
}
func Test_read_any_to_float(t *testing.T) {
should := require.New(t)
for k, v := range floatConvertMap {
any := jsoniter.Get([]byte(k))
should.Equal(float64(v), any.ToFloat64(), "the original val is "+k)
}
for k, v := range floatConvertMap {
any := jsoniter.Get([]byte(k))
should.Equal(float32(v), any.ToFloat32(), "the original val is "+k)
}
}
func Test_read_float_to_any(t *testing.T) {
should := require.New(t)
any := jsoniter.WrapFloat64(12.3)
anyFloat64 := float64(12.3)
any2 := jsoniter.WrapFloat64(-1.1)
should.Equal(float64(12.3), any.ToFloat64())
should.True(any.ToBool())
should.Equal(float32(anyFloat64), any.ToFloat32())
should.Equal(int(anyFloat64), any.ToInt())
should.Equal(int32(anyFloat64), any.ToInt32())
should.Equal(int64(anyFloat64), any.ToInt64())
should.Equal(uint(anyFloat64), any.ToUint())
should.Equal(uint32(anyFloat64), any.ToUint32())
should.Equal(uint64(anyFloat64), any.ToUint64())
should.Equal(uint(0), any2.ToUint())
should.Equal(uint32(0), any2.ToUint32())
should.Equal(uint64(0), any2.ToUint64())
should.Equal(any.ValueType(), jsoniter.NumberValue)
should.Equal("1.23E+01", any.ToString())
}

View File

@ -0,0 +1,198 @@
package any_tests
import (
"fmt"
"testing"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
var intConvertMap = map[string]int{
"null": 0,
"321.1": 321,
"-321.1": -321,
`"1.1"`: 1,
`"-321.1"`: -321,
"0.0": 0,
"0": 0,
`"0"`: 0,
`"0.0"`: 0,
"-1.1": -1,
"true": 1,
"false": 0,
`"true"`: 0,
`"false"`: 0,
`"true123"`: 0,
`"123true"`: 123,
`"-123true"`: -123,
`"1.2332e6"`: 1,
`""`: 0,
"+": 0,
"-": 0,
"[]": 0,
"[1,2]": 1,
`["1","2"]`: 1,
// object in php cannot convert to int
"{}": 0,
}
func Test_read_any_to_int(t *testing.T) {
should := require.New(t)
// int
for k, v := range intConvertMap {
any := jsoniter.Get([]byte(k))
should.Equal(v, any.ToInt(), fmt.Sprintf("origin val %v", k))
}
// int32
for k, v := range intConvertMap {
any := jsoniter.Get([]byte(k))
should.Equal(int32(v), any.ToInt32(), fmt.Sprintf("original val is %v", k))
}
// int64
for k, v := range intConvertMap {
any := jsoniter.Get([]byte(k))
should.Equal(int64(v), any.ToInt64(), fmt.Sprintf("original val is %v", k))
}
}
var uintConvertMap = map[string]int{
"null": 0,
"321.1": 321,
`"1.1"`: 1,
`"-123.1"`: 0,
"0.0": 0,
"0": 0,
`"0"`: 0,
`"0.0"`: 0,
`"00.0"`: 0,
"true": 1,
"false": 0,
`"true"`: 0,
`"false"`: 0,
`"true123"`: 0,
`"+1"`: 1,
`"123true"`: 123,
`"-123true"`: 0,
`"1.2332e6"`: 1,
`""`: 0,
"+": 0,
"-": 0,
".": 0,
"[]": 0,
"[1,2]": 1,
"{}": 0,
"{1,2}": 0,
"-1.1": 0,
"-321.1": 0,
}
func Test_read_any_to_uint(t *testing.T) {
should := require.New(t)
for k, v := range uintConvertMap {
any := jsoniter.Get([]byte(k))
should.Equal(uint64(v), any.ToUint64(), fmt.Sprintf("origin val %v", k))
}
for k, v := range uintConvertMap {
any := jsoniter.Get([]byte(k))
should.Equal(uint32(v), any.ToUint32(), fmt.Sprintf("origin val %v", k))
}
for k, v := range uintConvertMap {
any := jsoniter.Get([]byte(k))
should.Equal(uint(v), any.ToUint(), fmt.Sprintf("origin val %v", k))
}
}
func Test_read_int64_to_any(t *testing.T) {
should := require.New(t)
any := jsoniter.WrapInt64(12345)
should.Equal(12345, any.ToInt())
should.Equal(int32(12345), any.ToInt32())
should.Equal(int64(12345), any.ToInt64())
should.Equal(uint(12345), any.ToUint())
should.Equal(uint32(12345), any.ToUint32())
should.Equal(uint64(12345), any.ToUint64())
should.Equal(float32(12345), any.ToFloat32())
should.Equal(float64(12345), any.ToFloat64())
should.Equal("12345", any.ToString())
should.Equal(true, any.ToBool())
should.Equal(any.ValueType(), jsoniter.NumberValue)
stream := jsoniter.NewStream(jsoniter.ConfigDefault, nil, 32)
any.WriteTo(stream)
should.Equal("12345", string(stream.Buffer()))
}
func Test_read_int32_to_any(t *testing.T) {
should := require.New(t)
any := jsoniter.WrapInt32(12345)
should.Equal(12345, any.ToInt())
should.Equal(int32(12345), any.ToInt32())
should.Equal(int64(12345), any.ToInt64())
should.Equal(uint(12345), any.ToUint())
should.Equal(uint32(12345), any.ToUint32())
should.Equal(uint64(12345), any.ToUint64())
should.Equal(float32(12345), any.ToFloat32())
should.Equal(float64(12345), any.ToFloat64())
should.Equal("12345", any.ToString())
should.Equal(true, any.ToBool())
should.Equal(any.ValueType(), jsoniter.NumberValue)
stream := jsoniter.NewStream(jsoniter.ConfigDefault, nil, 32)
any.WriteTo(stream)
should.Equal("12345", string(stream.Buffer()))
}
func Test_read_uint32_to_any(t *testing.T) {
should := require.New(t)
any := jsoniter.WrapUint32(12345)
should.Equal(12345, any.ToInt())
should.Equal(int32(12345), any.ToInt32())
should.Equal(int64(12345), any.ToInt64())
should.Equal(uint(12345), any.ToUint())
should.Equal(uint32(12345), any.ToUint32())
should.Equal(uint64(12345), any.ToUint64())
should.Equal(float32(12345), any.ToFloat32())
should.Equal(float64(12345), any.ToFloat64())
should.Equal("12345", any.ToString())
should.Equal(true, any.ToBool())
should.Equal(any.ValueType(), jsoniter.NumberValue)
stream := jsoniter.NewStream(jsoniter.ConfigDefault, nil, 32)
any.WriteTo(stream)
should.Equal("12345", string(stream.Buffer()))
}
func Test_read_uint64_to_any(t *testing.T) {
should := require.New(t)
any := jsoniter.WrapUint64(12345)
should.Equal(12345, any.ToInt())
should.Equal(int32(12345), any.ToInt32())
should.Equal(int64(12345), any.ToInt64())
should.Equal(uint(12345), any.ToUint())
should.Equal(uint32(12345), any.ToUint32())
should.Equal(uint64(12345), any.ToUint64())
should.Equal(float32(12345), any.ToFloat32())
should.Equal(float64(12345), any.ToFloat64())
should.Equal("12345", any.ToString())
should.Equal(true, any.ToBool())
should.Equal(any.ValueType(), jsoniter.NumberValue)
stream := jsoniter.NewStream(jsoniter.ConfigDefault, nil, 32)
any.WriteTo(stream)
should.Equal("12345", string(stream.Buffer()))
stream = jsoniter.NewStream(jsoniter.ConfigDefault, nil, 32)
stream.WriteUint(uint(123))
should.Equal("123", string(stream.Buffer()))
}
func Test_int_lazy_any_get(t *testing.T) {
should := require.New(t)
any := jsoniter.Get([]byte("1234"))
// panic!!
//should.Equal(any.LastError(), io.EOF)
should.Equal(jsoniter.InvalidValue, any.Get(1, "2").ValueType())
}

View File

@ -0,0 +1,28 @@
package any_tests
import (
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"testing"
)
func Test_wrap_map(t *testing.T) {
should := require.New(t)
any := jsoniter.Wrap(map[string]string{"Field1": "hello"})
should.Equal("hello", any.Get("Field1").ToString())
any = jsoniter.Wrap(map[string]string{"Field1": "hello"})
should.Equal(1, any.Size())
}
func Test_map_wrapper_any_get_all(t *testing.T) {
should := require.New(t)
any := jsoniter.Wrap(map[string][]int{"Field1": {1, 2}})
should.Equal(`{"Field1":1}`, any.Get('*', 0).ToString())
should.Contains(any.Keys(), "Field1")
// map write to
stream := jsoniter.NewStream(jsoniter.ConfigDefault, nil, 0)
any.WriteTo(stream)
// TODO cannot pass
//should.Equal(string(stream.buf), "")
}

View File

@ -0,0 +1,16 @@
package any_tests
import (
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"testing"
)
func Test_read_null_as_any(t *testing.T) {
should := require.New(t)
any := jsoniter.Get([]byte(`null`))
should.Equal(0, any.ToInt())
should.Equal(float64(0), any.ToFloat64())
should.Equal("", any.ToString())
should.False(any.ToBool())
}

View File

@ -0,0 +1,121 @@
package any_tests
import (
"testing"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
func Test_read_object_as_any(t *testing.T) {
should := require.New(t)
any := jsoniter.Get([]byte(`{"a":"stream","c":"d"}`))
should.Equal(`{"a":"stream","c":"d"}`, any.ToString())
// partial parse
should.Equal("stream", any.Get("a").ToString())
should.Equal("d", any.Get("c").ToString())
should.Equal(2, len(any.Keys()))
any = jsoniter.Get([]byte(`{"a":"stream","c":"d"}`))
// full parse
should.Equal(2, len(any.Keys()))
should.Equal(2, any.Size())
should.True(any.ToBool())
should.Equal(0, any.ToInt())
should.Equal(jsoniter.ObjectValue, any.ValueType())
should.Nil(any.LastError())
obj := struct {
A string
}{}
any.ToVal(&obj)
should.Equal("stream", obj.A)
}
func Test_object_lazy_any_get(t *testing.T) {
should := require.New(t)
any := jsoniter.Get([]byte(`{"a":{"stream":{"c":"d"}}}`))
should.Equal("d", any.Get("a", "stream", "c").ToString())
}
func Test_object_lazy_any_get_all(t *testing.T) {
should := require.New(t)
any := jsoniter.Get([]byte(`{"a":[0],"stream":[1]}`))
should.Contains(any.Get('*', 0).ToString(), `"a":0`)
}
func Test_object_lazy_any_get_invalid(t *testing.T) {
should := require.New(t)
any := jsoniter.Get([]byte(`{}`))
should.Equal(jsoniter.InvalidValue, any.Get("a", "stream", "c").ValueType())
should.Equal(jsoniter.InvalidValue, any.Get(1).ValueType())
}
func Test_wrap_map_and_convert_to_any(t *testing.T) {
should := require.New(t)
any := jsoniter.Wrap(map[string]interface{}{"a": 1})
should.True(any.ToBool())
should.Equal(0, any.ToInt())
should.Equal(int32(0), any.ToInt32())
should.Equal(int64(0), any.ToInt64())
should.Equal(float32(0), any.ToFloat32())
should.Equal(float64(0), any.ToFloat64())
should.Equal(uint(0), any.ToUint())
should.Equal(uint32(0), any.ToUint32())
should.Equal(uint64(0), any.ToUint64())
}
func Test_wrap_object_and_convert_to_any(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
field2 string
}
any := jsoniter.Wrap(TestObject{"hello", "world"})
should.Equal("hello", any.Get("Field1").ToString())
any = jsoniter.Wrap(TestObject{"hello", "world"})
should.Equal(2, any.Size())
should.Equal(`{"Field1":"hello"}`, any.Get('*').ToString())
should.Equal(0, any.ToInt())
should.Equal(int32(0), any.ToInt32())
should.Equal(int64(0), any.ToInt64())
should.Equal(float32(0), any.ToFloat32())
should.Equal(float64(0), any.ToFloat64())
should.Equal(uint(0), any.ToUint())
should.Equal(uint32(0), any.ToUint32())
should.Equal(uint64(0), any.ToUint64())
should.True(any.ToBool())
should.Equal(`{"Field1":"hello"}`, any.ToString())
// cannot pass!
//stream := NewStream(ConfigDefault, nil, 32)
//any.WriteTo(stream)
//should.Equal(`{"Field1":"hello"}`, string(stream.Buffer()))
// cannot pass!
}
func Test_any_within_struct(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 jsoniter.Any
Field2 jsoniter.Any
}
obj := TestObject{}
err := jsoniter.UnmarshalFromString(`{"Field1": "hello", "Field2": [1,2,3]}`, &obj)
should.Nil(err)
should.Equal("hello", obj.Field1.ToString())
should.Equal("[1,2,3]", obj.Field2.ToString())
}
func Test_object_wrapper_any_get_all(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 []int
Field2 []int
}
any := jsoniter.Wrap(TestObject{[]int{1, 2}, []int{3, 4}})
should.Contains(any.Get('*', 0).ToString(), `"Field2":3`)
should.Contains(any.Keys(), "Field1")
should.Contains(any.Keys(), "Field2")
should.NotContains(any.Keys(), "Field3")
}

View File

@ -0,0 +1,58 @@
package any_tests
import (
"testing"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
var stringConvertMap = map[string]string{
"null": "",
"321.1": "321.1",
`"1.1"`: "1.1",
`"-123.1"`: "-123.1",
"0.0": "0.0",
"0": "0",
`"0"`: "0",
`"0.0"`: "0.0",
`"00.0"`: "00.0",
"true": "true",
"false": "false",
`"true"`: "true",
`"false"`: "false",
`"true123"`: "true123",
`"+1"`: "+1",
"[]": "[]",
"[1,2]": "[1,2]",
"{}": "{}",
`{"a":1, "stream":true}`: `{"a":1, "stream":true}`,
}
func Test_read_any_to_string(t *testing.T) {
should := require.New(t)
for k, v := range stringConvertMap {
any := jsoniter.Get([]byte(k))
should.Equal(v, any.ToString(), "original val "+k)
}
}
func Test_read_string_as_any(t *testing.T) {
should := require.New(t)
any := jsoniter.Get([]byte(`"hello"`))
should.Equal("hello", any.ToString())
should.True(any.ToBool())
any = jsoniter.Get([]byte(`" "`))
should.False(any.ToBool())
any = jsoniter.Get([]byte(`"false"`))
should.True(any.ToBool())
any = jsoniter.Get([]byte(`"123"`))
should.Equal(123, any.ToInt())
}
func Test_wrap_string(t *testing.T) {
should := require.New(t)
any := jsoniter.Get([]byte("-32000")).MustBeValid()
should.Equal(-32000, any.ToInt())
should.NoError(any.LastError())
}

View File

@ -0,0 +1,72 @@
package any_tests
import (
"testing"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
// if must be valid is useless, just drop this test
func Test_must_be_valid(t *testing.T) {
should := require.New(t)
any := jsoniter.Get([]byte("123"))
should.Equal(any.MustBeValid().ToInt(), 123)
any = jsoniter.Wrap(int8(10))
should.Equal(any.MustBeValid().ToInt(), 10)
any = jsoniter.Wrap(int16(10))
should.Equal(any.MustBeValid().ToInt(), 10)
any = jsoniter.Wrap(int32(10))
should.Equal(any.MustBeValid().ToInt(), 10)
any = jsoniter.Wrap(int64(10))
should.Equal(any.MustBeValid().ToInt(), 10)
any = jsoniter.Wrap(uint(10))
should.Equal(any.MustBeValid().ToInt(), 10)
any = jsoniter.Wrap(uint8(10))
should.Equal(any.MustBeValid().ToInt(), 10)
any = jsoniter.Wrap(uint16(10))
should.Equal(any.MustBeValid().ToInt(), 10)
any = jsoniter.Wrap(uint32(10))
should.Equal(any.MustBeValid().ToInt(), 10)
any = jsoniter.Wrap(uint64(10))
should.Equal(any.MustBeValid().ToInt(), 10)
any = jsoniter.Wrap(float32(10))
should.Equal(any.MustBeValid().ToFloat64(), float64(10))
any = jsoniter.Wrap(float64(10))
should.Equal(any.MustBeValid().ToFloat64(), float64(10))
any = jsoniter.Wrap(true)
should.Equal(any.MustBeValid().ToFloat64(), float64(1))
any = jsoniter.Wrap(false)
should.Equal(any.MustBeValid().ToFloat64(), float64(0))
any = jsoniter.Wrap(nil)
should.Equal(any.MustBeValid().ToFloat64(), float64(0))
any = jsoniter.Wrap(struct{ age int }{age: 1})
should.Equal(any.MustBeValid().ToFloat64(), float64(0))
any = jsoniter.Wrap(map[string]interface{}{"abc": 1})
should.Equal(any.MustBeValid().ToFloat64(), float64(0))
any = jsoniter.Wrap("abc")
should.Equal(any.MustBeValid().ToFloat64(), float64(0))
any = jsoniter.Wrap([]int{})
should.Equal(any.MustBeValid().ToFloat64(), float64(0))
any = jsoniter.Wrap([]int{1, 2})
should.Equal(any.MustBeValid().ToFloat64(), float64(1))
}

View File

@ -0,0 +1,119 @@
package any_tests
import (
"testing"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
func Test_wrap_and_valuetype_everything(t *testing.T) {
should := require.New(t)
var i interface{}
any := jsoniter.Get([]byte("123"))
// default of number type is float64
i = float64(123)
should.Equal(i, any.GetInterface())
any = jsoniter.Wrap(int8(10))
should.Equal(any.ValueType(), jsoniter.NumberValue)
should.Equal(any.LastError(), nil)
// get interface is not int8 interface
// i = int8(10)
// should.Equal(i, any.GetInterface())
any = jsoniter.Wrap(int16(10))
should.Equal(any.ValueType(), jsoniter.NumberValue)
should.Equal(any.LastError(), nil)
//i = int16(10)
//should.Equal(i, any.GetInterface())
any = jsoniter.Wrap(int32(10))
should.Equal(any.ValueType(), jsoniter.NumberValue)
should.Equal(any.LastError(), nil)
i = int32(10)
should.Equal(i, any.GetInterface())
any = jsoniter.Wrap(int64(10))
should.Equal(any.ValueType(), jsoniter.NumberValue)
should.Equal(any.LastError(), nil)
i = int64(10)
should.Equal(i, any.GetInterface())
any = jsoniter.Wrap(uint(10))
should.Equal(any.ValueType(), jsoniter.NumberValue)
should.Equal(any.LastError(), nil)
// not equal
//i = uint(10)
//should.Equal(i, any.GetInterface())
any = jsoniter.Wrap(uint8(10))
should.Equal(any.ValueType(), jsoniter.NumberValue)
should.Equal(any.LastError(), nil)
// not equal
// i = uint8(10)
// should.Equal(i, any.GetInterface())
any = jsoniter.Wrap(uint16(10))
should.Equal(any.ValueType(), jsoniter.NumberValue)
should.Equal(any.LastError(), nil)
any = jsoniter.Wrap(uint32(10))
should.Equal(any.ValueType(), jsoniter.NumberValue)
should.Equal(any.LastError(), nil)
i = uint32(10)
should.Equal(i, any.GetInterface())
any = jsoniter.Wrap(uint64(10))
should.Equal(any.ValueType(), jsoniter.NumberValue)
should.Equal(any.LastError(), nil)
i = uint64(10)
should.Equal(i, any.GetInterface())
any = jsoniter.Wrap(float32(10))
should.Equal(any.ValueType(), jsoniter.NumberValue)
should.Equal(any.LastError(), nil)
// not equal
//i = float32(10)
//should.Equal(i, any.GetInterface())
any = jsoniter.Wrap(float64(10))
should.Equal(any.ValueType(), jsoniter.NumberValue)
should.Equal(any.LastError(), nil)
i = float64(10)
should.Equal(i, any.GetInterface())
any = jsoniter.Wrap(true)
should.Equal(any.ValueType(), jsoniter.BoolValue)
should.Equal(any.LastError(), nil)
i = true
should.Equal(i, any.GetInterface())
any = jsoniter.Wrap(false)
should.Equal(any.ValueType(), jsoniter.BoolValue)
should.Equal(any.LastError(), nil)
i = false
should.Equal(i, any.GetInterface())
any = jsoniter.Wrap(nil)
should.Equal(any.ValueType(), jsoniter.NilValue)
should.Equal(any.LastError(), nil)
i = nil
should.Equal(i, any.GetInterface())
stream := jsoniter.NewStream(jsoniter.ConfigDefault, nil, 32)
any.WriteTo(stream)
should.Equal("null", string(stream.Buffer()))
should.Equal(any.LastError(), nil)
any = jsoniter.Wrap(struct{ age int }{age: 1})
should.Equal(any.ValueType(), jsoniter.ObjectValue)
should.Equal(any.LastError(), nil)
i = struct{ age int }{age: 1}
should.Equal(i, any.GetInterface())
any = jsoniter.Wrap(map[string]interface{}{"abc": 1})
should.Equal(any.ValueType(), jsoniter.ObjectValue)
should.Equal(any.LastError(), nil)
i = map[string]interface{}{"abc": 1}
should.Equal(i, any.GetInterface())
any = jsoniter.Wrap("abc")
i = "abc"
should.Equal(i, any.GetInterface())
should.Equal(nil, any.LastError())
}

View File

@ -14,7 +14,11 @@ func (any *uint32Any) LastError() error {
}
func (any *uint32Any) ValueType() ValueType {
return Number
return NumberValue
}
func (any *uint32Any) MustBeValid() Any {
return any
}
func (any *uint32Any) ToBool() bool {

74
any_uint64.go Normal file
View File

@ -0,0 +1,74 @@
package jsoniter
import (
"strconv"
)
type uint64Any struct {
baseAny
val uint64
}
func (any *uint64Any) LastError() error {
return nil
}
func (any *uint64Any) ValueType() ValueType {
return NumberValue
}
func (any *uint64Any) MustBeValid() Any {
return any
}
func (any *uint64Any) ToBool() bool {
return any.val != 0
}
func (any *uint64Any) ToInt() int {
return int(any.val)
}
func (any *uint64Any) ToInt32() int32 {
return int32(any.val)
}
func (any *uint64Any) ToInt64() int64 {
return int64(any.val)
}
func (any *uint64Any) ToUint() uint {
return uint(any.val)
}
func (any *uint64Any) ToUint32() uint32 {
return uint32(any.val)
}
func (any *uint64Any) ToUint64() uint64 {
return any.val
}
func (any *uint64Any) ToFloat32() float32 {
return float32(any.val)
}
func (any *uint64Any) ToFloat64() float64 {
return float64(any.val)
}
func (any *uint64Any) ToString() string {
return strconv.FormatUint(any.val, 10)
}
func (any *uint64Any) WriteTo(stream *Stream) {
stream.WriteUint64(any.val)
}
func (any *uint64Any) Parse() *Iterator {
return nil
}
func (any *uint64Any) GetInterface() interface{} {
return any.val
}

229
api_tests/config_test.go Normal file
View File

@ -0,0 +1,229 @@
package test
import (
"encoding/json"
"testing"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
func Test_use_number_for_unmarshal(t *testing.T) {
should := require.New(t)
api := jsoniter.Config{UseNumber: true}.Froze()
var obj interface{}
should.Nil(api.UnmarshalFromString("123", &obj))
should.Equal(json.Number("123"), obj)
}
func Test_customize_float_marshal(t *testing.T) {
should := require.New(t)
json := jsoniter.Config{MarshalFloatWith6Digits: true}.Froze()
str, err := json.MarshalToString(float32(1.23456789))
should.Nil(err)
should.Equal("1.234568", str)
}
func Test_customize_tag_key(t *testing.T) {
type TestObject struct {
Field string `orm:"field"`
}
should := require.New(t)
json := jsoniter.Config{TagKey: "orm"}.Froze()
str, err := json.MarshalToString(TestObject{"hello"})
should.Nil(err)
should.Equal(`{"field":"hello"}`, str)
}
func Test_read_large_number_as_interface(t *testing.T) {
should := require.New(t)
var val interface{}
err := jsoniter.Config{UseNumber: true}.Froze().UnmarshalFromString(`123456789123456789123456789`, &val)
should.Nil(err)
output, err := jsoniter.MarshalToString(val)
should.Nil(err)
should.Equal(`123456789123456789123456789`, output)
}
type caseSensitiveStruct struct {
A string `json:"a"`
B string `json:"b,omitempty"`
C *C `json:"C,omitempty"`
}
type C struct {
D int64 `json:"D,omitempty"`
E *E `json:"e,omitempty"`
}
type E struct {
F string `json:"F,omitempty"`
}
func Test_CaseSensitive(t *testing.T) {
should := require.New(t)
testCases := []struct {
input string
expectedOutput string
caseSensitive bool
}{
{
input: `{"A":"foo","B":"bar"}`,
expectedOutput: `{"a":"foo","b":"bar"}`,
caseSensitive: false,
},
{
input: `{"a":"foo","b":"bar"}`,
expectedOutput: `{"a":"foo","b":"bar"}`,
caseSensitive: true,
},
{
input: `{"a":"foo","b":"bar","C":{"D":10}}`,
expectedOutput: `{"a":"foo","b":"bar","C":{"D":10}}`,
caseSensitive: true,
},
{
input: `{"a":"foo","B":"bar","c":{"d":10}}`,
expectedOutput: `{"a":"foo"}`,
caseSensitive: true,
},
{
input: `{"a":"foo","C":{"d":10}}`,
expectedOutput: `{"a":"foo","C":{}}`,
caseSensitive: true,
},
{
input: `{"a":"foo","C":{"D":10,"e":{"f":"baz"}}}`,
expectedOutput: `{"a":"foo","C":{"D":10,"e":{}}}`,
caseSensitive: true,
},
{
input: `{"a":"foo","C":{"D":10,"e":{"F":"baz"}}}`,
expectedOutput: `{"a":"foo","C":{"D":10,"e":{"F":"baz"}}}`,
caseSensitive: true,
},
{
input: `{"A":"foo","c":{"d":10,"E":{"f":"baz"}}}`,
expectedOutput: `{"a":"foo","C":{"D":10,"e":{"F":"baz"}}}`,
caseSensitive: false,
},
}
for _, tc := range testCases {
val := caseSensitiveStruct{}
err := jsoniter.Config{CaseSensitive: tc.caseSensitive}.Froze().UnmarshalFromString(tc.input, &val)
should.Nil(err)
output, err := jsoniter.MarshalToString(val)
should.Nil(err)
should.Equal(tc.expectedOutput, output)
}
}
type structWithElevenFields struct {
A string `json:"A,omitempty"`
B string `json:"B,omitempty"`
C string `json:"C,omitempty"`
D string `json:"d,omitempty"`
E string `json:"e,omitempty"`
F string `json:"f,omitempty"`
G string `json:"g,omitempty"`
H string `json:"h,omitempty"`
I string `json:"i,omitempty"`
J string `json:"j,omitempty"`
K string `json:"k,omitempty"`
}
func Test_CaseSensitive_MoreThanTenFields(t *testing.T) {
should := require.New(t)
testCases := []struct {
input string
expectedOutput string
caseSensitive bool
}{
{
input: `{"A":"1","B":"2","C":"3","d":"4","e":"5","f":"6","g":"7","h":"8","i":"9","j":"10","k":"11"}`,
expectedOutput: `{"A":"1","B":"2","C":"3","d":"4","e":"5","f":"6","g":"7","h":"8","i":"9","j":"10","k":"11"}`,
caseSensitive: true,
},
{
input: `{"a":"1","b":"2","c":"3","D":"4","E":"5","F":"6"}`,
expectedOutput: `{"A":"1","B":"2","C":"3","d":"4","e":"5","f":"6"}`,
caseSensitive: false,
},
{
input: `{"A":"1","b":"2","d":"4","E":"5"}`,
expectedOutput: `{"A":"1","d":"4"}`,
caseSensitive: true,
},
}
for _, tc := range testCases {
val := structWithElevenFields{}
err := jsoniter.Config{CaseSensitive: tc.caseSensitive}.Froze().UnmarshalFromString(tc.input, &val)
should.Nil(err)
output, err := jsoniter.MarshalToString(val)
should.Nil(err)
should.Equal(tc.expectedOutput, output)
}
}
type onlyTaggedFieldStruct struct {
A string `json:"a"`
B string
FSimpl F `json:"f_simpl"`
ISimpl I
FPtr *F `json:"f_ptr"`
IPtr *I
F
*I
}
type F struct {
G string `json:"g"`
H string
}
type I struct {
J string `json:"j"`
K string
}
func Test_OnlyTaggedField(t *testing.T) {
should := require.New(t)
obj := onlyTaggedFieldStruct{
A: "a",
B: "b",
FSimpl: F{G: "g", H: "h"},
ISimpl: I{J: "j", K: "k"},
FPtr: &F{G: "g", H: "h"},
IPtr: &I{J: "j", K: "k"},
F: F{G: "g", H: "h"},
I: &I{J: "j", K: "k"},
}
output, err := jsoniter.Config{OnlyTaggedField: true}.Froze().Marshal(obj)
should.Nil(err)
m := make(map[string]interface{})
err = jsoniter.Unmarshal(output, &m)
should.Nil(err)
should.Equal(map[string]interface{}{
"a": "a",
"f_simpl": map[string]interface{}{
"g": "g",
},
"f_ptr": map[string]interface{}{
"g": "g",
},
"g": "g",
"j": "j",
}, m)
}

64
api_tests/decoder_test.go Normal file
View File

@ -0,0 +1,64 @@
package test
import (
"bytes"
"encoding/json"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"io/ioutil"
"testing"
)
func Test_disallowUnknownFields(t *testing.T) {
should := require.New(t)
type TestObject struct{}
var obj TestObject
decoder := jsoniter.NewDecoder(bytes.NewBufferString(`{"field1":100}`))
decoder.DisallowUnknownFields()
should.Error(decoder.Decode(&obj))
}
func Test_new_decoder(t *testing.T) {
should := require.New(t)
decoder1 := json.NewDecoder(bytes.NewBufferString(`[1][2]`))
decoder2 := jsoniter.NewDecoder(bytes.NewBufferString(`[1][2]`))
arr1 := []int{}
should.Nil(decoder1.Decode(&arr1))
should.Equal([]int{1}, arr1)
arr2 := []int{}
should.True(decoder1.More())
buffered, _ := ioutil.ReadAll(decoder1.Buffered())
should.Equal("[2]", string(buffered))
should.Nil(decoder2.Decode(&arr2))
should.Equal([]int{1}, arr2)
should.True(decoder2.More())
buffered, _ = ioutil.ReadAll(decoder2.Buffered())
should.Equal("[2]", string(buffered))
should.Nil(decoder1.Decode(&arr1))
should.Equal([]int{2}, arr1)
should.False(decoder1.More())
should.Nil(decoder2.Decode(&arr2))
should.Equal([]int{2}, arr2)
should.False(decoder2.More())
}
func Test_use_number(t *testing.T) {
should := require.New(t)
decoder1 := json.NewDecoder(bytes.NewBufferString(`123`))
decoder1.UseNumber()
decoder2 := jsoniter.NewDecoder(bytes.NewBufferString(`123`))
decoder2.UseNumber()
var obj1 interface{}
should.Nil(decoder1.Decode(&obj1))
should.Equal(json.Number("123"), obj1)
var obj2 interface{}
should.Nil(decoder2.Decode(&obj2))
should.Equal(json.Number("123"), obj2)
}
func Test_decoder_more(t *testing.T) {
should := require.New(t)
decoder := jsoniter.NewDecoder(bytes.NewBufferString("abcde"))
should.True(decoder.More())
}

View File

@ -0,0 +1,46 @@
//+build go1.8
package test
import (
"bytes"
"encoding/json"
"testing"
"unicode/utf8"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
func Test_new_encoder(t *testing.T) {
should := require.New(t)
buf1 := &bytes.Buffer{}
encoder1 := json.NewEncoder(buf1)
encoder1.SetEscapeHTML(false)
encoder1.Encode([]int{1})
should.Equal("[1]\n", buf1.String())
buf2 := &bytes.Buffer{}
encoder2 := jsoniter.NewEncoder(buf2)
encoder2.SetEscapeHTML(false)
encoder2.Encode([]int{1})
should.Equal("[1]\n", buf2.String())
}
func Test_string_encode_with_std_without_html_escape(t *testing.T) {
api := jsoniter.Config{EscapeHTML: false}.Froze()
should := require.New(t)
for i := 0; i < utf8.RuneSelf; i++ {
input := string([]byte{byte(i)})
buf := &bytes.Buffer{}
encoder := json.NewEncoder(buf)
encoder.SetEscapeHTML(false)
err := encoder.Encode(input)
should.Nil(err)
stdOutput := buf.String()
stdOutput = stdOutput[:len(stdOutput)-1]
jsoniterOutputBytes, err := api.Marshal(input)
should.Nil(err)
jsoniterOutput := string(jsoniterOutputBytes)
should.Equal(stdOutput, jsoniterOutput)
}
}

20
api_tests/encoder_test.go Normal file
View File

@ -0,0 +1,20 @@
package test
import (
"bytes"
"encoding/json"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"testing"
)
// Standard Encoder has trailing newline.
func TestEncoderHasTrailingNewline(t *testing.T) {
should := require.New(t)
var buf, stdbuf bytes.Buffer
enc := jsoniter.ConfigCompatibleWithStandardLibrary.NewEncoder(&buf)
enc.Encode(1)
stdenc := json.NewEncoder(&stdbuf)
stdenc.Encode(1)
should.Equal(stdbuf.Bytes(), buf.Bytes())
}

View File

@ -0,0 +1,36 @@
package test
import (
"encoding/json"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"testing"
)
func Test_marshal_indent(t *testing.T) {
should := require.New(t)
obj := struct {
F1 int
F2 []int
}{1, []int{2, 3, 4}}
output, err := json.MarshalIndent(obj, "", " ")
should.Nil(err)
should.Equal("{\n \"F1\": 1,\n \"F2\": [\n 2,\n 3,\n 4\n ]\n}", string(output))
output, err = jsoniter.MarshalIndent(obj, "", " ")
should.Nil(err)
should.Equal("{\n \"F1\": 1,\n \"F2\": [\n 2,\n 3,\n 4\n ]\n}", string(output))
}
func Test_marshal_indent_map(t *testing.T) {
should := require.New(t)
obj := map[int]int{1: 2}
output, err := json.MarshalIndent(obj, "", " ")
should.Nil(err)
should.Equal("{\n \"1\": 2\n}", string(output))
output, err = jsoniter.MarshalIndent(obj, "", " ")
should.Nil(err)
should.Equal("{\n \"1\": 2\n}", string(output))
output, err = jsoniter.ConfigCompatibleWithStandardLibrary.MarshalIndent(obj, "", " ")
should.Nil(err)
should.Equal("{\n \"1\": 2\n}", string(output))
}

View File

@ -0,0 +1,47 @@
package test
import (
"bytes"
"encoding/json"
"testing"
jsoniter "github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
var marshalConfig = jsoniter.Config{
EscapeHTML: false,
SortMapKeys: true,
ValidateJsonRawMessage: true,
}.Froze()
type Container struct {
Bar interface{}
}
func (c *Container) MarshalJSON() ([]byte, error) {
return marshalConfig.Marshal(&c.Bar)
}
func TestEncodeEscape(t *testing.T) {
should := require.New(t)
container := &Container{
Bar: []string{"123<ab>", "ooo"},
}
out, err := marshalConfig.Marshal(container)
should.Nil(err)
bufout := string(out)
var stdbuf bytes.Buffer
stdenc := json.NewEncoder(&stdbuf)
stdenc.SetEscapeHTML(false)
err = stdenc.Encode(container)
should.Nil(err)
stdout := string(stdbuf.Bytes())
if stdout[len(stdout)-1:] == "\n" {
stdout = stdout[:len(stdout)-1]
}
should.Equal(stdout, bufout)
}

View File

@ -0,0 +1,36 @@
package test
import (
"bytes"
"encoding/json"
"github.com/json-iterator/go"
"testing"
"github.com/stretchr/testify/require"
)
type Foo struct {
Bar interface{}
}
func (f Foo) MarshalJSON() ([]byte, error) {
var buf bytes.Buffer
err := json.NewEncoder(&buf).Encode(f.Bar)
return buf.Bytes(), err
}
// Standard Encoder has trailing newline.
func TestEncodeMarshalJSON(t *testing.T) {
foo := Foo {
Bar: 123,
}
should := require.New(t)
var buf, stdbuf bytes.Buffer
enc := jsoniter.ConfigCompatibleWithStandardLibrary.NewEncoder(&buf)
enc.Encode(foo)
stdenc := json.NewEncoder(&stdbuf)
stdenc.Encode(foo)
should.Equal(stdbuf.Bytes(), buf.Bytes())
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,25 @@
package test
import (
"bytes"
"github.com/json-iterator/go"
"testing"
)
func Benchmark_encode_string_with_SetEscapeHTML(b *testing.B) {
type V struct {
S string
B bool
I int
}
var json = jsoniter.ConfigCompatibleWithStandardLibrary
b.ReportAllocs()
for i := 0; i < b.N; i++ {
buf := &bytes.Buffer{}
enc := json.NewEncoder(buf)
enc.SetEscapeHTML(true)
if err := enc.Encode(V{S: "s", B: true, I: 233}); err != nil {
b.Fatal(err)
}
}
}

View File

@ -0,0 +1,158 @@
package test
import (
"encoding/json"
"github.com/json-iterator/go"
"io/ioutil"
"os"
"testing"
)
//func Test_large_file(t *testing.T) {
// file, err := os.Open("/tmp/large-file.json")
// if err != nil {
// t.Fatal(err)
// }
// iter := Parse(file, 4096)
// count := 0
// for iter.ReadArray() {
// iter.Skip()
// count++
// }
// if count != 11351 {
// t.Fatal(count)
// }
//}
func init() {
ioutil.WriteFile("/tmp/large-file.json", []byte(`[{
"person": {
"id": "d50887ca-a6ce-4e59-b89f-14f0b5d03b03",
"name": {
"fullName": "Leonid Bugaev",
"givenName": "Leonid",
"familyName": "Bugaev"
},
"email": "leonsbox@gmail.com",
"gender": "male",
"location": "Saint Petersburg, Saint Petersburg, RU",
"geo": {
"city": "Saint Petersburg",
"state": "Saint Petersburg",
"country": "Russia",
"lat": 59.9342802,
"lng": 30.3350986
},
"bio": "Senior engineer at Granify.com",
"site": "http://flickfaver.com",
"avatar": "https://d1ts43dypk8bqh.cloudfront.net/v1/avatars/d50887ca-a6ce-4e59-b89f-14f0b5d03b03",
"employment": {
"name": "www.latera.ru",
"title": "Software Engineer",
"domain": "gmail.com"
},
"facebook": {
"handle": "leonid.bugaev"
},
"github": {
"handle": "buger",
"id": 14009,
"avatar": "https://avatars.githubusercontent.com/u/14009?v=3",
"company": "Granify",
"blog": "http://leonsbox.com",
"followers": 95,
"following": 10
},
"twitter": {
"handle": "flickfaver",
"id": 77004410,
"bio": null,
"followers": 2,
"following": 1,
"statuses": 5,
"favorites": 0,
"location": "",
"site": "http://flickfaver.com",
"avatar": null
},
"linkedin": {
"handle": "in/leonidbugaev"
},
"googleplus": {
"handle": null
},
"angellist": {
"handle": "leonid-bugaev",
"id": 61541,
"bio": "Senior engineer at Granify.com",
"blog": "http://buger.github.com",
"site": "http://buger.github.com",
"followers": 41,
"avatar": "https://d1qb2nb5cznatu.cloudfront.net/users/61541-medium_jpg?1405474390"
},
"klout": {
"handle": null,
"score": null
},
"foursquare": {
"handle": null
},
"aboutme": {
"handle": "leonid.bugaev",
"bio": null,
"avatar": null
},
"gravatar": {
"handle": "buger",
"urls": [
],
"avatar": "http://1.gravatar.com/avatar/f7c8edd577d13b8930d5522f28123510",
"avatars": [
{
"url": "http://1.gravatar.com/avatar/f7c8edd577d13b8930d5522f28123510",
"type": "thumbnail"
}
]
},
"fuzzy": false
},
"company": "hello"
}]`), 0666)
}
/*
200000 8886 ns/op 4336 B/op 6 allocs/op
50000 34244 ns/op 6744 B/op 14 allocs/op
*/
func Benchmark_jsoniter_large_file(b *testing.B) {
b.ReportAllocs()
for n := 0; n < b.N; n++ {
file, _ := os.Open("/tmp/large-file.json")
iter := jsoniter.Parse(jsoniter.ConfigDefault, file, 4096)
count := 0
iter.ReadArrayCB(func(iter *jsoniter.Iterator) bool {
// Skip() is strict by default, use --tags jsoniter-sloppy to skip without validation
iter.Skip()
count++
return true
})
file.Close()
if iter.Error != nil {
b.Error(iter.Error)
}
}
}
func Benchmark_json_large_file(b *testing.B) {
b.ReportAllocs()
for n := 0; n < b.N; n++ {
file, _ := os.Open("/tmp/large-file.json")
bytes, _ := ioutil.ReadAll(file)
file.Close()
result := []struct{}{}
err := json.Unmarshal(bytes, &result)
if err != nil {
b.Error(err)
}
}
}

12
build.sh Executable file
View File

@ -0,0 +1,12 @@
#!/bin/bash
set -e
set -x
if [ ! -d /tmp/build-golang/src/github.com/json-iterator ]; then
mkdir -p /tmp/build-golang/src/github.com/json-iterator
ln -s $PWD /tmp/build-golang/src/github.com/json-iterator/go
fi
export GOPATH=/tmp/build-golang
go get -u github.com/golang/dep/cmd/dep
cd /tmp/build-golang/src/github.com/json-iterator/go
exec $GOPATH/bin/dep ensure -update

375
config.go Normal file
View File

@ -0,0 +1,375 @@
package jsoniter
import (
"encoding/json"
"io"
"reflect"
"sync"
"unsafe"
"github.com/modern-go/concurrent"
"github.com/modern-go/reflect2"
)
// Config customize how the API should behave.
// The API is created from Config by Froze.
type Config struct {
IndentionStep int
MarshalFloatWith6Digits bool
EscapeHTML bool
SortMapKeys bool
UseNumber bool
DisallowUnknownFields bool
TagKey string
OnlyTaggedField bool
ValidateJsonRawMessage bool
ObjectFieldMustBeSimpleString bool
CaseSensitive bool
}
// API the public interface of this package.
// Primary Marshal and Unmarshal.
type API interface {
IteratorPool
StreamPool
MarshalToString(v interface{}) (string, error)
Marshal(v interface{}) ([]byte, error)
MarshalIndent(v interface{}, prefix, indent string) ([]byte, error)
UnmarshalFromString(str string, v interface{}) error
Unmarshal(data []byte, v interface{}) error
Get(data []byte, path ...interface{}) Any
NewEncoder(writer io.Writer) *Encoder
NewDecoder(reader io.Reader) *Decoder
Valid(data []byte) bool
RegisterExtension(extension Extension)
DecoderOf(typ reflect2.Type) ValDecoder
EncoderOf(typ reflect2.Type) ValEncoder
}
// ConfigDefault the default API
var ConfigDefault = Config{
EscapeHTML: true,
}.Froze()
// ConfigCompatibleWithStandardLibrary tries to be 100% compatible with standard library behavior
var ConfigCompatibleWithStandardLibrary = Config{
EscapeHTML: true,
SortMapKeys: true,
ValidateJsonRawMessage: true,
}.Froze()
// ConfigFastest marshals float with only 6 digits precision
var ConfigFastest = Config{
EscapeHTML: false,
MarshalFloatWith6Digits: true, // will lose precession
ObjectFieldMustBeSimpleString: true, // do not unescape object field
}.Froze()
type frozenConfig struct {
configBeforeFrozen Config
sortMapKeys bool
indentionStep int
objectFieldMustBeSimpleString bool
onlyTaggedField bool
disallowUnknownFields bool
decoderCache *concurrent.Map
encoderCache *concurrent.Map
encoderExtension Extension
decoderExtension Extension
extraExtensions []Extension
streamPool *sync.Pool
iteratorPool *sync.Pool
caseSensitive bool
}
func (cfg *frozenConfig) initCache() {
cfg.decoderCache = concurrent.NewMap()
cfg.encoderCache = concurrent.NewMap()
}
func (cfg *frozenConfig) addDecoderToCache(cacheKey uintptr, decoder ValDecoder) {
cfg.decoderCache.Store(cacheKey, decoder)
}
func (cfg *frozenConfig) addEncoderToCache(cacheKey uintptr, encoder ValEncoder) {
cfg.encoderCache.Store(cacheKey, encoder)
}
func (cfg *frozenConfig) getDecoderFromCache(cacheKey uintptr) ValDecoder {
decoder, found := cfg.decoderCache.Load(cacheKey)
if found {
return decoder.(ValDecoder)
}
return nil
}
func (cfg *frozenConfig) getEncoderFromCache(cacheKey uintptr) ValEncoder {
encoder, found := cfg.encoderCache.Load(cacheKey)
if found {
return encoder.(ValEncoder)
}
return nil
}
var cfgCache = concurrent.NewMap()
func getFrozenConfigFromCache(cfg Config) *frozenConfig {
obj, found := cfgCache.Load(cfg)
if found {
return obj.(*frozenConfig)
}
return nil
}
func addFrozenConfigToCache(cfg Config, frozenConfig *frozenConfig) {
cfgCache.Store(cfg, frozenConfig)
}
// Froze forge API from config
func (cfg Config) Froze() API {
api := &frozenConfig{
sortMapKeys: cfg.SortMapKeys,
indentionStep: cfg.IndentionStep,
objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString,
onlyTaggedField: cfg.OnlyTaggedField,
disallowUnknownFields: cfg.DisallowUnknownFields,
caseSensitive: cfg.CaseSensitive,
}
api.streamPool = &sync.Pool{
New: func() interface{} {
return NewStream(api, nil, 512)
},
}
api.iteratorPool = &sync.Pool{
New: func() interface{} {
return NewIterator(api)
},
}
api.initCache()
encoderExtension := EncoderExtension{}
decoderExtension := DecoderExtension{}
if cfg.MarshalFloatWith6Digits {
api.marshalFloatWith6Digits(encoderExtension)
}
if cfg.EscapeHTML {
api.escapeHTML(encoderExtension)
}
if cfg.UseNumber {
api.useNumber(decoderExtension)
}
if cfg.ValidateJsonRawMessage {
api.validateJsonRawMessage(encoderExtension)
}
api.encoderExtension = encoderExtension
api.decoderExtension = decoderExtension
api.configBeforeFrozen = cfg
return api
}
func (cfg Config) frozeWithCacheReuse(extraExtensions []Extension) *frozenConfig {
api := getFrozenConfigFromCache(cfg)
if api != nil {
return api
}
api = cfg.Froze().(*frozenConfig)
for _, extension := range extraExtensions {
api.RegisterExtension(extension)
}
addFrozenConfigToCache(cfg, api)
return api
}
func (cfg *frozenConfig) validateJsonRawMessage(extension EncoderExtension) {
encoder := &funcEncoder{func(ptr unsafe.Pointer, stream *Stream) {
rawMessage := *(*json.RawMessage)(ptr)
iter := cfg.BorrowIterator([]byte(rawMessage))
iter.Read()
if iter.Error != nil {
stream.WriteRaw("null")
} else {
cfg.ReturnIterator(iter)
stream.WriteRaw(string(rawMessage))
}
}, func(ptr unsafe.Pointer) bool {
return len(*((*json.RawMessage)(ptr))) == 0
}}
extension[reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()] = encoder
extension[reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()] = encoder
}
func (cfg *frozenConfig) useNumber(extension DecoderExtension) {
extension[reflect2.TypeOfPtr((*interface{})(nil)).Elem()] = &funcDecoder{func(ptr unsafe.Pointer, iter *Iterator) {
exitingValue := *((*interface{})(ptr))
if exitingValue != nil && reflect.TypeOf(exitingValue).Kind() == reflect.Ptr {
iter.ReadVal(exitingValue)
return
}
if iter.WhatIsNext() == NumberValue {
*((*interface{})(ptr)) = json.Number(iter.readNumberAsString())
} else {
*((*interface{})(ptr)) = iter.Read()
}
}}
}
func (cfg *frozenConfig) getTagKey() string {
tagKey := cfg.configBeforeFrozen.TagKey
if tagKey == "" {
return "json"
}
return tagKey
}
func (cfg *frozenConfig) RegisterExtension(extension Extension) {
cfg.extraExtensions = append(cfg.extraExtensions, extension)
copied := cfg.configBeforeFrozen
cfg.configBeforeFrozen = copied
}
type lossyFloat32Encoder struct {
}
func (encoder *lossyFloat32Encoder) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteFloat32Lossy(*((*float32)(ptr)))
}
func (encoder *lossyFloat32Encoder) IsEmpty(ptr unsafe.Pointer) bool {
return *((*float32)(ptr)) == 0
}
type lossyFloat64Encoder struct {
}
func (encoder *lossyFloat64Encoder) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteFloat64Lossy(*((*float64)(ptr)))
}
func (encoder *lossyFloat64Encoder) IsEmpty(ptr unsafe.Pointer) bool {
return *((*float64)(ptr)) == 0
}
// EnableLossyFloatMarshalling keeps 10**(-6) precision
// for float variables for better performance.
func (cfg *frozenConfig) marshalFloatWith6Digits(extension EncoderExtension) {
// for better performance
extension[reflect2.TypeOfPtr((*float32)(nil)).Elem()] = &lossyFloat32Encoder{}
extension[reflect2.TypeOfPtr((*float64)(nil)).Elem()] = &lossyFloat64Encoder{}
}
type htmlEscapedStringEncoder struct {
}
func (encoder *htmlEscapedStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
str := *((*string)(ptr))
stream.WriteStringWithHTMLEscaped(str)
}
func (encoder *htmlEscapedStringEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return *((*string)(ptr)) == ""
}
func (cfg *frozenConfig) escapeHTML(encoderExtension EncoderExtension) {
encoderExtension[reflect2.TypeOfPtr((*string)(nil)).Elem()] = &htmlEscapedStringEncoder{}
}
func (cfg *frozenConfig) cleanDecoders() {
typeDecoders = map[string]ValDecoder{}
fieldDecoders = map[string]ValDecoder{}
*cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig))
}
func (cfg *frozenConfig) cleanEncoders() {
typeEncoders = map[string]ValEncoder{}
fieldEncoders = map[string]ValEncoder{}
*cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig))
}
func (cfg *frozenConfig) MarshalToString(v interface{}) (string, error) {
stream := cfg.BorrowStream(nil)
defer cfg.ReturnStream(stream)
stream.WriteVal(v)
if stream.Error != nil {
return "", stream.Error
}
return string(stream.Buffer()), nil
}
func (cfg *frozenConfig) Marshal(v interface{}) ([]byte, error) {
stream := cfg.BorrowStream(nil)
defer cfg.ReturnStream(stream)
stream.WriteVal(v)
if stream.Error != nil {
return nil, stream.Error
}
result := stream.Buffer()
copied := make([]byte, len(result))
copy(copied, result)
return copied, nil
}
func (cfg *frozenConfig) MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
if prefix != "" {
panic("prefix is not supported")
}
for _, r := range indent {
if r != ' ' {
panic("indent can only be space")
}
}
newCfg := cfg.configBeforeFrozen
newCfg.IndentionStep = len(indent)
return newCfg.frozeWithCacheReuse(cfg.extraExtensions).Marshal(v)
}
func (cfg *frozenConfig) UnmarshalFromString(str string, v interface{}) error {
data := []byte(str)
iter := cfg.BorrowIterator(data)
defer cfg.ReturnIterator(iter)
iter.ReadVal(v)
c := iter.nextToken()
if c == 0 {
if iter.Error == io.EOF {
return nil
}
return iter.Error
}
iter.ReportError("Unmarshal", "there are bytes left after unmarshal")
return iter.Error
}
func (cfg *frozenConfig) Get(data []byte, path ...interface{}) Any {
iter := cfg.BorrowIterator(data)
defer cfg.ReturnIterator(iter)
return locatePath(iter, path)
}
func (cfg *frozenConfig) Unmarshal(data []byte, v interface{}) error {
iter := cfg.BorrowIterator(data)
defer cfg.ReturnIterator(iter)
iter.ReadVal(v)
c := iter.nextToken()
if c == 0 {
if iter.Error == io.EOF {
return nil
}
return iter.Error
}
iter.ReportError("Unmarshal", "there are bytes left after unmarshal")
return iter.Error
}
func (cfg *frozenConfig) NewEncoder(writer io.Writer) *Encoder {
stream := NewStream(cfg, writer, 512)
return &Encoder{stream}
}
func (cfg *frozenConfig) NewDecoder(reader io.Reader) *Decoder {
iter := Parse(cfg, reader, 512)
return &Decoder{iter}
}
func (cfg *frozenConfig) Valid(data []byte) bool {
iter := cfg.BorrowIterator(data)
defer cfg.ReturnIterator(iter)
iter.Skip()
return iter.Error == nil
}

121
example_test.go Normal file
View File

@ -0,0 +1,121 @@
package jsoniter
import (
"fmt"
"os"
"strings"
)
func ExampleMarshal() {
type ColorGroup struct {
ID int
Name string
Colors []string
}
group := ColorGroup{
ID: 1,
Name: "Reds",
Colors: []string{"Crimson", "Red", "Ruby", "Maroon"},
}
b, err := Marshal(group)
if err != nil {
fmt.Println("error:", err)
}
os.Stdout.Write(b)
// Output:
// {"ID":1,"Name":"Reds","Colors":["Crimson","Red","Ruby","Maroon"]}
}
func ExampleUnmarshal() {
var jsonBlob = []byte(`[
{"Name": "Platypus", "Order": "Monotremata"},
{"Name": "Quoll", "Order": "Dasyuromorphia"}
]`)
type Animal struct {
Name string
Order string
}
var animals []Animal
err := Unmarshal(jsonBlob, &animals)
if err != nil {
fmt.Println("error:", err)
}
fmt.Printf("%+v", animals)
// Output:
// [{Name:Platypus Order:Monotremata} {Name:Quoll Order:Dasyuromorphia}]
}
func ExampleConfigFastest_Marshal() {
type ColorGroup struct {
ID int
Name string
Colors []string
}
group := ColorGroup{
ID: 1,
Name: "Reds",
Colors: []string{"Crimson", "Red", "Ruby", "Maroon"},
}
stream := ConfigFastest.BorrowStream(nil)
defer ConfigFastest.ReturnStream(stream)
stream.WriteVal(group)
if stream.Error != nil {
fmt.Println("error:", stream.Error)
}
os.Stdout.Write(stream.Buffer())
// Output:
// {"ID":1,"Name":"Reds","Colors":["Crimson","Red","Ruby","Maroon"]}
}
func ExampleConfigFastest_Unmarshal() {
var jsonBlob = []byte(`[
{"Name": "Platypus", "Order": "Monotremata"},
{"Name": "Quoll", "Order": "Dasyuromorphia"}
]`)
type Animal struct {
Name string
Order string
}
var animals []Animal
iter := ConfigFastest.BorrowIterator(jsonBlob)
defer ConfigFastest.ReturnIterator(iter)
iter.ReadVal(&animals)
if iter.Error != nil {
fmt.Println("error:", iter.Error)
}
fmt.Printf("%+v", animals)
// Output:
// [{Name:Platypus Order:Monotremata} {Name:Quoll Order:Dasyuromorphia}]
}
func ExampleGet() {
val := []byte(`{"ID":1,"Name":"Reds","Colors":["Crimson","Red","Ruby","Maroon"]}`)
fmt.Printf(Get(val, "Colors", 0).ToString())
// Output:
// Crimson
}
func ExampleMyKey() {
hello := MyKey("hello")
output, _ := Marshal(map[*MyKey]string{&hello: "world"})
fmt.Println(string(output))
obj := map[*MyKey]string{}
Unmarshal(output, &obj)
for k, v := range obj {
fmt.Println(*k, v)
}
// Output:
// {"Hello":"world"}
// Hel world
}
type MyKey string
func (m *MyKey) MarshalText() ([]byte, error) {
return []byte(strings.Replace(string(*m), "h", "H", -1)), nil
}
func (m *MyKey) UnmarshalText(text []byte) error {
*m = MyKey(text[:3])
return nil
}

View File

@ -0,0 +1,223 @@
package test
import (
"bytes"
"fmt"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"strconv"
"testing"
"time"
"unsafe"
)
func Test_customize_type_decoder(t *testing.T) {
t.Skip()
jsoniter.RegisterTypeDecoderFunc("time.Time", func(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
t, err := time.ParseInLocation("2006-01-02 15:04:05", iter.ReadString(), time.UTC)
if err != nil {
iter.Error = err
return
}
*((*time.Time)(ptr)) = t
})
//defer jsoniter.ConfigDefault.(*frozenConfig).cleanDecoders()
val := time.Time{}
err := jsoniter.Unmarshal([]byte(`"2016-12-05 08:43:28"`), &val)
if err != nil {
t.Fatal(err)
}
year, month, day := val.Date()
if year != 2016 || month != 12 || day != 5 {
t.Fatal(val)
}
}
func Test_customize_byte_array_encoder(t *testing.T) {
t.Skip()
//jsoniter.ConfigDefault.(*frozenConfig).cleanEncoders()
should := require.New(t)
jsoniter.RegisterTypeEncoderFunc("[]uint8", func(ptr unsafe.Pointer, stream *jsoniter.Stream) {
t := *((*[]byte)(ptr))
stream.WriteString(string(t))
}, nil)
//defer jsoniter.ConfigDefault.(*frozenConfig).cleanEncoders()
val := []byte("abc")
str, err := jsoniter.MarshalToString(val)
should.Nil(err)
should.Equal(`"abc"`, str)
}
type CustomEncoderAttachmentTestStruct struct {
Value int32 `json:"value"`
}
type CustomEncoderAttachmentTestStructEncoder struct {}
func (c *CustomEncoderAttachmentTestStructEncoder) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream) {
attachVal, ok := stream.Attachment.(int)
stream.WriteRaw(`"`)
stream.WriteRaw(fmt.Sprintf("%t %d", ok, attachVal))
stream.WriteRaw(`"`)
}
func (c *CustomEncoderAttachmentTestStructEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return false
}
func Test_custom_encoder_attachment(t *testing.T) {
jsoniter.RegisterTypeEncoder("test.CustomEncoderAttachmentTestStruct", &CustomEncoderAttachmentTestStructEncoder{})
expectedValue := 17
should := require.New(t)
buf := &bytes.Buffer{}
stream := jsoniter.NewStream(jsoniter.Config{SortMapKeys: true}.Froze(), buf, 4096)
stream.Attachment = expectedValue
val := map[string]CustomEncoderAttachmentTestStruct{"a": {}}
stream.WriteVal(val)
stream.Flush()
should.Nil(stream.Error)
should.Equal("{\"a\":\"true 17\"}", buf.String())
}
func Test_customize_field_decoder(t *testing.T) {
type Tom struct {
field1 string
}
jsoniter.RegisterFieldDecoderFunc("jsoniter.Tom", "field1", func(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
*((*string)(ptr)) = strconv.Itoa(iter.ReadInt())
})
//defer jsoniter.ConfigDefault.(*frozenConfig).cleanDecoders()
tom := Tom{}
err := jsoniter.Unmarshal([]byte(`{"field1": 100}`), &tom)
if err != nil {
t.Fatal(err)
}
}
func Test_recursive_empty_interface_customization(t *testing.T) {
t.Skip()
var obj interface{}
jsoniter.RegisterTypeDecoderFunc("interface {}", func(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
switch iter.WhatIsNext() {
case jsoniter.NumberValue:
*(*interface{})(ptr) = iter.ReadInt64()
default:
*(*interface{})(ptr) = iter.Read()
}
})
should := require.New(t)
jsoniter.Unmarshal([]byte("[100]"), &obj)
should.Equal([]interface{}{int64(100)}, obj)
}
type MyInterface interface {
Hello() string
}
type MyString string
func (ms MyString) Hello() string {
return string(ms)
}
func Test_read_custom_interface(t *testing.T) {
t.Skip()
should := require.New(t)
var val MyInterface
jsoniter.RegisterTypeDecoderFunc("jsoniter.MyInterface", func(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
*((*MyInterface)(ptr)) = MyString(iter.ReadString())
})
err := jsoniter.UnmarshalFromString(`"hello"`, &val)
should.Nil(err)
should.Equal("hello", val.Hello())
}
const flow1 = `
{"A":"hello"}
{"A":"hello"}
{"A":"hello"}
{"A":"hello"}
{"A":"hello"}`
const flow2 = `
{"A":"hello"}
{"A":"hello"}
{"A":"hello"}
{"A":"hello"}
{"A":"hello"}
`
type (
Type1 struct {
A string
}
Type2 struct {
A string
}
)
func (t *Type2) UnmarshalJSON(data []byte) error {
return nil
}
func (t *Type2) MarshalJSON() ([]byte, error) {
return nil, nil
}
func TestType1NoFinalLF(t *testing.T) {
reader := bytes.NewReader([]byte(flow1))
dec := jsoniter.NewDecoder(reader)
i := 0
for dec.More() {
data := &Type1{}
if err := dec.Decode(data); err != nil {
t.Errorf("at %v got %v", i, err)
}
i++
}
}
func TestType1FinalLF(t *testing.T) {
reader := bytes.NewReader([]byte(flow2))
dec := jsoniter.NewDecoder(reader)
i := 0
for dec.More() {
data := &Type1{}
if err := dec.Decode(data); err != nil {
t.Errorf("at %v got %v", i, err)
}
i++
}
}
func TestType2NoFinalLF(t *testing.T) {
reader := bytes.NewReader([]byte(flow1))
dec := jsoniter.NewDecoder(reader)
i := 0
for dec.More() {
data := &Type2{}
if err := dec.Decode(data); err != nil {
t.Errorf("at %v got %v", i, err)
}
i++
}
}
func TestType2FinalLF(t *testing.T) {
reader := bytes.NewReader([]byte(flow2))
dec := jsoniter.NewDecoder(reader)
i := 0
for dec.More() {
data := &Type2{}
if err := dec.Decode(data); err != nil {
t.Errorf("at %v got %v", i, err)
}
i++
}
}

View File

@ -0,0 +1,120 @@
package test
import (
"github.com/json-iterator/go"
"github.com/modern-go/reflect2"
"github.com/stretchr/testify/require"
"reflect"
"strconv"
"testing"
"unsafe"
)
type TestObject1 struct {
Field1 string
}
type testExtension struct {
jsoniter.DummyExtension
}
func (extension *testExtension) UpdateStructDescriptor(structDescriptor *jsoniter.StructDescriptor) {
if structDescriptor.Type.String() != "test.TestObject1" {
return
}
binding := structDescriptor.GetField("Field1")
binding.Encoder = &funcEncoder{fun: func(ptr unsafe.Pointer, stream *jsoniter.Stream) {
str := *((*string)(ptr))
val, _ := strconv.Atoi(str)
stream.WriteInt(val)
}}
binding.Decoder = &funcDecoder{func(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
*((*string)(ptr)) = strconv.Itoa(iter.ReadInt())
}}
binding.ToNames = []string{"field-1"}
binding.FromNames = []string{"field-1"}
}
func Test_customize_field_by_extension(t *testing.T) {
should := require.New(t)
cfg := jsoniter.Config{}.Froze()
cfg.RegisterExtension(&testExtension{})
obj := TestObject1{}
err := cfg.UnmarshalFromString(`{"field-1": 100}`, &obj)
should.Nil(err)
should.Equal("100", obj.Field1)
str, err := cfg.MarshalToString(obj)
should.Nil(err)
should.Equal(`{"field-1":100}`, str)
}
func Test_customize_map_key_encoder(t *testing.T) {
should := require.New(t)
cfg := jsoniter.Config{}.Froze()
cfg.RegisterExtension(&testMapKeyExtension{})
m := map[int]int{1: 2}
output, err := cfg.MarshalToString(m)
should.NoError(err)
should.Equal(`{"2":2}`, output)
m = map[int]int{}
should.NoError(cfg.UnmarshalFromString(output, &m))
should.Equal(map[int]int{1: 2}, m)
}
type testMapKeyExtension struct {
jsoniter.DummyExtension
}
func (extension *testMapKeyExtension) CreateMapKeyEncoder(typ reflect2.Type) jsoniter.ValEncoder {
if typ.Kind() == reflect.Int {
return &funcEncoder{
fun: func(ptr unsafe.Pointer, stream *jsoniter.Stream) {
stream.WriteRaw(`"`)
stream.WriteInt(*(*int)(ptr) + 1)
stream.WriteRaw(`"`)
},
}
}
return nil
}
func (extension *testMapKeyExtension) CreateMapKeyDecoder(typ reflect2.Type) jsoniter.ValDecoder {
if typ.Kind() == reflect.Int {
return &funcDecoder{
fun: func(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
i, err := strconv.Atoi(iter.ReadString())
if err != nil {
iter.ReportError("read map key", err.Error())
return
}
i--
*(*int)(ptr) = i
},
}
}
return nil
}
type funcDecoder struct {
fun jsoniter.DecoderFunc
}
func (decoder *funcDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
decoder.fun(ptr, iter)
}
type funcEncoder struct {
fun jsoniter.EncoderFunc
isEmptyFunc func(ptr unsafe.Pointer) bool
}
func (encoder *funcEncoder) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream) {
encoder.fun(ptr, stream)
}
func (encoder *funcEncoder) IsEmpty(ptr unsafe.Pointer) bool {
if encoder.isEmptyFunc == nil {
return false
}
return encoder.isEmptyFunc(ptr)
}

View File

@ -0,0 +1,238 @@
package extra
import (
"github.com/json-iterator/go"
"github.com/modern-go/reflect2"
"unicode/utf8"
"unsafe"
)
// safeSet holds the value true if the ASCII character with the given array
// position can be represented inside a JSON string without any further
// escaping.
//
// All values are true except for the ASCII control characters (0-31), the
// double quote ("), and the backslash character ("\").
var safeSet = [utf8.RuneSelf]bool{
' ': true,
'!': true,
'"': false,
'#': true,
'$': true,
'%': true,
'&': true,
'\'': true,
'(': true,
')': true,
'*': true,
'+': true,
',': true,
'-': true,
'.': true,
'/': true,
'0': true,
'1': true,
'2': true,
'3': true,
'4': true,
'5': true,
'6': true,
'7': true,
'8': true,
'9': true,
':': true,
';': true,
'<': true,
'=': true,
'>': true,
'?': true,
'@': true,
'A': true,
'B': true,
'C': true,
'D': true,
'E': true,
'F': true,
'G': true,
'H': true,
'I': true,
'J': true,
'K': true,
'L': true,
'M': true,
'N': true,
'O': true,
'P': true,
'Q': true,
'R': true,
'S': true,
'T': true,
'U': true,
'V': true,
'W': true,
'X': true,
'Y': true,
'Z': true,
'[': true,
'\\': false,
']': true,
'^': true,
'_': true,
'`': true,
'a': true,
'b': true,
'c': true,
'd': true,
'e': true,
'f': true,
'g': true,
'h': true,
'i': true,
'j': true,
'k': true,
'l': true,
'm': true,
'n': true,
'o': true,
'p': true,
'q': true,
'r': true,
's': true,
't': true,
'u': true,
'v': true,
'w': true,
'x': true,
'y': true,
'z': true,
'{': true,
'|': true,
'}': true,
'~': true,
'\u007f': true,
}
var binaryType = reflect2.TypeOfPtr((*[]byte)(nil)).Elem()
type BinaryAsStringExtension struct {
jsoniter.DummyExtension
}
func (extension *BinaryAsStringExtension) CreateEncoder(typ reflect2.Type) jsoniter.ValEncoder {
if typ == binaryType {
return &binaryAsStringCodec{}
}
return nil
}
func (extension *BinaryAsStringExtension) CreateDecoder(typ reflect2.Type) jsoniter.ValDecoder {
if typ == binaryType {
return &binaryAsStringCodec{}
}
return nil
}
type binaryAsStringCodec struct {
}
func (codec *binaryAsStringCodec) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
rawBytes := iter.ReadStringAsSlice()
bytes := make([]byte, 0, len(rawBytes))
for i := 0; i < len(rawBytes); i++ {
b := rawBytes[i]
if b == '\\' {
b2 := rawBytes[i+1]
if b2 != '\\' {
iter.ReportError("decode binary as string", `\\x is only supported escape`)
return
}
b3 := rawBytes[i+2]
if b3 != 'x' {
iter.ReportError("decode binary as string", `\\x is only supported escape`)
return
}
b4 := rawBytes[i+3]
b5 := rawBytes[i+4]
i += 4
b = readHex(iter, b4, b5)
}
bytes = append(bytes, b)
}
*(*[]byte)(ptr) = bytes
}
func (codec *binaryAsStringCodec) IsEmpty(ptr unsafe.Pointer) bool {
return len(*((*[]byte)(ptr))) == 0
}
func (codec *binaryAsStringCodec) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream) {
newBuffer := writeBytes(stream.Buffer(), *(*[]byte)(ptr))
stream.SetBuffer(newBuffer)
}
func readHex(iter *jsoniter.Iterator, b1, b2 byte) byte {
var ret byte
if b1 >= '0' && b1 <= '9' {
ret = b1 - '0'
} else if b1 >= 'a' && b1 <= 'f' {
ret = b1 - 'a' + 10
} else {
iter.ReportError("read hex", "expects 0~9 or a~f, but found "+string([]byte{b1}))
return 0
}
ret *= 16
if b2 >= '0' && b2 <= '9' {
ret = b2 - '0'
} else if b2 >= 'a' && b2 <= 'f' {
ret = b2 - 'a' + 10
} else {
iter.ReportError("read hex", "expects 0~9 or a~f, but found "+string([]byte{b2}))
return 0
}
return ret
}
var hex = "0123456789abcdef"
func writeBytes(space []byte, s []byte) []byte {
space = append(space, '"')
// write string, the fast path, without utf8 and escape support
var i int
var c byte
for i, c = range s {
if c < utf8.RuneSelf && safeSet[c] {
space = append(space, c)
} else {
break
}
}
if i == len(s)-1 {
space = append(space, '"')
return space
}
return writeBytesSlowPath(space, s[i:])
}
func writeBytesSlowPath(space []byte, s []byte) []byte {
start := 0
// for the remaining parts, we process them char by char
var i int
var b byte
for i, b = range s {
if b >= utf8.RuneSelf {
space = append(space, '\\', '\\', 'x', hex[b>>4], hex[b&0xF])
start = i + 1
continue
}
if safeSet[b] {
continue
}
if start < i {
space = append(space, s[start:i]...)
}
space = append(space, '\\', '\\', 'x', hex[b>>4], hex[b&0xF])
start = i + 1
}
if start < len(s) {
space = append(space, s[start:]...)
}
return append(space, '"')
}

View File

@ -0,0 +1,32 @@
package extra
import (
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"testing"
)
func init() {
jsoniter.RegisterExtension(&BinaryAsStringExtension{})
}
func TestBinaryAsStringCodec(t *testing.T) {
t.Run("safe set", func(t *testing.T) {
should := require.New(t)
output, err := jsoniter.Marshal([]byte("hello"))
should.NoError(err)
should.Equal(`"hello"`, string(output))
var val []byte
should.NoError(jsoniter.Unmarshal(output, &val))
should.Equal(`hello`, string(val))
})
t.Run("non safe set", func(t *testing.T) {
should := require.New(t)
output, err := jsoniter.Marshal([]byte{1, 2, 3, 15})
should.NoError(err)
should.Equal(`"\\x01\\x02\\x03\\x0f"`, string(output))
var val []byte
should.NoError(jsoniter.Unmarshal(output, &val))
should.Equal([]byte{1, 2, 3, 15}, val)
})
}

294
extra/fuzzy_decoder.go Normal file
View File

@ -0,0 +1,294 @@
package extra
import (
"encoding/json"
"io"
"math"
"reflect"
"strings"
"unsafe"
"github.com/json-iterator/go"
"github.com/modern-go/reflect2"
)
const maxUint = ^uint(0)
const maxInt = int(maxUint >> 1)
const minInt = -maxInt - 1
// RegisterFuzzyDecoders decode input from PHP with tolerance.
// It will handle string/number auto conversation, and treat empty [] as empty struct.
func RegisterFuzzyDecoders() {
jsoniter.RegisterExtension(&tolerateEmptyArrayExtension{})
jsoniter.RegisterTypeDecoder("string", &fuzzyStringDecoder{})
jsoniter.RegisterTypeDecoder("float32", &fuzzyFloat32Decoder{})
jsoniter.RegisterTypeDecoder("float64", &fuzzyFloat64Decoder{})
jsoniter.RegisterTypeDecoder("int", &fuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if isFloat {
val := iter.ReadFloat64()
if val > float64(maxInt) || val < float64(minInt) {
iter.ReportError("fuzzy decode int", "exceed range")
return
}
*((*int)(ptr)) = int(val)
} else {
*((*int)(ptr)) = iter.ReadInt()
}
}})
jsoniter.RegisterTypeDecoder("uint", &fuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if isFloat {
val := iter.ReadFloat64()
if val > float64(maxUint) || val < 0 {
iter.ReportError("fuzzy decode uint", "exceed range")
return
}
*((*uint)(ptr)) = uint(val)
} else {
*((*uint)(ptr)) = iter.ReadUint()
}
}})
jsoniter.RegisterTypeDecoder("int8", &fuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if isFloat {
val := iter.ReadFloat64()
if val > float64(math.MaxInt8) || val < float64(math.MinInt8) {
iter.ReportError("fuzzy decode int8", "exceed range")
return
}
*((*int8)(ptr)) = int8(val)
} else {
*((*int8)(ptr)) = iter.ReadInt8()
}
}})
jsoniter.RegisterTypeDecoder("uint8", &fuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if isFloat {
val := iter.ReadFloat64()
if val > float64(math.MaxUint8) || val < 0 {
iter.ReportError("fuzzy decode uint8", "exceed range")
return
}
*((*uint8)(ptr)) = uint8(val)
} else {
*((*uint8)(ptr)) = iter.ReadUint8()
}
}})
jsoniter.RegisterTypeDecoder("int16", &fuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if isFloat {
val := iter.ReadFloat64()
if val > float64(math.MaxInt16) || val < float64(math.MinInt16) {
iter.ReportError("fuzzy decode int16", "exceed range")
return
}
*((*int16)(ptr)) = int16(val)
} else {
*((*int16)(ptr)) = iter.ReadInt16()
}
}})
jsoniter.RegisterTypeDecoder("uint16", &fuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if isFloat {
val := iter.ReadFloat64()
if val > float64(math.MaxUint16) || val < 0 {
iter.ReportError("fuzzy decode uint16", "exceed range")
return
}
*((*uint16)(ptr)) = uint16(val)
} else {
*((*uint16)(ptr)) = iter.ReadUint16()
}
}})
jsoniter.RegisterTypeDecoder("int32", &fuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if isFloat {
val := iter.ReadFloat64()
if val > float64(math.MaxInt32) || val < float64(math.MinInt32) {
iter.ReportError("fuzzy decode int32", "exceed range")
return
}
*((*int32)(ptr)) = int32(val)
} else {
*((*int32)(ptr)) = iter.ReadInt32()
}
}})
jsoniter.RegisterTypeDecoder("uint32", &fuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if isFloat {
val := iter.ReadFloat64()
if val > float64(math.MaxUint32) || val < 0 {
iter.ReportError("fuzzy decode uint32", "exceed range")
return
}
*((*uint32)(ptr)) = uint32(val)
} else {
*((*uint32)(ptr)) = iter.ReadUint32()
}
}})
jsoniter.RegisterTypeDecoder("int64", &fuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if isFloat {
val := iter.ReadFloat64()
if val > float64(math.MaxInt64) || val < float64(math.MinInt64) {
iter.ReportError("fuzzy decode int64", "exceed range")
return
}
*((*int64)(ptr)) = int64(val)
} else {
*((*int64)(ptr)) = iter.ReadInt64()
}
}})
jsoniter.RegisterTypeDecoder("uint64", &fuzzyIntegerDecoder{func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if isFloat {
val := iter.ReadFloat64()
if val > float64(math.MaxUint64) || val < 0 {
iter.ReportError("fuzzy decode uint64", "exceed range")
return
}
*((*uint64)(ptr)) = uint64(val)
} else {
*((*uint64)(ptr)) = iter.ReadUint64()
}
}})
}
type tolerateEmptyArrayExtension struct {
jsoniter.DummyExtension
}
func (extension *tolerateEmptyArrayExtension) DecorateDecoder(typ reflect2.Type, decoder jsoniter.ValDecoder) jsoniter.ValDecoder {
if typ.Kind() == reflect.Struct || typ.Kind() == reflect.Map {
return &tolerateEmptyArrayDecoder{decoder}
}
return decoder
}
type tolerateEmptyArrayDecoder struct {
valDecoder jsoniter.ValDecoder
}
func (decoder *tolerateEmptyArrayDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
if iter.WhatIsNext() == jsoniter.ArrayValue {
iter.Skip()
newIter := iter.Pool().BorrowIterator([]byte("{}"))
defer iter.Pool().ReturnIterator(newIter)
decoder.valDecoder.Decode(ptr, newIter)
} else {
decoder.valDecoder.Decode(ptr, iter)
}
}
type fuzzyStringDecoder struct {
}
func (decoder *fuzzyStringDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
valueType := iter.WhatIsNext()
switch valueType {
case jsoniter.NumberValue:
var number json.Number
iter.ReadVal(&number)
*((*string)(ptr)) = string(number)
case jsoniter.StringValue:
*((*string)(ptr)) = iter.ReadString()
case jsoniter.NilValue:
iter.Skip()
*((*string)(ptr)) = ""
default:
iter.ReportError("fuzzyStringDecoder", "not number or string")
}
}
type fuzzyIntegerDecoder struct {
fun func(isFloat bool, ptr unsafe.Pointer, iter *jsoniter.Iterator)
}
func (decoder *fuzzyIntegerDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
valueType := iter.WhatIsNext()
var str string
switch valueType {
case jsoniter.NumberValue:
var number json.Number
iter.ReadVal(&number)
str = string(number)
case jsoniter.StringValue:
str = iter.ReadString()
case jsoniter.BoolValue:
if iter.ReadBool() {
str = "1"
} else {
str = "0"
}
case jsoniter.NilValue:
iter.Skip()
str = "0"
default:
iter.ReportError("fuzzyIntegerDecoder", "not number or string")
}
if len(str) == 0 {
str = "0"
}
newIter := iter.Pool().BorrowIterator([]byte(str))
defer iter.Pool().ReturnIterator(newIter)
isFloat := strings.IndexByte(str, '.') != -1
decoder.fun(isFloat, ptr, newIter)
if newIter.Error != nil && newIter.Error != io.EOF {
iter.Error = newIter.Error
}
}
type fuzzyFloat32Decoder struct {
}
func (decoder *fuzzyFloat32Decoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
valueType := iter.WhatIsNext()
var str string
switch valueType {
case jsoniter.NumberValue:
*((*float32)(ptr)) = iter.ReadFloat32()
case jsoniter.StringValue:
str = iter.ReadString()
newIter := iter.Pool().BorrowIterator([]byte(str))
defer iter.Pool().ReturnIterator(newIter)
*((*float32)(ptr)) = newIter.ReadFloat32()
if newIter.Error != nil && newIter.Error != io.EOF {
iter.Error = newIter.Error
}
case jsoniter.BoolValue:
// support bool to float32
if iter.ReadBool() {
*((*float32)(ptr)) = 1
} else {
*((*float32)(ptr)) = 0
}
case jsoniter.NilValue:
iter.Skip()
*((*float32)(ptr)) = 0
default:
iter.ReportError("fuzzyFloat32Decoder", "not number or string")
}
}
type fuzzyFloat64Decoder struct {
}
func (decoder *fuzzyFloat64Decoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
valueType := iter.WhatIsNext()
var str string
switch valueType {
case jsoniter.NumberValue:
*((*float64)(ptr)) = iter.ReadFloat64()
case jsoniter.StringValue:
str = iter.ReadString()
newIter := iter.Pool().BorrowIterator([]byte(str))
defer iter.Pool().ReturnIterator(newIter)
*((*float64)(ptr)) = newIter.ReadFloat64()
if newIter.Error != nil && newIter.Error != io.EOF {
iter.Error = newIter.Error
}
case jsoniter.BoolValue:
// support bool to float64
if iter.ReadBool() {
*((*float64)(ptr)) = 1
} else {
*((*float64)(ptr)) = 0
}
case jsoniter.NilValue:
iter.Skip()
*((*float64)(ptr)) = 0
default:
iter.ReportError("fuzzyFloat64Decoder", "not number or string")
}
}

393
extra/fuzzy_decoder_test.go Normal file
View File

@ -0,0 +1,393 @@
package extra
import (
"testing"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
func init() {
RegisterFuzzyDecoders()
}
func Test_any_to_string(t *testing.T) {
should := require.New(t)
var val string
should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val))
should.Equal("100", val)
should.Nil(jsoniter.UnmarshalFromString("10", &val))
should.Equal("10", val)
should.Nil(jsoniter.UnmarshalFromString("10.1", &val))
should.Equal("10.1", val)
should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val))
should.Equal("10.1", val)
should.NotNil(jsoniter.UnmarshalFromString("{}", &val))
should.NotNil(jsoniter.UnmarshalFromString("[]", &val))
}
func Test_any_to_int64(t *testing.T) {
should := require.New(t)
var val int64
should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val))
should.Equal(int64(100), val)
should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val))
should.Equal(int64(10), val)
should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val))
should.Equal(int64(10), val)
should.Nil(jsoniter.UnmarshalFromString(`10`, &val))
should.Equal(int64(10), val)
should.Nil(jsoniter.UnmarshalFromString(`""`, &val))
should.Equal(int64(0), val)
// bool part
should.Nil(jsoniter.UnmarshalFromString(`false`, &val))
should.Equal(int64(0), val)
should.Nil(jsoniter.UnmarshalFromString(`true`, &val))
should.Equal(int64(1), val)
should.Nil(jsoniter.UnmarshalFromString(`-10`, &val))
should.Equal(int64(-10), val)
should.NotNil(jsoniter.UnmarshalFromString("{}", &val))
should.NotNil(jsoniter.UnmarshalFromString("[]", &val))
// large float to int
should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val))
}
func Test_any_to_int(t *testing.T) {
should := require.New(t)
var val int
should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val))
should.Equal(100, val)
should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val))
should.Equal(10, val)
should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val))
should.Equal(10, val)
should.Nil(jsoniter.UnmarshalFromString(`10`, &val))
should.Equal(10, val)
// bool part
should.Nil(jsoniter.UnmarshalFromString(`false`, &val))
should.Equal(0, val)
should.Nil(jsoniter.UnmarshalFromString(`true`, &val))
should.Equal(1, val)
should.NotNil(jsoniter.UnmarshalFromString("{}", &val))
should.NotNil(jsoniter.UnmarshalFromString("[]", &val))
// large float to int
should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val))
}
func Test_any_to_int16(t *testing.T) {
should := require.New(t)
var val int16
should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val))
should.Equal(int16(100), val)
should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val))
should.Equal(int16(10), val)
should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val))
should.Equal(int16(10), val)
should.Nil(jsoniter.UnmarshalFromString(`10`, &val))
should.Equal(int16(10), val)
// bool part
should.Nil(jsoniter.UnmarshalFromString(`false`, &val))
should.Equal(int16(0), val)
should.Nil(jsoniter.UnmarshalFromString(`true`, &val))
should.Equal(int16(1), val)
should.NotNil(jsoniter.UnmarshalFromString("{}", &val))
should.NotNil(jsoniter.UnmarshalFromString("[]", &val))
// large float to int
should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val))
}
func Test_any_to_int32(t *testing.T) {
should := require.New(t)
var val int32
should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val))
should.Equal(int32(100), val)
should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val))
should.Equal(int32(10), val)
should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val))
should.Equal(int32(10), val)
should.Nil(jsoniter.UnmarshalFromString(`10`, &val))
should.Equal(int32(10), val)
// bool part
should.Nil(jsoniter.UnmarshalFromString(`false`, &val))
should.Equal(int32(0), val)
should.Nil(jsoniter.UnmarshalFromString(`true`, &val))
should.Equal(int32(1), val)
should.NotNil(jsoniter.UnmarshalFromString("{}", &val))
should.NotNil(jsoniter.UnmarshalFromString("[]", &val))
// large float to int
should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val))
}
func Test_any_to_int8(t *testing.T) {
should := require.New(t)
var val int8
should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val))
should.Equal(int8(100), val)
should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val))
should.Equal(int8(10), val)
should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val))
should.Equal(int8(10), val)
should.Nil(jsoniter.UnmarshalFromString(`10`, &val))
should.Equal(int8(10), val)
// bool part
should.Nil(jsoniter.UnmarshalFromString(`false`, &val))
should.Equal(int8(0), val)
should.Nil(jsoniter.UnmarshalFromString(`true`, &val))
should.Equal(int8(1), val)
should.NotNil(jsoniter.UnmarshalFromString("{}", &val))
should.NotNil(jsoniter.UnmarshalFromString("[]", &val))
// large float to int
should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val))
}
func Test_any_to_uint8(t *testing.T) {
should := require.New(t)
var val uint8
should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val))
should.Equal(uint8(100), val)
should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val))
should.Equal(uint8(10), val)
should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val))
should.Equal(uint8(10), val)
should.Nil(jsoniter.UnmarshalFromString(`10`, &val))
should.Equal(uint8(10), val)
// bool part
should.Nil(jsoniter.UnmarshalFromString(`false`, &val))
should.Equal(uint8(0), val)
should.Nil(jsoniter.UnmarshalFromString(`true`, &val))
should.Equal(uint8(1), val)
should.NotNil(jsoniter.UnmarshalFromString("{}", &val))
should.NotNil(jsoniter.UnmarshalFromString("[]", &val))
// large float to int
should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val))
}
func Test_any_to_uint64(t *testing.T) {
should := require.New(t)
var val uint64
should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val))
should.Equal(uint64(100), val)
should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val))
should.Equal(uint64(10), val)
should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val))
should.Equal(uint64(10), val)
should.Nil(jsoniter.UnmarshalFromString(`10`, &val))
should.Equal(uint64(10), val)
// bool part
should.Nil(jsoniter.UnmarshalFromString(`false`, &val))
should.Equal(uint64(0), val)
should.Nil(jsoniter.UnmarshalFromString(`true`, &val))
should.Equal(uint64(1), val)
// TODO fix?
should.NotNil(jsoniter.UnmarshalFromString(`-10`, &val))
should.Equal(uint64(0), val)
should.NotNil(jsoniter.UnmarshalFromString("{}", &val))
should.NotNil(jsoniter.UnmarshalFromString("[]", &val))
// large float to int
should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val))
}
func Test_any_to_uint32(t *testing.T) {
should := require.New(t)
var val uint32
should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val))
should.Equal(uint32(100), val)
should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val))
should.Equal(uint32(10), val)
should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val))
should.Equal(uint32(10), val)
should.Nil(jsoniter.UnmarshalFromString(`10`, &val))
should.Equal(uint32(10), val)
// bool part
should.Nil(jsoniter.UnmarshalFromString(`false`, &val))
should.Equal(uint32(0), val)
should.Nil(jsoniter.UnmarshalFromString(`true`, &val))
should.Equal(uint32(1), val)
// TODO fix?
should.NotNil(jsoniter.UnmarshalFromString(`-10`, &val))
should.Equal(uint32(0), val)
should.NotNil(jsoniter.UnmarshalFromString("{}", &val))
should.NotNil(jsoniter.UnmarshalFromString("[]", &val))
// large float to int
should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val))
}
func Test_any_to_uint16(t *testing.T) {
should := require.New(t)
var val uint16
should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val))
should.Equal(uint16(100), val)
should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val))
should.Equal(uint16(10), val)
should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val))
should.Equal(uint16(10), val)
should.Nil(jsoniter.UnmarshalFromString(`10`, &val))
should.Equal(uint16(10), val)
// bool part
should.Nil(jsoniter.UnmarshalFromString(`false`, &val))
should.Equal(uint16(0), val)
should.Nil(jsoniter.UnmarshalFromString(`true`, &val))
should.Equal(uint16(1), val)
// TODO fix?
should.NotNil(jsoniter.UnmarshalFromString(`-10`, &val))
should.Equal(uint16(0), val)
should.NotNil(jsoniter.UnmarshalFromString("{}", &val))
should.NotNil(jsoniter.UnmarshalFromString("[]", &val))
// large float to int
should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val))
}
func Test_any_to_uint(t *testing.T) {
should := require.New(t)
var val uint
should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val))
should.Equal(uint(100), val)
should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val))
should.Equal(uint(10), val)
should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val))
should.Equal(uint(10), val)
should.Nil(jsoniter.UnmarshalFromString(`10`, &val))
should.Equal(uint(10), val)
should.Nil(jsoniter.UnmarshalFromString(`false`, &val))
should.Equal(uint(0), val)
should.Nil(jsoniter.UnmarshalFromString(`true`, &val))
should.Equal(uint(1), val)
should.NotNil(jsoniter.UnmarshalFromString("{}", &val))
should.NotNil(jsoniter.UnmarshalFromString("[]", &val))
// large float to int
should.NotNil(jsoniter.UnmarshalFromString(`1234512345123451234512345.0`, &val))
}
func Test_any_to_float32(t *testing.T) {
should := require.New(t)
var val float32
should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val))
should.Equal(float32(100), val)
should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val))
should.Equal(float32(10.1), val)
should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val))
should.Equal(float32(10.1), val)
should.Nil(jsoniter.UnmarshalFromString(`10`, &val))
should.Equal(float32(10), val)
// bool part
should.Nil(jsoniter.UnmarshalFromString(`false`, &val))
should.Equal(float32(0), val)
should.Nil(jsoniter.UnmarshalFromString(`true`, &val))
should.Equal(float32(1), val)
should.NotNil(jsoniter.UnmarshalFromString("{}", &val))
should.NotNil(jsoniter.UnmarshalFromString("[]", &val))
}
func Test_any_to_float64(t *testing.T) {
should := require.New(t)
var val float64
should.Nil(jsoniter.UnmarshalFromString(`"100"`, &val))
should.Equal(float64(100), val)
should.Nil(jsoniter.UnmarshalFromString(`"10.1"`, &val))
should.Equal(float64(10.1), val)
should.Nil(jsoniter.UnmarshalFromString(`10.1`, &val))
should.Equal(float64(10.1), val)
should.Nil(jsoniter.UnmarshalFromString(`10`, &val))
should.Equal(float64(10), val)
// bool part
should.Nil(jsoniter.UnmarshalFromString(`false`, &val))
should.Equal(float64(0), val)
should.Nil(jsoniter.UnmarshalFromString(`true`, &val))
should.Equal(float64(1), val)
should.NotNil(jsoniter.UnmarshalFromString("{}", &val))
should.NotNil(jsoniter.UnmarshalFromString("[]", &val))
}
func Test_empty_array_as_map(t *testing.T) {
should := require.New(t)
var val map[string]interface{}
should.Nil(jsoniter.UnmarshalFromString(`[]`, &val))
should.Equal(map[string]interface{}{}, val)
}
func Test_empty_array_as_object(t *testing.T) {
should := require.New(t)
var val struct{}
should.Nil(jsoniter.UnmarshalFromString(`[]`, &val))
should.Equal(struct{}{}, val)
}
func Test_bad_case(t *testing.T) {
var jsonstr = `
{
"extra_type": 181760,
"combo_type": 0,
"trigger_time_ms": 1498800398000,
"_create_time": "2017-06-16 11:21:39",
"_msg_type": 41000
}
`
type OrderEventRequestParams struct {
ExtraType uint64 `json:"extra_type"`
}
var a OrderEventRequestParams
err := jsoniter.UnmarshalFromString(jsonstr, &a)
should := require.New(t)
should.Nil(err)
}
func Test_null_to_string(t *testing.T) {
should := require.New(t)
body := []byte(`null`)
var message string
err := jsoniter.Unmarshal(body, &message)
should.NoError(err)
}
func Test_null_to_int(t *testing.T) {
should := require.New(t)
body := []byte(`null`)
var message int
err := jsoniter.Unmarshal(body, &message)
should.NoError(err)
}
func Test_null_to_float32(t *testing.T) {
should := require.New(t)
body := []byte(`null`)
var message float32
err := jsoniter.Unmarshal(body, &message)
should.NoError(err)
}
func Test_null_to_float64(t *testing.T) {
should := require.New(t)
body := []byte(`null`)
var message float64
err := jsoniter.Unmarshal(body, &message)
should.NoError(err)
}

52
extra/naming_strategy.go Normal file
View File

@ -0,0 +1,52 @@
package extra
import (
"github.com/json-iterator/go"
"strings"
"unicode"
)
// SetNamingStrategy rename struct fields uniformly
func SetNamingStrategy(translate func(string) string) {
jsoniter.RegisterExtension(&namingStrategyExtension{jsoniter.DummyExtension{}, translate})
}
type namingStrategyExtension struct {
jsoniter.DummyExtension
translate func(string) string
}
func (extension *namingStrategyExtension) UpdateStructDescriptor(structDescriptor *jsoniter.StructDescriptor) {
for _, binding := range structDescriptor.Fields {
tag, hastag := binding.Field.Tag().Lookup("json")
if hastag {
tagParts := strings.Split(tag, ",")
if tagParts[0] == "-" {
continue // hidden field
}
if tagParts[0] != "" {
continue // field explicitly named
}
}
binding.ToNames = []string{extension.translate(binding.Field.Name())}
binding.FromNames = []string{extension.translate(binding.Field.Name())}
}
}
// LowerCaseWithUnderscores one strategy to SetNamingStrategy for. It will change HelloWorld to hello_world.
func LowerCaseWithUnderscores(name string) string {
newName := []rune{}
for i, c := range name {
if i == 0 {
newName = append(newName, unicode.ToLower(c))
} else {
if unicode.IsUpper(c) {
newName = append(newName, '_')
newName = append(newName, unicode.ToLower(c))
} else {
newName = append(newName, c)
}
}
}
return string(newName)
}

View File

@ -0,0 +1,50 @@
package extra
import (
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"testing"
)
func Test_lower_case_with_underscores(t *testing.T) {
should := require.New(t)
should.Equal("hello_world", LowerCaseWithUnderscores("helloWorld"))
should.Equal("hello_world", LowerCaseWithUnderscores("HelloWorld"))
SetNamingStrategy(LowerCaseWithUnderscores)
output, err := jsoniter.Marshal(struct {
UserName string
FirstLanguage string
}{
UserName: "taowen",
FirstLanguage: "Chinese",
})
should.Nil(err)
should.Equal(`{"user_name":"taowen","first_language":"Chinese"}`, string(output))
}
func Test_set_naming_strategy_with_overrides(t *testing.T) {
should := require.New(t)
SetNamingStrategy(LowerCaseWithUnderscores)
output, err := jsoniter.Marshal(struct {
UserName string `json:"UserName"`
FirstLanguage string
}{
UserName: "taowen",
FirstLanguage: "Chinese",
})
should.Nil(err)
should.Equal(`{"UserName":"taowen","first_language":"Chinese"}`, string(output))
}
func Test_set_naming_strategy_with_omitempty(t *testing.T) {
should := require.New(t)
SetNamingStrategy(LowerCaseWithUnderscores)
output, err := jsoniter.Marshal(struct {
UserName string
FirstLanguage string `json:",omitempty"`
}{
UserName: "taowen",
})
should.Nil(err)
should.Equal(`{"user_name":"taowen"}`, string(output))
}

54
extra/privat_fields.go Normal file
View File

@ -0,0 +1,54 @@
package extra
import (
"github.com/json-iterator/go"
"strings"
"unicode"
)
// SupportPrivateFields include private fields when encoding/decoding
func SupportPrivateFields() {
jsoniter.RegisterExtension(&privateFieldsExtension{})
}
type privateFieldsExtension struct {
jsoniter.DummyExtension
}
func (extension *privateFieldsExtension) UpdateStructDescriptor(structDescriptor *jsoniter.StructDescriptor) {
for _, binding := range structDescriptor.Fields {
isPrivate := unicode.IsLower(rune(binding.Field.Name()[0]))
if isPrivate {
tag, hastag := binding.Field.Tag().Lookup("json")
if !hastag {
binding.FromNames = []string{binding.Field.Name()}
binding.ToNames = []string{binding.Field.Name()}
continue
}
tagParts := strings.Split(tag, ",")
names := calcFieldNames(binding.Field.Name(), tagParts[0], tag)
binding.FromNames = names
binding.ToNames = names
}
}
}
func calcFieldNames(originalFieldName string, tagProvidedFieldName string, wholeTag string) []string {
// ignore?
if wholeTag == "-" {
return []string{}
}
// rename?
var fieldNames []string
if tagProvidedFieldName == "" {
fieldNames = []string{originalFieldName}
} else {
fieldNames = []string{tagProvidedFieldName}
}
// private?
isNotExported := unicode.IsLower(rune(originalFieldName[0]))
if isNotExported {
fieldNames = []string{}
}
return fieldNames
}

View File

@ -0,0 +1,18 @@
package extra
import (
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"testing"
)
func Test_private_fields(t *testing.T) {
type TestObject struct {
field1 string
}
SupportPrivateFields()
should := require.New(t)
obj := TestObject{}
should.Nil(jsoniter.UnmarshalFromString(`{"field1":"Hello"}`, &obj))
should.Equal("Hello", obj.field1)
}

View File

@ -0,0 +1,31 @@
package extra
import (
"github.com/json-iterator/go"
"time"
"unsafe"
)
// RegisterTimeAsInt64Codec encode/decode time since number of unit since epoch. the precision is the unit.
func RegisterTimeAsInt64Codec(precision time.Duration) {
jsoniter.RegisterTypeEncoder("time.Time", &timeAsInt64Codec{precision})
jsoniter.RegisterTypeDecoder("time.Time", &timeAsInt64Codec{precision})
}
type timeAsInt64Codec struct {
precision time.Duration
}
func (codec *timeAsInt64Codec) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
nanoseconds := iter.ReadInt64() * codec.precision.Nanoseconds()
*((*time.Time)(ptr)) = time.Unix(0, nanoseconds)
}
func (codec *timeAsInt64Codec) IsEmpty(ptr unsafe.Pointer) bool {
ts := *((*time.Time)(ptr))
return ts.UnixNano() == 0
}
func (codec *timeAsInt64Codec) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream) {
ts := *((*time.Time)(ptr))
stream.WriteInt64(ts.UnixNano() / codec.precision.Nanoseconds())
}

View File

@ -0,0 +1,31 @@
package extra
import (
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"testing"
"time"
)
func Test_time_as_int64(t *testing.T) {
should := require.New(t)
RegisterTimeAsInt64Codec(time.Nanosecond)
output, err := jsoniter.Marshal(time.Unix(1497952257, 1002))
should.Nil(err)
should.Equal("1497952257000001002", string(output))
var val time.Time
should.Nil(jsoniter.Unmarshal(output, &val))
should.Equal(int64(1497952257000001002), val.UnixNano())
}
func Test_time_as_int64_keep_microsecond(t *testing.T) {
t.Skip("conflict")
should := require.New(t)
RegisterTimeAsInt64Codec(time.Microsecond)
output, err := jsoniter.Marshal(time.Unix(1, 1002))
should.Nil(err)
should.Equal("1000001", string(output))
var val time.Time
should.Nil(jsoniter.Unmarshal(output, &val))
should.Equal(int64(1000001000), val.UnixNano())
}

View File

@ -1,88 +0,0 @@
package jsoniter
import (
"io"
"bytes"
)
// Unmarshal adapts to json/encoding APIs
func Unmarshal(data []byte, v interface{}) error {
iter := ParseBytes(data)
iter.ReadVal(v)
if iter.head == iter.tail {
iter.loadMore()
}
if iter.Error == io.EOF {
return nil
}
if iter.Error == nil {
iter.reportError("Unmarshal", "there are bytes left after unmarshal")
}
return iter.Error
}
func UnmarshalAny(data []byte) (Any, error) {
iter := ParseBytes(data)
any := iter.ReadAny()
if iter.head == iter.tail {
iter.loadMore()
}
if iter.Error == io.EOF {
return any, nil
}
if iter.Error == nil {
iter.reportError("UnmarshalAny", "there are bytes left after unmarshal")
}
return any, iter.Error
}
func UnmarshalFromString(str string, v interface{}) error {
data := []byte(str)
iter := ParseBytes(data)
iter.ReadVal(v)
if iter.head == iter.tail {
iter.loadMore()
}
if iter.Error == io.EOF {
return nil
}
if iter.Error == nil {
iter.reportError("UnmarshalFromString", "there are bytes left after unmarshal")
}
return iter.Error
}
func UnmarshalAnyFromString(str string) (Any, error) {
data := []byte(str)
iter := ParseBytes(data)
any := iter.ReadAny()
if iter.head == iter.tail {
iter.loadMore()
}
if iter.Error == io.EOF {
return any, nil
}
if iter.Error == nil {
iter.reportError("UnmarshalAnyFromString", "there are bytes left after unmarshal")
}
return nil, iter.Error
}
func Marshal(v interface{}) ([]byte, error) {
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream.WriteVal(v)
stream.Flush()
if stream.Error != nil {
return nil, stream.Error
}
return buf.Bytes(), nil
}
func MarshalToString(v interface{}) (string, error) {
buf, err := Marshal(v)
if err != nil {
return "", err
}
return string(buf), nil
}

View File

@ -1,300 +0,0 @@
package jsoniter
import (
"fmt"
"reflect"
)
type Any interface {
LastError() error
ValueType() ValueType
ToBool() bool
ToInt() int
ToInt32() int32
ToInt64() int64
ToUint() uint
ToUint32() uint32
ToUint64() uint64
ToFloat32() float32
ToFloat64() float64
ToString() string
Get(path ...interface{}) Any
Size() int
Keys() []string
IterateObject() (func() (string, Any, bool), bool)
IterateArray() (func() (Any, bool), bool)
GetArray() []Any
SetArray(newList []Any) bool
GetObject() map[string]Any
SetObject(map[string]Any) bool
GetInterface() interface{}
WriteTo(stream *Stream)
Parse() *Iterator
}
type baseAny struct{}
func (any *baseAny) Get(path ...interface{}) Any {
return &invalidAny{baseAny{}, fmt.Errorf("Get %v from simple value", path)}
}
func (any *baseAny) Size() int {
return 0
}
func (any *baseAny) Keys() []string {
return []string{}
}
func (any *baseAny) IterateObject() (func() (string, Any, bool), bool) {
return nil, false
}
func (any *baseAny) IterateArray() (func() (Any, bool), bool) {
return nil, false
}
func (any *baseAny) GetArray() []Any {
return []Any{}
}
func (any *baseAny) SetArray(newList []Any) bool {
return false
}
func (any *baseAny) GetObject() map[string]Any {
return map[string]Any{}
}
func (any *baseAny) SetObject(map[string]Any) bool {
return false
}
func WrapInt32(val int32) Any {
return &int32Any{baseAny{}, val}
}
func WrapInt64(val int64) Any {
return &int64Any{baseAny{}, val}
}
func WrapUint32(val uint32) Any {
return &uint32Any{baseAny{}, val}
}
func WrapUint64(val uint64) Any {
return &uint64Any{baseAny{}, val}
}
func WrapFloat64(val float64) Any {
return &floatAny{baseAny{}, val}
}
func WrapString(val string) Any {
return &stringAny{baseAny{}, nil, val}
}
func Wrap(val interface{}) Any {
if val == nil {
return &nilAny{}
}
type_ := reflect.TypeOf(val)
switch type_.Kind() {
case reflect.Slice:
return wrapArray(val)
case reflect.Struct:
return wrapStruct(val)
case reflect.Map:
return wrapMap(val)
case reflect.String:
return WrapString(val.(string))
case reflect.Int:
return WrapInt64(int64(val.(int)))
case reflect.Int8:
return WrapInt32(int32(val.(int8)))
case reflect.Int16:
return WrapInt32(int32(val.(int16)))
case reflect.Int32:
return WrapInt32(val.(int32))
case reflect.Int64:
return WrapInt64(val.(int64))
case reflect.Uint:
return WrapUint64(uint64(val.(uint)))
case reflect.Uint8:
return WrapUint32(uint32(val.(uint8)))
case reflect.Uint16:
return WrapUint32(uint32(val.(uint16)))
case reflect.Uint32:
return WrapUint32(uint32(val.(uint32)))
case reflect.Uint64:
return WrapUint64(val.(uint64))
case reflect.Float32:
return WrapFloat64(float64(val.(float32)))
case reflect.Float64:
return WrapFloat64(val.(float64))
case reflect.Bool:
if val.(bool) == true {
return &trueAny{}
} else {
return &falseAny{}
}
}
return &invalidAny{baseAny{}, fmt.Errorf("unsupported type: %v", type_)}
}
func (iter *Iterator) ReadAny() Any {
return iter.readAny(nil)
}
func (iter *Iterator) readAny(reusableIter *Iterator) Any {
c := iter.nextToken()
switch c {
case '"':
return iter.readStringAny(reusableIter)
case 'n':
iter.skipFixedBytes(3) // null
return &nilAny{}
case 't':
iter.skipFixedBytes(3) // true
return &trueAny{}
case 'f':
iter.skipFixedBytes(4) // false
return &falseAny{}
case '{':
return iter.readObjectAny(reusableIter)
case '[':
return iter.readArrayAny(reusableIter)
default:
return iter.readNumberAny(reusableIter, c)
}
}
func (iter *Iterator) readNumberAny(reusableIter *Iterator, firstByte byte) Any {
dotFound := false
lazyBuf := make([]byte, 1, 8)
lazyBuf[0] = firstByte
for {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
if c == '.' {
dotFound = true
continue
}
switch c {
case ' ', '\n', '\r', '\t', ',', '}', ']':
lazyBuf = append(lazyBuf, iter.buf[iter.head:i]...)
iter.head = i
if dotFound {
return &float64LazyAny{baseAny{}, lazyBuf, reusableIter, nil, 0}
} else {
if firstByte == '-' {
return &int64LazyAny{baseAny{}, lazyBuf, reusableIter, nil, 0}
} else {
return &uint64LazyAny{baseAny{}, lazyBuf, reusableIter, nil, 0}
}
}
}
}
lazyBuf = append(lazyBuf, iter.buf[iter.head:iter.tail]...)
if !iter.loadMore() {
iter.head = iter.tail
if dotFound {
return &float64LazyAny{baseAny{}, lazyBuf, reusableIter, nil, 0}
} else {
if firstByte == '-' {
return &int64LazyAny{baseAny{}, lazyBuf, reusableIter, nil, 0}
} else {
return &uint64LazyAny{baseAny{}, lazyBuf, reusableIter, nil, 0}
}
}
}
}
}
func (iter *Iterator) readStringAny(reusableIter *Iterator) Any {
lazyBuf := make([]byte, 1, 8)
lazyBuf[0] = '"'
for {
end, escaped := iter.findStringEnd()
if end == -1 {
lazyBuf = append(lazyBuf, iter.buf[iter.head:iter.tail]...)
if !iter.loadMore() {
iter.reportError("readStringAny", "incomplete string")
return &invalidAny{}
}
if escaped {
iter.head = 1 // skip the first char as last char read is \
}
} else {
lazyBuf = append(lazyBuf, iter.buf[iter.head:end]...)
iter.head = end
return &stringLazyAny{baseAny{}, lazyBuf, reusableIter, nil, ""}
}
}
}
func (iter *Iterator) readObjectAny(reusableIter *Iterator) Any {
level := 1
lazyBuf := make([]byte, 1, 32)
lazyBuf[0] = '{'
for {
start := iter.head
for i := iter.head; i < iter.tail; i++ {
switch iter.buf[i] {
case '"': // If inside string, skip it
iter.head = i + 1
iter.skipString()
i = iter.head - 1 // it will be i++ soon
case '{': // If open symbol, increase level
level++
case '}': // If close symbol, increase level
level--
// If we have returned to the original level, we're done
if level == 0 {
iter.head = i + 1
lazyBuf = append(lazyBuf, iter.buf[start:iter.head]...)
return &objectLazyAny{baseAny{}, lazyBuf, reusableIter, nil, nil, lazyBuf}
}
}
}
lazyBuf = append(lazyBuf, iter.buf[iter.head:iter.tail]...)
if !iter.loadMore() {
iter.reportError("skipObject", "incomplete object")
return &invalidAny{}
}
}
}
func (iter *Iterator) readArrayAny(reusableIter *Iterator) Any {
level := 1
lazyBuf := make([]byte, 1, 32)
lazyBuf[0] = '['
for {
start := iter.head
for i := iter.head; i < iter.tail; i++ {
switch iter.buf[i] {
case '"': // If inside string, skip it
iter.head = i + 1
iter.skipString()
i = iter.head - 1 // it will be i++ soon
case '[': // If open symbol, increase level
level++
case ']': // If close symbol, increase level
level--
// If we have returned to the original level, we're done
if level == 0 {
iter.head = i + 1
lazyBuf = append(lazyBuf, iter.buf[start:iter.head]...)
return &arrayLazyAny{baseAny{}, lazyBuf, reusableIter, nil, nil, lazyBuf}
}
}
}
lazyBuf = append(lazyBuf, iter.buf[iter.head:iter.tail]...)
if !iter.loadMore() {
iter.reportError("skipArray", "incomplete array")
return &invalidAny{}
}
}
}

View File

@ -1,539 +0,0 @@
package jsoniter
import (
"unsafe"
"fmt"
"reflect"
)
type arrayLazyAny struct {
baseAny
buf []byte
iter *Iterator
err error
cache []Any
remaining []byte
}
func (any *arrayLazyAny) ValueType() ValueType {
return Array
}
func (any *arrayLazyAny) Parse() *Iterator {
iter := any.iter
if iter == nil {
iter = NewIterator()
any.iter = iter
}
iter.ResetBytes(any.remaining)
return iter
}
func (any *arrayLazyAny) fillCacheUntil(target int) Any {
if any.remaining == nil {
if target >= len(any.cache) {
return nil
}
return any.cache[target]
}
if any.cache == nil {
any.cache = make([]Any, 0, 8)
}
i := len(any.cache)
if target < i {
return any.cache[target]
}
iter := any.Parse()
if (len(any.remaining) == len(any.buf)) {
iter.head++
c := iter.nextToken()
if c != ']' {
iter.unreadByte()
element := iter.readAny(iter)
any.cache = append(any.cache, element)
if target == 0 {
any.remaining = iter.buf[iter.head:]
any.err = iter.Error
return element
}
i = 1
} else {
any.remaining = nil
any.err = iter.Error
return nil
}
}
for iter.nextToken() == ',' {
element := iter.readAny(iter)
any.cache = append(any.cache, element)
if i == target {
any.remaining = iter.buf[iter.head:]
any.err = iter.Error
return element
}
i++
}
any.remaining = nil
any.err = iter.Error
return nil
}
func (any *arrayLazyAny) fillCache() {
if any.remaining == nil {
return
}
if any.cache == nil {
any.cache = make([]Any, 0, 8)
}
iter := any.Parse()
if len(any.remaining) == len(any.buf) {
iter.head++
c := iter.nextToken()
if c != ']' {
iter.unreadByte()
any.cache = append(any.cache, iter.readAny(iter))
} else {
any.remaining = nil
any.err = iter.Error
return
}
}
for iter.nextToken() == ',' {
any.cache = append(any.cache, iter.readAny(iter))
}
any.remaining = nil
any.err = iter.Error
}
func (any *arrayLazyAny) LastError() error {
return any.err
}
func (any *arrayLazyAny) ToBool() bool {
if any.cache == nil {
any.IterateArray() // trigger first element read
}
return len(any.cache) != 0
}
func (any *arrayLazyAny) ToInt() int {
if any.cache == nil {
any.IterateArray() // trigger first element read
}
if len(any.cache) == 0 {
return 0
}
return 1
}
func (any *arrayLazyAny) ToInt32() int32 {
if any.cache == nil {
any.IterateArray() // trigger first element read
}
if len(any.cache) == 0 {
return 0
}
return 1
}
func (any *arrayLazyAny) ToInt64() int64 {
if any.cache == nil {
any.IterateArray() // trigger first element read
}
if len(any.cache) == 0 {
return 0
}
return 1
}
func (any *arrayLazyAny) ToUint() uint {
if any.cache == nil {
any.IterateArray() // trigger first element read
}
if len(any.cache) == 0 {
return 0
}
return 1
}
func (any *arrayLazyAny) ToUint32() uint32 {
if any.cache == nil {
any.IterateArray() // trigger first element read
}
if len(any.cache) == 0 {
return 0
}
return 1
}
func (any *arrayLazyAny) ToUint64() uint64 {
if any.cache == nil {
any.IterateArray() // trigger first element read
}
if len(any.cache) == 0 {
return 0
}
return 1
}
func (any *arrayLazyAny) ToFloat32() float32 {
if any.cache == nil {
any.IterateArray() // trigger first element read
}
if len(any.cache) == 0 {
return 0
}
return 1
}
func (any *arrayLazyAny) ToFloat64() float64 {
if any.cache == nil {
any.IterateArray() // trigger first element read
}
if len(any.cache) == 0 {
return 0
}
return 1
}
func (any *arrayLazyAny) ToString() string {
if len(any.remaining) == len(any.buf) {
// nothing has been parsed yet
return *(*string)(unsafe.Pointer(&any.buf))
} else {
any.fillCache()
str, err := MarshalToString(any.cache)
any.err = err
return str
}
}
func (any *arrayLazyAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
var element Any
switch firstPath := path[0].(type) {
case int:
element = any.fillCacheUntil(firstPath)
if element == nil {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
}
case int32:
if '*' == firstPath {
any.fillCache()
arr := make([]Any, 0, len(any.cache))
for _, element := range any.cache {
found := element.Get(path[1:]...)
if found.ValueType() != Invalid {
arr = append(arr, found)
}
}
return wrapArray(arr)
} else {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", path[0], any.cache)}
}
default:
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", path[0], any.cache)}
}
if len(path) == 1 {
return element
} else {
return element.Get(path[1:]...)
}
}
func (any *arrayLazyAny) Size() int {
any.fillCache()
return len(any.cache)
}
func (any *arrayLazyAny) IterateArray() (func() (Any, bool), bool) {
if any.cache == nil {
any.cache = make([]Any, 0, 8)
}
remaining := any.remaining
if len(remaining) == len(any.buf) {
iter := any.Parse()
iter.head++
c := iter.nextToken()
if c != ']' {
iter.unreadByte()
v := iter.readAny(iter)
any.cache = append(any.cache, v)
remaining = iter.buf[iter.head:]
any.remaining = remaining
} else {
remaining = nil
any.remaining = nil
any.err = iter.Error
return nil, false
}
}
if len(any.cache) == 0 {
return nil, false
}
arr := any.cache
nextValue := arr[0]
i := 1
return func() (Any, bool) {
value := nextValue
if i < len(arr) {
// read from cache
nextValue = arr[i]
i++
return value, true
} else {
// read from buffer
iter := any.iter
if iter == nil {
iter = NewIterator()
any.iter = iter
}
iter.ResetBytes(remaining)
c := iter.nextToken()
if c == ',' {
nextValue = iter.readAny(iter)
any.cache = append(any.cache, nextValue)
remaining = iter.buf[iter.head:]
any.remaining = remaining
any.err = iter.Error
return value, true
} else {
remaining = nil
any.remaining = nil
any.err = iter.Error
return value, false
}
}
}, true
}
func (any *arrayLazyAny) GetArray() []Any {
any.fillCache()
return any.cache
}
func (any *arrayLazyAny) SetArray(newList []Any) bool {
any.fillCache()
any.cache = newList
return true
}
func (any *arrayLazyAny) WriteTo(stream *Stream) {
if len(any.remaining) == len(any.buf) {
// nothing has been parsed yet
stream.Write(any.buf)
} else {
any.fillCache()
stream.WriteVal(any.cache)
}
}
func (any *arrayLazyAny) GetInterface() interface{} {
any.fillCache()
return any.cache
}
type arrayAny struct {
baseAny
err error
cache []Any
val reflect.Value
}
func wrapArray(val interface{}) *arrayAny {
return &arrayAny{baseAny{}, nil, nil, reflect.ValueOf(val)}
}
func (any *arrayAny) ValueType() ValueType {
return Array
}
func (any *arrayAny) Parse() *Iterator {
return nil
}
func (any *arrayAny) LastError() error {
return any.err
}
func (any *arrayAny) ToBool() bool {
return any.val.Len() != 0
}
func (any *arrayAny) ToInt() int {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToInt32() int32 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToInt64() int64 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToUint() uint {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToUint32() uint32 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToUint64() uint64 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToFloat32() float32 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToFloat64() float64 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToString() string {
if len(any.cache) == 0 {
// nothing has been parsed yet
str, err := MarshalToString(any.val.Interface())
any.err = err
return str
} else {
any.fillCache()
str, err := MarshalToString(any.cache)
any.err = err
return str
}
}
func (any *arrayAny) fillCacheUntil(idx int) Any {
if idx < len(any.cache) {
return any.cache[idx]
} else {
for i := len(any.cache); i < any.val.Len(); i++ {
element := Wrap(any.val.Index(i).Interface())
any.cache = append(any.cache, element)
if idx == i {
return element
}
}
return nil
}
}
func (any *arrayAny) fillCache() {
any.cache = make([]Any, any.val.Len())
for i := 0; i < any.val.Len(); i++ {
any.cache[i] = Wrap(any.val.Index(i).Interface())
}
}
func (any *arrayAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
var element Any
switch firstPath := path[0].(type) {
case int:
element = any.fillCacheUntil(firstPath)
if element == nil {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
}
case int32:
if '*' == firstPath {
any.fillCache()
mappedAll := make([]Any, 0, len(any.cache))
for _, element := range any.cache {
mapped := element.Get(path[1:]...)
if mapped.ValueType() != Invalid {
mappedAll = append(mappedAll, mapped)
}
}
return wrapArray(mappedAll)
} else {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", path[0], any.cache)}
}
default:
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", path[0], any.cache)}
}
if len(path) == 1 {
return element
} else {
return element.Get(path[1:]...)
}
}
func (any *arrayAny) Size() int {
any.fillCache()
return len(any.cache)
}
func (any *arrayAny) IterateArray() (func() (Any, bool), bool) {
if any.val.Len() == 0 {
return nil, false
}
i := 0
return func() (Any, bool) {
if i == any.val.Len() {
return nil, false
}
if i == len(any.cache) {
any.cache = append(any.cache, Wrap(any.val.Index(i).Interface()))
}
val := any.cache[i]
i++
return val, i != any.val.Len()
}, true
}
func (any *arrayAny) GetArray() []Any {
any.fillCache()
return any.cache
}
func (any *arrayAny) SetArray(newList []Any) bool {
any.fillCache()
any.cache = newList
return true
}
func (any *arrayAny) WriteTo(stream *Stream) {
if len(any.cache) == 0 {
// nothing has been parsed yet
stream.WriteVal(any.val)
} else {
any.fillCache()
stream.WriteVal(any.cache)
}
}
func (any *arrayAny) GetInterface() interface{} {
any.fillCache()
return any.cache
}

View File

@ -1,166 +0,0 @@
package jsoniter
import (
"io"
"unsafe"
"strconv"
)
type float64LazyAny struct {
baseAny
buf []byte
iter *Iterator
err error
cache float64
}
func (any *float64LazyAny) Parse() *Iterator {
iter := any.iter
if iter == nil {
iter = NewIterator()
}
iter.ResetBytes(any.buf)
return iter
}
func (any *float64LazyAny) ValueType() ValueType {
return Number
}
func (any *float64LazyAny) fillCache() {
if any.err != nil {
return
}
iter := any.Parse()
any.cache = iter.ReadFloat64()
if iter.Error != io.EOF {
iter.reportError("floatLazyAny", "there are bytes left")
}
any.err = iter.Error
}
func (any *float64LazyAny) LastError() error {
return any.err
}
func (any *float64LazyAny) ToBool() bool {
return any.ToFloat64() != 0
}
func (any *float64LazyAny) ToInt() int {
any.fillCache()
return int(any.cache)
}
func (any *float64LazyAny) ToInt32() int32 {
any.fillCache()
return int32(any.cache)
}
func (any *float64LazyAny) ToInt64() int64 {
any.fillCache()
return int64(any.cache)
}
func (any *float64LazyAny) ToUint() uint {
any.fillCache()
return uint(any.cache)
}
func (any *float64LazyAny) ToUint32() uint32 {
any.fillCache()
return uint32(any.cache)
}
func (any *float64LazyAny) ToUint64() uint64 {
any.fillCache()
return uint64(any.cache)
}
func (any *float64LazyAny) ToFloat32() float32 {
any.fillCache()
return float32(any.cache)
}
func (any *float64LazyAny) ToFloat64() float64 {
any.fillCache()
return any.cache
}
func (any *float64LazyAny) ToString() string {
return *(*string)(unsafe.Pointer(&any.buf))
}
func (any *float64LazyAny) WriteTo(stream *Stream) {
stream.Write(any.buf)
}
func (any *float64LazyAny) GetInterface() interface{} {
any.fillCache()
return any.cache
}
type floatAny struct {
baseAny
val float64
}
func (any *floatAny) Parse() *Iterator {
return nil
}
func (any *floatAny) ValueType() ValueType {
return Number
}
func (any *floatAny) LastError() error {
return nil
}
func (any *floatAny) ToBool() bool {
return any.ToFloat64() != 0
}
func (any *floatAny) ToInt() int {
return int(any.val)
}
func (any *floatAny) ToInt32() int32 {
return int32(any.val)
}
func (any *floatAny) ToInt64() int64 {
return int64(any.val)
}
func (any *floatAny) ToUint() uint {
return uint(any.val)
}
func (any *floatAny) ToUint32() uint32 {
return uint32(any.val)
}
func (any *floatAny) ToUint64() uint64 {
return uint64(any.val)
}
func (any *floatAny) ToFloat32() float32 {
return float32(any.val)
}
func (any *floatAny) ToFloat64() float64 {
return any.val
}
func (any *floatAny) ToString() string {
return strconv.FormatFloat(any.val, 'E', -1, 64)
}
func (any *floatAny) WriteTo(stream *Stream) {
stream.WriteFloat64(any.val)
}
func (any *floatAny) GetInterface() interface{} {
return any.val
}

View File

@ -1,166 +0,0 @@
package jsoniter
import (
"io"
"unsafe"
"strconv"
)
type int64LazyAny struct {
baseAny
buf []byte
iter *Iterator
err error
cache int64
}
func (any *int64LazyAny) ValueType() ValueType {
return Number
}
func (any *int64LazyAny) Parse() *Iterator {
iter := any.iter
if iter == nil {
iter = NewIterator()
}
iter.ResetBytes(any.buf)
return iter
}
func (any *int64LazyAny) fillCache() {
if any.err != nil {
return
}
iter := any.Parse()
any.cache = iter.ReadInt64()
if iter.Error != io.EOF {
iter.reportError("intLazyAny", "there are bytes left")
}
any.err = iter.Error
}
func (any *int64LazyAny) LastError() error {
return any.err
}
func (any *int64LazyAny) ToBool() bool {
return any.ToInt64() != 0
}
func (any *int64LazyAny) ToInt() int {
any.fillCache()
return int(any.cache)
}
func (any *int64LazyAny) ToInt32() int32 {
any.fillCache()
return int32(any.cache)
}
func (any *int64LazyAny) ToInt64() int64 {
any.fillCache()
return any.cache
}
func (any *int64LazyAny) ToUint() uint {
any.fillCache()
return uint(any.cache)
}
func (any *int64LazyAny) ToUint32() uint32 {
any.fillCache()
return uint32(any.cache)
}
func (any *int64LazyAny) ToUint64() uint64 {
any.fillCache()
return uint64(any.cache)
}
func (any *int64LazyAny) ToFloat32() float32 {
any.fillCache()
return float32(any.cache)
}
func (any *int64LazyAny) ToFloat64() float64 {
any.fillCache()
return float64(any.cache)
}
func (any *int64LazyAny) ToString() string {
return *(*string)(unsafe.Pointer(&any.buf))
}
func (any *int64LazyAny) WriteTo(stream *Stream) {
stream.Write(any.buf)
}
func (any *int64LazyAny) GetInterface() interface{} {
any.fillCache()
return any.cache
}
type int64Any struct {
baseAny
val int64
}
func (any *int64Any) LastError() error {
return nil
}
func (any *int64Any) ValueType() ValueType {
return Number
}
func (any *int64Any) ToBool() bool {
return any.val != 0
}
func (any *int64Any) ToInt() int {
return int(any.val)
}
func (any *int64Any) ToInt32() int32 {
return int32(any.val)
}
func (any *int64Any) ToInt64() int64 {
return any.val
}
func (any *int64Any) ToUint() uint {
return uint(any.val)
}
func (any *int64Any) ToUint32() uint32 {
return uint32(any.val)
}
func (any *int64Any) ToUint64() uint64 {
return uint64(any.val)
}
func (any *int64Any) ToFloat32() float32 {
return float32(any.val)
}
func (any *int64Any) ToFloat64() float64 {
return float64(any.val)
}
func (any *int64Any) ToString() string {
return strconv.FormatInt(any.val, 10)
}
func (any *int64Any) WriteTo(stream *Stream) {
stream.WriteInt64(any.val)
}
func (any *int64Any) Parse() *Iterator {
return nil
}
func (any *int64Any) GetInterface() interface{} {
return any.val
}

View File

@ -1,847 +0,0 @@
package jsoniter
import (
"unsafe"
"fmt"
"reflect"
)
type objectLazyAny struct {
baseAny
buf []byte
iter *Iterator
err error
cache map[string]Any
remaining []byte
}
func (any *objectLazyAny) ValueType() ValueType {
return Object
}
func (any *objectLazyAny) Parse() *Iterator {
iter := any.iter
if iter == nil {
iter = NewIterator()
any.iter = iter
}
iter.ResetBytes(any.remaining)
return iter
}
func (any *objectLazyAny) fillCacheUntil(target string) Any {
if any.remaining == nil {
return any.cache[target]
}
if any.cache == nil {
any.cache = map[string]Any{}
}
val := any.cache[target]
if val != nil {
return val
}
iter := any.Parse()
if len(any.remaining) == len(any.buf) {
iter.head++
c := iter.nextToken()
if c != '}' {
iter.unreadByte()
k := string(iter.readObjectFieldAsBytes())
v := iter.readAny(iter)
any.cache[k] = v
if target == k {
any.remaining = iter.buf[iter.head:]
any.err = iter.Error
return v
}
} else {
any.remaining = nil
any.err = iter.Error
return nil
}
}
for iter.nextToken() == ',' {
k := string(iter.readObjectFieldAsBytes())
v := iter.readAny(iter)
any.cache[k] = v
if target == k {
any.remaining = iter.buf[iter.head:]
any.err = iter.Error
return v
}
}
any.remaining = nil
any.err = iter.Error
return nil
}
func (any *objectLazyAny) fillCache() {
if any.remaining == nil {
return
}
if any.cache == nil {
any.cache = map[string]Any{}
}
iter := any.Parse()
if len(any.remaining) == len(any.buf) {
iter.head++
c := iter.nextToken()
if c != '}' {
iter.unreadByte()
k := string(iter.readObjectFieldAsBytes())
v := iter.readAny(iter)
any.cache[k] = v
} else {
any.remaining = nil
any.err = iter.Error
return
}
}
for iter.nextToken() == ',' {
k := string(iter.readObjectFieldAsBytes())
v := iter.readAny(iter)
any.cache[k] = v
}
any.remaining = nil
any.err = iter.Error
return
}
func (any *objectLazyAny) LastError() error {
return any.err
}
func (any *objectLazyAny) ToBool() bool {
if any.cache == nil {
any.IterateObject() // trigger first value read
}
return len(any.cache) != 0
}
func (any *objectLazyAny) ToInt() int {
if any.cache == nil {
any.IterateObject() // trigger first value read
}
if len(any.cache) == 0 {
return 0
}
return 1
}
func (any *objectLazyAny) ToInt32() int32 {
if any.cache == nil {
any.IterateObject() // trigger first value read
}
if len(any.cache) == 0 {
return 0
}
return 1
}
func (any *objectLazyAny) ToInt64() int64 {
if any.cache == nil {
any.IterateObject() // trigger first value read
}
if len(any.cache) == 0 {
return 0
}
return 1
}
func (any *objectLazyAny) ToUint() uint {
if any.cache == nil {
any.IterateObject() // trigger first value read
}
if len(any.cache) == 0 {
return 0
}
return 1
}
func (any *objectLazyAny) ToUint32() uint32 {
if any.cache == nil {
any.IterateObject() // trigger first value read
}
if len(any.cache) == 0 {
return 0
}
return 1
}
func (any *objectLazyAny) ToUint64() uint64 {
if any.cache == nil {
any.IterateObject() // trigger first value read
}
if len(any.cache) == 0 {
return 0
}
return 1
}
func (any *objectLazyAny) ToFloat32() float32 {
if any.cache == nil {
any.IterateObject() // trigger first value read
}
if len(any.cache) == 0 {
return 0
}
return 1
}
func (any *objectLazyAny) ToFloat64() float64 {
if any.cache == nil {
any.IterateObject() // trigger first value read
}
if len(any.cache) == 0 {
return 0
}
return 1
}
func (any *objectLazyAny) ToString() string {
if len(any.remaining) == len(any.buf) {
// nothing has been parsed yet
return *(*string)(unsafe.Pointer(&any.buf))
} else {
any.fillCache()
str, err := MarshalToString(any.cache)
any.err = err
return str
}
}
func (any *objectLazyAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
var element Any
switch firstPath := path[0].(type) {
case string:
element = any.fillCacheUntil(firstPath)
if element == nil {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
}
case int32:
if '*' == firstPath {
any.fillCache()
mappedAll := map[string]Any{}
for key, value := range any.cache {
mapped := value.Get(path[1:]...)
if mapped.ValueType() != Invalid {
mappedAll[key] = mapped
}
}
return wrapMap(mappedAll)
} else {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
}
default:
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
}
if len(path) == 1 {
return element
} else {
return element.Get(path[1:]...)
}
}
func (any *objectLazyAny) Keys() []string {
any.fillCache()
keys := make([]string, 0, len(any.cache))
for key := range any.cache {
keys = append(keys, key)
}
return keys
}
func (any *objectLazyAny) Size() int {
any.fillCache()
return len(any.cache)
}
func (any *objectLazyAny) IterateObject() (func() (string, Any, bool), bool) {
if any.cache == nil {
any.cache = map[string]Any{}
}
remaining := any.remaining
if len(remaining) == len(any.buf) {
iter := any.Parse()
iter.head++
c := iter.nextToken()
if c != '}' {
iter.unreadByte()
k := string(iter.readObjectFieldAsBytes())
v := iter.readAny(iter)
any.cache[k] = v
remaining = iter.buf[iter.head:]
any.remaining = remaining
} else {
remaining = nil
any.remaining = nil
any.err = iter.Error
return nil, false
}
}
if len(any.cache) == 0 {
return nil, false
}
keys := make([]string, 0, len(any.cache))
values := make([]Any, 0, len(any.cache))
for key, value := range any.cache {
keys = append(keys, key)
values = append(values, value)
}
nextKey := keys[0]
nextValue := values[0]
i := 1
return func() (string, Any, bool) {
key := nextKey
value := nextValue
if i < len(keys) {
// read from cache
nextKey = keys[i]
nextValue = values[i]
i++
return key, value, true
} else {
// read from buffer
iter := any.iter
if iter == nil {
iter = NewIterator()
any.iter = iter
}
iter.ResetBytes(remaining)
c := iter.nextToken()
if c == ',' {
nextKey = string(iter.readObjectFieldAsBytes())
nextValue = iter.readAny(iter)
any.cache[nextKey] = nextValue
remaining = iter.buf[iter.head:]
any.remaining = remaining
any.err = iter.Error
return key, value, true
} else {
remaining = nil
any.remaining = nil
any.err = iter.Error
return key, value, false
}
}
}, true
}
func (any *objectLazyAny) GetObject() map[string]Any {
any.fillCache()
return any.cache
}
func (any *objectLazyAny) SetObject(val map[string]Any) bool {
any.fillCache()
any.cache = val
return true
}
func (any *objectLazyAny) WriteTo(stream *Stream) {
if len(any.remaining) == len(any.buf) {
// nothing has been parsed yet
stream.Write(any.buf)
} else {
any.fillCache()
stream.WriteVal(any.cache)
}
}
func (any *objectLazyAny) GetInterface() interface{} {
any.fillCache()
return any.cache
}
type objectAny struct {
baseAny
err error
cache map[string]Any
val reflect.Value
}
func wrapStruct(val interface{}) *objectAny {
return &objectAny{baseAny{}, nil, nil, reflect.ValueOf(val)}
}
func (any *objectAny) ValueType() ValueType {
return Object
}
func (any *objectAny) Parse() *Iterator {
return nil
}
func (any *objectAny) fillCacheUntil(target string) Any {
if any.cache == nil {
any.cache = map[string]Any{}
}
element, found := any.cache[target]
if found {
return element
}
for i := len(any.cache); i < any.val.NumField(); i++ {
field := any.val.Field(i)
fieldName := any.val.Type().Field(i).Name
var element Any
if field.CanInterface() {
element = Wrap(field.Interface())
} else {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", fieldName, any.cache)}
}
any.cache[fieldName] = element
if fieldName == target {
return element
}
}
return nil
}
func (any *objectAny) fillCache() {
if any.cache == nil {
any.cache = map[string]Any{}
}
if len(any.cache) == any.val.NumField() {
return
}
for i := 0; i < any.val.NumField(); i++ {
field := any.val.Field(i)
fieldName := any.val.Type().Field(i).Name
var element Any
if field.CanInterface() {
element = Wrap(field.Interface())
} else {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", fieldName, any.cache)}
}
any.cache[fieldName] = element
}
}
func (any *objectAny) LastError() error {
return any.err
}
func (any *objectAny) ToBool() bool {
return any.val.NumField() != 0
}
func (any *objectAny) ToInt() int {
if any.val.NumField() == 0 {
return 0
}
return 1
}
func (any *objectAny) ToInt32() int32 {
if any.val.NumField() == 0 {
return 0
}
return 1
}
func (any *objectAny) ToInt64() int64 {
if any.val.NumField() == 0 {
return 0
}
return 1
}
func (any *objectAny) ToUint() uint {
if any.val.NumField() == 0 {
return 0
}
return 1
}
func (any *objectAny) ToUint32() uint32 {
if any.val.NumField() == 0 {
return 0
}
return 1
}
func (any *objectAny) ToUint64() uint64 {
if any.val.NumField() == 0 {
return 0
}
return 1
}
func (any *objectAny) ToFloat32() float32 {
if any.val.NumField() == 0 {
return 0
}
return 1
}
func (any *objectAny) ToFloat64() float64 {
if any.val.NumField() == 0 {
return 0
}
return 1
}
func (any *objectAny) ToString() string {
if len(any.cache) == 0 {
str, err := MarshalToString(any.val.Interface())
any.err = err
return str
} else {
any.fillCache()
str, err := MarshalToString(any.cache)
any.err = err
return str
}
}
func (any *objectAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
var element Any
switch firstPath := path[0].(type) {
case string:
element = any.fillCacheUntil(firstPath)
if element == nil {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
}
case int32:
if '*' == firstPath {
any.fillCache()
mappedAll := map[string]Any{}
for key, value := range any.cache {
mapped := value.Get(path[1:]...)
if mapped.ValueType() != Invalid {
mappedAll[key] = mapped
}
}
return wrapMap(mappedAll)
} else {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
}
default:
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
}
if len(path) == 1 {
return element
} else {
return element.Get(path[1:]...)
}
}
func (any *objectAny) Keys() []string {
any.fillCache()
keys := make([]string, 0, len(any.cache))
for key := range any.cache {
keys = append(keys, key)
}
return keys
}
func (any *objectAny) Size() int {
any.fillCache()
return len(any.cache)
}
func (any *objectAny) IterateObject() (func() (string, Any, bool), bool) {
if any.cache == nil {
any.cache = map[string]Any{}
}
if any.val.NumField() == 0 {
return nil, false
}
cacheKeys := make([]string, len(any.cache))
i := 0
for key := range any.cache {
cacheKeys[i] = key
i++
}
i = 0
return func() (string, Any, bool) {
if i == any.val.NumField() {
return "", nil, false
}
var fieldName string
var fieldValueAsAny Any
if i == len(cacheKeys) {
fieldName = any.val.Type().Field(i).Name
cacheKeys = append(cacheKeys, fieldName)
fieldValue := any.val.Field(i)
if fieldValue.CanInterface() {
fieldValueAsAny = Wrap(fieldValue.Interface())
any.cache[fieldName] = fieldValueAsAny
} else {
fieldValueAsAny = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", fieldName, any.cache)}
any.cache[fieldName] = fieldValueAsAny
}
} else {
fieldName = cacheKeys[i]
fieldValueAsAny = any.cache[fieldName]
}
i++
return fieldName, fieldValueAsAny, i != any.val.NumField()
}, true
}
func (any *objectAny) GetObject() map[string]Any {
any.fillCache()
return any.cache
}
func (any *objectAny) SetObject(val map[string]Any) bool {
any.fillCache()
any.cache = val
return true
}
func (any *objectAny) WriteTo(stream *Stream) {
if len(any.cache) == 0 {
// nothing has been parsed yet
stream.WriteVal(any.val)
} else {
any.fillCache()
stream.WriteVal(any.cache)
}
}
func (any *objectAny) GetInterface() interface{} {
any.fillCache()
return any.cache
}
type mapAny struct {
baseAny
err error
cache map[string]Any
val reflect.Value
}
func wrapMap(val interface{}) *mapAny {
return &mapAny{baseAny{}, nil, nil, reflect.ValueOf(val)}
}
func (any *mapAny) ValueType() ValueType {
return Object
}
func (any *mapAny) Parse() *Iterator {
return nil
}
func (any *mapAny) fillCacheUntil(target string) Any {
if any.cache == nil {
any.cache = map[string]Any{}
}
element, found := any.cache[target]
if found {
return element
}
for _, key := range any.val.MapKeys() {
keyAsStr := key.String()
_, found := any.cache[keyAsStr]
if found {
continue
}
element := Wrap(any.val.MapIndex(key).Interface())
any.cache[keyAsStr] = element
if keyAsStr == target {
return element
}
}
return nil
}
func (any *mapAny) fillCache() {
if any.cache == nil {
any.cache = map[string]Any{}
}
if len(any.cache) == any.val.Len() {
return
}
for _, key := range any.val.MapKeys() {
keyAsStr := key.String()
element := Wrap(any.val.MapIndex(key).Interface())
any.cache[keyAsStr] = element
}
}
func (any *mapAny) LastError() error {
return any.err
}
func (any *mapAny) ToBool() bool {
return any.val.Len() != 0
}
func (any *mapAny) ToInt() int {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *mapAny) ToInt32() int32 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *mapAny) ToInt64() int64 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *mapAny) ToUint() uint {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *mapAny) ToUint32() uint32 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *mapAny) ToUint64() uint64 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *mapAny) ToFloat32() float32 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *mapAny) ToFloat64() float64 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *mapAny) ToString() string {
if len(any.cache) == 0 {
str, err := MarshalToString(any.val.Interface())
any.err = err
return str
} else {
any.fillCache()
str, err := MarshalToString(any.cache)
any.err = err
return str
}
}
func (any *mapAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
var element Any
switch firstPath := path[0].(type) {
case string:
element = any.fillCacheUntil(firstPath)
if element == nil {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
}
case int32:
if '*' == firstPath {
any.fillCache()
mappedAll := map[string]Any{}
for key, value := range any.cache {
mapped := value.Get(path[1:]...)
if mapped.ValueType() != Invalid {
mappedAll[key] = mapped
}
}
return wrapMap(mappedAll)
} else {
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
}
default:
element = &invalidAny{baseAny{}, fmt.Errorf("%v not found in %v", firstPath, any.cache)}
}
if len(path) == 1 {
return element
} else {
return element.Get(path[1:]...)
}
}
func (any *mapAny) Keys() []string {
any.fillCache()
keys := make([]string, 0, len(any.cache))
for key := range any.cache {
keys = append(keys, key)
}
return keys
}
func (any *mapAny) Size() int {
any.fillCache()
return len(any.cache)
}
func (any *mapAny) IterateObject() (func() (string, Any, bool), bool) {
any.fillCache()
if len(any.cache) == 0 {
return nil, false
}
keys := make([]string, len(any.cache))
values := make([]Any, len(any.cache))
i := 0
for k, v := range any.cache {
keys[i] = k
values[i] = v
i++
}
i = 0
return func() (string, Any, bool) {
if i == len(keys) {
return "", nil, false
}
k := keys[i]
v := values[i]
i++
return k, v, i != len(keys)
}, true
}
func (any *mapAny) GetObject() map[string]Any {
any.fillCache()
return any.cache
}
func (any *mapAny) SetObject(val map[string]Any) bool {
any.fillCache()
any.cache = val
return true
}
func (any *mapAny) WriteTo(stream *Stream) {
if len(any.cache) == 0 {
// nothing has been parsed yet
stream.WriteVal(any.val)
} else {
any.fillCache()
stream.WriteVal(any.cache)
}
}
func (any *mapAny) GetInterface() interface{} {
any.fillCache()
return any.cache
}

View File

@ -1,231 +0,0 @@
package jsoniter
import (
"io"
"strconv"
)
type stringLazyAny struct{
baseAny
buf []byte
iter *Iterator
err error
cache string
}
func (any *stringLazyAny) ValueType() ValueType {
return String
}
func (any *stringLazyAny) Parse() *Iterator {
iter := any.iter
if iter == nil {
iter = NewIterator()
any.iter = iter
}
iter.ResetBytes(any.buf)
return iter
}
func (any *stringLazyAny) fillCache() {
if any.err != nil {
return
}
iter := any.Parse()
any.cache = iter.ReadString()
if iter.Error != io.EOF {
iter.reportError("stringLazyAny", "there are bytes left")
}
any.err = iter.Error
}
func (any *stringLazyAny) LastError() error {
return any.err
}
func (any *stringLazyAny) ToBool() bool {
str := any.ToString()
if str == "false" {
return false
}
for _, c := range str {
switch c {
case ' ', '\n', '\r', '\t':
default:
return true
}
}
return false
}
func (any *stringLazyAny) ToInt() int {
iter := any.Parse()
iter.head++
val := iter.ReadInt()
any.err = iter.Error
return val
}
func (any *stringLazyAny) ToInt32() int32 {
iter := any.Parse()
iter.head++
val := iter.ReadInt32()
any.err = iter.Error
return val
}
func (any *stringLazyAny) ToInt64() int64 {
iter := any.Parse()
iter.head++
val := iter.ReadInt64()
any.err = iter.Error
return val
}
func (any *stringLazyAny) ToUint() uint {
iter := any.Parse()
iter.head++
val := iter.ReadUint()
any.err = iter.Error
return val
}
func (any *stringLazyAny) ToUint32() uint32 {
iter := any.Parse()
iter.head++
val := iter.ReadUint32()
any.err = iter.Error
return val
}
func (any *stringLazyAny) ToUint64() uint64 {
iter := any.Parse()
iter.head++
val := iter.ReadUint64()
any.err = iter.Error
return val
}
func (any *stringLazyAny) ToFloat32() float32 {
iter := any.Parse()
iter.head++
val := iter.ReadFloat32()
any.err = iter.Error
return val
}
func (any *stringLazyAny) ToFloat64() float64 {
iter := any.Parse()
iter.head++
val := iter.ReadFloat64()
any.err = iter.Error
return val
}
func (any *stringLazyAny) ToString() string {
any.fillCache()
return any.cache
}
func (any *stringLazyAny) WriteTo(stream *Stream) {
stream.Write(any.buf)
}
func (any *stringLazyAny) GetInterface() interface{} {
any.fillCache()
return any.cache
}
type stringAny struct{
baseAny
err error
val string
}
func (any *stringAny) Parse() *Iterator {
return nil
}
func (any *stringAny) ValueType() ValueType {
return String
}
func (any *stringAny) LastError() error {
return any.err
}
func (any *stringAny) ToBool() bool {
str := any.ToString()
if str == "false" {
return false
}
for _, c := range str {
switch c {
case ' ', '\n', '\r', '\t':
default:
return true
}
}
return false
}
func (any *stringAny) ToInt() int {
parsed, err := strconv.ParseInt(any.val, 10, 64)
any.err = err
return int(parsed)
}
func (any *stringAny) ToInt32() int32 {
parsed, err := strconv.ParseInt(any.val, 10, 32)
any.err = err
return int32(parsed)
}
func (any *stringAny) ToInt64() int64 {
parsed, err := strconv.ParseInt(any.val, 10, 64)
any.err = err
return parsed
}
func (any *stringAny) ToUint() uint {
parsed, err := strconv.ParseUint(any.val, 10, 64)
any.err = err
return uint(parsed)
}
func (any *stringAny) ToUint32() uint32 {
parsed, err := strconv.ParseUint(any.val, 10, 32)
any.err = err
return uint32(parsed)
}
func (any *stringAny) ToUint64() uint64 {
parsed, err := strconv.ParseUint(any.val, 10, 64)
any.err = err
return parsed
}
func (any *stringAny) ToFloat32() float32 {
parsed, err := strconv.ParseFloat(any.val, 32)
any.err = err
return float32(parsed)
}
func (any *stringAny) ToFloat64() float64 {
parsed, err := strconv.ParseFloat(any.val, 64)
any.err = err
return parsed
}
func (any *stringAny) ToString() string {
return any.val
}
func (any *stringAny) WriteTo(stream *Stream) {
stream.WriteString(any.val)
}
func (any *stringAny) GetInterface() interface{} {
return any.val
}

View File

@ -1,167 +0,0 @@
package jsoniter
import (
"strconv"
"unsafe"
"io"
)
type uint64LazyAny struct {
baseAny
buf []byte
iter *Iterator
err error
cache uint64
}
func (any *uint64LazyAny) ValueType() ValueType {
return Number
}
func (any *uint64LazyAny) Parse() *Iterator {
iter := any.iter
if iter == nil {
iter = NewIterator()
}
iter.ResetBytes(any.buf)
return iter
}
func (any *uint64LazyAny) fillCache() {
if any.err != nil {
return
}
iter := any.Parse()
any.cache = iter.ReadUint64()
if iter.Error != io.EOF {
iter.reportError("intLazyAny", "there are bytes left")
}
any.err = iter.Error
}
func (any *uint64LazyAny) LastError() error {
return any.err
}
func (any *uint64LazyAny) ToBool() bool {
return any.ToInt64() != 0
}
func (any *uint64LazyAny) ToInt() int {
any.fillCache()
return int(any.cache)
}
func (any *uint64LazyAny) ToInt32() int32 {
any.fillCache()
return int32(any.cache)
}
func (any *uint64LazyAny) ToInt64() int64 {
any.fillCache()
return int64(any.cache)
}
func (any *uint64LazyAny) ToUint() uint {
any.fillCache()
return uint(any.cache)
}
func (any *uint64LazyAny) ToUint32() uint32 {
any.fillCache()
return uint32(any.cache)
}
func (any *uint64LazyAny) ToUint64() uint64 {
any.fillCache()
return any.cache
}
func (any *uint64LazyAny) ToFloat32() float32 {
any.fillCache()
return float32(any.cache)
}
func (any *uint64LazyAny) ToFloat64() float64 {
any.fillCache()
return float64(any.cache)
}
func (any *uint64LazyAny) ToString() string {
return *(*string)(unsafe.Pointer(&any.buf))
}
func (any *uint64LazyAny) WriteTo(stream *Stream) {
stream.Write(any.buf)
}
func (any *uint64LazyAny) GetInterface() interface{} {
any.fillCache()
return any.cache
}
type uint64Any struct {
baseAny
val uint64
}
func (any *uint64Any) LastError() error {
return nil
}
func (any *uint64Any) ValueType() ValueType {
return Number
}
func (any *uint64Any) ToBool() bool {
return any.val != 0
}
func (any *uint64Any) ToInt() int {
return int(any.val)
}
func (any *uint64Any) ToInt32() int32 {
return int32(any.val)
}
func (any *uint64Any) ToInt64() int64 {
return int64(any.val)
}
func (any *uint64Any) ToUint() uint {
return uint(any.val)
}
func (any *uint64Any) ToUint32() uint32 {
return uint32(any.val)
}
func (any *uint64Any) ToUint64() uint64 {
return any.val
}
func (any *uint64Any) ToFloat32() float32 {
return float32(any.val)
}
func (any *uint64Any) ToFloat64() float64 {
return float64(any.val)
}
func (any *uint64Any) ToString() string {
return strconv.FormatUint(any.val, 10)
}
func (any *uint64Any) WriteTo(stream *Stream) {
stream.WriteUint64(any.val)
}
func (any *uint64Any) Parse() *Iterator {
return nil
}
func (any *uint64Any) GetInterface() interface{} {
return any.val
}

View File

@ -1,279 +0,0 @@
package jsoniter
import (
"encoding/base64"
"fmt"
"io"
)
type ValueType int
const (
Invalid ValueType = iota
String
Number
Nil
Bool
Array
Object
)
var hexDigits []byte
var valueTypes []ValueType
func init() {
hexDigits = make([]byte, 256)
for i := 0; i < len(hexDigits); i++ {
hexDigits[i] = 255
}
for i := '0'; i <= '9'; i++ {
hexDigits[i] = byte(i - '0')
}
for i := 'a'; i <= 'f'; i++ {
hexDigits[i] = byte((i - 'a') + 10)
}
for i := 'A'; i <= 'F'; i++ {
hexDigits[i] = byte((i - 'A') + 10)
}
valueTypes = make([]ValueType, 256)
for i := 0; i < len(valueTypes); i++ {
valueTypes[i] = Invalid
}
valueTypes['"'] = String
valueTypes['-'] = Number
valueTypes['0'] = Number
valueTypes['1'] = Number
valueTypes['2'] = Number
valueTypes['3'] = Number
valueTypes['4'] = Number
valueTypes['5'] = Number
valueTypes['6'] = Number
valueTypes['7'] = Number
valueTypes['8'] = Number
valueTypes['9'] = Number
valueTypes['t'] = Bool
valueTypes['f'] = Bool
valueTypes['n'] = Nil
valueTypes['['] = Array
valueTypes['{'] = Object
}
// Iterator is a fast and flexible JSON parser
type Iterator struct {
reader io.Reader
buf []byte
head int
tail int
Error error
}
// Create creates an empty Iterator instance
func NewIterator() *Iterator {
return &Iterator{
reader: nil,
buf: nil,
head: 0,
tail: 0,
}
}
// Parse parses a json buffer in io.Reader into an Iterator instance
func Parse(reader io.Reader, bufSize int) *Iterator {
return &Iterator{
reader: reader,
buf: make([]byte, bufSize),
head: 0,
tail: 0,
}
}
// ParseBytes parses a json byte slice into an Iterator instance
func ParseBytes(input []byte) *Iterator {
return &Iterator{
reader: nil,
buf: input,
head: 0,
tail: len(input),
}
}
// ParseString parses a json string into an Iterator instance
func ParseString(input string) *Iterator {
return ParseBytes([]byte(input))
}
// Reset can reset an Iterator instance for another json buffer in io.Reader
func (iter *Iterator) Reset(reader io.Reader) *Iterator {
iter.reader = reader
iter.head = 0
iter.tail = 0
return iter
}
// ResetBytes can reset an Iterator instance for another json byte slice
func (iter *Iterator) ResetBytes(input []byte) *Iterator {
iter.reader = nil
iter.Error = nil
iter.buf = input
iter.head = 0
iter.tail = len(input)
return iter
}
// WhatIsNext gets ValueType of relatively next json object
func (iter *Iterator) WhatIsNext() ValueType {
valueType := valueTypes[iter.nextToken()]
iter.unreadByte()
return valueType
}
func (iter *Iterator) skipWhitespacesWithoutLoadMore() bool {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
switch c {
case ' ', '\n', '\t', '\r':
continue
}
iter.head = i
return false
}
return true
}
func (iter *Iterator) nextToken() byte {
// a variation of skip whitespaces, returning the next non-whitespace token
for {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
switch c {
case ' ', '\n', '\t', '\r':
continue
}
iter.head = i + 1
return c
}
if !iter.loadMore() {
return 0
}
}
}
func (iter *Iterator) reportError(operation string, msg string) {
if iter.Error != nil {
if iter.Error != io.EOF {
return
}
}
peekStart := iter.head - 10
if peekStart < 0 {
peekStart = 0
}
iter.Error = fmt.Errorf("%s: %s, parsing %v ...%s... at %s", operation, msg, iter.head,
string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail]))
}
// CurrentBuffer gets current buffer as string
func (iter *Iterator) CurrentBuffer() string {
peekStart := iter.head - 10
if peekStart < 0 {
peekStart = 0
}
return fmt.Sprintf("parsing %v ...|%s|... at %s", iter.head,
string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail]))
}
func (iter *Iterator) readByte() (ret byte) {
if iter.head == iter.tail {
if iter.loadMore() {
ret = iter.buf[iter.head]
iter.head++
return ret
}
return 0
}
ret = iter.buf[iter.head]
iter.head++
return ret
}
func (iter *Iterator) loadMore() bool {
if iter.reader == nil {
if iter.Error == nil {
iter.Error = io.EOF
}
return false
}
for {
n, err := iter.reader.Read(iter.buf)
if n == 0 {
if err != nil {
if iter.Error == nil {
iter.Error = err
}
return false
}
} else {
iter.head = 0
iter.tail = n
return true
}
}
}
func (iter *Iterator) unreadByte() {
if iter.head == 0 {
iter.reportError("unreadByte", "unread too many bytes")
return
}
iter.head--
return
}
func (iter *Iterator) Read() interface{} {
valueType := iter.WhatIsNext()
switch valueType {
case String:
return iter.ReadString()
case Number:
return iter.ReadFloat64()
case Nil:
iter.skipFixedBytes(4) // null
return nil
case Bool:
return iter.ReadBool()
case Array:
arr := []interface{}{}
iter.ReadArrayCB(func(iter *Iterator) bool {
arr = append(arr, iter.Read())
return true
})
return arr
case Object:
obj := map[string]interface{}{}
iter.ReadObjectCB(func(Iter *Iterator, field string) bool {
obj[field] = iter.Read()
return true
})
return obj
default:
iter.reportError("Read", fmt.Sprintf("unexpected value type: %v", valueType))
return nil
}
}
// ReadBase64 reads a json object as Base64 in byte slice
func (iter *Iterator) ReadBase64() (ret []byte) {
src := iter.ReadStringAsSlice()
if iter.Error != nil {
return
}
b64 := base64.StdEncoding
ret = make([]byte, b64.DecodedLen(len(src)))
n, err := b64.Decode(ret, src)
if err != nil {
iter.Error = err
return
}
return ret[:n]
}

View File

@ -1,51 +0,0 @@
package jsoniter
func (iter *Iterator) ReadArray() (ret bool) {
c := iter.nextToken()
switch c {
case 'n':
iter.skipFixedBytes(3)
return false // null
case '[':
c = iter.nextToken()
if c != ']' {
iter.unreadByte()
return true
}
return false
case ']':
return false
case ',':
return true
default:
iter.reportError("ReadArray", "expect [ or , or ] or n, but found: " + string([]byte{c}))
return
}
}
func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
c := iter.nextToken()
if c == '[' {
c = iter.nextToken()
if c != ']' {
iter.unreadByte()
if !callback(iter) {
return false
}
for iter.nextToken() == ',' {
if !callback(iter) {
return false
}
}
return true
}
return true
}
if c == 'n' {
iter.skipFixedBytes(3)
return true // null
}
iter.reportError("ReadArrayCB", "expect [ or n, but found: " + string([]byte{c}))
return false
}

View File

@ -1,209 +0,0 @@
package jsoniter
import (
"io"
"strconv"
"unsafe"
)
var floatDigits []int8
const invalidCharForNumber = int8(-1)
const endOfNumber = int8(-2)
const dotInNumber = int8(-3)
func init() {
floatDigits = make([]int8, 256)
for i := 0; i < len(floatDigits); i++ {
floatDigits[i] = invalidCharForNumber
}
for i := int8('0'); i <= int8('9'); i++ {
floatDigits[i] = i - int8('0')
}
floatDigits[','] = endOfNumber;
floatDigits[']'] = endOfNumber;
floatDigits['}'] = endOfNumber;
floatDigits[' '] = endOfNumber;
floatDigits['.'] = dotInNumber;
}
func (iter *Iterator) ReadFloat32() (ret float32) {
c := iter.nextToken()
if c == '-' {
return -iter.readPositiveFloat32()
} else {
iter.unreadByte()
return iter.readPositiveFloat32()
}
}
func (iter *Iterator) readPositiveFloat32() (ret float32) {
value := uint64(0)
c := byte(' ')
i := iter.head
non_decimal_loop:
for ; i < iter.tail; i++ {
c = iter.buf[i]
ind := floatDigits[c]
switch ind {
case invalidCharForNumber:
return iter.readFloat32SlowPath()
case endOfNumber:
iter.head = i
return float32(value)
case dotInNumber:
break non_decimal_loop
}
if value > uint64SafeToMultiple10 {
return iter.readFloat32SlowPath()
}
value = (value << 3) + (value << 1) + uint64(ind); // value = value * 10 + ind;
}
if c == '.' {
i++
decimalPlaces := 0;
for ; i < iter.tail; i++ {
c = iter.buf[i]
ind := floatDigits[c];
switch ind {
case endOfNumber:
if decimalPlaces > 0 && decimalPlaces < len(POW10) {
iter.head = i
return float32(float64(value) / float64(POW10[decimalPlaces]))
}
// too many decimal places
return iter.readFloat32SlowPath()
case invalidCharForNumber:
fallthrough
case dotInNumber:
return iter.readFloat32SlowPath()
}
decimalPlaces++
if value > uint64SafeToMultiple10 {
return iter.readFloat32SlowPath()
}
value = (value << 3) + (value << 1) + uint64(ind)
}
}
return iter.readFloat32SlowPath()
}
func (iter *Iterator) readFloat32SlowPath() (ret float32) {
strBuf := [16]byte{}
str := strBuf[0:0]
load_loop:
for {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
switch c {
case '-', '.', 'e', 'E', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
str = append(str, c)
continue
default:
break load_loop
}
}
if !iter.loadMore() {
break
}
}
if iter.Error != nil && iter.Error != io.EOF {
return
}
val, err := strconv.ParseFloat(*(*string)(unsafe.Pointer(&str)), 32)
if err != nil {
iter.Error = err
return
}
return float32(val)
}
func (iter *Iterator) ReadFloat64() (ret float64) {
c := iter.nextToken()
if c == '-' {
return -iter.readPositiveFloat64()
} else {
iter.unreadByte()
return iter.readPositiveFloat64()
}
}
func (iter *Iterator) readPositiveFloat64() (ret float64) {
value := uint64(0)
c := byte(' ')
i := iter.head
non_decimal_loop:
for ; i < iter.tail; i++ {
c = iter.buf[i]
ind := floatDigits[c]
switch ind {
case invalidCharForNumber:
return iter.readFloat64SlowPath()
case endOfNumber:
iter.head = i
return float64(value)
case dotInNumber:
break non_decimal_loop
}
if value > uint64SafeToMultiple10 {
return iter.readFloat64SlowPath()
}
value = (value << 3) + (value << 1) + uint64(ind); // value = value * 10 + ind;
}
if c == '.' {
i++
decimalPlaces := 0;
for ; i < iter.tail; i++ {
c = iter.buf[i]
ind := floatDigits[c];
switch ind {
case endOfNumber:
if decimalPlaces > 0 && decimalPlaces < len(POW10) {
iter.head = i
return float64(value) / float64(POW10[decimalPlaces])
}
// too many decimal places
return iter.readFloat64SlowPath()
case invalidCharForNumber:
fallthrough
case dotInNumber:
return iter.readFloat64SlowPath()
}
decimalPlaces++
if value > uint64SafeToMultiple10 {
return iter.readFloat64SlowPath()
}
value = (value << 3) + (value << 1) + uint64(ind)
}
}
return iter.readFloat64SlowPath()
}
func (iter *Iterator) readFloat64SlowPath() (ret float64) {
strBuf := [16]byte{}
str := strBuf[0:0]
load_loop:
for {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
switch c {
case '-', '.', 'e', 'E', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
str = append(str, c)
continue
default:
break load_loop
}
}
if !iter.loadMore() {
break
}
}
if iter.Error != nil && iter.Error != io.EOF {
return
}
val, err := strconv.ParseFloat(*(*string)(unsafe.Pointer(&str)), 64)
if err != nil {
iter.Error = err
return
}
return val
}

View File

@ -1,259 +0,0 @@
package jsoniter
import (
"strconv"
)
var intDigits []int8
const uint32SafeToMultiply10 = uint32(0xffffffff) / 10 - 1
const uint64SafeToMultiple10 = uint64(0xffffffffffffffff) / 10 - 1
const int64Max = uint64(0x7fffffffffffffff)
const int32Max = uint32(0x7fffffff)
const int16Max = uint32(0x7fff)
const uint16Max = uint32(0xffff)
const int8Max = uint32(0x7fff)
const uint8Max = uint32(0xffff)
func init() {
intDigits = make([]int8, 256)
for i := 0; i < len(floatDigits); i++ {
intDigits[i] = invalidCharForNumber
}
for i := int8('0'); i <= int8('9'); i++ {
intDigits[i] = i - int8('0')
}
}
func (iter *Iterator) ReadUint() uint {
return uint(iter.ReadUint64())
}
func (iter *Iterator) ReadInt() int {
return int(iter.ReadInt64())
}
func (iter *Iterator) ReadInt8() (ret int8) {
c := iter.nextToken()
if c == '-' {
val := iter.readUint32(iter.readByte())
if val > int8Max + 1 {
iter.reportError("ReadInt8", "overflow: " + strconv.FormatInt(int64(val), 10))
return
}
return -int8(val)
} else {
val := iter.readUint32(c)
if val > int8Max {
iter.reportError("ReadInt8", "overflow: " + strconv.FormatInt(int64(val), 10))
return
}
return int8(val)
}
}
func (iter *Iterator) ReadUint8() (ret uint8) {
val := iter.readUint32(iter.nextToken())
if val > uint8Max {
iter.reportError("ReadUint8", "overflow: " + strconv.FormatInt(int64(val), 10))
return
}
return uint8(val)
}
func (iter *Iterator) ReadInt16() (ret int16) {
c := iter.nextToken()
if c == '-' {
val := iter.readUint32(iter.readByte())
if val > int16Max + 1 {
iter.reportError("ReadInt16", "overflow: " + strconv.FormatInt(int64(val), 10))
return
}
return -int16(val)
} else {
val := iter.readUint32(c)
if val > int16Max {
iter.reportError("ReadInt16", "overflow: " + strconv.FormatInt(int64(val), 10))
return
}
return int16(val)
}
}
func (iter *Iterator) ReadUint16() (ret uint16) {
val := iter.readUint32(iter.nextToken())
if val > uint16Max {
iter.reportError("ReadUint16", "overflow: " + strconv.FormatInt(int64(val), 10))
return
}
return uint16(val)
}
func (iter *Iterator) ReadInt32() (ret int32) {
c := iter.nextToken()
if c == '-' {
val := iter.readUint32(iter.readByte())
if val > int32Max + 1 {
iter.reportError("ReadInt32", "overflow: " + strconv.FormatInt(int64(val), 10))
return
}
return -int32(val)
} else {
val := iter.readUint32(c)
if val > int32Max {
iter.reportError("ReadInt32", "overflow: " + strconv.FormatInt(int64(val), 10))
return
}
return int32(val)
}
}
func (iter *Iterator) ReadUint32() (ret uint32) {
return iter.readUint32(iter.nextToken())
}
func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind := intDigits[c]
if ind == 0 {
return 0 // single zero
}
if ind == invalidCharForNumber {
iter.reportError("readUint32", "unexpected character: " + string([]byte{byte(ind)}))
return
}
value := uint32(ind)
if iter.tail - iter.head > 10 {
i := iter.head
ind2 := intDigits[iter.buf[i]]
if ind2 == invalidCharForNumber {
iter.head = i
return value
}
i++
ind3 := intDigits[iter.buf[i]]
if ind3 == invalidCharForNumber {
iter.head = i
return value * 10 + uint32(ind2)
}
//iter.head = i + 1
//value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
i++
ind4 := intDigits[iter.buf[i]]
if ind4 == invalidCharForNumber {
iter.head = i
return value * 100 + uint32(ind2) * 10 + uint32(ind3)
}
i++
ind5 := intDigits[iter.buf[i]]
if ind5 == invalidCharForNumber {
iter.head = i
return value * 1000 + uint32(ind2) * 100 + uint32(ind3) * 10 + uint32(ind4)
}
i++
ind6 := intDigits[iter.buf[i]]
if ind6 == invalidCharForNumber {
iter.head = i
return value * 10000 + uint32(ind2) * 1000 + uint32(ind3) * 100 + uint32(ind4) * 10 + uint32(ind5)
}
i++
ind7 := intDigits[iter.buf[i]]
if ind7 == invalidCharForNumber {
iter.head = i
return value * 100000 + uint32(ind2) * 10000 + uint32(ind3) * 1000 + uint32(ind4) * 100 + uint32(ind5) * 10 + uint32(ind6)
}
i++
ind8 := intDigits[iter.buf[i]]
if ind8 == invalidCharForNumber {
iter.head = i
return value * 1000000 + uint32(ind2) * 100000 + uint32(ind3) * 10000 + uint32(ind4) * 1000 + uint32(ind5) * 100 + uint32(ind6) * 10 + uint32(ind7)
}
i++
ind9 := intDigits[iter.buf[i]]
value = value * 10000000 + uint32(ind2) * 1000000 + uint32(ind3) * 100000 + uint32(ind4) * 10000 + uint32(ind5) * 1000 + uint32(ind6) * 100 + uint32(ind7) * 10 + uint32(ind8)
iter.head = i
if ind9 == invalidCharForNumber {
return value
}
}
for {
for i := iter.head; i < iter.tail; i++ {
ind = intDigits[iter.buf[i]]
if ind == invalidCharForNumber {
iter.head = i
return value
}
if value > uint32SafeToMultiply10 {
value2 := (value << 3) + (value << 1) + uint32(ind)
if value2 < value {
iter.reportError("readUint32", "overflow")
return
} else {
value = value2
continue
}
}
value = (value << 3) + (value << 1) + uint32(ind)
}
if (!iter.loadMore()) {
return value
}
}
}
func (iter *Iterator) ReadInt64() (ret int64) {
c := iter.nextToken()
if c == '-' {
val := iter.readUint64(iter.readByte())
if val > int64Max + 1 {
iter.reportError("ReadInt64", "overflow: " + strconv.FormatUint(uint64(val), 10))
return
}
return -int64(val)
} else {
val := iter.readUint64(c)
if val > int64Max {
iter.reportError("ReadInt64", "overflow: " + strconv.FormatUint(uint64(val), 10))
return
}
return int64(val)
}
}
func (iter *Iterator) ReadUint64() uint64 {
return iter.readUint64(iter.nextToken())
}
func (iter *Iterator) readUint64(c byte) (ret uint64) {
ind := intDigits[c]
if ind == 0 {
return 0 // single zero
}
if ind == invalidCharForNumber {
iter.reportError("readUint64", "unexpected character: " + string([]byte{byte(ind)}))
return
}
value := uint64(ind)
for {
for i := iter.head; i < iter.tail; i++ {
ind = intDigits[iter.buf[i]]
if ind == invalidCharForNumber {
iter.head = i
return value
}
if value > uint64SafeToMultiple10 {
value2 := (value << 3) + (value << 1) + uint64(ind)
if value2 < value {
iter.reportError("readUint64", "overflow")
return
} else {
value = value2
continue
}
}
value = (value << 3) + (value << 1) + uint64(ind)
}
if (!iter.loadMore()) {
return value
}
}
}

View File

@ -1,143 +0,0 @@
package jsoniter
func (iter *Iterator) ReadObject() (ret string) {
c := iter.nextToken()
switch c {
case 'n':
iter.skipFixedBytes(3)
return "" // null
case '{':
c = iter.nextToken()
if c == '"' {
iter.unreadByte()
return string(iter.readObjectFieldAsBytes())
}
if c == '}' {
return "" // end of object
}
iter.reportError("ReadObject", `expect " after {`)
return
case ',':
return string(iter.readObjectFieldAsBytes())
case '}':
return "" // end of object
default:
iter.reportError("ReadObject", `expect { or , or } or n`)
return
}
}
func (iter *Iterator) readFieldHash() int32 {
hash := int64(0x811c9dc5)
c := iter.nextToken()
if c == '"' {
for {
for i := iter.head; i < iter.tail; i++ {
// require ascii string and no escape
b := iter.buf[i]
if b == '"' {
iter.head = i+1
c = iter.nextToken()
if c != ':' {
iter.reportError("readFieldHash", `expect :, but found ` + string([]byte{c}))
}
return int32(hash)
}
hash ^= int64(b)
hash *= 0x1000193
}
if !iter.loadMore() {
iter.reportError("readFieldHash", `incomplete field name`)
return 0
}
}
}
iter.reportError("readFieldHash", `expect ", but found ` + string([]byte{c}))
return 0
}
func calcHash(str string) int32 {
hash := int64(0x811c9dc5)
for _, b := range str {
hash ^= int64(b)
hash *= 0x1000193
}
return int32(hash)
}
func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
c := iter.nextToken()
if c == '{' {
c = iter.nextToken()
if c == '"' {
iter.unreadByte()
field := string(iter.readObjectFieldAsBytes())
if !callback(iter, field) {
return false
}
for iter.nextToken() == ',' {
field := string(iter.readObjectFieldAsBytes())
if !callback(iter, field) {
return false
}
}
return true
}
if c == '}' {
return true
}
iter.reportError("ReadObjectCB", `expect " after }`)
return false
}
if c == 'n' {
iter.skipFixedBytes(3)
return true // null
}
iter.reportError("ReadObjectCB", `expect { or n`)
return false
}
func (iter *Iterator) readObjectStart() bool {
c := iter.nextToken()
if c == '{' {
c = iter.nextToken()
if c == '}' {
return false
}
iter.unreadByte()
return true
}
iter.reportError("readObjectStart", "expect { ")
return false
}
func (iter *Iterator) readObjectFieldAsBytes() (ret []byte) {
str := iter.ReadStringAsSlice()
if iter.skipWhitespacesWithoutLoadMore() {
if ret == nil {
ret = make([]byte, len(str))
copy(ret, str)
}
if !iter.loadMore() {
return
}
}
if iter.buf[iter.head] != ':' {
iter.reportError("readObjectFieldAsBytes", "expect : after object field")
return
}
iter.head++
if iter.skipWhitespacesWithoutLoadMore() {
if ret == nil {
ret = make([]byte, len(str))
copy(ret, str)
}
if !iter.loadMore() {
return
}
}
if ret == nil {
return str
}
return ret
}

View File

@ -1,217 +0,0 @@
package jsoniter
import (
"unicode/utf16"
"unsafe"
)
// TODO: avoid append
func (iter *Iterator) ReadString() (ret string) {
c := iter.nextToken()
if c == '"' {
copied := make([]byte, 32)
j := 0
fast_loop:
for {
i := iter.head
for ; i < iter.tail && j < len(copied); i++ {
c := iter.buf[i]
if c == '"' {
iter.head = i + 1
copied = copied[:j]
return *(*string)(unsafe.Pointer(&copied))
} else if c == '\\' {
iter.head = i
break fast_loop
}
copied[j] = c
j++
}
if i == iter.tail {
if iter.loadMore() {
i = iter.head
continue
} else {
iter.reportError("ReadString", "incomplete string")
return
}
}
iter.head = i
if j == len(copied) {
newBuf := make([]byte, len(copied) * 2)
copy(newBuf, copied)
copied = newBuf
}
}
return iter.readStringSlowPath(copied[:j])
}
iter.reportError("ReadString", `expects " or n`)
return
}
func (iter *Iterator) readStringSlowPath(str []byte) (ret string) {
var c byte
for iter.Error == nil {
c = iter.readByte()
if c == '"' {
return *(*string)(unsafe.Pointer(&str))
}
if c == '\\' {
c = iter.readByte()
switch c {
case 'u':
r := iter.readU4()
if utf16.IsSurrogate(r) {
c = iter.readByte()
if iter.Error != nil {
return
}
if c != '\\' {
iter.reportError("ReadString",
`expects \u after utf16 surrogate, but \ not found`)
return
}
c = iter.readByte()
if iter.Error != nil {
return
}
if c != 'u' {
iter.reportError("ReadString",
`expects \u after utf16 surrogate, but \u not found`)
return
}
r2 := iter.readU4()
if iter.Error != nil {
return
}
combined := utf16.DecodeRune(r, r2)
str = appendRune(str, combined)
} else {
str = appendRune(str, r)
}
case '"':
str = append(str, '"')
case '\\':
str = append(str, '\\')
case '/':
str = append(str, '/')
case 'b':
str = append(str, '\b')
case 'f':
str = append(str, '\f')
case 'n':
str = append(str, '\n')
case 'r':
str = append(str, '\r')
case 't':
str = append(str, '\t')
default:
iter.reportError("ReadString",
`invalid escape char after \`)
return
}
} else {
str = append(str, c)
}
}
return
}
func (iter *Iterator) ReadStringAsSlice() (ret []byte) {
c := iter.nextToken()
if c == '"' {
for i := iter.head; i < iter.tail; i++ {
// require ascii string and no escape
// for: field name, base64, number
if iter.buf[i] == '"' {
// fast path: reuse the underlying buffer
ret = iter.buf[iter.head : i]
iter.head = i + 1
return ret
}
}
readLen := iter.tail - iter.head
copied := make([]byte, readLen, readLen * 2)
copy(copied, iter.buf[iter.head:iter.tail])
iter.head = iter.tail
for iter.Error == nil {
c := iter.readByte()
if c == '"' {
return copied
}
copied = append(copied, c)
}
return copied
}
iter.reportError("ReadString", `expects " or n`)
return
}
func (iter *Iterator) readU4() (ret rune) {
for i := 0; i < 4; i++ {
c := iter.readByte()
if iter.Error != nil {
return
}
if c >= '0' && c <= '9' {
ret = ret * 16 + rune(c - '0')
} else if c >= 'a' && c <= 'f' {
ret = ret * 16 + rune(c - 'a' + 10)
} else {
iter.reportError("readU4", "expects 0~9 or a~f")
return
}
}
return ret
}
const (
t1 = 0x00 // 0000 0000
tx = 0x80 // 1000 0000
t2 = 0xC0 // 1100 0000
t3 = 0xE0 // 1110 0000
t4 = 0xF0 // 1111 0000
t5 = 0xF8 // 1111 1000
maskx = 0x3F // 0011 1111
mask2 = 0x1F // 0001 1111
mask3 = 0x0F // 0000 1111
mask4 = 0x07 // 0000 0111
rune1Max = 1 << 7 - 1
rune2Max = 1 << 11 - 1
rune3Max = 1 << 16 - 1
surrogateMin = 0xD800
surrogateMax = 0xDFFF
maxRune = '\U0010FFFF' // Maximum valid Unicode code point.
runeError = '\uFFFD' // the "error" Rune or "Unicode replacement character"
)
func appendRune(p []byte, r rune) []byte {
// Negative values are erroneous. Making it unsigned addresses the problem.
switch i := uint32(r); {
case i <= rune1Max:
p = append(p, byte(r))
return p
case i <= rune2Max:
p = append(p, t2 | byte(r >> 6))
p = append(p, tx | byte(r) & maskx)
return p
case i > maxRune, surrogateMin <= i && i <= surrogateMax:
r = runeError
fallthrough
case i <= rune3Max:
p = append(p, t3 | byte(r >> 12))
p = append(p, tx | byte(r >> 6) & maskx)
p = append(p, tx | byte(r) & maskx)
return p
default:
p = append(p, t4 | byte(r >> 18))
p = append(p, tx | byte(r >> 12) & maskx)
p = append(p, tx | byte(r >> 6) & maskx)
p = append(p, tx | byte(r) & maskx)
return p
}
}

View File

@ -1,477 +0,0 @@
package jsoniter
import (
"fmt"
"reflect"
"sync/atomic"
"unsafe"
"errors"
)
/*
Reflection on type to create decoders, which is then cached
Reflection on value is avoided as we can, as the reflect.Value itself will allocate, with following exceptions
1. create instance of new value, for example *int will need a int to be allocated
2. append to slice, if the existing cap is not enough, allocate will be done using Reflect.New
3. assignment to map, both key and value will be reflect.Value
For a simple struct binding, it will be reflect.Value free and allocation free
*/
type Decoder interface {
decode(ptr unsafe.Pointer, iter *Iterator)
}
type Encoder interface {
encode(ptr unsafe.Pointer, stream *Stream)
encodeInterface(val interface{}, stream *Stream)
}
func WriteToStream(val interface{}, stream *Stream, encoder Encoder) {
e := (*emptyInterface)(unsafe.Pointer(&val))
if reflect.TypeOf(val).Kind() == reflect.Ptr {
encoder.encode(unsafe.Pointer(&e.word), stream)
} else {
encoder.encode(e.word, stream)
}
}
type DecoderFunc func(ptr unsafe.Pointer, iter *Iterator)
type EncoderFunc func(ptr unsafe.Pointer, stream *Stream)
type ExtensionFunc func(typ reflect.Type, field *reflect.StructField) ([]string, DecoderFunc)
type funcDecoder struct {
fun DecoderFunc
}
func (decoder *funcDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
decoder.fun(ptr, iter)
}
type funcEncoder struct {
fun EncoderFunc
}
func (encoder *funcEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
encoder.fun(ptr, stream)
}
func (encoder *funcEncoder) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
var DECODERS unsafe.Pointer
var ENCODERS unsafe.Pointer
var typeDecoders map[string]Decoder
var fieldDecoders map[string]Decoder
var typeEncoders map[string]Encoder
var fieldEncoders map[string]Encoder
var extensions []ExtensionFunc
var anyType reflect.Type
func init() {
typeDecoders = map[string]Decoder{}
fieldDecoders = map[string]Decoder{}
typeEncoders = map[string]Encoder{}
fieldEncoders = map[string]Encoder{}
extensions = []ExtensionFunc{}
atomic.StorePointer(&DECODERS, unsafe.Pointer(&map[string]Decoder{}))
atomic.StorePointer(&ENCODERS, unsafe.Pointer(&map[string]Encoder{}))
anyType = reflect.TypeOf((*Any)(nil)).Elem()
}
func addDecoderToCache(cacheKey reflect.Type, decoder Decoder) {
done := false
for !done {
ptr := atomic.LoadPointer(&DECODERS)
cache := *(*map[reflect.Type]Decoder)(ptr)
copied := map[reflect.Type]Decoder{}
for k, v := range cache {
copied[k] = v
}
copied[cacheKey] = decoder
done = atomic.CompareAndSwapPointer(&DECODERS, ptr, unsafe.Pointer(&copied))
}
}
func addEncoderToCache(cacheKey reflect.Type, encoder Encoder) {
done := false
for !done {
ptr := atomic.LoadPointer(&ENCODERS)
cache := *(*map[reflect.Type]Encoder)(ptr)
copied := map[reflect.Type]Encoder{}
for k, v := range cache {
copied[k] = v
}
copied[cacheKey] = encoder
done = atomic.CompareAndSwapPointer(&ENCODERS, ptr, unsafe.Pointer(&copied))
}
}
func getDecoderFromCache(cacheKey reflect.Type) Decoder {
ptr := atomic.LoadPointer(&DECODERS)
cache := *(*map[reflect.Type]Decoder)(ptr)
return cache[cacheKey]
}
func getEncoderFromCache(cacheKey reflect.Type) Encoder {
ptr := atomic.LoadPointer(&ENCODERS)
cache := *(*map[reflect.Type]Encoder)(ptr)
return cache[cacheKey]
}
// RegisterTypeDecoder can register a type for json object
func RegisterTypeDecoder(typ string, fun DecoderFunc) {
typeDecoders[typ] = &funcDecoder{fun}
}
// RegisterFieldDecoder can register a type for json field
func RegisterFieldDecoder(typ string, field string, fun DecoderFunc) {
fieldDecoders[fmt.Sprintf("%s/%s", typ, field)] = &funcDecoder{fun}
}
func RegisterTypeEncoder(typ string, fun EncoderFunc) {
typeEncoders[typ] = &funcEncoder{fun}
}
func RegisterFieldEncoder(typ string, field string, fun EncoderFunc) {
fieldEncoders[fmt.Sprintf("%s/%s", typ, field)] = &funcEncoder{fun}
}
// RegisterExtension can register a custom extension
func RegisterExtension(extension ExtensionFunc) {
extensions = append(extensions, extension)
}
// CleanDecoders cleans decoders registered
func CleanDecoders() {
typeDecoders = map[string]Decoder{}
fieldDecoders = map[string]Decoder{}
}
type optionalDecoder struct {
valueType reflect.Type
valueDecoder Decoder
}
func (decoder *optionalDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
*((*unsafe.Pointer)(ptr)) = nil
} else {
if *((*unsafe.Pointer)(ptr)) == nil {
// pointer to null, we have to allocate memory to hold the value
value := reflect.New(decoder.valueType)
decoder.valueDecoder.decode(unsafe.Pointer(value.Pointer()), iter)
*((*uintptr)(ptr)) = value.Pointer()
} else {
// reuse existing instance
decoder.valueDecoder.decode(*((*unsafe.Pointer)(ptr)), iter)
}
}
}
type optionalEncoder struct {
valueType reflect.Type
valueEncoder Encoder
}
func (encoder *optionalEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
if *((*unsafe.Pointer)(ptr)) == nil {
stream.WriteNil()
} else {
encoder.valueEncoder.encode(*((*unsafe.Pointer)(ptr)), stream)
}
}
func (encoder *optionalEncoder) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
type mapDecoder struct {
mapType reflect.Type
elemType reflect.Type
elemDecoder Decoder
mapInterface emptyInterface
}
func (decoder *mapDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
// dark magic to cast unsafe.Pointer back to interface{} using reflect.Type
mapInterface := decoder.mapInterface
mapInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
realVal := reflect.ValueOf(*realInterface).Elem()
for field := iter.ReadObject(); field != ""; field = iter.ReadObject() {
elem := reflect.New(decoder.elemType)
decoder.elemDecoder.decode(unsafe.Pointer(elem.Pointer()), iter)
// to put into map, we have to use reflection
realVal.SetMapIndex(reflect.ValueOf(string([]byte(field))), elem.Elem())
}
}
type mapEncoder struct {
mapType reflect.Type
elemType reflect.Type
elemEncoder Encoder
mapInterface emptyInterface
}
func (encoder *mapEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
mapInterface := encoder.mapInterface
mapInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
realVal := reflect.ValueOf(*realInterface)
stream.WriteObjectStart()
for i, key := range realVal.MapKeys() {
if i != 0 {
stream.WriteMore()
}
stream.WriteObjectField(key.String())
val := realVal.MapIndex(key).Interface()
encoder.elemEncoder.encodeInterface(val, stream)
}
stream.WriteObjectEnd()
}
func (encoder *mapEncoder) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
type mapInterfaceEncoder struct {
mapType reflect.Type
elemType reflect.Type
elemEncoder Encoder
mapInterface emptyInterface
}
func (encoder *mapInterfaceEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
mapInterface := encoder.mapInterface
mapInterface.word = ptr
realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
realVal := reflect.ValueOf(*realInterface)
stream.WriteObjectStart()
for i, key := range realVal.MapKeys() {
if i != 0 {
stream.WriteMore()
}
stream.WriteObjectField(key.String())
val := realVal.MapIndex(key).Interface()
encoder.elemEncoder.encode(unsafe.Pointer(&val), stream)
}
stream.WriteObjectEnd()
}
func (encoder *mapInterfaceEncoder) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
// emptyInterface is the header for an interface{} value.
type emptyInterface struct {
typ *struct{}
word unsafe.Pointer
}
// Read converts an Iterator instance into go interface, same as json.Unmarshal
func (iter *Iterator) ReadVal(obj interface{}) {
typ := reflect.TypeOf(obj)
cacheKey := typ.Elem()
cachedDecoder := getDecoderFromCache(cacheKey)
if cachedDecoder == nil {
decoder, err := decoderOfType(cacheKey)
if err != nil {
iter.Error = err
return
}
cachedDecoder = decoder
addDecoderToCache(cacheKey, decoder)
}
e := (*emptyInterface)(unsafe.Pointer(&obj))
cachedDecoder.decode(e.word, iter)
}
func (stream *Stream) WriteVal(val interface{}) {
if nil == val {
stream.WriteNil()
return
}
typ := reflect.TypeOf(val)
cacheKey := typ
cachedEncoder := getEncoderFromCache(cacheKey)
if cachedEncoder == nil {
encoder, err := encoderOfType(cacheKey)
if err != nil {
stream.Error = err
return
}
cachedEncoder = encoder
addEncoderToCache(cacheKey, encoder)
}
cachedEncoder.encodeInterface(val, stream)
}
type prefix string
func (p prefix) addToDecoder(decoder Decoder, err error) (Decoder, error) {
if err != nil {
return nil, fmt.Errorf("%s: %s", p, err.Error())
}
return decoder, err
}
func (p prefix) addToEncoder(encoder Encoder, err error) (Encoder, error) {
if err != nil {
return nil, fmt.Errorf("%s: %s", p, err.Error())
}
return encoder, err
}
func decoderOfType(typ reflect.Type) (Decoder, error) {
typeName := typ.String()
typeDecoder := typeDecoders[typeName]
if typeDecoder != nil {
return typeDecoder, nil
}
switch typ.Kind() {
case reflect.String:
return &stringCodec{}, nil
case reflect.Int:
return &intCodec{}, nil
case reflect.Int8:
return &int8Codec{}, nil
case reflect.Int16:
return &int16Codec{}, nil
case reflect.Int32:
return &int32Codec{}, nil
case reflect.Int64:
return &int64Codec{}, nil
case reflect.Uint:
return &uintCodec{}, nil
case reflect.Uint8:
return &uint8Codec{}, nil
case reflect.Uint16:
return &uint16Codec{}, nil
case reflect.Uint32:
return &uint32Codec{}, nil
case reflect.Uint64:
return &uint64Codec{}, nil
case reflect.Float32:
return &float32Codec{}, nil
case reflect.Float64:
return &float64Codec{}, nil
case reflect.Bool:
return &boolCodec{}, nil
case reflect.Interface:
if typ.NumMethod() == 0 {
return &interfaceCodec{}, nil
} else {
return nil, errors.New("unsupportd type: " + typ.String())
}
case reflect.Struct:
return prefix(fmt.Sprintf("[%s]", typeName)).addToDecoder(decoderOfStruct(typ))
case reflect.Slice:
return prefix("[slice]").addToDecoder(decoderOfSlice(typ))
case reflect.Map:
return prefix("[map]").addToDecoder(decoderOfMap(typ))
case reflect.Ptr:
return prefix("[optional]").addToDecoder(decoderOfOptional(typ))
default:
return nil, fmt.Errorf("unsupported type: %v", typ)
}
}
func encoderOfType(typ reflect.Type) (Encoder, error) {
if typ.ConvertibleTo(anyType) {
return &anyCodec{}, nil
}
typeName := typ.String()
typeEncoder := typeEncoders[typeName]
if typeEncoder != nil {
return typeEncoder, nil
}
switch typ.Kind() {
case reflect.String:
return &stringCodec{}, nil
case reflect.Int:
return &intCodec{}, nil
case reflect.Int8:
return &int8Codec{}, nil
case reflect.Int16:
return &int16Codec{}, nil
case reflect.Int32:
return &int32Codec{}, nil
case reflect.Int64:
return &int64Codec{}, nil
case reflect.Uint:
return &uintCodec{}, nil
case reflect.Uint8:
return &uint8Codec{}, nil
case reflect.Uint16:
return &uint16Codec{}, nil
case reflect.Uint32:
return &uint32Codec{}, nil
case reflect.Uint64:
return &uint64Codec{}, nil
case reflect.Float32:
return &float32Codec{}, nil
case reflect.Float64:
return &float64Codec{}, nil
case reflect.Bool:
return &boolCodec{}, nil
case reflect.Interface:
return &interfaceCodec{}, nil
case reflect.Struct:
return prefix(fmt.Sprintf("[%s]", typeName)).addToEncoder(encoderOfStruct(typ))
case reflect.Slice:
return prefix("[slice]").addToEncoder(encoderOfSlice(typ))
case reflect.Map:
return prefix("[map]").addToEncoder(encoderOfMap(typ))
case reflect.Ptr:
return prefix("[optional]").addToEncoder(encoderOfOptional(typ))
default:
return nil, fmt.Errorf("unsupported type: %v", typ)
}
}
func decoderOfOptional(typ reflect.Type) (Decoder, error) {
elemType := typ.Elem()
decoder, err := decoderOfType(elemType)
if err != nil {
return nil, err
}
return &optionalDecoder{elemType, decoder}, nil
}
func encoderOfOptional(typ reflect.Type) (Encoder, error) {
elemType := typ.Elem()
decoder, err := encoderOfType(elemType)
if err != nil {
return nil, err
}
return &optionalEncoder{elemType, decoder}, nil
}
func decoderOfMap(typ reflect.Type) (Decoder, error) {
decoder, err := decoderOfType(typ.Elem())
if err != nil {
return nil, err
}
mapInterface := reflect.New(typ).Interface()
return &mapDecoder{typ, typ.Elem(), decoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}, nil
}
func encoderOfMap(typ reflect.Type) (Encoder, error) {
elemType := typ.Elem()
encoder, err := encoderOfType(elemType)
if err != nil {
return nil, err
}
mapInterface := reflect.New(typ).Elem().Interface()
if elemType.Kind() == reflect.Interface && elemType.NumMethod() == 0 {
return &mapInterfaceEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}, nil
} else {
return &mapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}, nil
}
}

View File

@ -1,150 +0,0 @@
package jsoniter
import (
"unsafe"
"reflect"
"io"
"fmt"
)
func decoderOfSlice(typ reflect.Type) (Decoder, error) {
decoder, err := decoderOfType(typ.Elem())
if err != nil {
return nil, err
}
return &sliceDecoder{typ, typ.Elem(), decoder}, nil
}
func encoderOfSlice(typ reflect.Type) (Encoder, error) {
encoder, err := encoderOfType(typ.Elem())
if err != nil {
return nil, err
}
return &sliceEncoder{typ, typ.Elem(), encoder}, nil
}
type sliceEncoder struct {
sliceType reflect.Type
elemType reflect.Type
elemEncoder Encoder
}
func (encoder *sliceEncoder) encode(ptr unsafe.Pointer, stream *Stream) {
slice := (*sliceHeader)(ptr)
if slice.Len == 0 {
stream.WriteEmptyArray()
return
}
stream.WriteArrayStart()
elemPtr := uintptr(slice.Data)
encoder.elemEncoder.encode(unsafe.Pointer(elemPtr), stream)
for i := 1; i < slice.Len; i++ {
stream.WriteMore()
elemPtr += encoder.elemType.Size()
encoder.elemEncoder.encode(unsafe.Pointer(elemPtr), stream)
}
stream.WriteArrayEnd()
if stream.Error != nil && stream.Error != io.EOF {
stream.Error = fmt.Errorf("%v: %s", encoder.sliceType, stream.Error.Error())
}
}
func (encoder *sliceEncoder) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
type sliceDecoder struct {
sliceType reflect.Type
elemType reflect.Type
elemDecoder Decoder
}
// sliceHeader is a safe version of SliceHeader used within this package.
type sliceHeader struct {
Data unsafe.Pointer
Len int
Cap int
}
func (decoder *sliceDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
decoder.doDecode(ptr, iter)
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.sliceType, iter.Error.Error())
}
}
func (decoder *sliceDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) {
slice := (*sliceHeader)(ptr)
reuseSlice(slice, decoder.sliceType, 4)
if !iter.ReadArray() {
return
}
offset := uintptr(0)
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data) + offset), iter)
if !iter.ReadArray() {
slice.Len = 1
return
}
offset += decoder.elemType.Size()
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data) + offset), iter)
if !iter.ReadArray() {
slice.Len = 2
return
}
offset += decoder.elemType.Size()
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data) + offset), iter)
if !iter.ReadArray() {
slice.Len = 3
return
}
offset += decoder.elemType.Size()
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data) + offset), iter)
slice.Len = 4
for iter.ReadArray() {
growOne(slice, decoder.sliceType, decoder.elemType)
offset += decoder.elemType.Size()
decoder.elemDecoder.decode(unsafe.Pointer(uintptr(slice.Data) + offset), iter)
}
}
// grow grows the slice s so that it can hold extra more values, allocating
// more capacity if needed. It also returns the old and new slice lengths.
func growOne(slice *sliceHeader, sliceType reflect.Type, elementType reflect.Type) {
newLen := slice.Len + 1
if newLen <= slice.Cap {
slice.Len = newLen
return
}
newCap := slice.Cap
if newCap == 0 {
newCap = 1
} else {
for newCap < newLen {
if slice.Len < 1024 {
newCap += newCap
} else {
newCap += newCap / 4
}
}
}
dst := unsafe.Pointer(reflect.MakeSlice(sliceType, newLen, newCap).Pointer())
// copy old array into new array
originalBytesCount := uintptr(slice.Len) * elementType.Size()
srcPtr := (*[1 << 30]byte)(slice.Data)
dstPtr := (*[1 << 30]byte)(dst)
for i := uintptr(0); i < originalBytesCount; i++ {
dstPtr[i] = srcPtr[i]
}
slice.Len = newLen
slice.Cap = newCap
slice.Data = dst
}
func reuseSlice(slice *sliceHeader, sliceType reflect.Type, expectedCap int) {
if expectedCap <= slice.Cap {
return
}
dst := unsafe.Pointer(reflect.MakeSlice(sliceType, 0, expectedCap).Pointer())
slice.Cap = expectedCap
slice.Data = dst
}

View File

@ -1,266 +0,0 @@
package jsoniter
import (
"unsafe"
)
type stringCodec struct {
}
func (codec *stringCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
*((*string)(ptr)) = iter.ReadString()
}
func (codec *stringCodec) encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteString(*((*string)(ptr)))
}
func (encoder *stringCodec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
type intCodec struct {
}
func (codec *intCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
*((*int)(ptr)) = iter.ReadInt()
}
func (codec *intCodec) encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteInt(*((*int)(ptr)))
}
func (encoder *intCodec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
type int8Codec struct {
}
func (codec *int8Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
*((*int8)(ptr)) = iter.ReadInt8()
}
func (codec *int8Codec) encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteInt8(*((*int8)(ptr)))
}
func (encoder *int8Codec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
type int16Codec struct {
}
func (codec *int16Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
*((*int16)(ptr)) = iter.ReadInt16()
}
func (codec *int16Codec) encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteInt16(*((*int16)(ptr)))
}
func (encoder *int16Codec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
type int32Codec struct {
}
func (codec *int32Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
*((*int32)(ptr)) = iter.ReadInt32()
}
func (codec *int32Codec) encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteInt32(*((*int32)(ptr)))
}
func (encoder *int32Codec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
type int64Codec struct {
}
func (codec *int64Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
*((*int64)(ptr)) = iter.ReadInt64()
}
func (codec *int64Codec) encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteInt64(*((*int64)(ptr)))
}
func (encoder *int64Codec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
type uintCodec struct {
}
func (codec *uintCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
*((*uint)(ptr)) = iter.ReadUint()
}
func (codec *uintCodec) encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint(*((*uint)(ptr)))
}
func (encoder *uintCodec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
type uint8Codec struct {
}
func (codec *uint8Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
*((*uint8)(ptr)) = iter.ReadUint8()
}
func (codec *uint8Codec) encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint8(*((*uint8)(ptr)))
}
func (encoder *uint8Codec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
type uint16Codec struct {
}
func (decoder *uint16Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
*((*uint16)(ptr)) = iter.ReadUint16()
}
func (codec *uint16Codec) encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint16(*((*uint16)(ptr)))
}
func (encoder *uint16Codec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
type uint32Codec struct {
}
func (codec *uint32Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
*((*uint32)(ptr)) = iter.ReadUint32()
}
func (codec *uint32Codec) encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint32(*((*uint32)(ptr)))
}
func (encoder *uint32Codec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
type uint64Codec struct {
}
func (codec *uint64Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
*((*uint64)(ptr)) = iter.ReadUint64()
}
func (codec *uint64Codec) encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint64(*((*uint64)(ptr)))
}
func (encoder *uint64Codec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
type float32Codec struct {
}
func (codec *float32Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
*((*float32)(ptr)) = iter.ReadFloat32()
}
func (codec *float32Codec) encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteFloat32(*((*float32)(ptr)))
}
func (encoder *float32Codec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
type float64Codec struct {
}
func (codec *float64Codec) decode(ptr unsafe.Pointer, iter *Iterator) {
*((*float64)(ptr)) = iter.ReadFloat64()
}
func (codec *float64Codec) encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteFloat64(*((*float64)(ptr)))
}
func (encoder *float64Codec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
type boolCodec struct {
}
func (codec *boolCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
*((*bool)(ptr)) = iter.ReadBool()
}
func (codec *boolCodec) encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteBool(*((*bool)(ptr)))
}
func (encoder *boolCodec) encodeInterface(val interface{}, stream *Stream) {
WriteToStream(val, stream, encoder)
}
type interfaceCodec struct {
}
func (codec *interfaceCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
*((*interface{})(ptr)) = iter.Read()
}
func (codec *interfaceCodec) encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteVal(*((*interface{})(ptr)))
}
func (encoder *interfaceCodec) encodeInterface(val interface{}, stream *Stream) {
stream.WriteVal(val)
}
type anyCodec struct {
}
func (codec *anyCodec) decode(ptr unsafe.Pointer, iter *Iterator) {
*((*Any)(ptr)) = iter.ReadAny()
}
func (codec *anyCodec) encode(ptr unsafe.Pointer, stream *Stream) {
(*((*Any)(ptr))).WriteTo(stream)
}
func (encoder *anyCodec) encodeInterface(val interface{}, stream *Stream) {
(val.(Any)).WriteTo(stream)
}
type stringNumberDecoder struct {
elemDecoder Decoder
}
func (decoder *stringNumberDecoder) decode(ptr unsafe.Pointer, iter *Iterator) {
c := iter.nextToken()
if c != '"' {
iter.reportError("stringNumberDecoder", `expect "`)
return
}
decoder.elemDecoder.decode(ptr, iter)
if iter.Error != nil {
return
}
c = iter.readByte()
if c != '"' {
iter.reportError("stringNumberDecoder", `expect "`)
return
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,313 +0,0 @@
package jsoniter
import (
"io"
)
type Stream struct {
out io.Writer
buf []byte
n int
Error error
indention int
IndentionStep int
}
func NewStream(out io.Writer, bufSize int) *Stream {
return &Stream{out, make([]byte, bufSize), 0, nil, 0, 0}
}
func (b *Stream) Reset(out io.Writer) {
b.out = out
b.n = 0
}
// Available returns how many bytes are unused in the buffer.
func (b *Stream) Available() int {
return len(b.buf) - b.n
}
// Buffered returns the number of bytes that have been written into the current buffer.
func (b *Stream) Buffered() int {
return b.n
}
// Write writes the contents of p into the buffer.
// It returns the number of bytes written.
// If nn < len(p), it also returns an error explaining
// why the write is short.
func (b *Stream) Write(p []byte) (nn int, err error) {
for len(p) > b.Available() && b.Error == nil {
var n int
if b.Buffered() == 0 {
// Large write, empty buffer.
// Write directly from p to avoid copy.
n, b.Error = b.out.Write(p)
} else {
n = copy(b.buf[b.n:], p)
b.n += n
b.Flush()
}
nn += n
p = p[n:]
}
if b.Error != nil {
return nn, b.Error
}
n := copy(b.buf[b.n:], p)
b.n += n
nn += n
return nn, nil
}
// WriteByte writes a single byte.
func (b *Stream) writeByte(c byte) {
if b.Error != nil {
return
}
if b.Available() <= 0 && b.Flush() != nil {
return
}
b.buf[b.n] = c
b.n++
}
func (b *Stream) writeTwoBytes(c1 byte, c2 byte) {
if b.Error != nil {
return
}
if b.Available() <= 1 && b.Flush() != nil {
return
}
b.buf[b.n] = c1
b.buf[b.n + 1] = c2
b.n += 2
}
func (b *Stream) writeThreeBytes(c1 byte, c2 byte, c3 byte) {
if b.Error != nil {
return
}
if b.Available() <= 2 && b.Flush() != nil {
return
}
b.buf[b.n] = c1
b.buf[b.n + 1] = c2
b.buf[b.n + 2] = c3
b.n += 3
}
func (b *Stream) writeFourBytes(c1 byte, c2 byte, c3 byte, c4 byte) {
if b.Error != nil {
return
}
if b.Available() <= 3 && b.Flush() != nil {
return
}
b.buf[b.n] = c1
b.buf[b.n + 1] = c2
b.buf[b.n + 2] = c3
b.buf[b.n + 3] = c4
b.n += 4
}
func (b *Stream) writeFiveBytes(c1 byte, c2 byte, c3 byte, c4 byte, c5 byte) {
if b.Error != nil {
return
}
if b.Available() <= 3 && b.Flush() != nil {
return
}
b.buf[b.n] = c1
b.buf[b.n + 1] = c2
b.buf[b.n + 2] = c3
b.buf[b.n + 3] = c4
b.buf[b.n + 4] = c5
b.n += 5
}
// Flush writes any buffered data to the underlying io.Writer.
func (b *Stream) Flush() error {
if b.Error != nil {
return b.Error
}
if b.n == 0 {
return nil
}
n, err := b.out.Write(b.buf[0:b.n])
if n < b.n && err == nil {
err = io.ErrShortWrite
}
if err != nil {
if n > 0 && n < b.n {
copy(b.buf[0:b.n - n], b.buf[n:b.n])
}
b.n -= n
b.Error = err
return err
}
b.n = 0
return nil
}
func (b *Stream) WriteRaw(s string) {
for len(s) > b.Available() && b.Error == nil {
n := copy(b.buf[b.n:], s)
b.n += n
s = s[n:]
b.Flush()
}
if b.Error != nil {
return
}
n := copy(b.buf[b.n:], s)
b.n += n
}
func (stream *Stream) WriteString(s string) {
valLen := len(s)
toWriteLen := valLen
bufLengthMinusTwo := len(stream.buf) - 2 // make room for the quotes
if stream.n + toWriteLen > bufLengthMinusTwo {
toWriteLen = bufLengthMinusTwo - stream.n
}
if toWriteLen < 0 {
stream.Flush()
if stream.n + toWriteLen > bufLengthMinusTwo {
toWriteLen = bufLengthMinusTwo - stream.n
}
}
n := stream.n
stream.buf[n] = '"'
n++
// write string, the fast path, without utf8 and escape support
i := 0
for ; i < toWriteLen; i++ {
c := s[i]
if c > 31 && c != '"' && c != '\\' {
stream.buf[n] = c
n++
} else {
break;
}
}
if i == valLen {
stream.buf[n] = '"'
n++
stream.n = n
return
}
stream.n = n
// for the remaining parts, we process them char by char
stream.writeStringSlowPath(s, i, valLen);
stream.writeByte('"')
}
func (stream *Stream) writeStringSlowPath(s string, i int, valLen int) {
for ; i < valLen; i++ {
c := s[i]
switch (c) {
case '"':
stream.writeTwoBytes('\\', '"')
case '\\':
stream.writeTwoBytes('\\', '\\')
case '\b':
stream.writeTwoBytes('\\', 'b')
case '\f':
stream.writeTwoBytes('\\', 'f')
case '\n':
stream.writeTwoBytes('\\', 'n')
case '\r':
stream.writeTwoBytes('\\', 'r')
case '\t':
stream.writeTwoBytes('\\', 't')
default:
stream.writeByte(c);
}
}
}
func (stream *Stream) WriteNil() {
stream.writeFourBytes('n', 'u', 'l', 'l')
}
func (stream *Stream) WriteTrue() {
stream.writeFourBytes('t', 'r', 'u', 'e')
}
func (stream *Stream) WriteFalse() {
stream.writeFiveBytes('f', 'a', 'l', 's', 'e')
}
func (stream *Stream) WriteBool(val bool) {
if val {
stream.WriteTrue()
} else {
stream.WriteFalse()
}
}
func (stream *Stream) WriteObjectStart() {
stream.indention += stream.IndentionStep
stream.writeByte('{')
stream.writeIndention(0)
}
func (stream *Stream) WriteObjectField(field string) {
stream.WriteString(field)
stream.writeByte(':')
}
func (stream *Stream) WriteObjectEnd() {
stream.writeIndention(stream.IndentionStep)
stream.indention -= stream.IndentionStep
stream.writeByte('}')
}
func (stream *Stream) WriteEmptyObject() {
stream.writeByte('{')
stream.writeByte('}')
}
func (stream *Stream) WriteMore() {
stream.writeByte(',')
stream.writeIndention(0)
}
func (stream *Stream) WriteArrayStart() {
stream.indention += stream.IndentionStep
stream.writeByte('[')
stream.writeIndention(0)
}
func (stream *Stream) WriteEmptyArray() {
stream.writeByte('[')
stream.writeByte(']')
}
func (stream *Stream) WriteArrayEnd() {
stream.writeIndention(stream.IndentionStep)
stream.indention -= stream.IndentionStep
stream.writeByte(']')
}
func (stream *Stream) writeIndention(delta int) {
if (stream.indention == 0) {
return
}
stream.writeByte('\n')
toWrite := stream.indention - delta
i := 0
for {
for ; i < toWrite && stream.n < len(stream.buf); i++ {
stream.buf[stream.n] = ' '
stream.n ++
}
if i == toWrite {
break;
} else {
stream.Flush()
}
}
}

View File

@ -1,71 +0,0 @@
package jsoniter
import (
"strconv"
)
var POW10 []uint64
func init() {
POW10 = []uint64{1, 10, 100, 1000, 10000, 100000, 1000000}
}
func (stream *Stream) WriteFloat32(val float32) {
if val < 0 {
stream.writeByte('-')
val = -val
}
if val > 0x4ffffff {
stream.WriteRaw(strconv.FormatFloat(float64(val), 'f', -1, 32));
return
}
precision := 6
exp := uint64(1000000) // 6
lval := uint64(float64(val) * float64(exp) + 0.5)
stream.WriteUint64(lval / exp)
fval := lval % exp
if fval == 0 {
return
}
stream.writeByte('.')
if stream.Available() < 10 {
stream.Flush()
}
for p := precision - 1; p > 0 && fval < POW10[p]; p-- {
stream.writeByte('0')
}
stream.WriteUint64(fval);
for stream.buf[stream.n - 1] == '0' {
stream.n--;
}
}
func (stream *Stream) WriteFloat64(val float64) {
if val < 0 {
stream.writeByte('-')
val = -val
}
if val > 0x4ffffff {
stream.WriteRaw(strconv.FormatFloat(val, 'f', -1, 64));
return
}
precision := 6
exp := uint64(1000000) // 6
lval := uint64(val * float64(exp) + 0.5)
stream.WriteUint64(lval / exp)
fval := lval % exp
if fval == 0 {
return
}
stream.writeByte('.')
if stream.Available() < 10 {
stream.Flush()
}
for p := precision - 1; p > 0 && fval < POW10[p]; p-- {
stream.writeByte('0')
}
stream.WriteUint64(fval);
for stream.buf[stream.n - 1] == '0' {
stream.n--;
}
}

View File

@ -1,361 +0,0 @@
package jsoniter
var digits []uint8
var digitTens []uint8
var digitOnes []uint8
var DIGITS []uint32
func init() {
digits = []uint8{
'0', '1', '2', '3', '4', '5',
'6', '7', '8', '9', 'a', 'b',
'c', 'd', 'e', 'f', 'g', 'h',
'i', 'j', 'k', 'l', 'm', 'n',
'o', 'p', 'q', 'r', 's', 't',
'u', 'v', 'w', 'x', 'y', 'z',
}
digitTens = []uint8{
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'1', '1', '1', '1', '1', '1', '1', '1', '1', '1',
'2', '2', '2', '2', '2', '2', '2', '2', '2', '2',
'3', '3', '3', '3', '3', '3', '3', '3', '3', '3',
'4', '4', '4', '4', '4', '4', '4', '4', '4', '4',
'5', '5', '5', '5', '5', '5', '5', '5', '5', '5',
'6', '6', '6', '6', '6', '6', '6', '6', '6', '6',
'7', '7', '7', '7', '7', '7', '7', '7', '7', '7',
'8', '8', '8', '8', '8', '8', '8', '8', '8', '8',
'9', '9', '9', '9', '9', '9', '9', '9', '9', '9',
}
digitOnes = []uint8{
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
}
DIGITS = make([]uint32, 1000)
for i := uint32(0); i < 1000; i++ {
DIGITS[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i % 10 + '0';
if i < 10 {
DIGITS[i] += 2 << 24
} else if i < 100 {
DIGITS[i] += 1 << 24
}
}
}
func writeFirstBuf(buf []byte, v uint32, n int) int {
start := v >> 24
if start == 0 {
buf[n] = byte(v >> 16)
n++
buf[n] = byte(v >> 8)
n++
} else if start == 1 {
buf[n] = byte(v >> 8)
n++
}
buf[n] = byte(v)
n++
return n
}
func writeBuf(buf []byte, v uint32, n int) {
buf[n] = byte(v >> 16)
buf[n + 1] = byte(v >> 8)
buf[n + 2] = byte(v)
}
func (stream *Stream) WriteUint8(val uint8) {
if stream.Available() < 3 {
stream.Flush()
}
stream.n = writeFirstBuf(stream.buf, DIGITS[val], stream.n)
}
func (stream *Stream) WriteInt8(nval int8) {
if stream.Available() < 4 {
stream.Flush()
}
n := stream.n
var val uint8
if (nval < 0) {
val = uint8(-nval)
stream.buf[n] = '-'
n++
} else {
val = uint8(nval)
}
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
}
func (stream *Stream) WriteUint16(val uint16) {
if stream.Available() < 5 {
stream.Flush()
}
q1 := val / 1000
if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], stream.n)
return
}
r1 := val - q1 * 1000;
n := writeFirstBuf(stream.buf, DIGITS[q1], stream.n)
writeBuf(stream.buf, DIGITS[r1], n)
stream.n = n + 3
return
}
func (stream *Stream) WriteInt16(nval int16) {
if stream.Available() < 6 {
stream.Flush()
}
n := stream.n
var val uint16
if (nval < 0) {
val = uint16(-nval)
stream.buf[n] = '-'
n++
} else {
val = uint16(nval)
}
q1 := val / 1000
if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
return
}
r1 := val - q1 * 1000;
n = writeFirstBuf(stream.buf, DIGITS[q1], n)
writeBuf(stream.buf, DIGITS[r1], n)
stream.n = n + 3
return
}
func (stream *Stream) WriteUint32(val uint32) {
if stream.Available() < 10 {
stream.Flush()
}
n := stream.n
q1 := val / 1000
if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
return
}
r1 := val - q1 * 1000;
q2 := q1 / 1000
if q2 == 0 {
n := writeFirstBuf(stream.buf, DIGITS[q1], n)
writeBuf(stream.buf, DIGITS[r1], n)
stream.n = n + 3
return
}
r2 := q1 - q2 * 1000
q3 := q2 / 1000
if q3 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q2], n)
} else {
r3 := q2 - q3 * 1000
stream.buf[n] = byte(q3 + '0')
n++
writeBuf(stream.buf, DIGITS[r3], n)
n += 3
}
writeBuf(stream.buf, DIGITS[r2], n)
writeBuf(stream.buf, DIGITS[r1], n + 3)
stream.n = n + 6
}
func (stream *Stream) WriteInt32(nval int32) {
if stream.Available() < 11 {
stream.Flush()
}
n := stream.n
var val uint32
if (nval < 0) {
val = uint32(-nval)
stream.buf[n] = '-'
n++
} else {
val = uint32(nval)
}
q1 := val / 1000
if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
return
}
r1 := val - q1 * 1000;
q2 := q1 / 1000
if q2 == 0 {
n := writeFirstBuf(stream.buf, DIGITS[q1], n)
writeBuf(stream.buf, DIGITS[r1], n)
stream.n = n + 3
return
}
r2 := q1 - q2 * 1000
q3 := q2 / 1000
if q3 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q2], n)
} else {
r3 := q2 - q3 * 1000
stream.buf[n] = byte(q3 + '0')
n++
writeBuf(stream.buf, DIGITS[r3], n)
n += 3
}
writeBuf(stream.buf, DIGITS[r2], n)
writeBuf(stream.buf, DIGITS[r1], n + 3)
stream.n = n + 6
}
func (stream *Stream) WriteUint64(val uint64) {
if stream.Available() < 20 {
stream.Flush()
}
n := stream.n
q1 := val / 1000
if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
return
}
r1 := val - q1 * 1000;
q2 := q1 / 1000
if q2 == 0 {
n := writeFirstBuf(stream.buf, DIGITS[q1], n)
writeBuf(stream.buf, DIGITS[r1], n)
stream.n = n + 3
return
}
r2 := q1 - q2 * 1000
q3 := q2 / 1000
if q3 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q2], n)
writeBuf(stream.buf, DIGITS[r2], n)
writeBuf(stream.buf, DIGITS[r1], n + 3)
stream.n = n + 6
return
}
r3 := q2 - q3 * 1000
q4 := q3 / 1000
if q4 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q3], n)
writeBuf(stream.buf, DIGITS[r3], n)
writeBuf(stream.buf, DIGITS[r2], n + 3)
writeBuf(stream.buf, DIGITS[r1], n + 6)
stream.n = n + 9
return
}
r4 := q3 - q4 * 1000
q5 := q4 / 1000
if q5 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q4], n)
writeBuf(stream.buf, DIGITS[r4], n)
writeBuf(stream.buf, DIGITS[r3], n + 3)
writeBuf(stream.buf, DIGITS[r2], n + 6)
writeBuf(stream.buf, DIGITS[r1], n + 9)
stream.n = n + 12
return
}
r5 := q4 - q5 * 1000
q6 := q5 / 1000
if q6 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q5], n)
} else {
n = writeFirstBuf(stream.buf, DIGITS[q6], n)
r6 := q5 - q6 * 1000
writeBuf(stream.buf, DIGITS[r6], n)
n += 3
}
writeBuf(stream.buf, DIGITS[r5], n)
writeBuf(stream.buf, DIGITS[r4], n + 3)
writeBuf(stream.buf, DIGITS[r3], n + 6)
writeBuf(stream.buf, DIGITS[r2], n + 9)
writeBuf(stream.buf, DIGITS[r1], n + 12)
stream.n = n + 15
}
func (stream *Stream) WriteInt64(nval int64) {
if stream.Available() < 20 {
stream.Flush()
}
n := stream.n
var val uint64
if (nval < 0) {
val = uint64(-nval)
stream.buf[n] = '-'
n++
} else {
val = uint64(nval)
}
q1 := val / 1000
if q1 == 0 {
stream.n = writeFirstBuf(stream.buf, DIGITS[val], n)
return
}
r1 := val - q1 * 1000;
q2 := q1 / 1000
if q2 == 0 {
n := writeFirstBuf(stream.buf, DIGITS[q1], n)
writeBuf(stream.buf, DIGITS[r1], n)
stream.n = n + 3
return
}
r2 := q1 - q2 * 1000
q3 := q2 / 1000
if q3 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q2], n)
writeBuf(stream.buf, DIGITS[r2], n)
writeBuf(stream.buf, DIGITS[r1], n + 3)
stream.n = n + 6
return
}
r3 := q2 - q3 * 1000
q4 := q3 / 1000
if q4 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q3], n)
writeBuf(stream.buf, DIGITS[r3], n)
writeBuf(stream.buf, DIGITS[r2], n + 3)
writeBuf(stream.buf, DIGITS[r1], n + 6)
stream.n = n + 9
return
}
r4 := q3 - q4 * 1000
q5 := q4 / 1000
if q5 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q4], n)
writeBuf(stream.buf, DIGITS[r4], n)
writeBuf(stream.buf, DIGITS[r3], n + 3)
writeBuf(stream.buf, DIGITS[r2], n + 6)
writeBuf(stream.buf, DIGITS[r1], n + 9)
stream.n = n + 12
return
}
r5 := q4 - q5 * 1000
q6 := q5 / 1000
if q6 == 0 {
n = writeFirstBuf(stream.buf, DIGITS[q5], n)
} else {
stream.buf[n] = byte(q6 + '0')
n++
r6 := q5 - q6 * 1000
writeBuf(stream.buf, DIGITS[r6], n)
n += 3
}
writeBuf(stream.buf, DIGITS[r5], n)
writeBuf(stream.buf, DIGITS[r4], n + 3)
writeBuf(stream.buf, DIGITS[r3], n + 6)
writeBuf(stream.buf, DIGITS[r2], n + 9)
writeBuf(stream.buf, DIGITS[r1], n + 12)
stream.n = n + 15
}
func (stream *Stream) WriteInt(val int) {
stream.WriteInt64(int64(val))
}
func (stream *Stream) WriteUint(val uint) {
stream.WriteUint64(uint64(val))
}

View File

@ -0,0 +1,7 @@
| json type \ dest type | bool | int | uint | float |string|
| --- | --- | --- | --- |--|--|
| number | positive => true <br/> negative => true <br/> zero => false| 23.2 => 23 <br/> -32.1 => -32| 12.1 => 12 <br/> -12.1 => 0|as normal|same as origin|
| string | empty string => false <br/> string "0" => false <br/> other strings => true | "123.32" => 123 <br/> "-123.4" => -123 <br/> "123.23xxxw" => 123 <br/> "abcde12" => 0 <br/> "-32.1" => -32| 13.2 => 13 <br/> -1.1 => 0 |12.1 => 12.1 <br/> -12.3 => -12.3<br/> 12.4xxa => 12.4 <br/> +1.1e2 =>110 |same as origin|
| bool | true => true <br/> false => false| true => 1 <br/> false => 0 | true => 1 <br/> false => 0 |true => 1 <br/>false => 0|true => "true" <br/> false => "false"|
| object | true | 0 | 0 |0|originnal json|
| array | empty array => false <br/> nonempty array => true| [] => 0 <br/> [1,2] => 1 | [] => 0 <br/> [1,2] => 1 |[] => 0<br/>[1,2] => 1|original json|

11
go.mod Normal file
View File

@ -0,0 +1,11 @@
module github.com/json-iterator/go
go 1.12
require (
github.com/davecgh/go-spew v1.1.1
github.com/google/gofuzz v1.0.0
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742
github.com/stretchr/testify v1.3.0
)

14
go.sum Normal file
View File

@ -0,0 +1,14 @@
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/google/gofuzz v1.0.0 h1:A8PeW59pxE9IoFRqBp37U+mSNaQoZ46F1f0f863XSXw=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=

349
iter.go Normal file
View File

@ -0,0 +1,349 @@
package jsoniter
import (
"encoding/json"
"fmt"
"io"
)
// ValueType the type for JSON element
type ValueType int
const (
// InvalidValue invalid JSON element
InvalidValue ValueType = iota
// StringValue JSON element "string"
StringValue
// NumberValue JSON element 100 or 0.10
NumberValue
// NilValue JSON element null
NilValue
// BoolValue JSON element true or false
BoolValue
// ArrayValue JSON element []
ArrayValue
// ObjectValue JSON element {}
ObjectValue
)
var hexDigits []byte
var valueTypes []ValueType
func init() {
hexDigits = make([]byte, 256)
for i := 0; i < len(hexDigits); i++ {
hexDigits[i] = 255
}
for i := '0'; i <= '9'; i++ {
hexDigits[i] = byte(i - '0')
}
for i := 'a'; i <= 'f'; i++ {
hexDigits[i] = byte((i - 'a') + 10)
}
for i := 'A'; i <= 'F'; i++ {
hexDigits[i] = byte((i - 'A') + 10)
}
valueTypes = make([]ValueType, 256)
for i := 0; i < len(valueTypes); i++ {
valueTypes[i] = InvalidValue
}
valueTypes['"'] = StringValue
valueTypes['-'] = NumberValue
valueTypes['0'] = NumberValue
valueTypes['1'] = NumberValue
valueTypes['2'] = NumberValue
valueTypes['3'] = NumberValue
valueTypes['4'] = NumberValue
valueTypes['5'] = NumberValue
valueTypes['6'] = NumberValue
valueTypes['7'] = NumberValue
valueTypes['8'] = NumberValue
valueTypes['9'] = NumberValue
valueTypes['t'] = BoolValue
valueTypes['f'] = BoolValue
valueTypes['n'] = NilValue
valueTypes['['] = ArrayValue
valueTypes['{'] = ObjectValue
}
// Iterator is a io.Reader like object, with JSON specific read functions.
// Error is not returned as return value, but stored as Error member on this iterator instance.
type Iterator struct {
cfg *frozenConfig
reader io.Reader
buf []byte
head int
tail int
depth int
captureStartedAt int
captured []byte
Error error
Attachment interface{} // open for customized decoder
}
// NewIterator creates an empty Iterator instance
func NewIterator(cfg API) *Iterator {
return &Iterator{
cfg: cfg.(*frozenConfig),
reader: nil,
buf: nil,
head: 0,
tail: 0,
depth: 0,
}
}
// Parse creates an Iterator instance from io.Reader
func Parse(cfg API, reader io.Reader, bufSize int) *Iterator {
return &Iterator{
cfg: cfg.(*frozenConfig),
reader: reader,
buf: make([]byte, bufSize),
head: 0,
tail: 0,
depth: 0,
}
}
// ParseBytes creates an Iterator instance from byte array
func ParseBytes(cfg API, input []byte) *Iterator {
return &Iterator{
cfg: cfg.(*frozenConfig),
reader: nil,
buf: input,
head: 0,
tail: len(input),
depth: 0,
}
}
// ParseString creates an Iterator instance from string
func ParseString(cfg API, input string) *Iterator {
return ParseBytes(cfg, []byte(input))
}
// Pool returns a pool can provide more iterator with same configuration
func (iter *Iterator) Pool() IteratorPool {
return iter.cfg
}
// Reset reuse iterator instance by specifying another reader
func (iter *Iterator) Reset(reader io.Reader) *Iterator {
iter.reader = reader
iter.head = 0
iter.tail = 0
iter.depth = 0
return iter
}
// ResetBytes reuse iterator instance by specifying another byte array as input
func (iter *Iterator) ResetBytes(input []byte) *Iterator {
iter.reader = nil
iter.buf = input
iter.head = 0
iter.tail = len(input)
iter.depth = 0
return iter
}
// WhatIsNext gets ValueType of relatively next json element
func (iter *Iterator) WhatIsNext() ValueType {
valueType := valueTypes[iter.nextToken()]
iter.unreadByte()
return valueType
}
func (iter *Iterator) skipWhitespacesWithoutLoadMore() bool {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
switch c {
case ' ', '\n', '\t', '\r':
continue
}
iter.head = i
return false
}
return true
}
func (iter *Iterator) isObjectEnd() bool {
c := iter.nextToken()
if c == ',' {
return false
}
if c == '}' {
return true
}
iter.ReportError("isObjectEnd", "object ended prematurely, unexpected char "+string([]byte{c}))
return true
}
func (iter *Iterator) nextToken() byte {
// a variation of skip whitespaces, returning the next non-whitespace token
for {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
switch c {
case ' ', '\n', '\t', '\r':
continue
}
iter.head = i + 1
return c
}
if !iter.loadMore() {
return 0
}
}
}
// ReportError record a error in iterator instance with current position.
func (iter *Iterator) ReportError(operation string, msg string) {
if iter.Error != nil {
if iter.Error != io.EOF {
return
}
}
peekStart := iter.head - 10
if peekStart < 0 {
peekStart = 0
}
peekEnd := iter.head + 10
if peekEnd > iter.tail {
peekEnd = iter.tail
}
parsing := string(iter.buf[peekStart:peekEnd])
contextStart := iter.head - 50
if contextStart < 0 {
contextStart = 0
}
contextEnd := iter.head + 50
if contextEnd > iter.tail {
contextEnd = iter.tail
}
context := string(iter.buf[contextStart:contextEnd])
iter.Error = fmt.Errorf("%s: %s, error found in #%v byte of ...|%s|..., bigger context ...|%s|...",
operation, msg, iter.head-peekStart, parsing, context)
}
// CurrentBuffer gets current buffer as string for debugging purpose
func (iter *Iterator) CurrentBuffer() string {
peekStart := iter.head - 10
if peekStart < 0 {
peekStart = 0
}
return fmt.Sprintf("parsing #%v byte, around ...|%s|..., whole buffer ...|%s|...", iter.head,
string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail]))
}
func (iter *Iterator) readByte() (ret byte) {
if iter.head == iter.tail {
if iter.loadMore() {
ret = iter.buf[iter.head]
iter.head++
return ret
}
return 0
}
ret = iter.buf[iter.head]
iter.head++
return ret
}
func (iter *Iterator) loadMore() bool {
if iter.reader == nil {
if iter.Error == nil {
iter.head = iter.tail
iter.Error = io.EOF
}
return false
}
if iter.captured != nil {
iter.captured = append(iter.captured,
iter.buf[iter.captureStartedAt:iter.tail]...)
iter.captureStartedAt = 0
}
for {
n, err := iter.reader.Read(iter.buf)
if n == 0 {
if err != nil {
if iter.Error == nil {
iter.Error = err
}
return false
}
} else {
iter.head = 0
iter.tail = n
return true
}
}
}
func (iter *Iterator) unreadByte() {
if iter.Error != nil {
return
}
iter.head--
return
}
// Read read the next JSON element as generic interface{}.
func (iter *Iterator) Read() interface{} {
valueType := iter.WhatIsNext()
switch valueType {
case StringValue:
return iter.ReadString()
case NumberValue:
if iter.cfg.configBeforeFrozen.UseNumber {
return json.Number(iter.readNumberAsString())
}
return iter.ReadFloat64()
case NilValue:
iter.skipFourBytes('n', 'u', 'l', 'l')
return nil
case BoolValue:
return iter.ReadBool()
case ArrayValue:
arr := []interface{}{}
iter.ReadArrayCB(func(iter *Iterator) bool {
var elem interface{}
iter.ReadVal(&elem)
arr = append(arr, elem)
return true
})
return arr
case ObjectValue:
obj := map[string]interface{}{}
iter.ReadMapCB(func(Iter *Iterator, field string) bool {
var elem interface{}
iter.ReadVal(&elem)
obj[field] = elem
return true
})
return obj
default:
iter.ReportError("Read", fmt.Sprintf("unexpected value type: %v", valueType))
return nil
}
}
// limit maximum depth of nesting, as allowed by https://tools.ietf.org/html/rfc7159#section-9
const maxDepth = 10000
func (iter *Iterator) incrementDepth() (success bool) {
iter.depth++
if iter.depth <= maxDepth {
return true
}
iter.ReportError("incrementDepth", "exceeded max depth")
return false
}
func (iter *Iterator) decrementDepth() (success bool) {
iter.depth--
if iter.depth >= 0 {
return true
}
iter.ReportError("decrementDepth", "unexpected negative nesting")
return false
}

64
iter_array.go Normal file
View File

@ -0,0 +1,64 @@
package jsoniter
// ReadArray read array element, tells if the array has more element to read.
func (iter *Iterator) ReadArray() (ret bool) {
c := iter.nextToken()
switch c {
case 'n':
iter.skipThreeBytes('u', 'l', 'l')
return false // null
case '[':
c = iter.nextToken()
if c != ']' {
iter.unreadByte()
return true
}
return false
case ']':
return false
case ',':
return true
default:
iter.ReportError("ReadArray", "expect [ or , or ] or n, but found "+string([]byte{c}))
return
}
}
// ReadArrayCB read array with callback
func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
c := iter.nextToken()
if c == '[' {
if !iter.incrementDepth() {
return false
}
c = iter.nextToken()
if c != ']' {
iter.unreadByte()
if !callback(iter) {
iter.decrementDepth()
return false
}
c = iter.nextToken()
for c == ',' {
if !callback(iter) {
iter.decrementDepth()
return false
}
c = iter.nextToken()
}
if c != ']' {
iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c}))
iter.decrementDepth()
return false
}
return iter.decrementDepth()
}
return iter.decrementDepth()
}
if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l')
return true // null
}
iter.ReportError("ReadArrayCB", "expect [ or n, but found "+string([]byte{c}))
return false
}

339
iter_float.go Normal file
View File

@ -0,0 +1,339 @@
package jsoniter
import (
"encoding/json"
"io"
"math/big"
"strconv"
"strings"
"unsafe"
)
var floatDigits []int8
const invalidCharForNumber = int8(-1)
const endOfNumber = int8(-2)
const dotInNumber = int8(-3)
func init() {
floatDigits = make([]int8, 256)
for i := 0; i < len(floatDigits); i++ {
floatDigits[i] = invalidCharForNumber
}
for i := int8('0'); i <= int8('9'); i++ {
floatDigits[i] = i - int8('0')
}
floatDigits[','] = endOfNumber
floatDigits[']'] = endOfNumber
floatDigits['}'] = endOfNumber
floatDigits[' '] = endOfNumber
floatDigits['\t'] = endOfNumber
floatDigits['\n'] = endOfNumber
floatDigits['.'] = dotInNumber
}
// ReadBigFloat read big.Float
func (iter *Iterator) ReadBigFloat() (ret *big.Float) {
str := iter.readNumberAsString()
if iter.Error != nil && iter.Error != io.EOF {
return nil
}
prec := 64
if len(str) > prec {
prec = len(str)
}
val, _, err := big.ParseFloat(str, 10, uint(prec), big.ToZero)
if err != nil {
iter.Error = err
return nil
}
return val
}
// ReadBigInt read big.Int
func (iter *Iterator) ReadBigInt() (ret *big.Int) {
str := iter.readNumberAsString()
if iter.Error != nil && iter.Error != io.EOF {
return nil
}
ret = big.NewInt(0)
var success bool
ret, success = ret.SetString(str, 10)
if !success {
iter.ReportError("ReadBigInt", "invalid big int")
return nil
}
return ret
}
//ReadFloat32 read float32
func (iter *Iterator) ReadFloat32() (ret float32) {
c := iter.nextToken()
if c == '-' {
return -iter.readPositiveFloat32()
}
iter.unreadByte()
return iter.readPositiveFloat32()
}
func (iter *Iterator) readPositiveFloat32() (ret float32) {
i := iter.head
// first char
if i == iter.tail {
return iter.readFloat32SlowPath()
}
c := iter.buf[i]
i++
ind := floatDigits[c]
switch ind {
case invalidCharForNumber:
return iter.readFloat32SlowPath()
case endOfNumber:
iter.ReportError("readFloat32", "empty number")
return
case dotInNumber:
iter.ReportError("readFloat32", "leading dot is invalid")
return
case 0:
if i == iter.tail {
return iter.readFloat32SlowPath()
}
c = iter.buf[i]
switch c {
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
iter.ReportError("readFloat32", "leading zero is invalid")
return
}
}
value := uint64(ind)
// chars before dot
non_decimal_loop:
for ; i < iter.tail; i++ {
c = iter.buf[i]
ind := floatDigits[c]
switch ind {
case invalidCharForNumber:
return iter.readFloat32SlowPath()
case endOfNumber:
iter.head = i
return float32(value)
case dotInNumber:
break non_decimal_loop
}
if value > uint64SafeToMultiple10 {
return iter.readFloat32SlowPath()
}
value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
}
// chars after dot
if c == '.' {
i++
decimalPlaces := 0
if i == iter.tail {
return iter.readFloat32SlowPath()
}
for ; i < iter.tail; i++ {
c = iter.buf[i]
ind := floatDigits[c]
switch ind {
case endOfNumber:
if decimalPlaces > 0 && decimalPlaces < len(pow10) {
iter.head = i
return float32(float64(value) / float64(pow10[decimalPlaces]))
}
// too many decimal places
return iter.readFloat32SlowPath()
case invalidCharForNumber, dotInNumber:
return iter.readFloat32SlowPath()
}
decimalPlaces++
if value > uint64SafeToMultiple10 {
return iter.readFloat32SlowPath()
}
value = (value << 3) + (value << 1) + uint64(ind)
}
}
return iter.readFloat32SlowPath()
}
func (iter *Iterator) readNumberAsString() (ret string) {
strBuf := [16]byte{}
str := strBuf[0:0]
load_loop:
for {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
switch c {
case '+', '-', '.', 'e', 'E', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
str = append(str, c)
continue
default:
iter.head = i
break load_loop
}
}
if !iter.loadMore() {
break
}
}
if iter.Error != nil && iter.Error != io.EOF {
return
}
if len(str) == 0 {
iter.ReportError("readNumberAsString", "invalid number")
}
return *(*string)(unsafe.Pointer(&str))
}
func (iter *Iterator) readFloat32SlowPath() (ret float32) {
str := iter.readNumberAsString()
if iter.Error != nil && iter.Error != io.EOF {
return
}
errMsg := validateFloat(str)
if errMsg != "" {
iter.ReportError("readFloat32SlowPath", errMsg)
return
}
val, err := strconv.ParseFloat(str, 32)
if err != nil {
iter.Error = err
return
}
return float32(val)
}
// ReadFloat64 read float64
func (iter *Iterator) ReadFloat64() (ret float64) {
c := iter.nextToken()
if c == '-' {
return -iter.readPositiveFloat64()
}
iter.unreadByte()
return iter.readPositiveFloat64()
}
func (iter *Iterator) readPositiveFloat64() (ret float64) {
i := iter.head
// first char
if i == iter.tail {
return iter.readFloat64SlowPath()
}
c := iter.buf[i]
i++
ind := floatDigits[c]
switch ind {
case invalidCharForNumber:
return iter.readFloat64SlowPath()
case endOfNumber:
iter.ReportError("readFloat64", "empty number")
return
case dotInNumber:
iter.ReportError("readFloat64", "leading dot is invalid")
return
case 0:
if i == iter.tail {
return iter.readFloat64SlowPath()
}
c = iter.buf[i]
switch c {
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
iter.ReportError("readFloat64", "leading zero is invalid")
return
}
}
value := uint64(ind)
// chars before dot
non_decimal_loop:
for ; i < iter.tail; i++ {
c = iter.buf[i]
ind := floatDigits[c]
switch ind {
case invalidCharForNumber:
return iter.readFloat64SlowPath()
case endOfNumber:
iter.head = i
return float64(value)
case dotInNumber:
break non_decimal_loop
}
if value > uint64SafeToMultiple10 {
return iter.readFloat64SlowPath()
}
value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
}
// chars after dot
if c == '.' {
i++
decimalPlaces := 0
if i == iter.tail {
return iter.readFloat64SlowPath()
}
for ; i < iter.tail; i++ {
c = iter.buf[i]
ind := floatDigits[c]
switch ind {
case endOfNumber:
if decimalPlaces > 0 && decimalPlaces < len(pow10) {
iter.head = i
return float64(value) / float64(pow10[decimalPlaces])
}
// too many decimal places
return iter.readFloat64SlowPath()
case invalidCharForNumber, dotInNumber:
return iter.readFloat64SlowPath()
}
decimalPlaces++
if value > uint64SafeToMultiple10 {
return iter.readFloat64SlowPath()
}
value = (value << 3) + (value << 1) + uint64(ind)
}
}
return iter.readFloat64SlowPath()
}
func (iter *Iterator) readFloat64SlowPath() (ret float64) {
str := iter.readNumberAsString()
if iter.Error != nil && iter.Error != io.EOF {
return
}
errMsg := validateFloat(str)
if errMsg != "" {
iter.ReportError("readFloat64SlowPath", errMsg)
return
}
val, err := strconv.ParseFloat(str, 64)
if err != nil {
iter.Error = err
return
}
return val
}
func validateFloat(str string) string {
// strconv.ParseFloat is not validating `1.` or `1.e1`
if len(str) == 0 {
return "empty number"
}
if str[0] == '-' {
return "-- is not valid"
}
dotPos := strings.IndexByte(str, '.')
if dotPos != -1 {
if dotPos == len(str)-1 {
return "dot can not be last character"
}
switch str[dotPos+1] {
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
default:
return "missing digit after dot"
}
}
return ""
}
// ReadNumber read json.Number
func (iter *Iterator) ReadNumber() (ret json.Number) {
return json.Number(iter.readNumberAsString())
}

345
iter_int.go Normal file
View File

@ -0,0 +1,345 @@
package jsoniter
import (
"math"
"strconv"
)
var intDigits []int8
const uint32SafeToMultiply10 = uint32(0xffffffff)/10 - 1
const uint64SafeToMultiple10 = uint64(0xffffffffffffffff)/10 - 1
func init() {
intDigits = make([]int8, 256)
for i := 0; i < len(intDigits); i++ {
intDigits[i] = invalidCharForNumber
}
for i := int8('0'); i <= int8('9'); i++ {
intDigits[i] = i - int8('0')
}
}
// ReadUint read uint
func (iter *Iterator) ReadUint() uint {
if strconv.IntSize == 32 {
return uint(iter.ReadUint32())
}
return uint(iter.ReadUint64())
}
// ReadInt read int
func (iter *Iterator) ReadInt() int {
if strconv.IntSize == 32 {
return int(iter.ReadInt32())
}
return int(iter.ReadInt64())
}
// ReadInt8 read int8
func (iter *Iterator) ReadInt8() (ret int8) {
c := iter.nextToken()
if c == '-' {
val := iter.readUint32(iter.readByte())
if val > math.MaxInt8+1 {
iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return -int8(val)
}
val := iter.readUint32(c)
if val > math.MaxInt8 {
iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return int8(val)
}
// ReadUint8 read uint8
func (iter *Iterator) ReadUint8() (ret uint8) {
val := iter.readUint32(iter.nextToken())
if val > math.MaxUint8 {
iter.ReportError("ReadUint8", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return uint8(val)
}
// ReadInt16 read int16
func (iter *Iterator) ReadInt16() (ret int16) {
c := iter.nextToken()
if c == '-' {
val := iter.readUint32(iter.readByte())
if val > math.MaxInt16+1 {
iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return -int16(val)
}
val := iter.readUint32(c)
if val > math.MaxInt16 {
iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return int16(val)
}
// ReadUint16 read uint16
func (iter *Iterator) ReadUint16() (ret uint16) {
val := iter.readUint32(iter.nextToken())
if val > math.MaxUint16 {
iter.ReportError("ReadUint16", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return uint16(val)
}
// ReadInt32 read int32
func (iter *Iterator) ReadInt32() (ret int32) {
c := iter.nextToken()
if c == '-' {
val := iter.readUint32(iter.readByte())
if val > math.MaxInt32+1 {
iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return -int32(val)
}
val := iter.readUint32(c)
if val > math.MaxInt32 {
iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return int32(val)
}
// ReadUint32 read uint32
func (iter *Iterator) ReadUint32() (ret uint32) {
return iter.readUint32(iter.nextToken())
}
func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind := intDigits[c]
if ind == 0 {
iter.assertInteger()
return 0 // single zero
}
if ind == invalidCharForNumber {
iter.ReportError("readUint32", "unexpected character: "+string([]byte{byte(ind)}))
return
}
value := uint32(ind)
if iter.tail-iter.head > 10 {
i := iter.head
ind2 := intDigits[iter.buf[i]]
if ind2 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value
}
i++
ind3 := intDigits[iter.buf[i]]
if ind3 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*10 + uint32(ind2)
}
//iter.head = i + 1
//value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
i++
ind4 := intDigits[iter.buf[i]]
if ind4 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*100 + uint32(ind2)*10 + uint32(ind3)
}
i++
ind5 := intDigits[iter.buf[i]]
if ind5 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*1000 + uint32(ind2)*100 + uint32(ind3)*10 + uint32(ind4)
}
i++
ind6 := intDigits[iter.buf[i]]
if ind6 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*10000 + uint32(ind2)*1000 + uint32(ind3)*100 + uint32(ind4)*10 + uint32(ind5)
}
i++
ind7 := intDigits[iter.buf[i]]
if ind7 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*100000 + uint32(ind2)*10000 + uint32(ind3)*1000 + uint32(ind4)*100 + uint32(ind5)*10 + uint32(ind6)
}
i++
ind8 := intDigits[iter.buf[i]]
if ind8 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*1000000 + uint32(ind2)*100000 + uint32(ind3)*10000 + uint32(ind4)*1000 + uint32(ind5)*100 + uint32(ind6)*10 + uint32(ind7)
}
i++
ind9 := intDigits[iter.buf[i]]
value = value*10000000 + uint32(ind2)*1000000 + uint32(ind3)*100000 + uint32(ind4)*10000 + uint32(ind5)*1000 + uint32(ind6)*100 + uint32(ind7)*10 + uint32(ind8)
iter.head = i
if ind9 == invalidCharForNumber {
iter.assertInteger()
return value
}
}
for {
for i := iter.head; i < iter.tail; i++ {
ind = intDigits[iter.buf[i]]
if ind == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value
}
if value > uint32SafeToMultiply10 {
value2 := (value << 3) + (value << 1) + uint32(ind)
if value2 < value {
iter.ReportError("readUint32", "overflow")
return
}
value = value2
continue
}
value = (value << 3) + (value << 1) + uint32(ind)
}
if !iter.loadMore() {
iter.assertInteger()
return value
}
}
}
// ReadInt64 read int64
func (iter *Iterator) ReadInt64() (ret int64) {
c := iter.nextToken()
if c == '-' {
val := iter.readUint64(iter.readByte())
if val > math.MaxInt64+1 {
iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
return
}
return -int64(val)
}
val := iter.readUint64(c)
if val > math.MaxInt64 {
iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
return
}
return int64(val)
}
// ReadUint64 read uint64
func (iter *Iterator) ReadUint64() uint64 {
return iter.readUint64(iter.nextToken())
}
func (iter *Iterator) readUint64(c byte) (ret uint64) {
ind := intDigits[c]
if ind == 0 {
iter.assertInteger()
return 0 // single zero
}
if ind == invalidCharForNumber {
iter.ReportError("readUint64", "unexpected character: "+string([]byte{byte(ind)}))
return
}
value := uint64(ind)
if iter.tail-iter.head > 10 {
i := iter.head
ind2 := intDigits[iter.buf[i]]
if ind2 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value
}
i++
ind3 := intDigits[iter.buf[i]]
if ind3 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*10 + uint64(ind2)
}
//iter.head = i + 1
//value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
i++
ind4 := intDigits[iter.buf[i]]
if ind4 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*100 + uint64(ind2)*10 + uint64(ind3)
}
i++
ind5 := intDigits[iter.buf[i]]
if ind5 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*1000 + uint64(ind2)*100 + uint64(ind3)*10 + uint64(ind4)
}
i++
ind6 := intDigits[iter.buf[i]]
if ind6 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*10000 + uint64(ind2)*1000 + uint64(ind3)*100 + uint64(ind4)*10 + uint64(ind5)
}
i++
ind7 := intDigits[iter.buf[i]]
if ind7 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*100000 + uint64(ind2)*10000 + uint64(ind3)*1000 + uint64(ind4)*100 + uint64(ind5)*10 + uint64(ind6)
}
i++
ind8 := intDigits[iter.buf[i]]
if ind8 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*1000000 + uint64(ind2)*100000 + uint64(ind3)*10000 + uint64(ind4)*1000 + uint64(ind5)*100 + uint64(ind6)*10 + uint64(ind7)
}
i++
ind9 := intDigits[iter.buf[i]]
value = value*10000000 + uint64(ind2)*1000000 + uint64(ind3)*100000 + uint64(ind4)*10000 + uint64(ind5)*1000 + uint64(ind6)*100 + uint64(ind7)*10 + uint64(ind8)
iter.head = i
if ind9 == invalidCharForNumber {
iter.assertInteger()
return value
}
}
for {
for i := iter.head; i < iter.tail; i++ {
ind = intDigits[iter.buf[i]]
if ind == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value
}
if value > uint64SafeToMultiple10 {
value2 := (value << 3) + (value << 1) + uint64(ind)
if value2 < value {
iter.ReportError("readUint64", "overflow")
return
}
value = value2
continue
}
value = (value << 3) + (value << 1) + uint64(ind)
}
if !iter.loadMore() {
iter.assertInteger()
return value
}
}
}
func (iter *Iterator) assertInteger() {
if iter.head < len(iter.buf) && iter.buf[iter.head] == '.' {
iter.ReportError("assertInteger", "can not decode float as int")
}
}

267
iter_object.go Normal file
View File

@ -0,0 +1,267 @@
package jsoniter
import (
"fmt"
"strings"
)
// ReadObject read one field from object.
// If object ended, returns empty string.
// Otherwise, returns the field name.
func (iter *Iterator) ReadObject() (ret string) {
c := iter.nextToken()
switch c {
case 'n':
iter.skipThreeBytes('u', 'l', 'l')
return "" // null
case '{':
c = iter.nextToken()
if c == '"' {
iter.unreadByte()
field := iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
return field
}
if c == '}' {
return "" // end of object
}
iter.ReportError("ReadObject", `expect " after {, but found `+string([]byte{c}))
return
case ',':
field := iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
return field
case '}':
return "" // end of object
default:
iter.ReportError("ReadObject", fmt.Sprintf(`expect { or , or } or n, but found %s`, string([]byte{c})))
return
}
}
// CaseInsensitive
func (iter *Iterator) readFieldHash() int64 {
hash := int64(0x811c9dc5)
c := iter.nextToken()
if c != '"' {
iter.ReportError("readFieldHash", `expect ", but found `+string([]byte{c}))
return 0
}
for {
for i := iter.head; i < iter.tail; i++ {
// require ascii string and no escape
b := iter.buf[i]
if b == '\\' {
iter.head = i
for _, b := range iter.readStringSlowPath() {
if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
b += 'a' - 'A'
}
hash ^= int64(b)
hash *= 0x1000193
}
c = iter.nextToken()
if c != ':' {
iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
return 0
}
return hash
}
if b == '"' {
iter.head = i + 1
c = iter.nextToken()
if c != ':' {
iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
return 0
}
return hash
}
if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
b += 'a' - 'A'
}
hash ^= int64(b)
hash *= 0x1000193
}
if !iter.loadMore() {
iter.ReportError("readFieldHash", `incomplete field name`)
return 0
}
}
}
func calcHash(str string, caseSensitive bool) int64 {
if !caseSensitive {
str = strings.ToLower(str)
}
hash := int64(0x811c9dc5)
for _, b := range []byte(str) {
hash ^= int64(b)
hash *= 0x1000193
}
return int64(hash)
}
// ReadObjectCB read object with callback, the key is ascii only and field name not copied
func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
c := iter.nextToken()
var field string
if c == '{' {
if !iter.incrementDepth() {
return false
}
c = iter.nextToken()
if c == '"' {
iter.unreadByte()
field = iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
if !callback(iter, field) {
iter.decrementDepth()
return false
}
c = iter.nextToken()
for c == ',' {
field = iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
if !callback(iter, field) {
iter.decrementDepth()
return false
}
c = iter.nextToken()
}
if c != '}' {
iter.ReportError("ReadObjectCB", `object not ended with }`)
iter.decrementDepth()
return false
}
return iter.decrementDepth()
}
if c == '}' {
return iter.decrementDepth()
}
iter.ReportError("ReadObjectCB", `expect " after }, but found `+string([]byte{c}))
iter.decrementDepth()
return false
}
if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l')
return true // null
}
iter.ReportError("ReadObjectCB", `expect { or n, but found `+string([]byte{c}))
return false
}
// ReadMapCB read map with callback, the key can be any string
func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
c := iter.nextToken()
if c == '{' {
if !iter.incrementDepth() {
return false
}
c = iter.nextToken()
if c == '"' {
iter.unreadByte()
field := iter.ReadString()
if iter.nextToken() != ':' {
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
iter.decrementDepth()
return false
}
if !callback(iter, field) {
iter.decrementDepth()
return false
}
c = iter.nextToken()
for c == ',' {
field = iter.ReadString()
if iter.nextToken() != ':' {
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
iter.decrementDepth()
return false
}
if !callback(iter, field) {
iter.decrementDepth()
return false
}
c = iter.nextToken()
}
if c != '}' {
iter.ReportError("ReadMapCB", `object not ended with }`)
iter.decrementDepth()
return false
}
return iter.decrementDepth()
}
if c == '}' {
return iter.decrementDepth()
}
iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c}))
iter.decrementDepth()
return false
}
if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l')
return true // null
}
iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c}))
return false
}
func (iter *Iterator) readObjectStart() bool {
c := iter.nextToken()
if c == '{' {
c = iter.nextToken()
if c == '}' {
return false
}
iter.unreadByte()
return true
} else if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l')
return false
}
iter.ReportError("readObjectStart", "expect { or n, but found "+string([]byte{c}))
return false
}
func (iter *Iterator) readObjectFieldAsBytes() (ret []byte) {
str := iter.ReadStringAsSlice()
if iter.skipWhitespacesWithoutLoadMore() {
if ret == nil {
ret = make([]byte, len(str))
copy(ret, str)
}
if !iter.loadMore() {
return
}
}
if iter.buf[iter.head] != ':' {
iter.ReportError("readObjectFieldAsBytes", "expect : after object field, but found "+string([]byte{iter.buf[iter.head]}))
return
}
iter.head++
if iter.skipWhitespacesWithoutLoadMore() {
if ret == nil {
ret = make([]byte, len(str))
copy(ret, str)
}
if !iter.loadMore() {
return
}
}
if ret == nil {
return str
}
return ret
}

130
iter_skip.go Normal file
View File

@ -0,0 +1,130 @@
package jsoniter
import "fmt"
// ReadNil reads a json object as nil and
// returns whether it's a nil or not
func (iter *Iterator) ReadNil() (ret bool) {
c := iter.nextToken()
if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l') // null
return true
}
iter.unreadByte()
return false
}
// ReadBool reads a json object as BoolValue
func (iter *Iterator) ReadBool() (ret bool) {
c := iter.nextToken()
if c == 't' {
iter.skipThreeBytes('r', 'u', 'e')
return true
}
if c == 'f' {
iter.skipFourBytes('a', 'l', 's', 'e')
return false
}
iter.ReportError("ReadBool", "expect t or f, but found "+string([]byte{c}))
return
}
// SkipAndReturnBytes skip next JSON element, and return its content as []byte.
// The []byte can be kept, it is a copy of data.
func (iter *Iterator) SkipAndReturnBytes() []byte {
iter.startCapture(iter.head)
iter.Skip()
return iter.stopCapture()
}
// SkipAndAppendBytes skips next JSON element and appends its content to
// buffer, returning the result.
func (iter *Iterator) SkipAndAppendBytes(buf []byte) []byte {
iter.startCaptureTo(buf, iter.head)
iter.Skip()
return iter.stopCapture()
}
func (iter *Iterator) startCaptureTo(buf []byte, captureStartedAt int) {
if iter.captured != nil {
panic("already in capture mode")
}
iter.captureStartedAt = captureStartedAt
iter.captured = buf
}
func (iter *Iterator) startCapture(captureStartedAt int) {
iter.startCaptureTo(make([]byte, 0, 32), captureStartedAt)
}
func (iter *Iterator) stopCapture() []byte {
if iter.captured == nil {
panic("not in capture mode")
}
captured := iter.captured
remaining := iter.buf[iter.captureStartedAt:iter.head]
iter.captureStartedAt = -1
iter.captured = nil
return append(captured, remaining...)
}
// Skip skips a json object and positions to relatively the next json object
func (iter *Iterator) Skip() {
c := iter.nextToken()
switch c {
case '"':
iter.skipString()
case 'n':
iter.skipThreeBytes('u', 'l', 'l') // null
case 't':
iter.skipThreeBytes('r', 'u', 'e') // true
case 'f':
iter.skipFourBytes('a', 'l', 's', 'e') // false
case '0':
iter.unreadByte()
iter.ReadFloat32()
case '-', '1', '2', '3', '4', '5', '6', '7', '8', '9':
iter.skipNumber()
case '[':
iter.skipArray()
case '{':
iter.skipObject()
default:
iter.ReportError("Skip", fmt.Sprintf("do not know how to skip: %v", c))
return
}
}
func (iter *Iterator) skipFourBytes(b1, b2, b3, b4 byte) {
if iter.readByte() != b1 {
iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
return
}
if iter.readByte() != b2 {
iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
return
}
if iter.readByte() != b3 {
iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
return
}
if iter.readByte() != b4 {
iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
return
}
}
func (iter *Iterator) skipThreeBytes(b1, b2, b3 byte) {
if iter.readByte() != b1 {
iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
return
}
if iter.readByte() != b2 {
iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
return
}
if iter.readByte() != b3 {
iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
return
}
}

View File

@ -1,55 +1,98 @@
//+build jsoniter_sloppy
package jsoniter
import "fmt"
// sloppy but faster implementation, do not validate the input json
// ReadNil reads a json object as nil and
// returns whether it's a nil or not
func (iter *Iterator) ReadNil() (ret bool) {
c := iter.nextToken()
if c == 'n' {
iter.skipFixedBytes(3) // null
return true
func (iter *Iterator) skipNumber() {
for {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
switch c {
case ' ', '\n', '\r', '\t', ',', '}', ']':
iter.head = i
return
}
}
if !iter.loadMore() {
return
}
}
iter.unreadByte()
return false
}
// ReadBool reads a json object as Bool
func (iter *Iterator) ReadBool() (ret bool) {
c := iter.nextToken()
if c == 't' {
iter.skipFixedBytes(3)
return true
}
if c == 'f' {
iter.skipFixedBytes(4)
return false
}
iter.reportError("ReadBool", "expect t or f")
return
}
// Skip skips a json object and positions to relatively the next json object
func (iter *Iterator) Skip() {
c := iter.nextToken()
switch c {
case '"':
iter.skipString()
case 'n', 't':
iter.skipFixedBytes(3) // null or true
case 'f':
iter.skipFixedBytes(4) // false
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
iter.skipUntilBreak()
case '[':
iter.skipArray()
case '{':
iter.skipObject()
default:
iter.reportError("Skip", fmt.Sprintf("do not know how to skip: %v", c))
func (iter *Iterator) skipArray() {
level := 1
if !iter.incrementDepth() {
return
}
for {
for i := iter.head; i < iter.tail; i++ {
switch iter.buf[i] {
case '"': // If inside string, skip it
iter.head = i + 1
iter.skipString()
i = iter.head - 1 // it will be i++ soon
case '[': // If open symbol, increase level
level++
if !iter.incrementDepth() {
return
}
case ']': // If close symbol, increase level
level--
if !iter.decrementDepth() {
return
}
// If we have returned to the original level, we're done
if level == 0 {
iter.head = i + 1
return
}
}
}
if !iter.loadMore() {
iter.ReportError("skipObject", "incomplete array")
return
}
}
}
func (iter *Iterator) skipObject() {
level := 1
if !iter.incrementDepth() {
return
}
for {
for i := iter.head; i < iter.tail; i++ {
switch iter.buf[i] {
case '"': // If inside string, skip it
iter.head = i + 1
iter.skipString()
i = iter.head - 1 // it will be i++ soon
case '{': // If open symbol, increase level
level++
if !iter.incrementDepth() {
return
}
case '}': // If close symbol, increase level
level--
if !iter.decrementDepth() {
return
}
// If we have returned to the original level, we're done
if level == 0 {
iter.head = i + 1
return
}
}
}
if !iter.loadMore() {
iter.ReportError("skipObject", "incomplete object")
return
}
}
}
func (iter *Iterator) skipString() {
@ -57,7 +100,7 @@ func (iter *Iterator) skipString() {
end, escaped := iter.findStringEnd()
if end == -1 {
if !iter.loadMore() {
iter.reportError("skipString", "incomplete string")
iter.ReportError("skipString", "incomplete string")
return
}
if escaped {
@ -118,90 +161,3 @@ func (iter *Iterator) findStringEnd() (int, bool) {
}
return -1, true // end with \
}
func (iter *Iterator) skipArray() {
level := 1
for {
for i := iter.head; i < iter.tail; i++ {
switch iter.buf[i] {
case '"': // If inside string, skip it
iter.head = i + 1
iter.skipString()
i = iter.head - 1 // it will be i++ soon
case '[': // If open symbol, increase level
level++
case ']': // If close symbol, increase level
level--
// If we have returned to the original level, we're done
if level == 0 {
iter.head = i + 1
return
}
}
}
if !iter.loadMore() {
iter.reportError("skipObject", "incomplete array")
return
}
}
}
func (iter *Iterator) skipObject() {
level := 1
for {
for i := iter.head; i < iter.tail; i++ {
switch iter.buf[i] {
case '"': // If inside string, skip it
iter.head = i + 1
iter.skipString()
i = iter.head - 1 // it will be i++ soon
case '{': // If open symbol, increase level
level++
case '}': // If close symbol, increase level
level--
// If we have returned to the original level, we're done
if level == 0 {
iter.head = i + 1
return
}
}
}
if !iter.loadMore() {
iter.reportError("skipObject", "incomplete object")
return
}
}
}
func (iter *Iterator) skipUntilBreak() {
// true, false, null, number
for {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
switch c {
case ' ', '\n', '\r', '\t', ',', '}', ']':
iter.head = i
return
}
}
if !iter.loadMore() {
return
}
}
}
func (iter *Iterator) skipFixedBytes(n int) {
iter.head += n;
if (iter.head >= iter.tail) {
more := iter.head - iter.tail;
if !iter.loadMore() {
if more > 0 {
iter.reportError("skipFixedBytes", "unexpected end");
}
return
}
iter.head += more;
}
}

View File

@ -1,62 +1,64 @@
//+build jsoniter_sloppy
package jsoniter
import (
"github.com/stretchr/testify/require"
"io"
"testing"
"github.com/json-iterator/go/require"
)
func Test_string_end(t *testing.T) {
end, escaped := ParseString(`abc"`).findStringEnd()
end, escaped := ParseString(ConfigDefault, `abc"`).findStringEnd()
if end != 4 {
t.Fatal(end)
}
if escaped != false {
t.Fatal(escaped)
}
end, escaped = ParseString(`abc\\"`).findStringEnd()
end, escaped = ParseString(ConfigDefault, `abc\\"`).findStringEnd()
if end != 6 {
t.Fatal(end)
}
if escaped != true {
t.Fatal(escaped)
}
end, escaped = ParseString(`abc\\\\"`).findStringEnd()
end, escaped = ParseString(ConfigDefault, `abc\\\\"`).findStringEnd()
if end != 8 {
t.Fatal(end)
}
if escaped != true {
t.Fatal(escaped)
}
end, escaped = ParseString(`abc\"`).findStringEnd()
end, escaped = ParseString(ConfigDefault, `abc\"`).findStringEnd()
if end != -1 {
t.Fatal(end)
}
if escaped != false {
t.Fatal(escaped)
}
end, escaped = ParseString(`abc\`).findStringEnd()
end, escaped = ParseString(ConfigDefault, `abc\`).findStringEnd()
if end != -1 {
t.Fatal(end)
}
if escaped != true {
t.Fatal(escaped)
}
end, escaped = ParseString(`abc\\`).findStringEnd()
end, escaped = ParseString(ConfigDefault, `abc\\`).findStringEnd()
if end != -1 {
t.Fatal(end)
}
if escaped != false {
t.Fatal(escaped)
}
end, escaped = ParseString(`\\`).findStringEnd()
end, escaped = ParseString(ConfigDefault, `\\`).findStringEnd()
if end != -1 {
t.Fatal(end)
}
if escaped != false {
t.Fatal(escaped)
}
end, escaped = ParseString(`\`).findStringEnd()
end, escaped = ParseString(ConfigDefault, `\`).findStringEnd()
if end != -1 {
t.Fatal(end)
}
@ -91,54 +93,54 @@ func (reader *StagedReader) Read(p []byte) (n int, err error) {
func Test_skip_string(t *testing.T) {
should := require.New(t)
iter := ParseString(`"abc`)
iter := ParseString(ConfigDefault, `"abc`)
iter.skipString()
should.Equal(1, iter.head)
iter = ParseString(`\""abc`)
iter = ParseString(ConfigDefault, `\""abc`)
iter.skipString()
should.Equal(3, iter.head)
reader := &StagedReader{
r1: `abc`,
r2: `"`,
}
iter = Parse(reader, 4096)
iter = Parse(ConfigDefault, reader, 4096)
iter.skipString()
should.Equal(1, iter.head)
reader = &StagedReader{
r1: `abc`,
r2: `1"`,
}
iter = Parse(reader, 4096)
iter = Parse(ConfigDefault, reader, 4096)
iter.skipString()
should.Equal(2, iter.head)
reader = &StagedReader{
r1: `abc\`,
r2: `"`,
}
iter = Parse(reader, 4096)
iter = Parse(ConfigDefault, reader, 4096)
iter.skipString()
should.NotNil(iter.Error)
reader = &StagedReader{
r1: `abc\`,
r2: `""`,
}
iter = Parse(reader, 4096)
iter = Parse(ConfigDefault, reader, 4096)
iter.skipString()
should.Equal(2, iter.head)
}
func Test_skip_object(t *testing.T) {
iter := ParseString(`}`)
iter := ParseString(ConfigDefault, `}`)
iter.skipObject()
if iter.head != 1 {
t.Fatal(iter.head)
}
iter = ParseString(`a}`)
iter = ParseString(ConfigDefault, `a}`)
iter.skipObject()
if iter.head != 2 {
t.Fatal(iter.head)
}
iter = ParseString(`{}}a`)
iter = ParseString(ConfigDefault, `{}}a`)
iter.skipObject()
if iter.head != 3 {
t.Fatal(iter.head)
@ -147,12 +149,12 @@ func Test_skip_object(t *testing.T) {
r1: `{`,
r2: `}}a`,
}
iter = Parse(reader, 4096)
iter = Parse(ConfigDefault, reader, 4096)
iter.skipObject()
if iter.head != 2 {
t.Fatal(iter.head)
}
iter = ParseString(`"}"}a`)
iter = ParseString(ConfigDefault, `"}"}a`)
iter.skipObject()
if iter.head != 4 {
t.Fatal(iter.head)

99
iter_skip_strict.go Normal file
View File

@ -0,0 +1,99 @@
//+build !jsoniter_sloppy
package jsoniter
import (
"fmt"
"io"
)
func (iter *Iterator) skipNumber() {
if !iter.trySkipNumber() {
iter.unreadByte()
if iter.Error != nil && iter.Error != io.EOF {
return
}
iter.ReadFloat64()
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = nil
iter.ReadBigFloat()
}
}
}
func (iter *Iterator) trySkipNumber() bool {
dotFound := false
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
switch c {
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
case '.':
if dotFound {
iter.ReportError("validateNumber", `more than one dot found in number`)
return true // already failed
}
if i+1 == iter.tail {
return false
}
c = iter.buf[i+1]
switch c {
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
default:
iter.ReportError("validateNumber", `missing digit after dot`)
return true // already failed
}
dotFound = true
default:
switch c {
case ',', ']', '}', ' ', '\t', '\n', '\r':
if iter.head == i {
return false // if - without following digits
}
iter.head = i
return true // must be valid
}
return false // may be invalid
}
}
return false
}
func (iter *Iterator) skipString() {
if !iter.trySkipString() {
iter.unreadByte()
iter.ReadString()
}
}
func (iter *Iterator) trySkipString() bool {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
if c == '"' {
iter.head = i + 1
return true // valid
} else if c == '\\' {
return false
} else if c < ' ' {
iter.ReportError("trySkipString",
fmt.Sprintf(`invalid control character found: %d`, c))
return true // already failed
}
}
return false
}
func (iter *Iterator) skipObject() {
iter.unreadByte()
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
iter.Skip()
return true
})
}
func (iter *Iterator) skipArray() {
iter.unreadByte()
iter.ReadArrayCB(func(iter *Iterator) bool {
iter.Skip()
return true
})
}

215
iter_str.go Normal file
View File

@ -0,0 +1,215 @@
package jsoniter
import (
"fmt"
"unicode/utf16"
)
// ReadString read string from iterator
func (iter *Iterator) ReadString() (ret string) {
c := iter.nextToken()
if c == '"' {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
if c == '"' {
ret = string(iter.buf[iter.head:i])
iter.head = i + 1
return ret
} else if c == '\\' {
break
} else if c < ' ' {
iter.ReportError("ReadString",
fmt.Sprintf(`invalid control character found: %d`, c))
return
}
}
return iter.readStringSlowPath()
} else if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l')
return ""
}
iter.ReportError("ReadString", `expects " or n, but found `+string([]byte{c}))
return
}
func (iter *Iterator) readStringSlowPath() (ret string) {
var str []byte
var c byte
for iter.Error == nil {
c = iter.readByte()
if c == '"' {
return string(str)
}
if c == '\\' {
c = iter.readByte()
str = iter.readEscapedChar(c, str)
} else {
str = append(str, c)
}
}
iter.ReportError("readStringSlowPath", "unexpected end of input")
return
}
func (iter *Iterator) readEscapedChar(c byte, str []byte) []byte {
switch c {
case 'u':
r := iter.readU4()
if utf16.IsSurrogate(r) {
c = iter.readByte()
if iter.Error != nil {
return nil
}
if c != '\\' {
iter.unreadByte()
str = appendRune(str, r)
return str
}
c = iter.readByte()
if iter.Error != nil {
return nil
}
if c != 'u' {
str = appendRune(str, r)
return iter.readEscapedChar(c, str)
}
r2 := iter.readU4()
if iter.Error != nil {
return nil
}
combined := utf16.DecodeRune(r, r2)
if combined == '\uFFFD' {
str = appendRune(str, r)
str = appendRune(str, r2)
} else {
str = appendRune(str, combined)
}
} else {
str = appendRune(str, r)
}
case '"':
str = append(str, '"')
case '\\':
str = append(str, '\\')
case '/':
str = append(str, '/')
case 'b':
str = append(str, '\b')
case 'f':
str = append(str, '\f')
case 'n':
str = append(str, '\n')
case 'r':
str = append(str, '\r')
case 't':
str = append(str, '\t')
default:
iter.ReportError("readEscapedChar",
`invalid escape char after \`)
return nil
}
return str
}
// ReadStringAsSlice read string from iterator without copying into string form.
// The []byte can not be kept, as it will change after next iterator call.
func (iter *Iterator) ReadStringAsSlice() (ret []byte) {
c := iter.nextToken()
if c == '"' {
for i := iter.head; i < iter.tail; i++ {
// require ascii string and no escape
// for: field name, base64, number
if iter.buf[i] == '"' {
// fast path: reuse the underlying buffer
ret = iter.buf[iter.head:i]
iter.head = i + 1
return ret
}
}
readLen := iter.tail - iter.head
copied := make([]byte, readLen, readLen*2)
copy(copied, iter.buf[iter.head:iter.tail])
iter.head = iter.tail
for iter.Error == nil {
c := iter.readByte()
if c == '"' {
return copied
}
copied = append(copied, c)
}
return copied
}
iter.ReportError("ReadStringAsSlice", `expects " or n, but found `+string([]byte{c}))
return
}
func (iter *Iterator) readU4() (ret rune) {
for i := 0; i < 4; i++ {
c := iter.readByte()
if iter.Error != nil {
return
}
if c >= '0' && c <= '9' {
ret = ret*16 + rune(c-'0')
} else if c >= 'a' && c <= 'f' {
ret = ret*16 + rune(c-'a'+10)
} else if c >= 'A' && c <= 'F' {
ret = ret*16 + rune(c-'A'+10)
} else {
iter.ReportError("readU4", "expects 0~9 or a~f, but found "+string([]byte{c}))
return
}
}
return ret
}
const (
t1 = 0x00 // 0000 0000
tx = 0x80 // 1000 0000
t2 = 0xC0 // 1100 0000
t3 = 0xE0 // 1110 0000
t4 = 0xF0 // 1111 0000
t5 = 0xF8 // 1111 1000
maskx = 0x3F // 0011 1111
mask2 = 0x1F // 0001 1111
mask3 = 0x0F // 0000 1111
mask4 = 0x07 // 0000 0111
rune1Max = 1<<7 - 1
rune2Max = 1<<11 - 1
rune3Max = 1<<16 - 1
surrogateMin = 0xD800
surrogateMax = 0xDFFF
maxRune = '\U0010FFFF' // Maximum valid Unicode code point.
runeError = '\uFFFD' // the "error" Rune or "Unicode replacement character"
)
func appendRune(p []byte, r rune) []byte {
// Negative values are erroneous. Making it unsigned addresses the problem.
switch i := uint32(r); {
case i <= rune1Max:
p = append(p, byte(r))
return p
case i <= rune2Max:
p = append(p, t2|byte(r>>6))
p = append(p, tx|byte(r)&maskx)
return p
case i > maxRune, surrogateMin <= i && i <= surrogateMax:
r = runeError
fallthrough
case i <= rune3Max:
p = append(p, t3|byte(r>>12))
p = append(p, tx|byte(r>>6)&maskx)
p = append(p, tx|byte(r)&maskx)
return p
default:
p = append(p, t4|byte(r>>18))
p = append(p, tx|byte(r>>12)&maskx)
p = append(p, tx|byte(r>>6)&maskx)
p = append(p, tx|byte(r)&maskx)
return p
}
}

18
jsoniter.go Normal file
View File

@ -0,0 +1,18 @@
// Package jsoniter implements encoding and decoding of JSON as defined in
// RFC 4627 and provides interfaces with identical syntax of standard lib encoding/json.
// Converting from encoding/json to jsoniter is no more than replacing the package with jsoniter
// and variable type declarations (if any).
// jsoniter interfaces gives 100% compatibility with code using standard lib.
//
// "JSON and Go"
// (https://golang.org/doc/articles/json_and_go.html)
// gives a description of how Marshal/Unmarshal operate
// between arbitrary or predefined json objects and bytes,
// and it applies to jsoniter.Marshal/Unmarshal as well.
//
// Besides, jsoniter.Iterator provides a different set of interfaces
// iterating given bytes/string/reader
// and yielding parsed elements one by one.
// This set of interfaces reads input as required and gives
// better performance.
package jsoniter

View File

@ -1,69 +0,0 @@
package jsoniter
import (
"reflect"
"strconv"
"testing"
"time"
"unsafe"
)
func Test_customize_type_decoder(t *testing.T) {
RegisterTypeDecoder("time.Time", func(ptr unsafe.Pointer, iter *Iterator) {
t, err := time.ParseInLocation("2006-01-02 15:04:05", iter.ReadString(), time.UTC)
if err != nil {
iter.Error = err
return
}
*((*time.Time)(ptr)) = t
})
defer CleanDecoders()
val := time.Time{}
err := Unmarshal([]byte(`"2016-12-05 08:43:28"`), &val)
if err != nil {
t.Fatal(err)
}
year, month, day := val.Date()
if year != 2016 || month != 12 || day != 5 {
t.Fatal(val)
}
}
type Tom struct {
field1 string
}
func Test_customize_field_decoder(t *testing.T) {
RegisterFieldDecoder("jsoniter.Tom", "field1", func(ptr unsafe.Pointer, iter *Iterator) {
*((*string)(ptr)) = strconv.Itoa(iter.ReadInt())
})
defer CleanDecoders()
tom := Tom{}
err := Unmarshal([]byte(`{"field1": 100}`), &tom)
if err != nil {
t.Fatal(err)
}
}
type TestObject1 struct {
field1 string
}
func Test_customize_field_by_extension(t *testing.T) {
RegisterExtension(func(type_ reflect.Type, field *reflect.StructField) ([]string, DecoderFunc) {
if type_.String() == "jsoniter.TestObject1" && field.Name == "field1" {
return []string{"field-1"}, func(ptr unsafe.Pointer, iter *Iterator) {
*((*string)(ptr)) = strconv.Itoa(iter.ReadInt())
}
}
return nil, nil
})
obj := TestObject1{}
err := Unmarshal([]byte(`{"field-1": 100}`), &obj)
if err != nil {
t.Fatal(err)
}
if obj.field1 != "100" {
t.Fatal(obj.field1)
}
}

View File

@ -1,39 +0,0 @@
package jsoniter
import (
"fmt"
"testing"
)
func Test_bind_api_demo(t *testing.T) {
iter := ParseString(`[0,1,2,3]`)
val := []int{}
iter.ReadVal(&val)
fmt.Println(val[3])
}
func Test_iterator_api_demo(t *testing.T) {
iter := ParseString(`[0,1,2,3]`)
total := 0
for iter.ReadArray() {
total += iter.ReadInt()
}
fmt.Println(total)
}
type User struct {
userID int
name string
tags []string
}
func Test_iterator_and_bind_api(t *testing.T) {
iter := ParseString(`[123, {"name": "taowen", "tags": ["crazy", "hacker"]}]`)
user := User{}
iter.ReadArray()
user.userID = iter.ReadInt()
iter.ReadArray()
iter.ReadVal(&user)
iter.ReadArray() // array end
fmt.Println(user)
}

View File

@ -1,94 +0,0 @@
package jsoniter
import (
"testing"
"github.com/json-iterator/go/require"
"unsafe"
)
func Test_write_array_of_interface(t *testing.T) {
should := require.New(t)
array := []interface{}{"hello"}
str, err := MarshalToString(array)
should.Nil(err)
should.Equal(`["hello"]`, str)
}
func Test_write_map_of_interface(t *testing.T) {
should := require.New(t)
val := map[string]interface{}{"hello":"world"}
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"hello":"world"}`, str)
}
func Test_write_map_of_interface_in_struct(t *testing.T) {
type TestObject struct {
Field map[string]interface{}
}
should := require.New(t)
val := TestObject{map[string]interface{}{"hello":"world"}}
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"Field":{"hello":"world"}}`, str)
}
func Test_write_map_of_interface_in_struct_with_two_fields(t *testing.T) {
type TestObject struct {
Field map[string]interface{}
Field2 string
}
should := require.New(t)
val := TestObject{map[string]interface{}{"hello":"world"}, ""}
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"Field":{"hello":"world"},"Field2":""}`, str)
}
type MyInterface interface {
Hello() string
}
type MyString string
func (ms MyString) Hello() string {
return string(ms)
}
func Test_write_map_of_custom_interface(t *testing.T) {
should := require.New(t)
myStr := MyString("world")
should.Equal("world", myStr.Hello())
val := map[string]MyInterface{"hello":myStr}
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"hello":"world"}`, str)
}
func Test_write_interface(t *testing.T) {
should := require.New(t)
var val interface{}
val = "hello"
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`"hello"`, str)
}
func Test_read_interface(t *testing.T) {
should := require.New(t)
var val interface{}
err := UnmarshalFromString(`"hello"`, &val)
should.Nil(err)
should.Equal("hello", val)
}
func Test_read_custom_interface(t *testing.T) {
should := require.New(t)
var val MyInterface
RegisterTypeDecoder("jsoniter.MyInterface", func(ptr unsafe.Pointer, iter *Iterator) {
*((*MyInterface)(ptr)) = MyString(iter.ReadString())
})
err := UnmarshalFromString(`"hello"`, &val)
should.Nil(err)
should.Equal("hello", val.Hello())
}

View File

@ -1,84 +0,0 @@
package jsoniter
import (
"bytes"
"io"
"testing"
)
func Test_read_by_one(t *testing.T) {
iter := Parse(bytes.NewBufferString("abc"), 1)
b := iter.readByte()
if iter.Error != nil {
t.Fatal(iter.Error)
}
if b != 'a' {
t.Fatal(b)
}
iter.unreadByte()
if iter.Error != nil {
t.Fatal(iter.Error)
}
iter.unreadByte()
if iter.Error == nil {
t.FailNow()
}
iter.Error = nil
b = iter.readByte()
if iter.Error != nil {
t.Fatal(iter.Error)
}
if b != 'a' {
t.Fatal(b)
}
}
func Test_read_by_two(t *testing.T) {
iter := Parse(bytes.NewBufferString("abc"), 2)
b := iter.readByte()
if iter.Error != nil {
t.Fatal(iter.Error)
}
if b != 'a' {
t.Fatal(b)
}
b = iter.readByte()
if iter.Error != nil {
t.Fatal(iter.Error)
}
if b != 'b' {
t.Fatal(b)
}
iter.unreadByte()
if iter.Error != nil {
t.Fatal(iter.Error)
}
iter.unreadByte()
if iter.Error != nil {
t.Fatal(iter.Error)
}
b = iter.readByte()
if iter.Error != nil {
t.Fatal(iter.Error)
}
if b != 'a' {
t.Fatal(b)
}
}
func Test_read_until_eof(t *testing.T) {
iter := Parse(bytes.NewBufferString("abc"), 2)
iter.readByte()
iter.readByte()
b := iter.readByte()
if iter.Error != nil {
t.Fatal(iter.Error)
}
if b != 'c' {
t.Fatal(b)
}
iter.readByte()
if iter.Error != io.EOF {
t.Fatal(iter.Error)
}
}

View File

@ -1,49 +0,0 @@
package jsoniter
import (
"encoding/json"
"io/ioutil"
"os"
"testing"
)
//func Test_large_file(t *testing.T) {
// file, err := os.Open("/tmp/large-file.json")
// if err != nil {
// t.Fatal(err)
// }
// iter := Parse(file, 4096)
// count := 0
// for iter.ReadArray() {
// iter.Skip()
// count++
// }
// if count != 11351 {
// t.Fatal(count)
// }
//}
func Benchmark_jsoniter_large_file(b *testing.B) {
b.ReportAllocs()
for n := 0; n < b.N; n++ {
file, _ := os.Open("/tmp/large-file.json")
iter := Parse(file, 4096)
count := 0
for iter.ReadArray() {
iter.Skip()
count++
}
file.Close()
}
}
func Benchmark_json_large_file(b *testing.B) {
b.ReportAllocs()
for n := 0; n < b.N; n++ {
file, _ := os.Open("/tmp/large-file.json")
bytes, _ := ioutil.ReadAll(file)
file.Close()
result := []struct{}{}
json.Unmarshal(bytes, &result)
}
}

View File

@ -1,58 +0,0 @@
package jsoniter
import (
"testing"
"github.com/json-iterator/go/require"
)
func Test_read_map(t *testing.T) {
should := require.New(t)
iter := ParseString(`{"hello": "world"}`)
m := map[string]string{"1": "2"}
iter.ReadVal(&m)
copy(iter.buf, []byte{0, 0, 0, 0, 0, 0})
should.Equal(map[string]string{"1": "2", "hello": "world"}, m)
}
func Test_read_map_of_interface(t *testing.T) {
should := require.New(t)
iter := ParseString(`{"hello": "world"}`)
m := map[string]interface{}{"1": "2"}
iter.ReadVal(&m)
should.Equal(map[string]interface{}{"1": "2", "hello": "world"}, m)
iter = ParseString(`{"hello": "world"}`)
should.Equal(map[string]interface{}{"hello": "world"}, iter.Read())
}
func Test_wrap_map(t *testing.T) {
should := require.New(t)
any := Wrap(map[string]string{"Field1": "hello"})
should.Equal("hello", any.Get("Field1").ToString())
any = Wrap(map[string]string{"Field1": "hello"})
should.Equal(1, any.Size())
any = Wrap(map[string]string{"Field1": "hello"})
vals := map[string]string{}
var k string
var v Any
for next, hasNext := any.IterateObject(); hasNext; {
k, v, hasNext = next()
if v.ValueType() == String {
vals[k] = v.ToString()
}
}
should.Equal(map[string]string{"Field1":"hello"}, vals)
}
func Test_map_wrapper_any_get_all(t *testing.T) {
should := require.New(t)
any := Wrap(map[string][]int{"Field1": []int{1, 2}})
should.Equal(`{"Field1":1}`, any.Get('*', 0).ToString())
}
func Test_write_val_map(t *testing.T) {
should := require.New(t)
val := map[string]string{"1": "2"}
str, err := MarshalToString(val)
should.Nil(err)
should.Equal(`{"1":"2"}`, str)
}

View File

@ -1,88 +0,0 @@
package jsoniter
import (
"encoding/json"
"reflect"
"testing"
)
type Level1 struct {
Hello []Level2
}
type Level2 struct {
World string
}
func Test_nested(t *testing.T) {
iter := ParseString(`{"hello": [{"world": "value1"}, {"world": "value2"}]}`)
l1 := Level1{}
for l1Field := iter.ReadObject(); l1Field != ""; l1Field = iter.ReadObject() {
switch l1Field {
case "hello":
l2Array := []Level2{}
for iter.ReadArray() {
l2 := Level2{}
for l2Field := iter.ReadObject(); l2Field != ""; l2Field = iter.ReadObject() {
switch l2Field {
case "world":
l2.World = iter.ReadString()
default:
iter.reportError("bind l2", "unexpected field: "+l2Field)
}
}
l2Array = append(l2Array, l2)
}
l1.Hello = l2Array
default:
iter.reportError("bind l1", "unexpected field: "+l1Field)
}
}
if !reflect.DeepEqual(l1, Level1{
Hello: []Level2{
{World: "value1"},
{World: "value2"},
},
}) {
t.Fatal(l1)
}
}
func Benchmark_jsoniter_nested(b *testing.B) {
for n := 0; n < b.N; n++ {
iter := ParseString(`{"hello": [{"world": "value1"}, {"world": "value2"}]}`)
l1 := Level1{}
for l1Field := iter.ReadObject(); l1Field != ""; l1Field = iter.ReadObject() {
switch l1Field {
case "hello":
l1.Hello = readLevel1Hello(iter)
default:
iter.Skip()
}
}
}
}
func readLevel1Hello(iter *Iterator) []Level2 {
l2Array := make([]Level2, 0, 2)
for iter.ReadArray() {
l2 := Level2{}
for l2Field := iter.ReadObject(); l2Field != ""; l2Field = iter.ReadObject() {
switch l2Field {
case "world":
l2.World = iter.ReadString()
default:
iter.Skip()
}
}
l2Array = append(l2Array, l2)
}
return l2Array
}
func Benchmark_json_nested(b *testing.B) {
for n := 0; n < b.N; n++ {
l1 := Level1{}
json.Unmarshal([]byte(`{"hello": [{"world": "value1"}, {"world": "value2"}]}`), &l1)
}
}

View File

@ -1,82 +0,0 @@
package jsoniter
import (
"testing"
"github.com/json-iterator/go/require"
"bytes"
)
func Test_read_null(t *testing.T) {
should := require.New(t)
iter := ParseString(`null`)
should.True(iter.ReadNil())
iter = ParseString(`null`)
should.Nil(iter.Read())
iter = ParseString(`null`)
any, err := UnmarshalAnyFromString(`null`)
should.Nil(err)
should.Equal(0, any.ToInt())
should.Equal(float64(0), any.ToFloat64())
should.Equal("", any.ToString())
should.False(any.ToBool())
}
func Test_write_null(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream.WriteNil()
stream.Flush()
should.Nil(stream.Error)
should.Equal("null", buf.String())
}
func Test_encode_null(t *testing.T) {
should := require.New(t)
str, err := MarshalToString(nil)
should.Nil(err)
should.Equal("null", str)
}
func Test_decode_null_object(t *testing.T) {
iter := ParseString(`[null,"a"]`)
iter.ReadArray()
if iter.ReadObject() != "" {
t.FailNow()
}
iter.ReadArray()
if iter.ReadString() != "a" {
t.FailNow()
}
}
func Test_decode_null_array(t *testing.T) {
iter := ParseString(`[null,"a"]`)
iter.ReadArray()
if iter.ReadArray() != false {
t.FailNow()
}
iter.ReadArray()
if iter.ReadString() != "a" {
t.FailNow()
}
}
func Test_decode_null_string(t *testing.T) {
should := require.New(t)
iter := ParseString(`[null,"a"]`)
should.True(iter.ReadArray())
should.True(iter.ReadNil())
should.True(iter.ReadArray())
should.Equal("a", iter.ReadString())
}
func Test_decode_null_skip(t *testing.T) {
iter := ParseString(`[null,"a"]`)
iter.ReadArray()
iter.Skip()
iter.ReadArray()
if iter.ReadString() != "a" {
t.FailNow()
}
}

View File

@ -1,254 +0,0 @@
package jsoniter
import (
"encoding/json"
"testing"
"github.com/json-iterator/go/require"
"bytes"
)
func Test_empty_object(t *testing.T) {
should := require.New(t)
iter := ParseString(`{}`)
field := iter.ReadObject()
should.Equal("", field)
iter = ParseString(`{}`)
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
should.FailNow("should not call")
return true
})
}
func Test_one_field(t *testing.T) {
should := require.New(t)
iter := ParseString(`{"a": "b"}`)
field := iter.ReadObject()
should.Equal("a", field)
value := iter.ReadString()
should.Equal("b", value)
field = iter.ReadObject()
should.Equal("", field)
iter = ParseString(`{"a": "b"}`)
should.True(iter.ReadObjectCB(func(iter *Iterator, field string) bool {
should.Equal("a", field)
return true
}))
}
func Test_two_field(t *testing.T) {
should := require.New(t)
iter := ParseString(`{ "a": "b" , "c": "d" }`)
field := iter.ReadObject()
should.Equal("a", field)
value := iter.ReadString()
should.Equal("b", value)
field = iter.ReadObject()
should.Equal("c", field)
value = iter.ReadString()
should.Equal("d", value)
field = iter.ReadObject()
should.Equal("", field)
iter = ParseString(`{"field1": "1", "field2": 2}`)
for field := iter.ReadObject(); field != ""; field = iter.ReadObject() {
switch field {
case "field1":
iter.ReadString()
case "field2":
iter.ReadInt64()
default:
iter.reportError("bind object", "unexpected field")
}
}
}
func Test_read_object_as_any(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString(`{"a":"b","c":"d"}`)
should.Nil(err)
should.Equal(`{"a":"b","c":"d"}`, any.ToString())
// partial parse
should.Equal("b", any.Get("a").ToString())
should.Equal("d", any.Get("c").ToString())
should.Equal(2, len(any.Keys()))
any, err = UnmarshalAnyFromString(`{"a":"b","c":"d"}`)
// full parse
should.Equal(2, len(any.Keys()))
should.Equal(2, any.Size())
should.True(any.ToBool())
should.Equal(1, any.ToInt())
}
func Test_object_any_lazy_iterator(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString(`{"a":"b","c":"d"}`)
should.Nil(err)
// iterator parse
vals := map[string]string{}
var k string
var v Any
next, hasNext := any.IterateObject()
should.True(hasNext)
k, v, hasNext = next()
should.True(hasNext)
vals[k] = v.ToString()
// trigger full parse
should.Equal(2, len(any.Keys()))
k, v, hasNext = next()
should.False(hasNext)
vals[k] = v.ToString()
should.Equal(map[string]string{"a":"b", "c":"d"}, vals)
vals = map[string]string{}
for next, hasNext := any.IterateObject(); hasNext; k, v, hasNext = next() {
vals[k] = v.ToString()
}
should.Equal(map[string]string{"a":"b", "c":"d"}, vals)
}
func Test_object_any_with_two_lazy_iterators(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString(`{"a":"b","c":"d","e":"f"}`)
should.Nil(err)
var k string
var v Any
next1, hasNext1 := any.IterateObject()
next2, hasNext2 := any.IterateObject()
should.True(hasNext1)
k, v, hasNext1 = next1()
should.True(hasNext1)
should.Equal("a", k)
should.Equal("b", v.ToString())
should.True(hasNext2)
k, v, hasNext2 = next2()
should.True(hasNext2)
should.Equal("a", k)
should.Equal("b", v.ToString())
k, v, hasNext1 = next1()
should.True(hasNext1)
should.Equal("c", k)
should.Equal("d", v.ToString())
k, v, hasNext2 = next2()
should.True(hasNext2)
should.Equal("c", k)
should.Equal("d", v.ToString())
}
func Test_object_lazy_any_get(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString(`{"a":{"b":{"c":"d"}}}`)
should.Nil(err)
should.Equal("d", any.Get("a", "b", "c").ToString())
}
func Test_object_lazy_any_get_all(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString(`{"a":[0],"b":[1]}`)
should.Nil(err)
should.Equal(`{"a":0,"b":1}`, any.Get('*', 0).ToString())
}
func Test_object_lazy_any_get_invalid(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString(`{}`)
should.Nil(err)
should.Equal(Invalid, any.Get("a", "b", "c").ValueType())
should.Equal(Invalid, any.Get(1).ValueType())
}
func Test_object_lazy_any_set(t *testing.T) {
should := require.New(t)
any, err := UnmarshalAnyFromString(`{"a":{"b":{"c":"d"}}}`)
should.Nil(err)
any.GetObject()["a"] = WrapInt64(1)
str, err := MarshalToString(any)
should.Nil(err)
should.Equal(`{"a":1}`, str)
}
func Test_wrap_object(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 string
field2 string
}
any := Wrap(TestObject{"hello", "world"})
should.Equal("hello", any.Get("Field1").ToString())
any = Wrap(TestObject{"hello", "world"})
should.Equal(2, any.Size())
any = Wrap(TestObject{"hello", "world"})
vals := map[string]string{}
var k string
var v Any
for next, hasNext := any.IterateObject(); hasNext; {
k, v, hasNext = next()
if v.ValueType() == String {
vals[k] = v.ToString()
}
}
should.Equal(map[string]string{"Field1":"hello"}, vals)
}
func Test_object_wrapper_any_get_all(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 []int
Field2 []int
}
any := Wrap(TestObject{[]int{1, 2}, []int{3, 4}})
should.Equal(`{"Field2":3,"Field1":1}`, any.Get('*', 0).ToString())
}
func Test_write_object(t *testing.T) {
should := require.New(t)
buf := &bytes.Buffer{}
stream := NewStream(buf, 4096)
stream.IndentionStep = 2
stream.WriteObjectStart()
stream.WriteObjectField("hello")
stream.WriteInt(1)
stream.WriteMore()
stream.WriteObjectField("world")
stream.WriteInt(2)
stream.WriteObjectEnd()
stream.Flush()
should.Nil(stream.Error)
should.Equal("{\n \"hello\":1,\n \"world\":2\n}", buf.String())
}
func Benchmark_jsoniter_object(b *testing.B) {
type TestObj struct {
Field1 string
Field2 uint64
}
for n := 0; n < b.N; n++ {
iter := ParseString(`{"field1": "1", "field2": 2}`)
obj := TestObj{}
for field := iter.ReadObject(); field != ""; field = iter.ReadObject() {
switch field {
case "field1":
obj.Field1 = iter.ReadString()
case "field2":
obj.Field2 = iter.ReadUint64()
default:
iter.reportError("bind object", "unexpected field")
}
}
}
}
func Benchmark_json_object(b *testing.B) {
type TestObj struct {
Field1 string
Field2 uint64
}
for n := 0; n < b.N; n++ {
result := TestObj{}
json.Unmarshal([]byte(`{"field1": "1", "field2": 2}`), &result)
}
}

View File

@ -1,45 +0,0 @@
package jsoniter
import (
"testing"
"github.com/json-iterator/go/require"
)
func Test_encode_optional_int_pointer(t *testing.T) {
should := require.New(t)
var ptr *int
str, err := MarshalToString(ptr)
should.Nil(err)
should.Equal("null", str)
val := 100
ptr = &val
str, err = MarshalToString(ptr)
should.Nil(err)
should.Equal("100", str)
}
func Test_decode_struct_with_optional_field(t *testing.T) {
should := require.New(t)
type TestObject struct {
field1 *string
field2 *string
}
obj := TestObject{}
UnmarshalFromString(`{"field1": null, "field2": "world"}`, &obj)
should.Nil(obj.field1)
should.Equal("world", *obj.field2)
}
func Test_encode_struct_with_optional_field(t *testing.T) {
should := require.New(t)
type TestObject struct {
field1 *string
field2 *string
}
obj := TestObject{}
world := "world"
obj.field2 = &world
str, err := MarshalToString(obj)
should.Nil(err)
should.Equal(`{"field1":null,"field2":"world"}`, str)
}

Some files were not shown because too many files have changed in this diff Show More