1
0
mirror of https://github.com/json-iterator/go.git synced 2025-06-03 22:27:26 +02:00

Compare commits

...

136 Commits

Author SHA1 Message Date
Tao Wen
71ac16282d
Merge pull request #635 from molon/fix-tests
encoding/json: Compatibility optimization
2022-09-16 07:37:16 +08:00
molon
720cd423d0 encoding/json: use standard ES6 formatting for numbers during marshal
92b3e3651d
2022-09-15 22:10:44 +08:00
molon
9b00d50ac0 Revert "std: float64/32 clean up e-09 to e-9"
This reverts commit 10b9ac0971bab50fe39f3afe993c39306ce27213.
2022-09-15 22:09:27 +08:00
molon
ffc487c633 encoding/json: allow non-string type keys for (un-)marshal
ffbd31e9f7
2022-09-15 22:07:27 +08:00
molon
10b9ac0971 std: float64/32 clean up e-09 to e-9 2022-09-15 21:13:32 +08:00
Tao Wen
bf8b920736
Merge pull request #628 from HassanBaker/upgrade-stretchr
bumped stretchr to v1.8.0 to get rid of yaml v2 vuln
2022-08-05 15:01:11 +08:00
Hassan Baker
a7b8414362 bumped stretchr to v1.8.0 to get rid of yaml v2 vuln
Signed-off-by: Hassan Baker <me@hassanbaker.dev>
2022-08-04 17:25:33 +01:00
Tao Wen
024077e996 fix go 1.18 compatibility 2021-09-11 10:17:26 +08:00
Tao Wen
08b5e5796b
notice incompatibility 2021-09-10 23:54:13 +08:00
Tao Wen
c6661824eb
Merge pull request #562 from runzhi/master
fix: invalid unicode while binary to string
2021-07-22 09:00:26 +08:00
origuo
3c55efb1a1 fix: invalid unicode while binary to string 2021-07-19 13:58:16 +08:00
Allen
e6b9536d36
Merge pull request #513 from AllenX2018/fix-issue-510
fix issue 510
2020-11-18 09:31:58 +08:00
AllenX2018
a3465d79a9 fix issue 510 2020-11-17 17:37:55 +08:00
Allen
9b79a3e192
fix issue 508 (#512) 2020-11-14 09:45:31 +08:00
fishyww
1779031cda fix issue 508 2020-11-13 15:58:42 +08:00
Ivan Boyarkin
6821bec9fa
jsoniter: Fix errors during reading integers from chunked io.Reader (#477)
This commit fixes bug in Iterator.assertInteger method if the next
conditions are met:
- Iterator reads data from `io.Reader`,
- expected value is `0` (zero)
- `Iterator.tail == Iterator.head + 1`
- `Iterator.tail < len(Iterator.buf)`
- value in the buffer after `Iterator.tail` is presented from the previous read and has '.' character.

Typical error which user cal see is:
- assertInteger: can not decode float as int, error found in #X byte of ...

Regression test added for checking the correct behaviour.

Fixes #476
2020-08-06 09:14:08 +08:00
Nikolay Dubina
9461257643
fixed null in number json tag string (#480) 2020-08-03 10:12:41 +08:00
AllenX2018
5bce16d299 fix issue #469 2020-07-21 17:07:23 +08:00
yjh
a1ca083078
update readme (#464) 2020-06-08 10:58:30 +08:00
allen
cd6773e694 remove quotation check for key when decoding map
we don't need to check if the key is surrounded by quotation.
In fact, the key might not be strings if we register an extension to
customize the map key encoder/decoder.It may be an integer, float, or
even a struct.
2020-05-31 18:18:48 +08:00
AllenX2018
55287ed53a disable map_key_test temporarily
- disable this test until golang issue 38105&38940 is fixed
2020-05-08 15:08:25 +08:00
Sai To Yeung
8961be9c21
Map keys of custom types should serialize using MarshalText when available (#461)
* Map keys of custom types should serialize/deserialize using MarshalText/UnmarshalText when available

- this brings marshaling/unmarshaling behavior in line with encoding/json
- in general, any types that implement the interfaces from the encoding package (TextUnmarshaler, TextMarshaler, etc.) should use the provided method when available
2020-05-08 10:21:59 +08:00
Allen
0f8241d334
Merge pull request #441 from robfig/flush2
(*Stream).WriteMore: do not Flush
2020-04-29 14:50:44 +08:00
AllenX2018
53b9d06ba7 temporarily comment out some test case to fix the CI fail since go1.14 released 2020-03-30 16:42:45 +08:00
Allen
1f7ee05ef8
Merge pull request #433 from AllenX2018/fix-anonymous-struct-error-message
fix issue #421
2020-03-30 16:26:14 +08:00
Allen
b22f393858
Merge pull request #450 from fantastao/fix_any_str_panic
fix any str ToInt64 ToUint64 panic
2020-03-26 19:37:32 +08:00
陶磊
bede7b9e40 fix any str ToInt64 ToUint64 panic
Change-Id: Ic1690713d96940258811cdc149b1604128aa91a2
2020-03-26 18:17:29 +08:00
AllenX2018
58aeb59006 fix issue #449 2020-03-26 14:46:47 +08:00
Allen
7acbb404a4
Merge pull request #432 from AllenX2018/fix-raw-message-integer-issue
fix issue #389 #411
2020-02-06 09:56:04 +08:00
Allen
69f2a91ff4
Merge pull request #429 from AllenX2018/fix-typo
fix issue #326
2020-02-05 09:51:54 +08:00
Rob Figueiredo
11a37a0774 (*Stream).WriteMore: remove implicit Flush
I believe that WriteMore should not call Flush for these reasons:

1. This is surprising for users because of inconsistency. Why call Flush in
   WriteMore and not in WriteObjectEnd?

2. It is not necessary; callers are free to call Flush if their use case demands
   it.

3. It harms performance in the common case by flushing the buffer much more
   frequently than it needs to be flushed.

The stream benchmark shows a 7% benefit to removing the Flush call, and I
observed a similar speedup in my real-world use case.

    benchmark                                        old ns/op     new ns/op     delta
    Benchmark_encode_string_with_SetEscapeHTML-8     442           437           -1.13%
    Benchmark_jsoniter_large_file-8                  21222         21062         -0.75%
    Benchmark_json_large_file-8                      40187         40266         +0.20%
    Benchmark_stream_encode_big_object-8             8611          7956          -7.61%

    benchmark                                        old allocs     new allocs     delta
    Benchmark_encode_string_with_SetEscapeHTML-8     6              6              +0.00%
    Benchmark_jsoniter_large_file-8                  78             78             +0.00%
    Benchmark_json_large_file-8                      13             13             +0.00%
    Benchmark_stream_encode_big_object-8             0              0              +0.00%

    benchmark                                        old bytes     new bytes     delta
    Benchmark_encode_string_with_SetEscapeHTML-8     760           760           +0.00%
    Benchmark_jsoniter_large_file-8                  4920          4920          +0.00%
    Benchmark_json_large_file-8                      6640          6640          +0.00%
    Benchmark_stream_encode_big_object-8             0             0             +0.00%

Backwards compatibility - I believe there is little to no risk that this breaks
callers. WriteMore does not leave the JSON in a valid state, so it must be
followed by other Write* methods. To get the finished JSON out, the caller must
already be calling Flush.
2020-01-17 22:14:50 -05:00
Rob Figueiredo
91f4a6405d (*Stream).Flush: reset buffer to beginning
Previously it would append to the end of the buffer instead of reusing the
now-free space.

Benchmark demonstrates the improvement, run with -benchtime=10s

    benchmark                                        old ns/op     new ns/op     delta
    Benchmark_encode_string_with_SetEscapeHTML-8     447           442           -1.12%
    Benchmark_jsoniter_large_file-8                  20998         21222         +1.07%
    Benchmark_json_large_file-8                      39593         40187         +1.50%
    Benchmark_stream_encode_big_object-8             10787         8611          -20.17%

    benchmark                                        old allocs     new allocs     delta
    Benchmark_encode_string_with_SetEscapeHTML-8     6              6              +0.00%
    Benchmark_jsoniter_large_file-8                  78             78             +0.00%
    Benchmark_json_large_file-8                      13             13             +0.00%
    Benchmark_stream_encode_big_object-8             31             0              -100.00%

    benchmark                                        old bytes     new bytes     delta
    Benchmark_encode_string_with_SetEscapeHTML-8     760           760           +0.00%
    Benchmark_jsoniter_large_file-8                  4920          4920          +0.00%
    Benchmark_json_large_file-8                      6640          6640          +0.00%
    Benchmark_stream_encode_big_object-8             10056         0             -100.00%

Fixes #438
2020-01-17 21:57:59 -05:00
Rob Figueiredo
a54d350455 benchmarks: add benchmark for Stream 2020-01-17 16:50:46 -05:00
allen
6f4c196d95 add more testcase 2020-01-16 17:17:18 +08:00
allen
8302a17e8c fix issue #421 2020-01-15 20:21:20 +08:00
allen
3987001e27 fix issue #389 #411 2020-01-14 14:14:02 +08:00
allen
78d9e97b7a fix issue #326 2020-01-10 16:11:04 +08:00
Tao Wen
49c900ee46
Merge pull request #427 from AllenX2018/fix-typo
fix the error message typo of ReadObjectCB & ReadMapCb function
2020-01-06 10:38:24 +08:00
allen
9c0685d8d3 fix the error message typo of ReadObjectCB & ReadMapCb function 2020-01-03 15:31:12 +08:00
Tao Wen
acfec88f7a
Merge pull request #422 from JensErat/map-invalid-type
pass nested error in compatible configuration, fixes #388
2019-12-21 11:10:28 +08:00
Tao Wen
e88512faf8
Merge pull request #423 from vano144/fix-attachments-on-stream
fix nil attachment on stream in custom encoder on sorted map
2019-12-21 11:09:53 +08:00
Tao Wen
b681149eae
Merge pull request #424 from aaronbee/sortKeysMapAllocations
Reduce allocations in sortKeysMapEncoder
2019-12-21 11:09:15 +08:00
Tao Wen
d1af7639b3
Merge pull request #425 from liggitt/default-max-depth
Revert "Merge pull request #418 from bbrks/configurable_maxDepth"
2019-12-21 11:04:54 +08:00
Jordan Liggitt
7c9f8c2d20 Revert "Merge pull request #418 from bbrks/configurable_maxDepth"
This reverts commit 44a7e7340d23d2e5d5d942966eb683436c027da0, reversing
changes made to dc11f49689fd1c9a6de20749def70bd889bf0d42.
2019-12-19 19:06:29 -05:00
Aaron Beitch
f814d6c0f1 Reduce allocations in sortKeysMapEncoder
Use one buffer for all values.
2019-12-03 11:55:47 -08:00
Ivan Romanov
aba8654400 fix nil attachment on stream in custom encoder on sorted map 2019-11-28 17:39:42 +03:00
Jens Erat
a1c9557592
pass nested error in compatible configuration
When invalid types inside a map were marshalled (in general, as soon as
sorted maps have been configured), the error message has not been
propagated out of the map's `subStream`.

Also fix and re-enable the channel test, which now resembles the
behavior of `encoding/json` and tests both default and compatible
configurations.

Signed-off-by: Jens Erat <email@jenserat.de>
2019-11-22 16:56:59 +01:00
Tao Wen
44a7e7340d
Merge pull request #418 from bbrks/configurable_maxDepth
Add MaxDepth as a config option
2019-11-12 22:47:28 +08:00
Ben Brooks
2834c7e43c Remove large test values that fail on 32-bit architectures 2019-11-11 16:35:50 +00:00
Ben Brooks
d296277d5c Adds MaxDepth config option
Defaults to 10,000 to match the existing maxDepth constant everywhetre,
except when using `ConfigCompatibleWithStandardLibrary` - which retains
the limitless depth (and causes a stack overflow).

Added tests for the new config, and also up to jsoniter's stack overflow limit.
2019-11-11 16:13:59 +00:00
Tao Wen
dc11f49689
Merge pull request #416 from jarredhawkins/issue-415
Ignore unnamed literals in structs
2019-10-30 08:35:33 +08:00
Jarred Hawkins
83f7b825b3 Unnamed struct literals 2019-10-28 23:05:10 -07:00
Tao Wen
03217c3e97
Merge pull request #410 from liggitt/stack
Limit nesting depth
2019-10-12 21:07:04 +08:00
Tao Wen
908eaed151
Merge pull request #408 from onelrdm/master
skip - tag before spliting parts
2019-10-12 21:05:04 +08:00
Jordan Liggitt
eec24895fe Limit nesting depth 2019-10-08 11:17:01 -04:00
onelrdm
1ba732a07d skip - tag before spliting parts 2019-09-28 17:17:44 +08:00
Tao Wen
819acad769
Merge pull request #398 from teou/master
use json.Marshaler then trim the last '\n' in reflect_marshaler
2019-09-23 14:02:24 +08:00
Tao Wen
695ec2b83b
Merge pull request #406 from bbrks/fix_nil_map_encoding
Fixes #405 - Encode nil map into null
2019-09-23 13:59:22 +08:00
Ben Brooks
028e2ef2bd Fixes #405 - Encode nil map into null 2019-09-19 13:11:30 +01:00
liyibo [李一博]
976454858b use json.Marshaler then trim the last '\n' in reflect_marshaler
N/A
2019-08-14 10:10:02 +08:00
Tao Wen
27518f6661
Merge pull request #373 from ernado/append-skip
fix #372: add AppendSkip iterator method
2019-06-22 00:12:01 +08:00
Tao Wen
94869abf43
Merge pull request #368 from alextomaili/fix-memory-allocation-overhead
allocate string for error description only if it really required
2019-06-22 00:11:00 +08:00
Aleksandr Razumov
459f0e30ae
fix #37: add SkipAndAppendBytes iterator method 2019-06-10 12:40:05 +03:00
Tim Hockin
0039f4ac3d
Merge pull request #371 from nikhita/byte-base64-encode
Don't marshal empty byte or uint8 slice as null
2019-06-03 19:06:22 +02:00
Nikhita Raghunath
fb5614a4ca Don't marshal empty byte or uint8 slice as null
[]byte or []uint8 are encoded as base-64 encoded string. Per this, non-nil
empty slice should not get marshalled as null, rather as "".

This restores compatibility with the standard library.
2019-06-03 16:19:17 +05:30
alextomaili
f71b9090aa allocate string for error description only if it really required 2019-05-27 03:02:21 +03:00
Tao Wen
08047c174c fix #365, return error for +inf -inf and NaN 2019-05-23 13:57:43 +08:00
Tao Wen
68347ec4d6
Merge pull request #366 from stephen-obashitech/master
Fix typo in UnmarshalFromString documentation
2019-05-22 13:56:51 +08:00
Stephen Brown
0fd91468bb
Fix typo in UnmarshalFromString documentation 2019-05-21 12:48:31 +01:00
Tao Wen
1bc9828b4f
Merge pull request #361 from lggomez/master
Add go module definition
2019-05-19 23:23:02 +08:00
Luis GG
24c3d57281 Add go module definition 2019-04-25 17:40:48 -03:00
Tao Wen
0ff49de124 update README 2019-03-06 22:29:09 +08:00
Tao Wen
5bc9320502
Merge pull request #316 from proemergotech/master
fix #308 do NOT skip embedded structs without tag when OnlyTaggedFiel…
2019-02-08 15:56:42 +08:00
Tao Wen
f64ce68b6e
Merge pull request #338 from dvrkps/master
Clean go vet error and ineffassign warnings.
2019-01-14 23:53:30 +08:00
Tao Wen
2d42ff74dd
Merge pull request #337 from denverdino/fix-encode-with-MarshalJSON
Fix the incompatible encoding #336
2019-01-14 23:52:16 +08:00
Davor Kapsa
3a023a5fbc clean readPositiveFloat64 2019-01-10 18:00:15 +01:00
Davor Kapsa
16aef10b2b clean readPositiveFloat32 2019-01-10 17:55:28 +01:00
Davor Kapsa
ae4c002f78 rename ExampleMyKey 2019-01-10 17:51:25 +01:00
Li Yi
e4aa2ec063 Fix the incompatible encoding 2019-01-03 18:19:22 +08:00
Tao Wen
d05f387f50 fix #317, try parse as BigFloat if overflow 2018-11-12 14:45:56 +08:00
Adam Kiss
a9403d25cd fix #308 do NOT skip embedded structs without tag when OnlyTaggedField is set to true 2018-10-29 12:00:51 +01:00
Tao Wen
05d041de10 fix #313 support json marshaller type as map key 2018-10-24 23:28:41 +08:00
Tao Wen
5916df66b3 fix #311 handle nil any 2018-10-24 21:05:37 +08:00
Tao Wen
2433035e51
Merge pull request #304 from Quasilyte/quasilyte/emptyFallthrough
use multi-value case clause instead of fallthrough
2018-09-14 09:48:43 +08:00
Tao Wen
6dfc0bf2dd
Merge pull request #305 from Quasilyte/quasilyte/assignOp
simplify `x = x <op> y` to `x <op>= y`
2018-09-14 09:48:25 +08:00
Tao Wen
b9be8dd373
Merge pull request #306 from Quasilyte/quasilyte/underef
remove redundant dereferencing expressions
2018-09-14 09:47:54 +08:00
Tao Wen
b8d78b6aaf
Merge pull request #307 from Quasilyte/quasilyte/commentedOutCode
any_tests: remove commented-out code
2018-09-14 09:47:25 +08:00
Iskander Sharipov
7109b5e7dd any_tests: remove commented-out code
Found using https://go-critic.github.io/overview#commentedOutCode-ref
2018-09-13 21:57:53 +03:00
Iskander Sharipov
4cc76529e8 remove redundant dereferencing expressions
Found using https://go-critic.github.io/overview#underef-ref
2018-09-13 21:51:59 +03:00
Iskander Sharipov
c5ddac9dc3 simplify x = x <op> y to x <op>= y
Found using https://go-critic.github.io/overview#assignOp-ref
2018-09-13 21:48:13 +03:00
Iskander Sharipov
f76d712086 use multi-value case clause instead of fallthrough
Found using https://go-critic.github.io/overview#emptyFallthrough-ref
2018-09-13 21:43:37 +03:00
Tao Wen
1624edc445 fix #295 decoder more was not compatible with standard library 2018-08-06 14:07:27 +08:00
Tao Wen
5d789e5e02 fix #291 omit empty was not handled properly for json raw message 2018-08-06 13:58:33 +08:00
Tao Wen
0260c89b54 fix #286 calcHash should use byte not rune to calc hash 2018-08-06 13:23:06 +08:00
Tao Wen
10a568c511 fix #293 copy extensions 2018-07-22 11:51:51 +08:00
Tao Wen
ab8a2e0c74 fix #276 allow rename when set naming strategy 2018-07-01 15:16:28 +08:00
Tao Wen
2fbdfbb595 merge 2018-07-01 13:06:34 +08:00
Tao Wen
720ab8dc7f add tests for #283 2018-07-01 13:05:25 +08:00
Tim Hockin
f2b4162afb
Merge pull request #285 from nikhita/fix-case-sensitivity
Fix case sensitivity
2018-06-12 13:28:35 -07:00
Nikhita Raghunath
3830516ed0 Fix case sensitivity for nested fields 2018-06-12 11:27:24 +05:30
Tao Wen
7cceb6c2e3
Merge pull request #282 from caesarxuchao/optional-case-sensitivity
Make case sensitivity optional
2018-06-10 17:13:47 +08:00
Chao Xu
b92cf78708 Make case sensitivity optional. Fix
https://github.com/kubernetes/kubernetes/issues/64612
2018-06-07 21:01:05 -07:00
Tao Wen
8744d7c5c7 \n should not be ignored in base64 decode 2018-05-26 09:43:29 +08:00
Tao Wen
37cc313d18 fix #274, unescape before base64 decode 2018-05-26 09:38:52 +08:00
Tao Wen
2ddf6d7582
Merge pull request #266 from ceshihao/fix_base64_with_whitespace
fix base64 contains newline case
2018-04-24 08:46:23 +08:00
ceshihao
6a6742f0a2 fix base64 contains newline characters \r or \n 2018-04-23 23:10:55 +08:00
Tao Wen
6c702ce12a fix #264 check io.EOF when test decoder.More 2018-04-20 16:10:56 +08:00
Tao Wen
f88871b601 fix #263, support empty string as 0 in fuzz mode 2018-04-18 16:34:54 +08:00
Tao Wen
f246f80f14 fix #260, support rename for extra.SupportPrivateFields 2018-04-18 16:28:55 +08:00
Tao Wen
51dd70305b add more test for #252 2018-04-18 16:22:47 +08:00
Tao Wen
a949c42748 fix #261 should load from reader 2018-04-18 16:11:14 +08:00
Tao Wen
f89479f5c0
Merge pull request #257 from ash2k/release-writer
Release writer to enable GC
2018-04-08 08:25:46 +08:00
Mikhail Mazurskiy
b858ec296c Release writer to enable GC 2018-04-07 21:40:08 +10:00
Tao Wen
885a41a0a6 Merge branch 'master' of https://github.com/json-iterator/go 2018-04-03 13:41:12 +08:00
Tao Wen
9e9a97040e always benchmark yourself 2018-04-03 13:41:01 +08:00
Tao Wen
fb4d53e4cc
Merge pull request #255 from bboreham/error-test
Add a test for input errors, and fix one bug that it finds
2018-04-02 13:50:44 +08:00
Bryan Boreham
b53656d459 Check that a struct ends with closing brace 2018-04-01 22:02:44 +00:00
Bryan Boreham
8f27a81d90 Add a test for input errors
Send various malformed JSON strings into the decoder for each type,
and check we get an error each time.
2018-04-01 22:01:21 +00:00
Tao Wen
4930b053b8 explit test case sensitive for #252 2018-03-24 22:38:32 +08:00
Tao Wen
06e0f9391e fix #250 case insensitive field match 2018-03-20 21:43:30 +08:00
Tao Wen
ca39e5af3e suport encode map[interface{}]interface{} 2018-03-15 21:28:16 +08:00
Tao Wen
39acec93e0 expose DecoderOf and EncoderOf 2018-03-14 23:18:20 +08:00
Tao Wen
25fa392355 fix #245, always reuse existing value even UseNumber 2018-03-01 19:23:20 +08:00
Tao Wen
d51e841de0 fix build on 1.8 2018-02-28 20:29:23 +08:00
Tao Wen
3353055b2a use concurrent.Map for 1.8 support 2018-02-28 17:11:57 +08:00
Tao Wen
455b3f8bb8 move reflect2 from plz to modern-go 2018-02-28 17:09:30 +08:00
Tao Wen
2a93f9003e fix #244 use BinaryAsStringExtension to make []byte pretty, while the output is valid json, but it can not be decoded by other json codec, as \x01 is decoded as \x01 by them, which is not original input 2018-02-27 12:40:48 +08:00
Tao Wen
9472474ffd test []byte behavior 2018-02-27 12:04:11 +08:00
Tao Wen
ad83167dc6 fix #243 fuzzy decoder should take null as valid input 2018-02-26 23:22:22 +08:00
Tao Wen
fff342fd04 gofmt 2018-02-24 22:04:41 +08:00
Tao Wen
8d6662b81b fix #242 add CreateMapKeyEncoder and CreateMapKeyDecoder to extension spi 2018-02-24 22:04:11 +08:00
Tao Wen
a377e2656b add map key example 2018-02-23 18:20:14 +08:00
Tao Wen
0ac74bba4a upgrade reflect2 2018-02-23 08:20:31 +08:00
Tao Wen
ebe943a4a6 fix #241, support 32bit platform 2018-02-23 08:12:45 +08:00
Tao Wen
414d0307c9 fix struct decoder report error 2018-02-22 13:30:59 +08:00
Tao Wen
86e9fd72bc update pkg 2018-02-22 11:48:56 +08:00
107 changed files with 2828 additions and 662 deletions

28
Gopkg.lock generated
View File

@ -2,32 +2,20 @@
[[projects]]
name = "github.com/davecgh/go-spew"
packages = ["spew"]
revision = "346938d642f2ec3594ed81d874461961cd0faa76"
version = "v1.1.0"
[[projects]]
branch = "master"
name = "github.com/google/gofuzz"
name = "github.com/modern-go/concurrent"
packages = ["."]
revision = "24818f796faf91cd76ec7bddd72458fbced7a6c1"
revision = "e0a39a4cb4216ea8db28e22a69f4ec25610d513a"
version = "1.0.0"
[[projects]]
name = "github.com/pmezard/go-difflib"
packages = ["difflib"]
revision = "792786c7400a136282c1664665ae0a8db921c6c2"
version = "v1.0.0"
[[projects]]
name = "github.com/stretchr/testify"
packages = ["assert","require"]
revision = "69483b4bd14f5845b5a1e55bca19e954e827f1d0"
version = "v1.1.4"
name = "github.com/modern-go/reflect2"
packages = ["."]
revision = "4b7aa43c6742a2c18fdef89dd197aaae7dac7ccd"
version = "1.0.1"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "f8b7cf3941d3792cbbd570bb53c093adaf774334d1162c651565c97a58dc9d09"
inputs-digest = "ea54a775e5a354cb015502d2e7aa4b74230fc77e894f34a838b268c25ec8eeb8"
solver-name = "gps-cdcl"
solver-version = 1

View File

@ -19,15 +19,8 @@
# name = "github.com/x/y"
# version = "2.4.0"
ignored = ["github.com/davecgh/go-spew*","github.com/google/gofuzz*","github.com/stretchr/testify*"]
[[constraint]]
name = "github.com/davecgh/go-spew"
version = "1.1.0"
[[constraint]]
branch = "master"
name = "github.com/google/gofuzz"
[[constraint]]
name = "github.com/stretchr/testify"
version = "1.1.4"
name = "github.com/modern-go/reflect2"
version = "1.0.1"

View File

@ -1,5 +1,5 @@
[![Sourcegraph](https://sourcegraph.com/github.com/json-iterator/go/-/badge.svg)](https://sourcegraph.com/github.com/json-iterator/go?badge)
[![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](http://godoc.org/github.com/json-iterator/go)
[![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](https://pkg.go.dev/github.com/json-iterator/go)
[![Build Status](https://travis-ci.org/json-iterator/go.svg?branch=master)](https://travis-ci.org/json-iterator/go)
[![codecov](https://codecov.io/gh/json-iterator/go/branch/master/graph/badge.svg)](https://codecov.io/gh/json-iterator/go)
[![rcard](https://goreportcard.com/badge/github.com/json-iterator/go)](https://goreportcard.com/report/github.com/json-iterator/go)
@ -8,12 +8,6 @@
A high-performance 100% compatible drop-in replacement of "encoding/json"
You can also use thrift like JSON using [thrift-iterator](https://github.com/thrift-iterator/go)
```
Go开发者们请加入我们,滴滴出行平台技术部 taowen@didichuxing.com
```
# Benchmark
![benchmark](http://jsoniter.com/benchmarks/go-benchmark.png)
@ -22,14 +16,17 @@ Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/githu
Raw Result (easyjson requires static code generation)
| | ns/op | allocation bytes | allocation times |
| --- | --- | --- | --- |
| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
| | ns/op | allocation bytes | allocation times |
| --------------- | ----------- | ---------------- | ---------------- |
| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
Always benchmark with your own workload.
The result depends heavily on the data input.
# Usage
@ -42,10 +39,10 @@ import "encoding/json"
json.Marshal(&data)
```
with
with
```go
import "github.com/json-iterator/go"
import jsoniter "github.com/json-iterator/go"
var json = jsoniter.ConfigCompatibleWithStandardLibrary
json.Marshal(&data)
@ -61,7 +58,7 @@ json.Unmarshal(input, &data)
with
```go
import "github.com/json-iterator/go"
import jsoniter "github.com/json-iterator/go"
var json = jsoniter.ConfigCompatibleWithStandardLibrary
json.Unmarshal(input, &data)
@ -79,10 +76,10 @@ go get github.com/json-iterator/go
Contributors
* [thockin](https://github.com/thockin)
* [mattn](https://github.com/mattn)
* [cch123](https://github.com/cch123)
* [Oleg Shaldybin](https://github.com/olegshaldybin)
* [Jason Toffaletti](https://github.com/toffaletti)
- [thockin](https://github.com/thockin)
- [mattn](https://github.com/mattn)
- [cch123](https://github.com/cch123)
- [Oleg Shaldybin](https://github.com/olegshaldybin)
- [Jason Toffaletti](https://github.com/toffaletti)
Report issue or pull request, or email taowen@gmail.com, or [![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby)

View File

@ -16,7 +16,7 @@ func Unmarshal(data []byte, v interface{}) error {
return ConfigDefault.Unmarshal(data, v)
}
// UnmarshalFromString convenient method to read from string instead of []byte
// UnmarshalFromString is a convenient method to read from string instead of []byte
func UnmarshalFromString(str string, v interface{}) error {
return ConfigDefault.UnmarshalFromString(str, v)
}
@ -77,7 +77,16 @@ func (adapter *Decoder) Decode(obj interface{}) error {
// More is there more?
func (adapter *Decoder) More() bool {
return adapter.iter.head != adapter.iter.tail
iter := adapter.iter
if iter.Error != nil {
return false
}
c := iter.nextToken()
if c == 0 {
return false
}
iter.unreadByte()
return c != ']' && c != '}'
}
// Buffered remaining buffer
@ -91,7 +100,7 @@ func (adapter *Decoder) Buffered() io.Reader {
func (adapter *Decoder) UseNumber() {
cfg := adapter.iter.cfg.configBeforeFrozen
cfg.UseNumber = true
adapter.iter.cfg = cfg.frozeWithCacheReuse()
adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
}
// DisallowUnknownFields causes the Decoder to return an error when the destination
@ -100,7 +109,7 @@ func (adapter *Decoder) UseNumber() {
func (adapter *Decoder) DisallowUnknownFields() {
cfg := adapter.iter.cfg.configBeforeFrozen
cfg.DisallowUnknownFields = true
adapter.iter.cfg = cfg.frozeWithCacheReuse()
adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
}
// NewEncoder same as json.NewEncoder
@ -125,14 +134,14 @@ func (adapter *Encoder) Encode(val interface{}) error {
func (adapter *Encoder) SetIndent(prefix, indent string) {
config := adapter.stream.cfg.configBeforeFrozen
config.IndentionStep = len(indent)
adapter.stream.cfg = config.frozeWithCacheReuse()
adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
}
// SetEscapeHTML escape html by default, set to false to disable
func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) {
config := adapter.stream.cfg.configBeforeFrozen
config.EscapeHTML = escapeHTML
adapter.stream.cfg = config.frozeWithCacheReuse()
adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
}
// Valid reports whether data is a valid JSON encoding.

18
any.go
View File

@ -3,10 +3,11 @@ package jsoniter
import (
"errors"
"fmt"
"github.com/modern-go/reflect2"
"io"
"reflect"
"strconv"
"unsafe"
"github.com/v2pro/plz/reflect2"
)
// Any generic object representation.
@ -101,6 +102,9 @@ func Wrap(val interface{}) Any {
case reflect.String:
return WrapString(val.(string))
case reflect.Int:
if strconv.IntSize == 32 {
return WrapInt32(int32(val.(int)))
}
return WrapInt64(int64(val.(int)))
case reflect.Int8:
return WrapInt32(int32(val.(int8)))
@ -111,7 +115,15 @@ func Wrap(val interface{}) Any {
case reflect.Int64:
return WrapInt64(val.(int64))
case reflect.Uint:
if strconv.IntSize == 32 {
return WrapUint32(uint32(val.(uint)))
}
return WrapUint64(uint64(val.(uint)))
case reflect.Uintptr:
if ptrSize == 32 {
return WrapUint32(uint32(val.(uintptr)))
}
return WrapUint64(uint64(val.(uintptr)))
case reflect.Uint8:
return WrapUint32(uint32(val.(uint8)))
case reflect.Uint16:
@ -300,6 +312,10 @@ func (codec *directAnyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
func (codec *directAnyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
any := *(*Any)(ptr)
if any == nil {
stream.WriteNil()
return
}
any.WriteTo(stream)
}

View File

@ -64,7 +64,6 @@ func (any *stringAny) ToInt64() int64 {
flag := 1
startPos := 0
endPos := 0
if any.val[0] == '+' || any.val[0] == '-' {
startPos = 1
}
@ -73,6 +72,7 @@ func (any *stringAny) ToInt64() int64 {
flag = -1
}
endPos := startPos
for i := startPos; i < len(any.val); i++ {
if any.val[i] >= '0' && any.val[i] <= '9' {
endPos = i + 1
@ -98,7 +98,6 @@ func (any *stringAny) ToUint64() uint64 {
}
startPos := 0
endPos := 0
if any.val[0] == '-' {
return 0
@ -107,6 +106,7 @@ func (any *stringAny) ToUint64() uint64 {
startPos = 1
}
endPos := startPos
for i := startPos; i < len(any.val); i++ {
if any.val[i] >= '0' && any.val[i] <= '9' {
endPos = i + 1

View File

@ -3,8 +3,8 @@ package any_tests
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
func Test_read_empty_array_as_any(t *testing.T) {

View File

@ -4,8 +4,8 @@ import (
"fmt"
"testing"
"github.com/stretchr/testify/require"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
var boolConvertMap = map[string]bool{

View File

@ -3,8 +3,8 @@ package any_tests
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
var floatConvertMap = map[string]float64{
@ -82,10 +82,8 @@ func Test_read_float_to_any(t *testing.T) {
should := require.New(t)
any := jsoniter.WrapFloat64(12.3)
anyFloat64 := float64(12.3)
//negaAnyFloat64 := float64(-1.1)
any2 := jsoniter.WrapFloat64(-1.1)
should.Equal(float64(12.3), any.ToFloat64())
//should.Equal("12.3", any.ToString())
should.True(any.ToBool())
should.Equal(float32(anyFloat64), any.ToFloat32())
should.Equal(int(anyFloat64), any.ToInt())

View File

@ -4,8 +4,8 @@ import (
"fmt"
"testing"
"github.com/stretchr/testify/require"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
var intConvertMap = map[string]int{

View File

@ -1,9 +1,9 @@
package any_tests
import (
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"testing"
"github.com/json-iterator/go"
)
func Test_wrap_map(t *testing.T) {

View File

@ -1,9 +1,9 @@
package any_tests
import (
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"testing"
"github.com/json-iterator/go"
)
func Test_read_null_as_any(t *testing.T) {

View File

@ -3,8 +3,8 @@ package any_tests
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
func Test_read_object_as_any(t *testing.T) {
@ -118,6 +118,4 @@ func Test_object_wrapper_any_get_all(t *testing.T) {
should.Contains(any.Keys(), "Field1")
should.Contains(any.Keys(), "Field2")
should.NotContains(any.Keys(), "Field3")
//should.Contains(any.GetObject()["Field1"].GetArray()[0], 1)
}
}

View File

@ -3,29 +3,29 @@ package any_tests
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
var stringConvertMap = map[string]string{
"null": "",
"321.1": "321.1",
`"1.1"`: "1.1",
`"-123.1"`: "-123.1",
"0.0": "0.0",
"0": "0",
`"0"`: "0",
`"0.0"`: "0.0",
`"00.0"`: "00.0",
"true": "true",
"false": "false",
`"true"`: "true",
`"false"`: "false",
`"true123"`: "true123",
`"+1"`: "+1",
"[]": "[]",
"[1,2]": "[1,2]",
"{}": "{}",
"null": "",
"321.1": "321.1",
`"1.1"`: "1.1",
`"-123.1"`: "-123.1",
"0.0": "0.0",
"0": "0",
`"0"`: "0",
`"0.0"`: "0.0",
`"00.0"`: "00.0",
"true": "true",
"false": "false",
`"true"`: "true",
`"false"`: "false",
`"true123"`: "true123",
`"+1"`: "+1",
"[]": "[]",
"[1,2]": "[1,2]",
"{}": "{}",
`{"a":1, "stream":true}`: `{"a":1, "stream":true}`,
}

View File

@ -3,8 +3,8 @@ package any_tests
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
// if must be valid is useless, just drop this test

View File

@ -3,8 +3,8 @@ package any_tests
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
func Test_wrap_and_valuetype_everything(t *testing.T) {

View File

@ -1,10 +1,11 @@
package test
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/json-iterator/go"
"encoding/json"
"testing"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
func Test_use_number_for_unmarshal(t *testing.T) {
@ -23,7 +24,6 @@ func Test_customize_float_marshal(t *testing.T) {
should.Equal("1.234568", str)
}
func Test_customize_tag_key(t *testing.T) {
type TestObject struct {
@ -45,4 +45,185 @@ func Test_read_large_number_as_interface(t *testing.T) {
output, err := jsoniter.MarshalToString(val)
should.Nil(err)
should.Equal(`123456789123456789123456789`, output)
}
}
type caseSensitiveStruct struct {
A string `json:"a"`
B string `json:"b,omitempty"`
C *C `json:"C,omitempty"`
}
type C struct {
D int64 `json:"D,omitempty"`
E *E `json:"e,omitempty"`
}
type E struct {
F string `json:"F,omitempty"`
}
func Test_CaseSensitive(t *testing.T) {
should := require.New(t)
testCases := []struct {
input string
expectedOutput string
caseSensitive bool
}{
{
input: `{"A":"foo","B":"bar"}`,
expectedOutput: `{"a":"foo","b":"bar"}`,
caseSensitive: false,
},
{
input: `{"a":"foo","b":"bar"}`,
expectedOutput: `{"a":"foo","b":"bar"}`,
caseSensitive: true,
},
{
input: `{"a":"foo","b":"bar","C":{"D":10}}`,
expectedOutput: `{"a":"foo","b":"bar","C":{"D":10}}`,
caseSensitive: true,
},
{
input: `{"a":"foo","B":"bar","c":{"d":10}}`,
expectedOutput: `{"a":"foo"}`,
caseSensitive: true,
},
{
input: `{"a":"foo","C":{"d":10}}`,
expectedOutput: `{"a":"foo","C":{}}`,
caseSensitive: true,
},
{
input: `{"a":"foo","C":{"D":10,"e":{"f":"baz"}}}`,
expectedOutput: `{"a":"foo","C":{"D":10,"e":{}}}`,
caseSensitive: true,
},
{
input: `{"a":"foo","C":{"D":10,"e":{"F":"baz"}}}`,
expectedOutput: `{"a":"foo","C":{"D":10,"e":{"F":"baz"}}}`,
caseSensitive: true,
},
{
input: `{"A":"foo","c":{"d":10,"E":{"f":"baz"}}}`,
expectedOutput: `{"a":"foo","C":{"D":10,"e":{"F":"baz"}}}`,
caseSensitive: false,
},
}
for _, tc := range testCases {
val := caseSensitiveStruct{}
err := jsoniter.Config{CaseSensitive: tc.caseSensitive}.Froze().UnmarshalFromString(tc.input, &val)
should.Nil(err)
output, err := jsoniter.MarshalToString(val)
should.Nil(err)
should.Equal(tc.expectedOutput, output)
}
}
type structWithElevenFields struct {
A string `json:"A,omitempty"`
B string `json:"B,omitempty"`
C string `json:"C,omitempty"`
D string `json:"d,omitempty"`
E string `json:"e,omitempty"`
F string `json:"f,omitempty"`
G string `json:"g,omitempty"`
H string `json:"h,omitempty"`
I string `json:"i,omitempty"`
J string `json:"j,omitempty"`
K string `json:"k,omitempty"`
}
func Test_CaseSensitive_MoreThanTenFields(t *testing.T) {
should := require.New(t)
testCases := []struct {
input string
expectedOutput string
caseSensitive bool
}{
{
input: `{"A":"1","B":"2","C":"3","d":"4","e":"5","f":"6","g":"7","h":"8","i":"9","j":"10","k":"11"}`,
expectedOutput: `{"A":"1","B":"2","C":"3","d":"4","e":"5","f":"6","g":"7","h":"8","i":"9","j":"10","k":"11"}`,
caseSensitive: true,
},
{
input: `{"a":"1","b":"2","c":"3","D":"4","E":"5","F":"6"}`,
expectedOutput: `{"A":"1","B":"2","C":"3","d":"4","e":"5","f":"6"}`,
caseSensitive: false,
},
{
input: `{"A":"1","b":"2","d":"4","E":"5"}`,
expectedOutput: `{"A":"1","d":"4"}`,
caseSensitive: true,
},
}
for _, tc := range testCases {
val := structWithElevenFields{}
err := jsoniter.Config{CaseSensitive: tc.caseSensitive}.Froze().UnmarshalFromString(tc.input, &val)
should.Nil(err)
output, err := jsoniter.MarshalToString(val)
should.Nil(err)
should.Equal(tc.expectedOutput, output)
}
}
type onlyTaggedFieldStruct struct {
A string `json:"a"`
B string
FSimpl F `json:"f_simpl"`
ISimpl I
FPtr *F `json:"f_ptr"`
IPtr *I
F
*I
}
type F struct {
G string `json:"g"`
H string
}
type I struct {
J string `json:"j"`
K string
}
func Test_OnlyTaggedField(t *testing.T) {
should := require.New(t)
obj := onlyTaggedFieldStruct{
A: "a",
B: "b",
FSimpl: F{G: "g", H: "h"},
ISimpl: I{J: "j", K: "k"},
FPtr: &F{G: "g", H: "h"},
IPtr: &I{J: "j", K: "k"},
F: F{G: "g", H: "h"},
I: &I{J: "j", K: "k"},
}
output, err := jsoniter.Config{OnlyTaggedField: true}.Froze().Marshal(obj)
should.Nil(err)
m := make(map[string]interface{})
err = jsoniter.Unmarshal(output, &m)
should.Nil(err)
should.Equal(map[string]interface{}{
"a": "a",
"f_simpl": map[string]interface{}{
"g": "g",
},
"f_ptr": map[string]interface{}{
"g": "g",
},
"g": "g",
"j": "j",
}, m)
}

View File

@ -2,11 +2,11 @@ package test
import (
"bytes"
"github.com/stretchr/testify/require"
"testing"
"github.com/json-iterator/go"
"io/ioutil"
"encoding/json"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"io/ioutil"
"testing"
)
func Test_disallowUnknownFields(t *testing.T) {
@ -18,7 +18,6 @@ func Test_disallowUnknownFields(t *testing.T) {
should.Error(decoder.Decode(&obj))
}
func Test_new_decoder(t *testing.T) {
should := require.New(t)
decoder1 := json.NewDecoder(bytes.NewBufferString(`[1][2]`))
@ -57,3 +56,9 @@ func Test_use_number(t *testing.T) {
should.Nil(decoder2.Decode(&obj2))
should.Equal(json.Number("123"), obj2)
}
func Test_decoder_more(t *testing.T) {
should := require.New(t)
decoder := jsoniter.NewDecoder(bytes.NewBufferString("abcde"))
should.True(decoder.More())
}

View File

@ -8,8 +8,8 @@ import (
"testing"
"unicode/utf8"
"github.com/stretchr/testify/require"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
func Test_new_encoder(t *testing.T) {

View File

@ -1,11 +1,11 @@
package test
import (
"testing"
"github.com/stretchr/testify/require"
"bytes"
"github.com/json-iterator/go"
"encoding/json"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"testing"
)
// Standard Encoder has trailing newline.
@ -17,4 +17,4 @@ func TestEncoderHasTrailingNewline(t *testing.T) {
stdenc := json.NewEncoder(&stdbuf)
stdenc.Encode(1)
should.Equal(stdbuf.Bytes(), buf.Bytes())
}
}

View File

@ -2,9 +2,9 @@ package test
import (
"encoding/json"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"testing"
"github.com/json-iterator/go"
)
func Test_marshal_indent(t *testing.T) {

View File

@ -0,0 +1,47 @@
package test
import (
"bytes"
"encoding/json"
"testing"
jsoniter "github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
var marshalConfig = jsoniter.Config{
EscapeHTML: false,
SortMapKeys: true,
ValidateJsonRawMessage: true,
}.Froze()
type Container struct {
Bar interface{}
}
func (c *Container) MarshalJSON() ([]byte, error) {
return marshalConfig.Marshal(&c.Bar)
}
func TestEncodeEscape(t *testing.T) {
should := require.New(t)
container := &Container{
Bar: []string{"123<ab>", "ooo"},
}
out, err := marshalConfig.Marshal(container)
should.Nil(err)
bufout := string(out)
var stdbuf bytes.Buffer
stdenc := json.NewEncoder(&stdbuf)
stdenc.SetEscapeHTML(false)
err = stdenc.Encode(container)
should.Nil(err)
stdout := string(stdbuf.Bytes())
if stdout[len(stdout)-1:] == "\n" {
stdout = stdout[:len(stdout)-1]
}
should.Equal(stdout, bufout)
}

View File

@ -0,0 +1,36 @@
package test
import (
"bytes"
"encoding/json"
"github.com/json-iterator/go"
"testing"
"github.com/stretchr/testify/require"
)
type Foo struct {
Bar interface{}
}
func (f Foo) MarshalJSON() ([]byte, error) {
var buf bytes.Buffer
err := json.NewEncoder(&buf).Encode(f.Bar)
return buf.Bytes(), err
}
// Standard Encoder has trailing newline.
func TestEncodeMarshalJSON(t *testing.T) {
foo := Foo {
Bar: 123,
}
should := require.New(t)
var buf, stdbuf bytes.Buffer
enc := jsoniter.ConfigCompatibleWithStandardLibrary.NewEncoder(&buf)
enc.Encode(foo)
stdenc := json.NewEncoder(&stdbuf)
stdenc.Encode(foo)
should.Equal(stdbuf.Bytes(), buf.Bytes())
}

View File

@ -2,10 +2,10 @@ package test
import (
"encoding/json"
"github.com/json-iterator/go"
"io/ioutil"
"os"
"testing"
"github.com/json-iterator/go"
)
//func Test_large_file(t *testing.T) {

128
benchmarks/stream_test.go Normal file
View File

@ -0,0 +1,128 @@
package test
import (
"bytes"
"strconv"
"testing"
jsoniter "github.com/json-iterator/go"
)
func Benchmark_stream_encode_big_object(b *testing.B) {
var buf bytes.Buffer
var stream = jsoniter.NewStream(jsoniter.ConfigDefault, &buf, 100)
for i := 0; i < b.N; i++ {
buf.Reset()
stream.Reset(&buf)
encodeObject(stream)
if stream.Error != nil {
b.Errorf("error: %+v", stream.Error)
}
}
}
func TestEncodeObject(t *testing.T) {
var stream = jsoniter.NewStream(jsoniter.ConfigDefault, nil, 100)
encodeObject(stream)
if stream.Error != nil {
t.Errorf("error encoding a test object: %+v", stream.Error)
return
}
var m = make(map[string]interface{})
if err := jsoniter.Unmarshal(stream.Buffer(), &m); err != nil {
t.Errorf("error unmarshaling a test object: %+v", err)
return
}
}
func encodeObject(stream *jsoniter.Stream) {
stream.WriteObjectStart()
stream.WriteObjectField("objectId")
stream.WriteUint64(8838243212)
stream.WriteMore()
stream.WriteObjectField("name")
stream.WriteString("Jane Doe")
stream.WriteMore()
stream.WriteObjectField("address")
stream.WriteObjectStart()
for i, field := range addressFields {
if i != 0 {
stream.WriteMore()
}
stream.WriteObjectField(field.key)
stream.WriteString(field.val)
}
stream.WriteMore()
stream.WriteObjectField("geo")
{
stream.WriteObjectStart()
stream.WriteObjectField("latitude")
stream.WriteFloat64(-154.550817)
stream.WriteMore()
stream.WriteObjectField("longitude")
stream.WriteFloat64(-84.176159)
stream.WriteObjectEnd()
}
stream.WriteObjectEnd()
stream.WriteMore()
stream.WriteObjectField("specialties")
stream.WriteArrayStart()
for i, s := range specialties {
if i != 0 {
stream.WriteMore()
}
stream.WriteString(s)
}
stream.WriteArrayEnd()
stream.WriteMore()
for i, text := range longText {
if i != 0 {
stream.WriteMore()
}
stream.WriteObjectField("longText" + strconv.Itoa(i))
stream.WriteString(text)
}
for i := 0; i < 25; i++ {
num := i * 18328
stream.WriteMore()
stream.WriteObjectField("integerField" + strconv.Itoa(i))
stream.WriteInt64(int64(num))
}
stream.WriteObjectEnd()
}
type field struct{ key, val string }
var (
addressFields = []field{
{"address1", "123 Example St"},
{"address2", "Apartment 5D, Suite 3"},
{"city", "Miami"},
{"state", "FL"},
{"postalCode", "33133"},
{"country", "US"},
}
specialties = []string{
"Web Design",
"Go Programming",
"Tennis",
"Cycling",
"Mixed martial arts",
}
longText = []string{
`Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.`,
`Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?`,
`But I must explain to you how all this mistaken idea of denouncing pleasure and praising pain was born and I will give you a complete account of the system, and expound the actual teachings of the great explorer of the truth, the master-builder of human happiness. No one rejects, dislikes, or avoids pleasure itself, because it is pleasure, but because those who do not know how to pursue pleasure rationally encounter consequences that are extremely painful. Nor again is there anyone who loves or pursues or desires to obtain pain of itself, because it is pain, but because occasionally circumstances occur in which toil and pain can procure him some great pleasure. To take a trivial example, which of us ever undertakes laborious physical exercise, except to obtain some advantage from it? But who has any right to find fault with a man who chooses to enjoy a pleasure that has no annoying consequences, or one who avoids a pain that produces no resultant pleasure?`,
`At vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio. Nam libero tempore, cum soluta nobis est eligendi optio cumque nihil impedit quo minus id quod maxime placeat facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet ut et voluptates repudiandae sint et molestiae non recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat.`,
`On the other hand, we denounce with righteous indignation and dislike men who are so beguiled and demoralized by the charms of pleasure of the moment, so blinded by desire, that they cannot foresee the pain and trouble that are bound to ensue; and equal blame belongs to those who fail in their duty through weakness of will, which is the same as saying through shrinking from toil and pain. These cases are perfectly simple and easy to distinguish. In a free hour, when our power of choice is untrammelled and when nothing prevents our being able to do what we like best, every pleasure is to be welcomed and every pain avoided. But in certain circumstances and owing to the claims of duty or the obligations of business it will frequently occur that pleasures have to be repudiated and annoyances accepted. The wise man therefore always holds in these matters to this principle of selection: he rejects pleasures to secure other greater pleasures, or else he endures pains to avoid worse pains.`,
}
)

101
config.go
View File

@ -3,9 +3,12 @@ package jsoniter
import (
"encoding/json"
"io"
"unsafe"
"github.com/v2pro/plz/reflect2"
"reflect"
"sync"
"unsafe"
"github.com/modern-go/concurrent"
"github.com/modern-go/reflect2"
)
// Config customize how the API should behave.
@ -21,6 +24,7 @@ type Config struct {
OnlyTaggedField bool
ValidateJsonRawMessage bool
ObjectFieldMustBeSimpleString bool
CaseSensitive bool
}
// API the public interface of this package.
@ -38,6 +42,8 @@ type API interface {
NewDecoder(reader io.Reader) *Decoder
Valid(data []byte) bool
RegisterExtension(extension Extension)
DecoderOf(typ reflect2.Type) ValDecoder
EncoderOf(typ reflect2.Type) ValEncoder
}
// ConfigDefault the default API
@ -59,6 +65,66 @@ var ConfigFastest = Config{
ObjectFieldMustBeSimpleString: true, // do not unescape object field
}.Froze()
type frozenConfig struct {
configBeforeFrozen Config
sortMapKeys bool
indentionStep int
objectFieldMustBeSimpleString bool
onlyTaggedField bool
disallowUnknownFields bool
decoderCache *concurrent.Map
encoderCache *concurrent.Map
encoderExtension Extension
decoderExtension Extension
extraExtensions []Extension
streamPool *sync.Pool
iteratorPool *sync.Pool
caseSensitive bool
}
func (cfg *frozenConfig) initCache() {
cfg.decoderCache = concurrent.NewMap()
cfg.encoderCache = concurrent.NewMap()
}
func (cfg *frozenConfig) addDecoderToCache(cacheKey uintptr, decoder ValDecoder) {
cfg.decoderCache.Store(cacheKey, decoder)
}
func (cfg *frozenConfig) addEncoderToCache(cacheKey uintptr, encoder ValEncoder) {
cfg.encoderCache.Store(cacheKey, encoder)
}
func (cfg *frozenConfig) getDecoderFromCache(cacheKey uintptr) ValDecoder {
decoder, found := cfg.decoderCache.Load(cacheKey)
if found {
return decoder.(ValDecoder)
}
return nil
}
func (cfg *frozenConfig) getEncoderFromCache(cacheKey uintptr) ValEncoder {
encoder, found := cfg.encoderCache.Load(cacheKey)
if found {
return encoder.(ValEncoder)
}
return nil
}
var cfgCache = concurrent.NewMap()
func getFrozenConfigFromCache(cfg Config) *frozenConfig {
obj, found := cfgCache.Load(cfg)
if found {
return obj.(*frozenConfig)
}
return nil
}
func addFrozenConfigToCache(cfg Config, frozenConfig *frozenConfig) {
cfgCache.Store(cfg, frozenConfig)
}
// Froze forge API from config
func (cfg Config) Froze() API {
api := &frozenConfig{
@ -67,6 +133,7 @@ func (cfg Config) Froze() API {
objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString,
onlyTaggedField: cfg.OnlyTaggedField,
disallowUnknownFields: cfg.DisallowUnknownFields,
caseSensitive: cfg.CaseSensitive,
}
api.streamPool = &sync.Pool{
New: func() interface{} {
@ -93,22 +160,21 @@ func (cfg Config) Froze() API {
if cfg.ValidateJsonRawMessage {
api.validateJsonRawMessage(encoderExtension)
}
if len(encoderExtension) > 0 {
api.extensions = append(api.extensions, encoderExtension)
}
if len(decoderExtension) > 0 {
api.extensions = append(api.extensions, decoderExtension)
}
api.encoderExtension = encoderExtension
api.decoderExtension = decoderExtension
api.configBeforeFrozen = cfg
return api
}
func (cfg Config) frozeWithCacheReuse() *frozenConfig {
func (cfg Config) frozeWithCacheReuse(extraExtensions []Extension) *frozenConfig {
api := getFrozenConfigFromCache(cfg)
if api != nil {
return api
}
api = cfg.Froze().(*frozenConfig)
for _, extension := range extraExtensions {
api.RegisterExtension(extension)
}
addFrozenConfigToCache(cfg, api)
return api
}
@ -117,15 +183,15 @@ func (cfg *frozenConfig) validateJsonRawMessage(extension EncoderExtension) {
encoder := &funcEncoder{func(ptr unsafe.Pointer, stream *Stream) {
rawMessage := *(*json.RawMessage)(ptr)
iter := cfg.BorrowIterator([]byte(rawMessage))
defer cfg.ReturnIterator(iter)
iter.Read()
if iter.Error != nil {
if iter.Error != nil && iter.Error != io.EOF {
stream.WriteRaw("null")
} else {
cfg.ReturnIterator(iter)
stream.WriteRaw(string(rawMessage))
}
}, func(ptr unsafe.Pointer) bool {
return false
return len(*((*json.RawMessage)(ptr))) == 0
}}
extension[reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()] = encoder
extension[reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()] = encoder
@ -133,6 +199,11 @@ func (cfg *frozenConfig) validateJsonRawMessage(extension EncoderExtension) {
func (cfg *frozenConfig) useNumber(extension DecoderExtension) {
extension[reflect2.TypeOfPtr((*interface{})(nil)).Elem()] = &funcDecoder{func(ptr unsafe.Pointer, iter *Iterator) {
exitingValue := *((*interface{})(ptr))
if exitingValue != nil && reflect.TypeOf(exitingValue).Kind() == reflect.Ptr {
iter.ReadVal(exitingValue)
return
}
if iter.WhatIsNext() == NumberValue {
*((*interface{})(ptr)) = json.Number(iter.readNumberAsString())
} else {
@ -149,7 +220,9 @@ func (cfg *frozenConfig) getTagKey() string {
}
func (cfg *frozenConfig) RegisterExtension(extension Extension) {
cfg.extensions = append(cfg.extensions, extension)
cfg.extraExtensions = append(cfg.extraExtensions, extension)
copied := cfg.configBeforeFrozen
cfg.configBeforeFrozen = copied
}
type lossyFloat32Encoder struct {
@ -244,7 +317,7 @@ func (cfg *frozenConfig) MarshalIndent(v interface{}, prefix, indent string) ([]
}
newCfg := cfg.configBeforeFrozen
newCfg.IndentionStep = len(indent)
return newCfg.frozeWithCacheReuse().Marshal(v)
return newCfg.frozeWithCacheReuse(cfg.extraExtensions).Marshal(v)
}
func (cfg *frozenConfig) UnmarshalFromString(str string, v interface{}) error {

View File

@ -1,64 +0,0 @@
//+build go1.9
package jsoniter
import (
"sync"
)
type frozenConfig struct {
configBeforeFrozen Config
sortMapKeys bool
indentionStep int
objectFieldMustBeSimpleString bool
onlyTaggedField bool
disallowUnknownFields bool
decoderCache sync.Map
encoderCache sync.Map
extensions []Extension
streamPool *sync.Pool
iteratorPool *sync.Pool
}
func (cfg *frozenConfig) initCache() {
cfg.decoderCache = sync.Map{}
cfg.encoderCache = sync.Map{}
}
func (cfg *frozenConfig) addDecoderToCache(cacheKey uintptr, decoder ValDecoder) {
cfg.decoderCache.Store(cacheKey, decoder)
}
func (cfg *frozenConfig) addEncoderToCache(cacheKey uintptr, encoder ValEncoder) {
cfg.encoderCache.Store(cacheKey, encoder)
}
func (cfg *frozenConfig) getDecoderFromCache(cacheKey uintptr) ValDecoder {
decoder, found := cfg.decoderCache.Load(cacheKey)
if found {
return decoder.(ValDecoder)
}
return nil
}
func (cfg *frozenConfig) getEncoderFromCache(cacheKey uintptr) ValEncoder {
encoder, found := cfg.encoderCache.Load(cacheKey)
if found {
return encoder.(ValEncoder)
}
return nil
}
var cfgCache = &sync.Map{}
func getFrozenConfigFromCache(cfg Config) *frozenConfig {
obj, found := cfgCache.Load(cfg)
if found {
return obj.(*frozenConfig)
}
return nil
}
func addFrozenConfigToCache(cfg Config, frozenConfig *frozenConfig) {
cfgCache.Store(cfg, frozenConfig)
}

View File

@ -1,70 +0,0 @@
//+build !go1.9
package jsoniter
import (
"sync"
)
type frozenConfig struct {
configBeforeFrozen Config
sortMapKeys bool
indentionStep int
objectFieldMustBeSimpleString bool
onlyTaggedField bool
disallowUnknownFields bool
cacheLock *sync.RWMutex
decoderCache map[uintptr]ValDecoder
encoderCache map[uintptr]ValEncoder
extensions []Extension
streamPool *sync.Pool
iteratorPool *sync.Pool
}
func (cfg *frozenConfig) initCache() {
cfg.cacheLock = &sync.RWMutex{}
cfg.decoderCache = map[uintptr]ValDecoder{}
cfg.encoderCache = map[uintptr]ValEncoder{}
}
func (cfg *frozenConfig) addDecoderToCache(cacheKey uintptr, decoder ValDecoder) {
cfg.cacheLock.Lock()
cfg.decoderCache[cacheKey] = decoder
cfg.cacheLock.Unlock()
}
func (cfg *frozenConfig) addEncoderToCache(cacheKey uintptr, encoder ValEncoder) {
cfg.cacheLock.Lock()
cfg.encoderCache[cacheKey] = encoder
cfg.cacheLock.Unlock()
}
func (cfg *frozenConfig) getDecoderFromCache(cacheKey uintptr) ValDecoder {
cfg.cacheLock.RLock()
decoder, _ := cfg.decoderCache[cacheKey].(ValDecoder)
cfg.cacheLock.RUnlock()
return decoder
}
func (cfg *frozenConfig) getEncoderFromCache(cacheKey uintptr) ValEncoder {
cfg.cacheLock.RLock()
encoder, _ := cfg.encoderCache[cacheKey].(ValEncoder)
cfg.cacheLock.RUnlock()
return encoder
}
var cfgCacheLock = &sync.RWMutex{}
var cfgCache = map[Config]*frozenConfig{}
func getFrozenConfigFromCache(cfg Config) *frozenConfig {
cfgCacheLock.RLock()
frozenConfig := cfgCache[cfg]
cfgCacheLock.RUnlock()
return frozenConfig
}
func addFrozenConfigToCache(cfg Config, frozenConfig *frozenConfig) {
cfgCacheLock.Lock()
cfgCache[cfg] = frozenConfig
cfgCacheLock.Unlock()
}

View File

@ -3,6 +3,7 @@ package jsoniter
import (
"fmt"
"os"
"strings"
)
func ExampleMarshal() {
@ -93,3 +94,28 @@ func ExampleGet() {
// Output:
// Crimson
}
func ExampleMyKey() {
hello := MyKey("hello")
output, _ := Marshal(map[*MyKey]string{&hello: "world"})
fmt.Println(string(output))
obj := map[*MyKey]string{}
Unmarshal(output, &obj)
for k, v := range obj {
fmt.Println(*k, v)
}
// Output:
// {"Hello":"world"}
// Hel world
}
type MyKey string
func (m *MyKey) MarshalText() ([]byte, error) {
return []byte(strings.Replace(string(*m), "h", "H", -1)), nil
}
func (m *MyKey) UnmarshalText(text []byte) error {
*m = MyKey(text[:3])
return nil
}

View File

@ -1,12 +1,14 @@
package test
import (
"testing"
"unsafe"
"time"
"bytes"
"fmt"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"strconv"
"testing"
"time"
"unsafe"
)
func Test_customize_type_decoder(t *testing.T) {
@ -46,6 +48,38 @@ func Test_customize_byte_array_encoder(t *testing.T) {
should.Equal(`"abc"`, str)
}
type CustomEncoderAttachmentTestStruct struct {
Value int32 `json:"value"`
}
type CustomEncoderAttachmentTestStructEncoder struct {}
func (c *CustomEncoderAttachmentTestStructEncoder) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream) {
attachVal, ok := stream.Attachment.(int)
stream.WriteRaw(`"`)
stream.WriteRaw(fmt.Sprintf("%t %d", ok, attachVal))
stream.WriteRaw(`"`)
}
func (c *CustomEncoderAttachmentTestStructEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return false
}
func Test_custom_encoder_attachment(t *testing.T) {
jsoniter.RegisterTypeEncoder("test.CustomEncoderAttachmentTestStruct", &CustomEncoderAttachmentTestStructEncoder{})
expectedValue := 17
should := require.New(t)
buf := &bytes.Buffer{}
stream := jsoniter.NewStream(jsoniter.Config{SortMapKeys: true}.Froze(), buf, 4096)
stream.Attachment = expectedValue
val := map[string]CustomEncoderAttachmentTestStruct{"a": {}}
stream.WriteVal(val)
stream.Flush()
should.Nil(stream.Error)
should.Equal("{\"a\":\"true 17\"}", buf.String())
}
func Test_customize_field_decoder(t *testing.T) {
type Tom struct {
field1 string
@ -61,7 +95,6 @@ func Test_customize_field_decoder(t *testing.T) {
}
}
func Test_recursive_empty_interface_customization(t *testing.T) {
t.Skip()
var obj interface{}
@ -98,4 +131,93 @@ func Test_read_custom_interface(t *testing.T) {
err := jsoniter.UnmarshalFromString(`"hello"`, &val)
should.Nil(err)
should.Equal("hello", val.Hello())
}
}
const flow1 = `
{"A":"hello"}
{"A":"hello"}
{"A":"hello"}
{"A":"hello"}
{"A":"hello"}`
const flow2 = `
{"A":"hello"}
{"A":"hello"}
{"A":"hello"}
{"A":"hello"}
{"A":"hello"}
`
type (
Type1 struct {
A string
}
Type2 struct {
A string
}
)
func (t *Type2) UnmarshalJSON(data []byte) error {
return nil
}
func (t *Type2) MarshalJSON() ([]byte, error) {
return nil, nil
}
func TestType1NoFinalLF(t *testing.T) {
reader := bytes.NewReader([]byte(flow1))
dec := jsoniter.NewDecoder(reader)
i := 0
for dec.More() {
data := &Type1{}
if err := dec.Decode(data); err != nil {
t.Errorf("at %v got %v", i, err)
}
i++
}
}
func TestType1FinalLF(t *testing.T) {
reader := bytes.NewReader([]byte(flow2))
dec := jsoniter.NewDecoder(reader)
i := 0
for dec.More() {
data := &Type1{}
if err := dec.Decode(data); err != nil {
t.Errorf("at %v got %v", i, err)
}
i++
}
}
func TestType2NoFinalLF(t *testing.T) {
reader := bytes.NewReader([]byte(flow1))
dec := jsoniter.NewDecoder(reader)
i := 0
for dec.More() {
data := &Type2{}
if err := dec.Decode(data); err != nil {
t.Errorf("at %v got %v", i, err)
}
i++
}
}
func TestType2FinalLF(t *testing.T) {
reader := bytes.NewReader([]byte(flow2))
dec := jsoniter.NewDecoder(reader)
i := 0
for dec.More() {
data := &Type2{}
if err := dec.Decode(data); err != nil {
t.Errorf("at %v got %v", i, err)
}
i++
}
}

View File

@ -1,11 +1,13 @@
package test
import (
"unsafe"
"github.com/json-iterator/go"
"github.com/modern-go/reflect2"
"github.com/stretchr/testify/require"
"reflect"
"strconv"
"testing"
"github.com/stretchr/testify/require"
"github.com/json-iterator/go"
"unsafe"
)
type TestObject1 struct {
@ -46,6 +48,53 @@ func Test_customize_field_by_extension(t *testing.T) {
should.Equal(`{"field-1":100}`, str)
}
func Test_customize_map_key_encoder(t *testing.T) {
should := require.New(t)
cfg := jsoniter.Config{}.Froze()
cfg.RegisterExtension(&testMapKeyExtension{})
m := map[int]int{1: 2}
output, err := cfg.MarshalToString(m)
should.NoError(err)
should.Equal(`{"2":2}`, output)
m = map[int]int{}
should.NoError(cfg.UnmarshalFromString(output, &m))
should.Equal(map[int]int{1: 2}, m)
}
type testMapKeyExtension struct {
jsoniter.DummyExtension
}
func (extension *testMapKeyExtension) CreateMapKeyEncoder(typ reflect2.Type) jsoniter.ValEncoder {
if typ.Kind() == reflect.Int {
return &funcEncoder{
fun: func(ptr unsafe.Pointer, stream *jsoniter.Stream) {
stream.WriteRaw(`"`)
stream.WriteInt(*(*int)(ptr) + 1)
stream.WriteRaw(`"`)
},
}
}
return nil
}
func (extension *testMapKeyExtension) CreateMapKeyDecoder(typ reflect2.Type) jsoniter.ValDecoder {
if typ.Kind() == reflect.Int {
return &funcDecoder{
fun: func(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
i, err := strconv.Atoi(iter.ReadString())
if err != nil {
iter.ReportError("read map key", err.Error())
return
}
i--
*(*int)(ptr) = i
},
}
}
return nil
}
type funcDecoder struct {
fun jsoniter.DecoderFunc
}

View File

@ -0,0 +1,238 @@
package extra
import (
"github.com/json-iterator/go"
"github.com/modern-go/reflect2"
"unicode/utf8"
"unsafe"
)
// safeSet holds the value true if the ASCII character with the given array
// position can be represented inside a JSON string without any further
// escaping.
//
// All values are true except for the ASCII control characters (0-31), the
// double quote ("), and the backslash character ("\").
var safeSet = [utf8.RuneSelf]bool{
' ': true,
'!': true,
'"': false,
'#': true,
'$': true,
'%': true,
'&': true,
'\'': true,
'(': true,
')': true,
'*': true,
'+': true,
',': true,
'-': true,
'.': true,
'/': true,
'0': true,
'1': true,
'2': true,
'3': true,
'4': true,
'5': true,
'6': true,
'7': true,
'8': true,
'9': true,
':': true,
';': true,
'<': true,
'=': true,
'>': true,
'?': true,
'@': true,
'A': true,
'B': true,
'C': true,
'D': true,
'E': true,
'F': true,
'G': true,
'H': true,
'I': true,
'J': true,
'K': true,
'L': true,
'M': true,
'N': true,
'O': true,
'P': true,
'Q': true,
'R': true,
'S': true,
'T': true,
'U': true,
'V': true,
'W': true,
'X': true,
'Y': true,
'Z': true,
'[': true,
'\\': false,
']': true,
'^': true,
'_': true,
'`': true,
'a': true,
'b': true,
'c': true,
'd': true,
'e': true,
'f': true,
'g': true,
'h': true,
'i': true,
'j': true,
'k': true,
'l': true,
'm': true,
'n': true,
'o': true,
'p': true,
'q': true,
'r': true,
's': true,
't': true,
'u': true,
'v': true,
'w': true,
'x': true,
'y': true,
'z': true,
'{': true,
'|': true,
'}': true,
'~': true,
'\u007f': true,
}
var binaryType = reflect2.TypeOfPtr((*[]byte)(nil)).Elem()
type BinaryAsStringExtension struct {
jsoniter.DummyExtension
}
func (extension *BinaryAsStringExtension) CreateEncoder(typ reflect2.Type) jsoniter.ValEncoder {
if typ == binaryType {
return &binaryAsStringCodec{}
}
return nil
}
func (extension *BinaryAsStringExtension) CreateDecoder(typ reflect2.Type) jsoniter.ValDecoder {
if typ == binaryType {
return &binaryAsStringCodec{}
}
return nil
}
type binaryAsStringCodec struct {
}
func (codec *binaryAsStringCodec) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) {
rawBytes := iter.ReadStringAsSlice()
bytes := make([]byte, 0, len(rawBytes))
for i := 0; i < len(rawBytes); i++ {
b := rawBytes[i]
if b == '\\' {
b2 := rawBytes[i+1]
if b2 != '\\' {
iter.ReportError("decode binary as string", `\\x is only supported escape`)
return
}
b3 := rawBytes[i+2]
if b3 != 'x' {
iter.ReportError("decode binary as string", `\\x is only supported escape`)
return
}
b4 := rawBytes[i+3]
b5 := rawBytes[i+4]
i += 4
b = readHex(iter, b4, b5)
}
bytes = append(bytes, b)
}
*(*[]byte)(ptr) = bytes
}
func (codec *binaryAsStringCodec) IsEmpty(ptr unsafe.Pointer) bool {
return len(*((*[]byte)(ptr))) == 0
}
func (codec *binaryAsStringCodec) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream) {
newBuffer := writeBytes(stream.Buffer(), *(*[]byte)(ptr))
stream.SetBuffer(newBuffer)
}
func readHex(iter *jsoniter.Iterator, b1, b2 byte) byte {
var ret byte
if b1 >= '0' && b1 <= '9' {
ret = b1 - '0'
} else if b1 >= 'a' && b1 <= 'f' {
ret = b1 - 'a' + 10
} else {
iter.ReportError("read hex", "expects 0~9 or a~f, but found "+string([]byte{b1}))
return 0
}
ret *= 16
if b2 >= '0' && b2 <= '9' {
ret += b2 - '0'
} else if b2 >= 'a' && b2 <= 'f' {
ret += b2 - 'a' + 10
} else {
iter.ReportError("read hex", "expects 0~9 or a~f, but found "+string([]byte{b2}))
return 0
}
return ret
}
var hex = "0123456789abcdef"
func writeBytes(space []byte, s []byte) []byte {
space = append(space, '"')
// write string, the fast path, without utf8 and escape support
var i int
var c byte
for i, c = range s {
if c < utf8.RuneSelf && safeSet[c] {
space = append(space, c)
} else {
break
}
}
if i == len(s)-1 {
space = append(space, '"')
return space
}
return writeBytesSlowPath(space, s[i:])
}
func writeBytesSlowPath(space []byte, s []byte) []byte {
start := 0
// for the remaining parts, we process them char by char
var i int
var b byte
for i, b = range s {
if b >= utf8.RuneSelf {
space = append(space, '\\', '\\', 'x', hex[b>>4], hex[b&0xF])
start = i + 1
continue
}
if safeSet[b] {
continue
}
if start < i {
space = append(space, s[start:i]...)
}
space = append(space, '\\', '\\', 'x', hex[b>>4], hex[b&0xF])
start = i + 1
}
if start < len(s) {
space = append(space, s[start:]...)
}
return append(space, '"')
}

View File

@ -0,0 +1,32 @@
package extra
import (
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"testing"
)
func init() {
jsoniter.RegisterExtension(&BinaryAsStringExtension{})
}
func TestBinaryAsStringCodec(t *testing.T) {
t.Run("safe set", func(t *testing.T) {
should := require.New(t)
output, err := jsoniter.Marshal([]byte("hello"))
should.NoError(err)
should.Equal(`"hello"`, string(output))
var val []byte
should.NoError(jsoniter.Unmarshal(output, &val))
should.Equal(`hello`, string(val))
})
t.Run("non safe set", func(t *testing.T) {
should := require.New(t)
output, err := jsoniter.Marshal([]byte{1, 2, 3, 23})
should.NoError(err)
should.Equal(`"\\x01\\x02\\x03\\x17"`, string(output))
var val []byte
should.NoError(jsoniter.Unmarshal(output, &val))
should.Equal([]byte{1, 2, 3, 23}, val)
})
}

View File

@ -9,7 +9,7 @@ import (
"unsafe"
"github.com/json-iterator/go"
"github.com/v2pro/plz/reflect2"
"github.com/modern-go/reflect2"
)
const maxUint = ^uint(0)
@ -183,6 +183,9 @@ func (decoder *fuzzyStringDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Ite
*((*string)(ptr)) = string(number)
case jsoniter.StringValue:
*((*string)(ptr)) = iter.ReadString()
case jsoniter.NilValue:
iter.Skip()
*((*string)(ptr)) = ""
default:
iter.ReportError("fuzzyStringDecoder", "not number or string")
}
@ -208,9 +211,15 @@ func (decoder *fuzzyIntegerDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.It
} else {
str = "0"
}
case jsoniter.NilValue:
iter.Skip()
str = "0"
default:
iter.ReportError("fuzzyIntegerDecoder", "not number or string")
}
if len(str) == 0 {
str = "0"
}
newIter := iter.Pool().BorrowIterator([]byte(str))
defer iter.Pool().ReturnIterator(newIter)
isFloat := strings.IndexByte(str, '.') != -1
@ -244,6 +253,9 @@ func (decoder *fuzzyFloat32Decoder) Decode(ptr unsafe.Pointer, iter *jsoniter.It
} else {
*((*float32)(ptr)) = 0
}
case jsoniter.NilValue:
iter.Skip()
*((*float32)(ptr)) = 0
default:
iter.ReportError("fuzzyFloat32Decoder", "not number or string")
}
@ -273,7 +285,10 @@ func (decoder *fuzzyFloat64Decoder) Decode(ptr unsafe.Pointer, iter *jsoniter.It
} else {
*((*float64)(ptr)) = 0
}
case jsoniter.NilValue:
iter.Skip()
*((*float64)(ptr)) = 0
default:
iter.ReportError("fuzzyFloat32Decoder", "not number or string")
iter.ReportError("fuzzyFloat64Decoder", "not number or string")
}
}

View File

@ -37,6 +37,8 @@ func Test_any_to_int64(t *testing.T) {
should.Equal(int64(10), val)
should.Nil(jsoniter.UnmarshalFromString(`10`, &val))
should.Equal(int64(10), val)
should.Nil(jsoniter.UnmarshalFromString(`""`, &val))
should.Equal(int64(0), val)
// bool part
should.Nil(jsoniter.UnmarshalFromString(`false`, &val))
@ -357,3 +359,35 @@ func Test_bad_case(t *testing.T) {
should := require.New(t)
should.Nil(err)
}
func Test_null_to_string(t *testing.T) {
should := require.New(t)
body := []byte(`null`)
var message string
err := jsoniter.Unmarshal(body, &message)
should.NoError(err)
}
func Test_null_to_int(t *testing.T) {
should := require.New(t)
body := []byte(`null`)
var message int
err := jsoniter.Unmarshal(body, &message)
should.NoError(err)
}
func Test_null_to_float32(t *testing.T) {
should := require.New(t)
body := []byte(`null`)
var message float32
err := jsoniter.Unmarshal(body, &message)
should.NoError(err)
}
func Test_null_to_float64(t *testing.T) {
should := require.New(t)
body := []byte(`null`)
var message float64
err := jsoniter.Unmarshal(body, &message)
should.NoError(err)
}

View File

@ -2,6 +2,7 @@ package extra
import (
"github.com/json-iterator/go"
"strings"
"unicode"
)
@ -17,6 +18,19 @@ type namingStrategyExtension struct {
func (extension *namingStrategyExtension) UpdateStructDescriptor(structDescriptor *jsoniter.StructDescriptor) {
for _, binding := range structDescriptor.Fields {
if unicode.IsLower(rune(binding.Field.Name()[0])) || binding.Field.Name()[0] == '_'{
continue
}
tag, hastag := binding.Field.Tag().Lookup("json")
if hastag {
tagParts := strings.Split(tag, ",")
if tagParts[0] == "-" {
continue // hidden field
}
if tagParts[0] != "" {
continue // field explicitly named
}
}
binding.ToNames = []string{extension.translate(binding.Field.Name())}
binding.FromNames = []string{extension.translate(binding.Field.Name())}
}

View File

@ -21,3 +21,46 @@ func Test_lower_case_with_underscores(t *testing.T) {
should.Nil(err)
should.Equal(`{"user_name":"taowen","first_language":"Chinese"}`, string(output))
}
func Test_set_naming_strategy_with_overrides(t *testing.T) {
should := require.New(t)
SetNamingStrategy(LowerCaseWithUnderscores)
output, err := jsoniter.Marshal(struct {
UserName string `json:"UserName"`
FirstLanguage string
}{
UserName: "taowen",
FirstLanguage: "Chinese",
})
should.Nil(err)
should.Equal(`{"UserName":"taowen","first_language":"Chinese"}`, string(output))
}
func Test_set_naming_strategy_with_omitempty(t *testing.T) {
should := require.New(t)
SetNamingStrategy(LowerCaseWithUnderscores)
output, err := jsoniter.Marshal(struct {
UserName string
FirstLanguage string `json:",omitempty"`
}{
UserName: "taowen",
})
should.Nil(err)
should.Equal(`{"user_name":"taowen"}`, string(output))
}
func Test_set_naming_strategy_with_private_field(t *testing.T) {
should := require.New(t)
SetNamingStrategy(LowerCaseWithUnderscores)
output, err := jsoniter.Marshal(struct {
UserName string
userId int
_UserAge int
}{
UserName: "allen",
userId: 100,
_UserAge: 30,
})
should.Nil(err)
should.Equal(`{"user_name":"allen"}`, string(output))
}

View File

@ -2,6 +2,7 @@ package extra
import (
"github.com/json-iterator/go"
"strings"
"unicode"
)
@ -18,8 +19,36 @@ func (extension *privateFieldsExtension) UpdateStructDescriptor(structDescriptor
for _, binding := range structDescriptor.Fields {
isPrivate := unicode.IsLower(rune(binding.Field.Name()[0]))
if isPrivate {
binding.FromNames = []string{binding.Field.Name()}
binding.ToNames = []string{binding.Field.Name()}
tag, hastag := binding.Field.Tag().Lookup("json")
if !hastag {
binding.FromNames = []string{binding.Field.Name()}
binding.ToNames = []string{binding.Field.Name()}
continue
}
tagParts := strings.Split(tag, ",")
names := calcFieldNames(binding.Field.Name(), tagParts[0], tag)
binding.FromNames = names
binding.ToNames = names
}
}
}
func calcFieldNames(originalFieldName string, tagProvidedFieldName string, wholeTag string) []string {
// ignore?
if wholeTag == "-" {
return []string{}
}
// rename?
var fieldNames []string
if tagProvidedFieldName == "" {
fieldNames = []string{originalFieldName}
} else {
fieldNames = []string{tagProvidedFieldName}
}
// private?
isNotExported := unicode.IsLower(rune(originalFieldName[0]))
if isNotExported {
fieldNames = []string{}
}
return fieldNames
}

11
go.mod Normal file
View File

@ -0,0 +1,11 @@
module github.com/json-iterator/go
go 1.12
require (
github.com/davecgh/go-spew v1.1.1
github.com/google/gofuzz v1.0.0
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421
github.com/modern-go/reflect2 v1.0.2
github.com/stretchr/testify v1.8.0
)

21
go.sum Normal file
View File

@ -0,0 +1,21 @@
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/google/gofuzz v1.0.0 h1:A8PeW59pxE9IoFRqBp37U+mSNaQoZ46F1f0f863XSXw=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

27
iter.go
View File

@ -74,6 +74,7 @@ type Iterator struct {
buf []byte
head int
tail int
depth int
captureStartedAt int
captured []byte
Error error
@ -88,6 +89,7 @@ func NewIterator(cfg API) *Iterator {
buf: nil,
head: 0,
tail: 0,
depth: 0,
}
}
@ -99,6 +101,7 @@ func Parse(cfg API, reader io.Reader, bufSize int) *Iterator {
buf: make([]byte, bufSize),
head: 0,
tail: 0,
depth: 0,
}
}
@ -110,6 +113,7 @@ func ParseBytes(cfg API, input []byte) *Iterator {
buf: input,
head: 0,
tail: len(input),
depth: 0,
}
}
@ -128,6 +132,7 @@ func (iter *Iterator) Reset(reader io.Reader) *Iterator {
iter.reader = reader
iter.head = 0
iter.tail = 0
iter.depth = 0
return iter
}
@ -137,6 +142,7 @@ func (iter *Iterator) ResetBytes(input []byte) *Iterator {
iter.buf = input
iter.head = 0
iter.tail = len(input)
iter.depth = 0
return iter
}
@ -320,3 +326,24 @@ func (iter *Iterator) Read() interface{} {
return nil
}
}
// limit maximum depth of nesting, as allowed by https://tools.ietf.org/html/rfc7159#section-9
const maxDepth = 10000
func (iter *Iterator) incrementDepth() (success bool) {
iter.depth++
if iter.depth <= maxDepth {
return true
}
iter.ReportError("incrementDepth", "exceeded max depth")
return false
}
func (iter *Iterator) decrementDepth() (success bool) {
iter.depth--
if iter.depth >= 0 {
return true
}
iter.ReportError("decrementDepth", "unexpected negative nesting")
return false
}

View File

@ -28,26 +28,32 @@ func (iter *Iterator) ReadArray() (ret bool) {
func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
c := iter.nextToken()
if c == '[' {
if !iter.incrementDepth() {
return false
}
c = iter.nextToken()
if c != ']' {
iter.unreadByte()
if !callback(iter) {
iter.decrementDepth()
return false
}
c = iter.nextToken()
for c == ',' {
if !callback(iter) {
iter.decrementDepth()
return false
}
c = iter.nextToken()
}
if c != ']' {
iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c}))
iter.decrementDepth()
return false
}
return true
return iter.decrementDepth()
}
return true
return iter.decrementDepth()
}
if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l')

View File

@ -77,14 +77,12 @@ func (iter *Iterator) ReadFloat32() (ret float32) {
}
func (iter *Iterator) readPositiveFloat32() (ret float32) {
value := uint64(0)
c := byte(' ')
i := iter.head
// first char
if i == iter.tail {
return iter.readFloat32SlowPath()
}
c = iter.buf[i]
c := iter.buf[i]
i++
ind := floatDigits[c]
switch ind {
@ -107,7 +105,7 @@ func (iter *Iterator) readPositiveFloat32() (ret float32) {
return
}
}
value = uint64(ind)
value := uint64(ind)
// chars before dot
non_decimal_loop:
for ; i < iter.tail; i++ {
@ -145,9 +143,7 @@ non_decimal_loop:
}
// too many decimal places
return iter.readFloat32SlowPath()
case invalidCharForNumber:
fallthrough
case dotInNumber:
case invalidCharForNumber, dotInNumber:
return iter.readFloat32SlowPath()
}
decimalPlaces++
@ -218,14 +214,12 @@ func (iter *Iterator) ReadFloat64() (ret float64) {
}
func (iter *Iterator) readPositiveFloat64() (ret float64) {
value := uint64(0)
c := byte(' ')
i := iter.head
// first char
if i == iter.tail {
return iter.readFloat64SlowPath()
}
c = iter.buf[i]
c := iter.buf[i]
i++
ind := floatDigits[c]
switch ind {
@ -248,7 +242,7 @@ func (iter *Iterator) readPositiveFloat64() (ret float64) {
return
}
}
value = uint64(ind)
value := uint64(ind)
// chars before dot
non_decimal_loop:
for ; i < iter.tail; i++ {
@ -286,9 +280,7 @@ non_decimal_loop:
}
// too many decimal places
return iter.readFloat64SlowPath()
case invalidCharForNumber:
fallthrough
case dotInNumber:
case invalidCharForNumber, dotInNumber:
return iter.readFloat64SlowPath()
}
decimalPlaces++
@ -296,6 +288,9 @@ non_decimal_loop:
return iter.readFloat64SlowPath()
}
value = (value << 3) + (value << 1) + uint64(ind)
if value > maxFloat64 {
return iter.readFloat64SlowPath()
}
}
}
return iter.readFloat64SlowPath()

View File

@ -9,6 +9,7 @@ var intDigits []int8
const uint32SafeToMultiply10 = uint32(0xffffffff)/10 - 1
const uint64SafeToMultiple10 = uint64(0xffffffffffffffff)/10 - 1
const maxFloat64 = 1<<53 - 1
func init() {
intDigits = make([]int8, 256)
@ -22,11 +23,17 @@ func init() {
// ReadUint read uint
func (iter *Iterator) ReadUint() uint {
if strconv.IntSize == 32 {
return uint(iter.ReadUint32())
}
return uint(iter.ReadUint64())
}
// ReadInt read int
func (iter *Iterator) ReadInt() int {
if strconv.IntSize == 32 {
return int(iter.ReadInt32())
}
return int(iter.ReadInt64())
}
@ -333,7 +340,7 @@ func (iter *Iterator) readUint64(c byte) (ret uint64) {
}
func (iter *Iterator) assertInteger() {
if iter.head < len(iter.buf) && iter.buf[iter.head] == '.' {
if iter.head < iter.tail && iter.buf[iter.head] == '.' {
iter.ReportError("assertInteger", "can not decode float as int")
}
}

View File

@ -2,7 +2,7 @@ package jsoniter
import (
"fmt"
"unicode"
"strings"
)
// ReadObject read one field from object.
@ -60,7 +60,7 @@ func (iter *Iterator) readFieldHash() int64 {
if b == '\\' {
iter.head = i
for _, b := range iter.readStringSlowPath() {
if 'A' <= b && b <= 'Z' {
if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
b += 'a' - 'A'
}
hash ^= int64(b)
@ -82,7 +82,7 @@ func (iter *Iterator) readFieldHash() int64 {
}
return hash
}
if 'A' <= b && b <= 'Z' {
if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
b += 'a' - 'A'
}
hash ^= int64(b)
@ -95,10 +95,13 @@ func (iter *Iterator) readFieldHash() int64 {
}
}
func calcHash(str string) int64 {
func calcHash(str string, caseSensitive bool) int64 {
if !caseSensitive {
str = strings.ToLower(str)
}
hash := int64(0x811c9dc5)
for _, b := range str {
hash ^= int64(unicode.ToLower(b))
for _, b := range []byte(str) {
hash ^= int64(b)
hash *= 0x1000193
}
return int64(hash)
@ -109,6 +112,9 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
c := iter.nextToken()
var field string
if c == '{' {
if !iter.incrementDepth() {
return false
}
c = iter.nextToken()
if c == '"' {
iter.unreadByte()
@ -118,6 +124,7 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
if !callback(iter, field) {
iter.decrementDepth()
return false
}
c = iter.nextToken()
@ -128,20 +135,23 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
if !callback(iter, field) {
iter.decrementDepth()
return false
}
c = iter.nextToken()
}
if c != '}' {
iter.ReportError("ReadObjectCB", `object not ended with }`)
iter.decrementDepth()
return false
}
return true
return iter.decrementDepth()
}
if c == '}' {
return true
return iter.decrementDepth()
}
iter.ReportError("ReadObjectCB", `expect " after }, but found `+string([]byte{c}))
iter.ReportError("ReadObjectCB", `expect " after {, but found `+string([]byte{c}))
iter.decrementDepth()
return false
}
if c == 'n' {
@ -156,15 +166,20 @@ func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
c := iter.nextToken()
if c == '{' {
if !iter.incrementDepth() {
return false
}
c = iter.nextToken()
if c == '"' {
iter.unreadByte()
field := iter.ReadString()
if iter.nextToken() != ':' {
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
iter.decrementDepth()
return false
}
if !callback(iter, field) {
iter.decrementDepth()
return false
}
c = iter.nextToken()
@ -172,23 +187,27 @@ func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
field = iter.ReadString()
if iter.nextToken() != ':' {
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
iter.decrementDepth()
return false
}
if !callback(iter, field) {
iter.decrementDepth()
return false
}
c = iter.nextToken()
}
if c != '}' {
iter.ReportError("ReadMapCB", `object not ended with }`)
iter.decrementDepth()
return false
}
return true
return iter.decrementDepth()
}
if c == '}' {
return true
return iter.decrementDepth()
}
iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c}))
iter.ReportError("ReadMapCB", `expect " after {, but found `+string([]byte{c}))
iter.decrementDepth()
return false
}
if c == 'n' {

View File

@ -37,17 +37,24 @@ func (iter *Iterator) SkipAndReturnBytes() []byte {
return iter.stopCapture()
}
type captureBuffer struct {
startedAt int
captured []byte
// SkipAndAppendBytes skips next JSON element and appends its content to
// buffer, returning the result.
func (iter *Iterator) SkipAndAppendBytes(buf []byte) []byte {
iter.startCaptureTo(buf, iter.head)
iter.Skip()
return iter.stopCapture()
}
func (iter *Iterator) startCapture(captureStartedAt int) {
func (iter *Iterator) startCaptureTo(buf []byte, captureStartedAt int) {
if iter.captured != nil {
panic("already in capture mode")
}
iter.captureStartedAt = captureStartedAt
iter.captured = make([]byte, 0, 32)
iter.captured = buf
}
func (iter *Iterator) startCapture(captureStartedAt int) {
iter.startCaptureTo(make([]byte, 0, 32), captureStartedAt)
}
func (iter *Iterator) stopCapture() []byte {
@ -58,13 +65,7 @@ func (iter *Iterator) stopCapture() []byte {
remaining := iter.buf[iter.captureStartedAt:iter.head]
iter.captureStartedAt = -1
iter.captured = nil
if len(captured) == 0 {
copied := make([]byte, len(remaining))
copy(copied, remaining)
return copied
}
captured = append(captured, remaining...)
return captured
return append(captured, remaining...)
}
// Skip skips a json object and positions to relatively the next json object

View File

@ -22,6 +22,9 @@ func (iter *Iterator) skipNumber() {
func (iter *Iterator) skipArray() {
level := 1
if !iter.incrementDepth() {
return
}
for {
for i := iter.head; i < iter.tail; i++ {
switch iter.buf[i] {
@ -31,8 +34,14 @@ func (iter *Iterator) skipArray() {
i = iter.head - 1 // it will be i++ soon
case '[': // If open symbol, increase level
level++
if !iter.incrementDepth() {
return
}
case ']': // If close symbol, increase level
level--
if !iter.decrementDepth() {
return
}
// If we have returned to the original level, we're done
if level == 0 {
@ -50,6 +59,10 @@ func (iter *Iterator) skipArray() {
func (iter *Iterator) skipObject() {
level := 1
if !iter.incrementDepth() {
return
}
for {
for i := iter.head; i < iter.tail; i++ {
switch iter.buf[i] {
@ -59,8 +72,14 @@ func (iter *Iterator) skipObject() {
i = iter.head - 1 // it will be i++ soon
case '{': // If open symbol, increase level
level++
if !iter.incrementDepth() {
return
}
case '}': // If close symbol, increase level
level--
if !iter.decrementDepth() {
return
}
// If we have returned to the original level, we're done
if level == 0 {

View File

@ -2,12 +2,22 @@
package jsoniter
import "fmt"
import (
"fmt"
"io"
)
func (iter *Iterator) skipNumber() {
if !iter.trySkipNumber() {
iter.unreadByte()
iter.ReadFloat32()
if iter.Error != nil && iter.Error != io.EOF {
return
}
iter.ReadFloat64()
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = nil
iter.ReadBigFloat()
}
}
}

View File

@ -3,9 +3,10 @@ package misc_tests
import (
"bytes"
"encoding/json"
"github.com/stretchr/testify/require"
"testing"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
func Test_empty_array(t *testing.T) {
@ -157,6 +158,27 @@ func Test_encode_byte_array(t *testing.T) {
should.Equal(`"AQID"`, string(bytes))
}
func Test_encode_empty_byte_array(t *testing.T) {
should := require.New(t)
bytes, err := json.Marshal([]byte{})
should.Nil(err)
should.Equal(`""`, string(bytes))
bytes, err = jsoniter.Marshal([]byte{})
should.Nil(err)
should.Equal(`""`, string(bytes))
}
func Test_encode_nil_byte_array(t *testing.T) {
should := require.New(t)
var nilSlice []byte
bytes, err := json.Marshal(nilSlice)
should.Nil(err)
should.Equal(`null`, string(bytes))
bytes, err = jsoniter.Marshal(nilSlice)
should.Nil(err)
should.Equal(`null`, string(bytes))
}
func Test_decode_byte_array_from_base64(t *testing.T) {
should := require.New(t)
data := []byte{}
@ -168,6 +190,17 @@ func Test_decode_byte_array_from_base64(t *testing.T) {
should.Equal([]byte{1, 2, 3}, data)
}
func Test_decode_byte_array_from_base64_with_newlines(t *testing.T) {
should := require.New(t)
data := []byte{}
err := json.Unmarshal([]byte(`"A\rQ\nID"`), &data)
should.Nil(err)
should.Equal([]byte{1, 2, 3}, data)
err = jsoniter.Unmarshal([]byte(`"A\rQ\nID"`), &data)
should.Nil(err)
should.Equal([]byte{1, 2, 3}, data)
}
func Test_decode_byte_array_from_array(t *testing.T) {
should := require.New(t)
data := []byte{}

View File

@ -4,8 +4,8 @@ import (
"bytes"
"testing"
"github.com/stretchr/testify/require"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
func Test_true(t *testing.T) {
@ -44,4 +44,4 @@ func Test_write_val_bool(t *testing.T) {
should.Equal(stream.Buffered(), 0)
should.Nil(stream.Error)
should.Equal("true", buf.String())
}
}

View File

@ -2,10 +2,11 @@ package misc_tests
import (
"encoding/json"
"math"
"testing"
"github.com/stretchr/testify/require"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
func Test_read_big_float(t *testing.T) {
@ -77,6 +78,26 @@ func Test_read_number(t *testing.T) {
should.Equal(`92233720368547758079223372036854775807`, string(val))
}
func Test_encode_inf(t *testing.T) {
should := require.New(t)
_, err := json.Marshal(math.Inf(1))
should.Error(err)
_, err = jsoniter.Marshal(float32(math.Inf(1)))
should.Error(err)
_, err = jsoniter.Marshal(math.Inf(-1))
should.Error(err)
}
func Test_encode_nan(t *testing.T) {
should := require.New(t)
_, err := json.Marshal(math.NaN())
should.Error(err)
_, err = jsoniter.Marshal(float32(math.NaN()))
should.Error(err)
_, err = jsoniter.Marshal(math.NaN())
should.Error(err)
}
func Benchmark_jsoniter_float(b *testing.B) {
b.ReportAllocs()
input := []byte(`1.1123,`)

View File

@ -5,12 +5,14 @@ package misc_tests
import (
"bytes"
"encoding/json"
"io"
"io/ioutil"
"math/rand"
"strconv"
"testing"
"github.com/stretchr/testify/require"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
func Test_read_uint64_invalid(t *testing.T) {
@ -70,6 +72,95 @@ func Test_float_as_int(t *testing.T) {
should.NotNil(jsoniter.Unmarshal([]byte(`1.1`), &i))
}
// chunkedData is io.Reader which returns random amount of data in range [1, chunkedData.chunkSize].
// It simulates chunked data on from HTTP server, which is commonly used by net/http package.
type chunkedData struct {
chunkSize int
data []byte
head int
}
// Read is implementation of the io.Reader which returns random amount of data in range [1, chunkedData.chunkSize].
func (c *chunkedData) Read(p []byte) (n int, err error) {
to := c.head + int(rand.Int31n(int32(c.chunkSize))+1)
// copy does not copy more data then p can consume
n = copy(p, c.data[c.head:to])
c.head = c.head + n
if c.head >= len(c.data) {
err = io.EOF
}
return n, err
}
// TestIterator_ReadInt_chunkedInput validates the behaviour of Iterator.ReadInt() method in where:
// - it reads data from io.Reader,
// - expected value is 0 (zero)
// - Iterator.tail == Iterator.head
// - Iterator.tail < len(Iterator.buf)
// - value in buffer after Iterator.tail is presented from previous read and has '.' character.
func TestIterator_ReadInt_chunkedInput(t *testing.T) {
should := require.New(t)
data := &chunkedData{
data: jsonFloatIntArray(t, 10),
}
// because this test is rely on randomness of chunkedData, we are doing multiple iterations to
// be sure, that we can hit a required case.
for data.chunkSize = 3; data.chunkSize <= len(data.data); data.chunkSize++ {
data.head = 0
iter := jsoniter.Parse(jsoniter.ConfigDefault, data, data.chunkSize)
i := 0
for iter.ReadArray() {
// every even item is float, let's just skip it.
if i%2 == 0 {
iter.Skip()
i++
continue
}
should.Zero(iter.ReadInt())
should.NoError(iter.Error)
i++
}
}
}
// jsonFloatIntArray generates JSON array where every
// - even item is float 0.1
// - odd item is integer 0
//
// [0.1, 0, 0.1, 0]
func jsonFloatIntArray(t *testing.T, numberOfItems int) []byte {
t.Helper()
numbers := make([]jsoniter.Any, numberOfItems)
for i := range numbers {
switch i % 2 {
case 0:
numbers[i] = jsoniter.WrapFloat64(0.1)
default:
numbers[i] = jsoniter.WrapInt64(0)
}
}
fixture, err := jsoniter.ConfigFastest.Marshal(numbers)
if err != nil {
panic(err)
}
b := &bytes.Buffer{}
require.NoError(
t,
json.Compact(b, fixture),
"json should be compactable",
)
return b.Bytes()
}
func Benchmark_jsoniter_encode_int(b *testing.B) {
stream := jsoniter.NewStream(jsoniter.ConfigDefault, ioutil.Discard, 64)
for n := 0; n < b.N; n++ {

View File

@ -2,10 +2,10 @@ package misc_tests
import (
"encoding/json"
"testing"
"github.com/stretchr/testify/require"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"io"
"testing"
)
func Test_nil_non_empty_interface(t *testing.T) {
@ -114,7 +114,7 @@ func Test_overwrite_interface_value_with_nil(t *testing.T) {
err := json.Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
should.NoError(err)
should.Equal(42, (*(wrapper.Payload.(*Payload))).Value)
should.Equal(42, wrapper.Payload.(*Payload).Value)
err = json.Unmarshal([]byte(`{"payload": null}`), &wrapper)
should.NoError(err)
@ -128,7 +128,7 @@ func Test_overwrite_interface_value_with_nil(t *testing.T) {
err = jsoniter.Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
should.Equal(nil, err)
should.Equal(42, (*(wrapper.Payload.(*Payload))).Value)
should.Equal(42, wrapper.Payload.(*Payload).Value)
err = jsoniter.Unmarshal([]byte(`{"payload": null}`), &wrapper)
should.Equal(nil, err)

View File

@ -7,8 +7,8 @@ import (
"strconv"
"testing"
"github.com/stretchr/testify/require"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
func Test_bad_case(t *testing.T) {

View File

@ -5,9 +5,9 @@ import (
"math/big"
"testing"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"strings"
"github.com/json-iterator/go"
)
func Test_decode_TextMarshaler_key_map(t *testing.T) {
@ -31,3 +31,22 @@ func Test_read_map_with_reader(t *testing.T) {
should.Equal(m2, m1)
should.Equal("1.0.76", m1["note"].(map[string]interface{})["CoreServices"].(map[string]interface{})["version_name"])
}
func Test_map_eface_of_eface(t *testing.T) {
should := require.New(t)
json := jsoniter.ConfigCompatibleWithStandardLibrary
output, err := json.MarshalToString(map[interface{}]interface{}{
"1": 2,
3: "4",
})
should.NoError(err)
should.Equal(`{"1":2,"3":"4"}`, output)
}
func Test_encode_nil_map(t *testing.T) {
should := require.New(t)
var nilMap map[string]string
output, err := jsoniter.MarshalToString(nilMap)
should.NoError(err)
should.Equal(`null`, output)
}

View File

@ -2,9 +2,10 @@ package misc_tests
import (
"encoding/json"
"reflect"
"testing"
"github.com/json-iterator/go"
"reflect"
"strings"
"testing"
)
type Level1 struct {
@ -15,6 +16,243 @@ type Level2 struct {
World string
}
func Test_deep_nested(t *testing.T) {
type unstructured interface{}
testcases := []struct {
name string
data []byte
expectError string
}{
{
name: "array under maxDepth",
data: []byte(`{"a":` + strings.Repeat(`[`, 10000-1) + strings.Repeat(`]`, 10000-1) + `}`),
expectError: "",
},
{
name: "array over maxDepth",
data: []byte(`{"a":` + strings.Repeat(`[`, 10000) + strings.Repeat(`]`, 10000) + `}`),
expectError: "max depth",
},
{
name: "object under maxDepth",
data: []byte(`{"a":` + strings.Repeat(`{"a":`, 10000-1) + `0` + strings.Repeat(`}`, 10000-1) + `}`),
expectError: "",
},
{
name: "object over maxDepth",
data: []byte(`{"a":` + strings.Repeat(`{"a":`, 10000) + `0` + strings.Repeat(`}`, 10000) + `}`),
expectError: "max depth",
},
}
targets := []struct {
name string
new func() interface{}
}{
{
name: "unstructured",
new: func() interface{} {
var v interface{}
return &v
},
},
{
name: "typed named field",
new: func() interface{} {
v := struct {
A interface{} `json:"a"`
}{}
return &v
},
},
{
name: "typed missing field",
new: func() interface{} {
v := struct {
B interface{} `json:"b"`
}{}
return &v
},
},
{
name: "typed 1 field",
new: func() interface{} {
v := struct {
A interface{} `json:"a"`
}{}
return &v
},
},
{
name: "typed 2 field",
new: func() interface{} {
v := struct {
A interface{} `json:"a"`
B interface{} `json:"b"`
}{}
return &v
},
},
{
name: "typed 3 field",
new: func() interface{} {
v := struct {
A interface{} `json:"a"`
B interface{} `json:"b"`
C interface{} `json:"c"`
}{}
return &v
},
},
{
name: "typed 4 field",
new: func() interface{} {
v := struct {
A interface{} `json:"a"`
B interface{} `json:"b"`
C interface{} `json:"c"`
D interface{} `json:"d"`
}{}
return &v
},
},
{
name: "typed 5 field",
new: func() interface{} {
v := struct {
A interface{} `json:"a"`
B interface{} `json:"b"`
C interface{} `json:"c"`
D interface{} `json:"d"`
E interface{} `json:"e"`
}{}
return &v
},
},
{
name: "typed 6 field",
new: func() interface{} {
v := struct {
A interface{} `json:"a"`
B interface{} `json:"b"`
C interface{} `json:"c"`
D interface{} `json:"d"`
E interface{} `json:"e"`
F interface{} `json:"f"`
}{}
return &v
},
},
{
name: "typed 7 field",
new: func() interface{} {
v := struct {
A interface{} `json:"a"`
B interface{} `json:"b"`
C interface{} `json:"c"`
D interface{} `json:"d"`
E interface{} `json:"e"`
F interface{} `json:"f"`
G interface{} `json:"g"`
}{}
return &v
},
},
{
name: "typed 8 field",
new: func() interface{} {
v := struct {
A interface{} `json:"a"`
B interface{} `json:"b"`
C interface{} `json:"c"`
D interface{} `json:"d"`
E interface{} `json:"e"`
F interface{} `json:"f"`
G interface{} `json:"g"`
H interface{} `json:"h"`
}{}
return &v
},
},
{
name: "typed 9 field",
new: func() interface{} {
v := struct {
A interface{} `json:"a"`
B interface{} `json:"b"`
C interface{} `json:"c"`
D interface{} `json:"d"`
E interface{} `json:"e"`
F interface{} `json:"f"`
G interface{} `json:"g"`
H interface{} `json:"h"`
I interface{} `json:"i"`
}{}
return &v
},
},
{
name: "typed 10 field",
new: func() interface{} {
v := struct {
A interface{} `json:"a"`
B interface{} `json:"b"`
C interface{} `json:"c"`
D interface{} `json:"d"`
E interface{} `json:"e"`
F interface{} `json:"f"`
G interface{} `json:"g"`
H interface{} `json:"h"`
I interface{} `json:"i"`
J interface{} `json:"j"`
}{}
return &v
},
},
{
name: "typed 11 field",
new: func() interface{} {
v := struct {
A interface{} `json:"a"`
B interface{} `json:"b"`
C interface{} `json:"c"`
D interface{} `json:"d"`
E interface{} `json:"e"`
F interface{} `json:"f"`
G interface{} `json:"g"`
H interface{} `json:"h"`
I interface{} `json:"i"`
J interface{} `json:"j"`
K interface{} `json:"k"`
}{}
return &v
},
},
}
for _, tc := range testcases {
t.Run(tc.name, func(t *testing.T) {
for _, target := range targets {
t.Run(target.name, func(t *testing.T) {
err := jsoniter.Unmarshal(tc.data, target.new())
if len(tc.expectError) == 0 {
if err != nil {
t.Errorf("unexpected error: %v", err)
}
} else {
if err == nil {
t.Errorf("expected error, got none")
} else if !strings.Contains(err.Error(), tc.expectError) {
t.Errorf("expected error containing '%s', got: %v", tc.expectError, err)
}
}
})
}
})
}
}
func Test_nested(t *testing.T) {
iter := jsoniter.ParseString(jsoniter.ConfigDefault, `{"hello": [{"world": "value1"}, {"world": "value2"}]}`)
l1 := Level1{}

View File

@ -5,8 +5,8 @@ import (
"io"
"testing"
"github.com/stretchr/testify/require"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
func Test_read_null(t *testing.T) {

View File

@ -2,12 +2,13 @@ package misc_tests
import (
"bytes"
"reflect"
"testing"
"github.com/stretchr/testify/require"
"github.com/json-iterator/go"
"time"
"github.com/stretchr/testify/require"
"strings"
"time"
)
func Test_empty_object(t *testing.T) {
@ -129,4 +130,243 @@ func Test_reader_and_load_more(t *testing.T) {
decoder := jsoniter.ConfigCompatibleWithStandardLibrary.NewDecoder(reader)
obj := TestObject{}
should.Nil(decoder.Decode(&obj))
}
}
func Test_unmarshal_into_existing_value(t *testing.T) {
should := require.New(t)
type TestObject struct {
Field1 int
Field2 interface{}
}
var obj TestObject
m := map[string]interface{}{}
obj.Field2 = &m
cfg := jsoniter.Config{UseNumber: true}.Froze()
err := cfg.Unmarshal([]byte(`{"Field1":1,"Field2":{"k":"v"}}`), &obj)
should.NoError(err)
should.Equal(map[string]interface{}{
"k": "v",
}, m)
}
// for issue421
func Test_unmarshal_anonymous_struct_invalid(t *testing.T) {
should := require.New(t)
t0 := struct {
Field1 string
}{}
cfg := jsoniter.ConfigCompatibleWithStandardLibrary
err := cfg.UnmarshalFromString(`{"Field1":`, &t0)
should.NotNil(err)
should.NotContains(err.Error(), reflect.TypeOf(t0).String())
cfgCaseSensitive := jsoniter.Config{
CaseSensitive: true,
}.Froze()
type TestObject1 struct {
Field1 struct {
InnerField1 string
}
}
t1 := TestObject1{}
err = cfgCaseSensitive.UnmarshalFromString(`{"Field1":{"InnerField1"`, &t1)
should.NotNil(err)
should.NotContains(err.Error(), reflect.TypeOf(t1.Field1).String())
should.Contains(err.Error(), reflect.TypeOf(t1).String())
type TestObject2 struct {
Field1 int
Field2 struct {
InnerField1 string
InnerField2 string
}
}
t2 := TestObject2{}
err = cfgCaseSensitive.UnmarshalFromString(`{"Field2":{"InnerField2"`, &t2)
should.NotNil(err)
should.NotContains(err.Error(), reflect.TypeOf(t2.Field2).String())
should.Contains(err.Error(), reflect.TypeOf(t2).String())
type TestObject3 struct {
Field1 int
Field2 int
Field3 struct {
InnerField1 string
InnerField2 string
InnerField3 string
}
}
t3 := TestObject3{}
err = cfgCaseSensitive.UnmarshalFromString(`{"Field3":{"InnerField3"`, &t3)
should.NotNil(err)
should.NotContains(err.Error(), reflect.TypeOf(t3.Field3).String())
should.Contains(err.Error(), reflect.TypeOf(t3).String())
type TestObject4 struct {
Field1 int
Field2 int
Field3 int
Field4 struct {
InnerField1 string
InnerField2 string
InnerField3 string
InnerField4 string
}
}
t4 := TestObject4{}
err = cfgCaseSensitive.UnmarshalFromString(`{"Field4":{"InnerField4"`, &t4)
should.NotNil(err)
should.NotContains(err.Error(), reflect.TypeOf(t4.Field4).String())
should.Contains(err.Error(), reflect.TypeOf(t4).String())
type TestObject5 struct {
Field1 int
Field2 int
Field3 int
Field4 int
Field5 struct {
InnerField1 string
InnerField2 string
InnerField3 string
InnerField4 string
InnerField5 string
}
}
t5 := TestObject5{}
err = cfgCaseSensitive.UnmarshalFromString(`{"Field5":{"InnerField5"`, &t5)
should.NotNil(err)
should.NotContains(err.Error(), reflect.TypeOf(t5.Field5).String())
should.Contains(err.Error(), reflect.TypeOf(t5).String())
type TestObject6 struct {
Field1 int
Field2 int
Field3 int
Field4 int
Field5 int
Field6 struct {
InnerField1 string
InnerField2 string
InnerField3 string
InnerField4 string
InnerField5 string
InnerField6 string
}
}
t6 := TestObject6{}
err = cfgCaseSensitive.UnmarshalFromString(`{"Field6":{"InnerField6"`, &t6)
should.NotNil(err)
should.NotContains(err.Error(), reflect.TypeOf(t6.Field6).String())
should.Contains(err.Error(), reflect.TypeOf(t6).String())
type TestObject7 struct {
Field1 int
Field2 int
Field3 int
Field4 int
Field5 int
Field6 int
Field7 struct {
InnerField1 string
InnerField2 string
InnerField3 string
InnerField4 string
InnerField5 string
InnerField6 string
InnerField7 string
}
}
t7 := TestObject7{}
err = cfgCaseSensitive.UnmarshalFromString(`{"Field7":{"InnerField7"`, &t7)
should.NotNil(err)
should.NotContains(err.Error(), reflect.TypeOf(t7.Field7).String())
should.Contains(err.Error(), reflect.TypeOf(t7).String())
type TestObject8 struct {
Field1 int
Field2 int
Field3 int
Field4 int
Field5 int
Field6 int
Field7 int
Field8 struct {
InnerField1 string
InnerField2 string
InnerField3 string
InnerField4 string
InnerField5 string
InnerField6 string
InnerField7 string
InnerField8 string
}
}
t8 := TestObject8{}
err = cfgCaseSensitive.UnmarshalFromString(`{"Field8":{"InnerField8"`, &t8)
should.NotNil(err)
should.NotContains(err.Error(), reflect.TypeOf(t8.Field8).String())
should.Contains(err.Error(), reflect.TypeOf(t8).String())
type TestObject9 struct {
Field1 int
Field2 int
Field3 int
Field4 int
Field5 int
Field6 int
Field7 int
Field8 int
Field9 struct {
InnerField1 string
InnerField2 string
InnerField3 string
InnerField4 string
InnerField5 string
InnerField6 string
InnerField7 string
InnerField8 string
InnerField9 string
}
}
t9 := TestObject9{}
err = cfgCaseSensitive.UnmarshalFromString(`{"Field9":{"InnerField9"`, &t9)
should.NotNil(err)
should.NotContains(err.Error(), reflect.TypeOf(t9.Field9).String())
should.Contains(err.Error(), reflect.TypeOf(t9).String())
type TestObject10 struct {
Field1 int
Field2 int
Field3 int
Field4 int
Field5 int
Field6 int
Field7 int
Field8 int
Field9 int
Field10 struct {
InnerField1 string
InnerField2 string
InnerField3 string
InnerField4 string
InnerField5 string
InnerField6 string
InnerField7 string
InnerField8 string
InnerField9 string
InnerField10 string
}
}
t10 := TestObject10{}
err = cfgCaseSensitive.UnmarshalFromString(`{"Field10":{"InnerField10"`, &t10)
should.NotNil(err)
should.NotContains(err.Error(), reflect.TypeOf(t10.Field10).String())
should.Contains(err.Error(), reflect.TypeOf(t10).String())
err = cfg.UnmarshalFromString(`{"Field10":{"InnerField10"`, &t10)
should.NotNil(err)
should.NotContains(err.Error(), reflect.TypeOf(t10.Field10).String())
should.Contains(err.Error(), reflect.TypeOf(t10).String())
}

View File

@ -2,10 +2,10 @@ package misc_tests
import (
"encoding/json"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"strings"
"testing"
"github.com/json-iterator/go"
)
func Test_jsoniter_RawMessage(t *testing.T) {
@ -42,21 +42,40 @@ func Test_marshal_invalid_json_raw_message(t *testing.T) {
should.Nil(aouterr)
}
func Test_marshal_nil_json_raw_message(t *testing.T) {
type A struct {
Nil1 jsoniter.RawMessage `json:"raw1"`
Nil2 json.RawMessage `json:"raw2"`
}
a := A{}
should := require.New(t)
aout, aouterr := jsoniter.Marshal(&a)
should.Equal(`{"raw1":null,"raw2":null}`, string(aout))
should.Nil(aouterr)
a.Nil1 = []byte(`Any`)
a.Nil2 = []byte(`Any`)
should.Nil(jsoniter.Unmarshal(aout, &a))
should.Nil(a.Nil1)
should.Nil(a.Nil2)
}
func Test_raw_message_memory_not_copied_issue(t *testing.T) {
jsonStream := `{"name":"xxxxx","bundle_id":"com.zonst.majiang","app_platform":"ios","app_category":"100103", "budget_day":1000,"bidding_min":1,"bidding_max":2,"bidding_type":"CPM", "freq":{"open":true,"type":"day","num":100},"speed":1, "targeting":{"vendor":{"open":true,"list":["zonst"]}, "geo_code":{"open":true,"list":["156110100"]},"app_category":{"open":true,"list":["100101"]}, "day_parting":{"open":true,"list":["100409","100410"]},"device_type":{"open":true,"list":["ipad"]}, "os_version":{"open":true,"list":[10]},"carrier":{"open":true,"list":["mobile"]}, "network":{"open":true,"list":["4G"]}},"url":{"tracking_imp_url":"http://www.baidu.com", "tracking_clk_url":"http://www.baidu.com","jump_url":"http://www.baidu.com","deep_link_url":"http://www.baidu.com"}}`
type IteratorObject struct {
Name *string `json:"name"`
BundleId *string `json:"bundle_id"`
AppCategory *string `json:"app_category"`
AppPlatform *string `json:"app_platform"`
BudgetDay *float32 `json:"budget_day"`
BiddingMax *float32 `json:"bidding_max"`
BiddingMin *float32 `json:"bidding_min"`
BiddingType *string `json:"bidding_type"`
Name *string `json:"name"`
BundleId *string `json:"bundle_id"`
AppCategory *string `json:"app_category"`
AppPlatform *string `json:"app_platform"`
BudgetDay *float32 `json:"budget_day"`
BiddingMax *float32 `json:"bidding_max"`
BiddingMin *float32 `json:"bidding_min"`
BiddingType *string `json:"bidding_type"`
Freq *jsoniter.RawMessage `json:"freq"`
Targeting *jsoniter.RawMessage `json:"targeting"`
Url *jsoniter.RawMessage `json:"url"`
Speed *int `json:"speed" db:"speed"`
Speed *int `json:"speed" db:"speed"`
}
obj := &IteratorObject{}

View File

@ -23,6 +23,7 @@ func (cfg *frozenConfig) BorrowStream(writer io.Writer) *Stream {
}
func (cfg *frozenConfig) ReturnStream(stream *Stream) {
stream.out = nil
stream.Error = nil
stream.Attachment = nil
cfg.streamPool.Put(stream)

View File

@ -4,7 +4,8 @@ import (
"fmt"
"reflect"
"unsafe"
"github.com/v2pro/plz/reflect2"
"github.com/modern-go/reflect2"
)
// ValDecoder is an internal type registered to cache as needed.
@ -40,6 +41,14 @@ type ctx struct {
decoders map[reflect2.Type]ValDecoder
}
func (b *ctx) caseSensitive() bool {
if b.frozenConfig == nil {
// default is case-insensitive
return false
}
return b.frozenConfig.caseSensitive
}
func (b *ctx) append(prefix string) *ctx {
return &ctx{
frozenConfig: b.frozenConfig,
@ -51,11 +60,12 @@ func (b *ctx) append(prefix string) *ctx {
// ReadVal copy the underlying JSON into go interface, same as json.Unmarshal
func (iter *Iterator) ReadVal(obj interface{}) {
depth := iter.depth
cacheKey := reflect2.RTypeOf(obj)
decoder := iter.cfg.getDecoderFromCache(cacheKey)
if decoder == nil {
typ := reflect2.TypeOf(obj)
if typ.Kind() != reflect.Ptr {
if typ == nil || typ.Kind() != reflect.Ptr {
iter.ReportError("ReadVal", "can only unmarshal into pointer")
return
}
@ -67,6 +77,10 @@ func (iter *Iterator) ReadVal(obj interface{}) {
return
}
decoder.Decode(ptr, iter)
if iter.depth != depth {
iter.ReportError("ReadVal", "unexpected mismatched nesting")
return
}
}
// WriteVal copy the go interface into underlying JSON, same as json.Marshal
@ -111,7 +125,8 @@ func decoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
for _, extension := range extensions {
decoder = extension.DecorateDecoder(typ, decoder)
}
for _, extension := range ctx.extensions {
decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder)
for _, extension := range ctx.extraExtensions {
decoder = extension.DecorateDecoder(typ, decoder)
}
return decoder
@ -213,7 +228,8 @@ func encoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
for _, extension := range extensions {
encoder = extension.DecorateEncoder(typ, encoder)
}
for _, extension := range ctx.extensions {
encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder)
for _, extension := range ctx.extraExtensions {
encoder = extension.DecorateEncoder(typ, encoder)
}
return encoder

View File

@ -2,9 +2,9 @@ package jsoniter
import (
"fmt"
"github.com/modern-go/reflect2"
"io"
"unsafe"
"github.com/v2pro/plz/reflect2"
)
func decoderOfArray(ctx *ctx, typ reflect2.Type) ValDecoder {

View File

@ -1,9 +1,9 @@
package jsoniter
import (
"github.com/v2pro/plz/reflect2"
"unsafe"
"github.com/modern-go/reflect2"
"reflect"
"unsafe"
)
type dynamicEncoder struct {

View File

@ -2,12 +2,12 @@ package jsoniter
import (
"fmt"
"github.com/modern-go/reflect2"
"reflect"
"sort"
"strings"
"unicode"
"unsafe"
"github.com/v2pro/plz/reflect2"
)
var typeDecoders = map[string]ValDecoder{}
@ -18,8 +18,8 @@ var extensions = []Extension{}
// StructDescriptor describe how should we encode/decode the struct
type StructDescriptor struct {
Type reflect2.Type
Fields []*Binding
Type reflect2.Type
Fields []*Binding
}
// GetField get one field from the descriptor by its name.
@ -47,6 +47,8 @@ type Binding struct {
// Can also rename fields by UpdateStructDescriptor.
type Extension interface {
UpdateStructDescriptor(structDescriptor *StructDescriptor)
CreateMapKeyDecoder(typ reflect2.Type) ValDecoder
CreateMapKeyEncoder(typ reflect2.Type) ValEncoder
CreateDecoder(typ reflect2.Type) ValDecoder
CreateEncoder(typ reflect2.Type) ValEncoder
DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder
@ -61,6 +63,16 @@ type DummyExtension struct {
func (extension *DummyExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
}
// CreateMapKeyDecoder No-op
func (extension *DummyExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
return nil
}
// CreateMapKeyEncoder No-op
func (extension *DummyExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
return nil
}
// CreateDecoder No-op
func (extension *DummyExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
return nil
@ -97,6 +109,16 @@ func (extension EncoderExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
return extension[typ]
}
// CreateMapKeyDecoder No-op
func (extension EncoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
return nil
}
// CreateMapKeyEncoder No-op
func (extension EncoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
return nil
}
// DecorateDecoder No-op
func (extension EncoderExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
return decoder
@ -113,6 +135,16 @@ type DecoderExtension map[reflect2.Type]ValDecoder
func (extension DecoderExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
}
// CreateMapKeyDecoder No-op
func (extension DecoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
return nil
}
// CreateMapKeyEncoder No-op
func (extension DecoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
return nil
}
// CreateDecoder get decoder from map
func (extension DecoderExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
return extension[typ]
@ -214,7 +246,8 @@ func getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder {
for _, extension := range extensions {
decoder = extension.DecorateDecoder(typ, decoder)
}
for _, extension := range ctx.extensions {
decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder)
for _, extension := range ctx.extraExtensions {
decoder = extension.DecorateDecoder(typ, decoder)
}
}
@ -227,14 +260,18 @@ func _getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder {
return decoder
}
}
for _, extension := range ctx.extensions {
decoder := ctx.decoderExtension.CreateDecoder(typ)
if decoder != nil {
return decoder
}
for _, extension := range ctx.extraExtensions {
decoder := extension.CreateDecoder(typ)
if decoder != nil {
return decoder
}
}
typeName := typ.String()
decoder := typeDecoders[typeName]
decoder = typeDecoders[typeName]
if decoder != nil {
return decoder
}
@ -254,7 +291,8 @@ func getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder {
for _, extension := range extensions {
encoder = extension.DecorateEncoder(typ, encoder)
}
for _, extension := range ctx.extensions {
encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder)
for _, extension := range ctx.extraExtensions {
encoder = extension.DecorateEncoder(typ, encoder)
}
}
@ -268,14 +306,18 @@ func _getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder {
return encoder
}
}
for _, extension := range ctx.extensions {
encoder := ctx.encoderExtension.CreateEncoder(typ)
if encoder != nil {
return encoder
}
for _, extension := range ctx.extraExtensions {
encoder := extension.CreateEncoder(typ)
if encoder != nil {
return encoder
}
}
typeName := typ.String()
encoder := typeEncoders[typeName]
encoder = typeEncoders[typeName]
if encoder != nil {
return encoder
}
@ -296,13 +338,13 @@ func describeStruct(ctx *ctx, typ reflect2.Type) *StructDescriptor {
for i := 0; i < structType.NumField(); i++ {
field := structType.Field(i)
tag, hastag := field.Tag().Lookup(ctx.getTagKey())
if ctx.onlyTaggedField && !hastag {
if ctx.onlyTaggedField && !hastag && !field.Anonymous() {
continue
}
if tag == "-" || field.Name() == "_" {
continue
}
tagParts := strings.Split(tag, ",")
if tag == "-" {
continue
}
if field.Anonymous() && (tag == "" || tagParts[0] == "") {
if field.Type().Kind() == reflect.Struct {
structDescriptor := describeStruct(ctx, field.Type())
@ -355,13 +397,15 @@ func describeStruct(ctx *ctx, typ reflect2.Type) *StructDescriptor {
}
func createStructDescriptor(ctx *ctx, typ reflect2.Type, bindings []*Binding, embeddedBindings []*Binding) *StructDescriptor {
structDescriptor := &StructDescriptor{
Type: typ,
Fields: bindings,
Type: typ,
Fields: bindings,
}
for _, extension := range extensions {
extension.UpdateStructDescriptor(structDescriptor)
}
for _, extension := range ctx.extensions {
ctx.encoderExtension.UpdateStructDescriptor(structDescriptor)
ctx.decoderExtension.UpdateStructDescriptor(structDescriptor)
for _, extension := range ctx.extraExtensions {
extension.UpdateStructDescriptor(structDescriptor)
}
processTags(structDescriptor, ctx.frozenConfig)
@ -431,7 +475,7 @@ func calcFieldNames(originalFieldName string, tagProvidedFieldName string, whole
fieldNames = []string{tagProvidedFieldName}
}
// private?
isNotExported := unicode.IsLower(rune(originalFieldName[0]))
isNotExported := unicode.IsLower(rune(originalFieldName[0])) || originalFieldName[0] == '_'
if isNotExported {
fieldNames = []string{}
}

View File

@ -2,9 +2,9 @@ package jsoniter
import (
"encoding/json"
"github.com/modern-go/reflect2"
"strconv"
"unsafe"
"github.com/v2pro/plz/reflect2"
)
type Number string
@ -109,4 +109,4 @@ func (codec *jsoniterNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
func (codec *jsoniterNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
return len(*((*Number)(ptr))) == 0
}
}

View File

@ -1,9 +1,9 @@
package jsoniter
import (
"unsafe"
"encoding/json"
"github.com/v2pro/plz/reflect2"
"github.com/modern-go/reflect2"
"unsafe"
)
var jsonRawMessageType = reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()
@ -33,11 +33,19 @@ type jsonRawMessageCodec struct {
}
func (codec *jsonRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*json.RawMessage)(ptr)) = json.RawMessage(iter.SkipAndReturnBytes())
if iter.ReadNil() {
*((*json.RawMessage)(ptr)) = nil
} else {
*((*json.RawMessage)(ptr)) = iter.SkipAndReturnBytes()
}
}
func (codec *jsonRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteRaw(string(*((*json.RawMessage)(ptr))))
if *((*json.RawMessage)(ptr)) == nil {
stream.WriteNil()
} else {
stream.WriteRaw(string(*((*json.RawMessage)(ptr))))
}
}
func (codec *jsonRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
@ -48,13 +56,21 @@ type jsoniterRawMessageCodec struct {
}
func (codec *jsoniterRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*RawMessage)(ptr)) = RawMessage(iter.SkipAndReturnBytes())
if iter.ReadNil() {
*((*RawMessage)(ptr)) = nil
} else {
*((*RawMessage)(ptr)) = iter.SkipAndReturnBytes()
}
}
func (codec *jsoniterRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteRaw(string(*((*RawMessage)(ptr))))
if *((*RawMessage)(ptr)) == nil {
stream.WriteNil()
} else {
stream.WriteRaw(string(*((*RawMessage)(ptr))))
}
}
func (codec *jsoniterRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
return len(*((*RawMessage)(ptr))) == 0
}
}

View File

@ -1,11 +1,13 @@
package jsoniter
import (
"fmt"
"io"
"reflect"
"sort"
"unsafe"
"github.com/v2pro/plz/reflect2"
"fmt"
"github.com/modern-go/reflect2"
)
func decoderOfMap(ctx *ctx, typ reflect2.Type) ValDecoder {
@ -38,6 +40,43 @@ func encoderOfMap(ctx *ctx, typ reflect2.Type) ValEncoder {
}
func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
decoder := ctx.decoderExtension.CreateMapKeyDecoder(typ)
if decoder != nil {
return decoder
}
for _, extension := range ctx.extraExtensions {
decoder := extension.CreateMapKeyDecoder(typ)
if decoder != nil {
return decoder
}
}
ptrType := reflect2.PtrTo(typ)
if ptrType.Implements(unmarshalerType) {
return &referenceDecoder{
&unmarshalerDecoder{
valType: ptrType,
},
}
}
if typ.Implements(unmarshalerType) {
return &unmarshalerDecoder{
valType: typ,
}
}
if ptrType.Implements(textUnmarshalerType) {
return &referenceDecoder{
&textUnmarshalerDecoder{
valType: ptrType,
},
}
}
if typ.Implements(textUnmarshalerType) {
return &textUnmarshalerDecoder{
valType: typ,
}
}
switch typ.Kind() {
case reflect.String:
return decoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
@ -52,24 +91,36 @@ func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
typ = reflect2.DefaultTypeOfKind(typ.Kind())
return &numericMapKeyDecoder{decoderOfType(ctx, typ)}
default:
ptrType := reflect2.PtrTo(typ)
if ptrType.Implements(textMarshalerType) {
return &referenceDecoder{
&textUnmarshalerDecoder{
valType: ptrType,
},
}
}
if typ.Implements(textMarshalerType) {
return &textUnmarshalerDecoder{
valType: typ,
}
}
return &lazyErrorDecoder{err: fmt.Errorf("unsupported map key type: %v", typ)}
}
}
func encoderOfMapKey(ctx *ctx, typ reflect2.Type) ValEncoder {
encoder := ctx.encoderExtension.CreateMapKeyEncoder(typ)
if encoder != nil {
return encoder
}
for _, extension := range ctx.extraExtensions {
encoder := extension.CreateMapKeyEncoder(typ)
if encoder != nil {
return encoder
}
}
if typ.Kind() != reflect.String {
if typ == textMarshalerType {
return &directTextMarshalerEncoder{
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
}
}
if typ.Implements(textMarshalerType) {
return &textMarshalerEncoder{
valType: typ,
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
}
}
}
switch typ.Kind() {
case reflect.String:
return encoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
@ -84,16 +135,8 @@ func encoderOfMapKey(ctx *ctx, typ reflect2.Type) ValEncoder {
typ = reflect2.DefaultTypeOfKind(typ.Kind())
return &numericMapKeyEncoder{encoderOfType(ctx, typ)}
default:
if typ == textMarshalerType {
return &directTextMarshalerEncoder{
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
}
}
if typ.Implements(textMarshalerType) {
return &textMarshalerEncoder{
valType: typ,
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
}
if typ.Kind() == reflect.Interface {
return &dynamicMapKeyEncoder{ctx, typ}
}
return &lazyErrorEncoder{err: fmt.Errorf("unsupported map key type: %v", typ)}
}
@ -127,10 +170,6 @@ func (decoder *mapDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if c == '}' {
return
}
if c != '"' {
iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c}))
return
}
iter.unreadByte()
key := decoder.keyType.UnsafeNew()
decoder.keyDecoder.Decode(key, iter)
@ -191,6 +230,21 @@ func (encoder *numericMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return false
}
type dynamicMapKeyEncoder struct {
ctx *ctx
valType reflect2.Type
}
func (encoder *dynamicMapKeyEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
obj := encoder.valType.UnsafeIndirect(ptr)
encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).Encode(reflect2.PtrOf(obj), stream)
}
func (encoder *dynamicMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool {
obj := encoder.valType.UnsafeIndirect(ptr)
return encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).IsEmpty(reflect2.PtrOf(obj))
}
type mapEncoder struct {
mapType *reflect2.UnsafeMapType
keyEncoder ValEncoder
@ -198,6 +252,10 @@ type mapEncoder struct {
}
func (encoder *mapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if *(*unsafe.Pointer)(ptr) == nil {
stream.WriteNil()
return
}
stream.WriteObjectStart()
iter := encoder.mapType.UnsafeIterate(ptr)
for i := 0; iter.HasNext(); i++ {
@ -235,13 +293,17 @@ func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteObjectStart()
mapIter := encoder.mapType.UnsafeIterate(ptr)
subStream := stream.cfg.BorrowStream(nil)
subStream.Attachment = stream.Attachment
subIter := stream.cfg.BorrowIterator(nil)
keyValues := encodedKeyValues{}
for mapIter.HasNext() {
subStream.buf = make([]byte, 0, 64)
key, elem := mapIter.UnsafeNext()
subStreamIndex := subStream.Buffered()
encoder.keyEncoder.Encode(key, subStream)
encodedKey := subStream.Buffer()
if subStream.Error != nil && subStream.Error != io.EOF && stream.Error == nil {
stream.Error = subStream.Error
}
encodedKey := subStream.Buffer()[subStreamIndex:]
subIter.ResetBytes(encodedKey)
decodedKey := subIter.ReadString()
if stream.indention > 0 {
@ -252,7 +314,7 @@ func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
encoder.elemEncoder.Encode(elem, subStream)
keyValues = append(keyValues, encodedKV{
key: decodedKey,
keyValue: subStream.Buffer(),
keyValue: subStream.Buffer()[subStreamIndex:],
})
}
sort.Sort(keyValues)
@ -262,6 +324,9 @@ func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
}
stream.Write(keyValue.keyValue)
}
if subStream.Error != nil && stream.Error == nil {
stream.Error = subStream.Error
}
stream.WriteObjectEnd()
stream.cfg.ReturnStream(subStream)
stream.cfg.ReturnIterator(subIter)

View File

@ -1,10 +1,11 @@
package jsoniter
import (
"github.com/v2pro/plz/reflect2"
"unsafe"
"encoding"
"encoding/json"
"unsafe"
"github.com/modern-go/reflect2"
)
var marshalerType = reflect2.TypeOfPtr((*json.Marshaler)(nil)).Elem()
@ -98,6 +99,12 @@ func (encoder *marshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if err != nil {
stream.Error = err
} else {
// html escape was already done by jsoniter
// but the extra '\n' should be trimed
l := len(bytes)
if l > 0 && bytes[l-1] == '\n' {
bytes = bytes[:l-1]
}
stream.Write(bytes)
}
}

View File

@ -3,10 +3,14 @@ package jsoniter
import (
"encoding/base64"
"reflect"
"strconv"
"unsafe"
"github.com/v2pro/plz/reflect2"
"github.com/modern-go/reflect2"
)
const ptrSize = 32 << uintptr(^uintptr(0)>>63)
func createEncoderOfNative(ctx *ctx, typ reflect2.Type) ValEncoder {
if typ.Kind() == reflect.Slice && typ.(reflect2.SliceType).Elem().Kind() == reflect.Uint8 {
sliceDecoder := decoderOfSlice(ctx, typ)
@ -24,7 +28,10 @@ func createEncoderOfNative(ctx *ctx, typ reflect2.Type) ValEncoder {
if typeName != "int" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*int)(nil)).Elem())
}
return &intCodec{}
if strconv.IntSize == 32 {
return &int32Codec{}
}
return &int64Codec{}
case reflect.Int8:
if typeName != "int8" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*int8)(nil)).Elem())
@ -49,7 +56,10 @@ func createEncoderOfNative(ctx *ctx, typ reflect2.Type) ValEncoder {
if typeName != "uint" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint)(nil)).Elem())
}
return &uintCodec{}
if strconv.IntSize == 32 {
return &uint32Codec{}
}
return &uint64Codec{}
case reflect.Uint8:
if typeName != "uint8" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint8)(nil)).Elem())
@ -69,7 +79,10 @@ func createEncoderOfNative(ctx *ctx, typ reflect2.Type) ValEncoder {
if typeName != "uintptr" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*uintptr)(nil)).Elem())
}
return &uintptrCodec{}
if ptrSize == 32 {
return &uint32Codec{}
}
return &uint64Codec{}
case reflect.Uint64:
if typeName != "uint64" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint64)(nil)).Elem())
@ -110,7 +123,10 @@ func createDecoderOfNative(ctx *ctx, typ reflect2.Type) ValDecoder {
if typeName != "int" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*int)(nil)).Elem())
}
return &intCodec{}
if strconv.IntSize == 32 {
return &int32Codec{}
}
return &int64Codec{}
case reflect.Int8:
if typeName != "int8" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*int8)(nil)).Elem())
@ -135,7 +151,10 @@ func createDecoderOfNative(ctx *ctx, typ reflect2.Type) ValDecoder {
if typeName != "uint" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint)(nil)).Elem())
}
return &uintCodec{}
if strconv.IntSize == 32 {
return &uint32Codec{}
}
return &uint64Codec{}
case reflect.Uint8:
if typeName != "uint8" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint8)(nil)).Elem())
@ -155,7 +174,10 @@ func createDecoderOfNative(ctx *ctx, typ reflect2.Type) ValDecoder {
if typeName != "uintptr" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*uintptr)(nil)).Elem())
}
return &uintptrCodec{}
if ptrSize == 32 {
return &uint32Codec{}
}
return &uint64Codec{}
case reflect.Uint64:
if typeName != "uint64" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint64)(nil)).Elem())
@ -196,40 +218,6 @@ func (codec *stringCodec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*string)(ptr)) == ""
}
type intCodec struct {
}
func (codec *intCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.ReadNil() {
*((*int)(ptr)) = iter.ReadInt()
}
}
func (codec *intCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteInt(*((*int)(ptr)))
}
func (codec *intCodec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*int)(ptr)) == 0
}
type uintptrCodec struct {
}
func (codec *uintptrCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.ReadNil() {
*((*uintptr)(ptr)) = uintptr(iter.ReadUint64())
}
}
func (codec *uintptrCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint64(uint64(*((*uintptr)(ptr))))
}
func (codec *uintptrCodec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*uintptr)(ptr)) == 0
}
type int8Codec struct {
}
@ -298,24 +286,6 @@ func (codec *int64Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*int64)(ptr)) == 0
}
type uintCodec struct {
}
func (codec *uintCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.ReadNil() {
*((*uint)(ptr)) = iter.ReadUint()
return
}
}
func (codec *uintCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint(*((*uint)(ptr)))
}
func (codec *uintCodec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*uint)(ptr)) == 0
}
type uint8Codec struct {
}
@ -436,7 +406,7 @@ func (codec *boolCodec) IsEmpty(ptr unsafe.Pointer) bool {
}
type base64Codec struct {
sliceType *reflect2.UnsafeSliceType
sliceType *reflect2.UnsafeSliceType
sliceDecoder ValDecoder
}
@ -447,16 +417,11 @@ func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
}
switch iter.WhatIsNext() {
case StringValue:
encoding := base64.StdEncoding
src := iter.SkipAndReturnBytes()
src = src[1: len(src)-1]
decodedLen := encoding.DecodedLen(len(src))
dst := make([]byte, decodedLen)
len, err := encoding.Decode(dst, src)
src := iter.ReadString()
dst, err := base64.StdEncoding.DecodeString(src)
if err != nil {
iter.ReportError("decode base64", err.Error())
} else {
dst = dst[:len]
codec.sliceType.UnsafeSet(ptr, unsafe.Pointer(&dst))
}
case ArrayValue:
@ -467,17 +432,19 @@ func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
}
func (codec *base64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
src := *((*[]byte)(ptr))
if len(src) == 0 {
if codec.sliceType.UnsafeIsNil(ptr) {
stream.WriteNil()
return
}
src := *((*[]byte)(ptr))
encoding := base64.StdEncoding
stream.writeByte('"')
size := encoding.EncodedLen(len(src))
buf := make([]byte, size)
encoding.Encode(buf, src)
stream.buf = append(stream.buf, buf...)
if len(src) != 0 {
size := encoding.EncodedLen(len(src))
buf := make([]byte, size)
encoding.Encode(buf, src)
stream.buf = append(stream.buf, buf...)
}
stream.writeByte('"')
}

View File

@ -1,18 +1,14 @@
package jsoniter
import (
"reflect"
"github.com/modern-go/reflect2"
"unsafe"
"github.com/v2pro/plz/reflect2"
)
func decoderOfOptional(ctx *ctx, typ reflect2.Type) ValDecoder {
ptrType := typ.(*reflect2.UnsafePtrType)
elemType := ptrType.Elem()
decoder := decoderOfType(ctx, elemType)
if ctx.prefix == "" && elemType.Kind() == reflect.Ptr {
return &dereferenceDecoder{elemType, decoder}
}
return &OptionalDecoder{elemType, decoder}
}

View File

@ -2,9 +2,9 @@ package jsoniter
import (
"fmt"
"github.com/modern-go/reflect2"
"io"
"unsafe"
"github.com/v2pro/plz/reflect2"
)
func decoderOfSlice(ctx *ctx, typ reflect2.Type) ValDecoder {

View File

@ -5,7 +5,8 @@ import (
"io"
"strings"
"unsafe"
"github.com/v2pro/plz/reflect2"
"github.com/modern-go/reflect2"
)
func decoderOfStruct(ctx *ctx, typ reflect2.Type) ValDecoder {
@ -31,6 +32,15 @@ func decoderOfStruct(ctx *ctx, typ reflect2.Type) ValDecoder {
for k, binding := range bindings {
fields[k] = binding.Decoder.(*structFieldDecoder)
}
if !ctx.caseSensitive() {
for k, binding := range bindings {
if _, found := fields[strings.ToLower(k)]; !found {
fields[strings.ToLower(k)] = binding.Decoder.(*structFieldDecoder)
}
}
}
return createStructDecoder(ctx, typ, fields)
}
@ -41,12 +51,13 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
knownHash := map[int64]struct{}{
0: {},
}
switch len(fields) {
case 0:
return &skipObjectDecoder{typ}
case 1:
for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName)
fieldHash := calcHash(fieldName, ctx.caseSensitive())
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields, false}
@ -60,7 +71,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
var fieldDecoder1 *structFieldDecoder
var fieldDecoder2 *structFieldDecoder
for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName)
fieldHash := calcHash(fieldName, ctx.caseSensitive())
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields, false}
@ -83,7 +94,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
var fieldDecoder2 *structFieldDecoder
var fieldDecoder3 *structFieldDecoder
for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName)
fieldHash := calcHash(fieldName, ctx.caseSensitive())
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields, false}
@ -114,7 +125,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
var fieldDecoder3 *structFieldDecoder
var fieldDecoder4 *structFieldDecoder
for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName)
fieldHash := calcHash(fieldName, ctx.caseSensitive())
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields, false}
@ -151,7 +162,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
var fieldDecoder4 *structFieldDecoder
var fieldDecoder5 *structFieldDecoder
for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName)
fieldHash := calcHash(fieldName, ctx.caseSensitive())
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields, false}
@ -194,7 +205,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
var fieldDecoder5 *structFieldDecoder
var fieldDecoder6 *structFieldDecoder
for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName)
fieldHash := calcHash(fieldName, ctx.caseSensitive())
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields, false}
@ -243,7 +254,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
var fieldDecoder6 *structFieldDecoder
var fieldDecoder7 *structFieldDecoder
for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName)
fieldHash := calcHash(fieldName, ctx.caseSensitive())
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields, false}
@ -298,7 +309,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
var fieldDecoder7 *structFieldDecoder
var fieldDecoder8 *structFieldDecoder
for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName)
fieldHash := calcHash(fieldName, ctx.caseSensitive())
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields, false}
@ -359,7 +370,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
var fieldDecoder8 *structFieldDecoder
var fieldDecoder9 *structFieldDecoder
for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName)
fieldHash := calcHash(fieldName, ctx.caseSensitive())
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields, false}
@ -426,7 +437,7 @@ func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structF
var fieldDecoder9 *structFieldDecoder
var fieldDecoder10 *structFieldDecoder
for fieldName, fieldDecoder := range fields {
fieldHash := calcHash(fieldName)
fieldHash := calcHash(fieldName, ctx.caseSensitive())
_, known := knownHash[fieldHash]
if known {
return &generalStructDecoder{typ, fields, false}
@ -489,13 +500,20 @@ func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
if !iter.readObjectStart() {
return
}
decoder.decodeOneField(ptr, iter)
for iter.nextToken() == ',' {
if !iter.incrementDepth() {
return
}
var c byte
for c = ','; c == ','; c = iter.nextToken() {
decoder.decodeOneField(ptr, iter)
}
if iter.Error != nil && iter.Error != io.EOF {
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
if c != '}' {
iter.ReportError("struct Decode", `expect }, but found `+string([]byte{c}))
}
iter.decrementDepth()
}
func (decoder *generalStructDecoder) decodeOneField(ptr unsafe.Pointer, iter *Iterator) {
@ -505,19 +523,19 @@ func (decoder *generalStructDecoder) decodeOneField(ptr unsafe.Pointer, iter *It
fieldBytes := iter.ReadStringAsSlice()
field = *(*string)(unsafe.Pointer(&fieldBytes))
fieldDecoder = decoder.fields[field]
if fieldDecoder == nil {
if fieldDecoder == nil && !iter.cfg.caseSensitive {
fieldDecoder = decoder.fields[strings.ToLower(field)]
}
} else {
field = iter.ReadString()
fieldDecoder = decoder.fields[field]
if fieldDecoder == nil {
if fieldDecoder == nil && !iter.cfg.caseSensitive {
fieldDecoder = decoder.fields[strings.ToLower(field)]
}
}
if fieldDecoder == nil {
msg := "found unknown field: " + field
if decoder.disallowUnknownFields {
msg := "found unknown field: " + field
iter.ReportError("ReadObject", msg)
}
c := iter.nextToken()
@ -557,6 +575,9 @@ func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
if !iter.readObjectStart() {
return
}
if !iter.incrementDepth() {
return
}
for {
if iter.readFieldHash() == decoder.fieldHash {
decoder.fieldDecoder.Decode(ptr, iter)
@ -567,9 +588,10 @@ func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator)
break
}
}
if iter.Error != nil && iter.Error != io.EOF {
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
iter.decrementDepth()
}
type twoFieldsStructDecoder struct {
@ -584,6 +606,9 @@ func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
if !iter.readObjectStart() {
return
}
if !iter.incrementDepth() {
return
}
for {
switch iter.readFieldHash() {
case decoder.fieldHash1:
@ -597,9 +622,10 @@ func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
break
}
}
if iter.Error != nil && iter.Error != io.EOF {
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
iter.decrementDepth()
}
type threeFieldsStructDecoder struct {
@ -616,6 +642,9 @@ func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
if !iter.readObjectStart() {
return
}
if !iter.incrementDepth() {
return
}
for {
switch iter.readFieldHash() {
case decoder.fieldHash1:
@ -631,9 +660,10 @@ func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
break
}
}
if iter.Error != nil && iter.Error != io.EOF {
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
iter.decrementDepth()
}
type fourFieldsStructDecoder struct {
@ -652,6 +682,9 @@ func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
if !iter.readObjectStart() {
return
}
if !iter.incrementDepth() {
return
}
for {
switch iter.readFieldHash() {
case decoder.fieldHash1:
@ -669,9 +702,10 @@ func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
break
}
}
if iter.Error != nil && iter.Error != io.EOF {
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
iter.decrementDepth()
}
type fiveFieldsStructDecoder struct {
@ -692,6 +726,9 @@ func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
if !iter.readObjectStart() {
return
}
if !iter.incrementDepth() {
return
}
for {
switch iter.readFieldHash() {
case decoder.fieldHash1:
@ -711,9 +748,10 @@ func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
break
}
}
if iter.Error != nil && iter.Error != io.EOF {
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
iter.decrementDepth()
}
type sixFieldsStructDecoder struct {
@ -736,6 +774,9 @@ func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
if !iter.readObjectStart() {
return
}
if !iter.incrementDepth() {
return
}
for {
switch iter.readFieldHash() {
case decoder.fieldHash1:
@ -757,9 +798,10 @@ func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
break
}
}
if iter.Error != nil && iter.Error != io.EOF {
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
iter.decrementDepth()
}
type sevenFieldsStructDecoder struct {
@ -784,6 +826,9 @@ func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
if !iter.readObjectStart() {
return
}
if !iter.incrementDepth() {
return
}
for {
switch iter.readFieldHash() {
case decoder.fieldHash1:
@ -807,9 +852,10 @@ func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
break
}
}
if iter.Error != nil && iter.Error != io.EOF {
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
iter.decrementDepth()
}
type eightFieldsStructDecoder struct {
@ -836,6 +882,9 @@ func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
if !iter.readObjectStart() {
return
}
if !iter.incrementDepth() {
return
}
for {
switch iter.readFieldHash() {
case decoder.fieldHash1:
@ -861,9 +910,10 @@ func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterat
break
}
}
if iter.Error != nil && iter.Error != io.EOF {
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
iter.decrementDepth()
}
type nineFieldsStructDecoder struct {
@ -892,6 +942,9 @@ func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
if !iter.readObjectStart() {
return
}
if !iter.incrementDepth() {
return
}
for {
switch iter.readFieldHash() {
case decoder.fieldHash1:
@ -919,9 +972,10 @@ func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
break
}
}
if iter.Error != nil && iter.Error != io.EOF {
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
iter.decrementDepth()
}
type tenFieldsStructDecoder struct {
@ -952,6 +1006,9 @@ func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
if !iter.readObjectStart() {
return
}
if !iter.incrementDepth() {
return
}
for {
switch iter.readFieldHash() {
case decoder.fieldHash1:
@ -981,9 +1038,10 @@ func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator
break
}
}
if iter.Error != nil && iter.Error != io.EOF {
if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
}
iter.decrementDepth()
}
type structFieldDecoder struct {
@ -995,7 +1053,7 @@ func (decoder *structFieldDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
fieldPtr := decoder.field.UnsafeGet(ptr)
decoder.fieldDecoder.Decode(fieldPtr, iter)
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%s: %s", decoder.field.Name, iter.Error.Error())
iter.Error = fmt.Errorf("%s: %s", decoder.field.Name(), iter.Error.Error())
}
}
@ -1017,6 +1075,11 @@ type stringModeNumberDecoder struct {
}
func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.WhatIsNext() == NilValue {
decoder.elemDecoder.Decode(ptr, iter)
return
}
c := iter.nextToken()
if c != '"' {
iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
@ -1031,4 +1094,4 @@ func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterato
iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
return
}
}
}

View File

@ -2,10 +2,10 @@ package jsoniter
import (
"fmt"
"github.com/modern-go/reflect2"
"io"
"reflect"
"unsafe"
"github.com/v2pro/plz/reflect2"
)
func encoderOfStruct(ctx *ctx, typ reflect2.Type) ValEncoder {
@ -47,38 +47,12 @@ func encoderOfStruct(ctx *ctx, typ reflect2.Type) ValEncoder {
}
func createCheckIsEmpty(ctx *ctx, typ reflect2.Type) checkIsEmpty {
encoder := createEncoderOfNative(ctx, typ)
if encoder != nil {
return encoder
}
kind := typ.Kind()
switch kind {
case reflect.String:
return &stringCodec{}
case reflect.Int:
return &intCodec{}
case reflect.Int8:
return &int8Codec{}
case reflect.Int16:
return &int16Codec{}
case reflect.Int32:
return &int32Codec{}
case reflect.Int64:
return &int64Codec{}
case reflect.Uint:
return &uintCodec{}
case reflect.Uint8:
return &uint8Codec{}
case reflect.Uint16:
return &uint16Codec{}
case reflect.Uint32:
return &uint32Codec{}
case reflect.Uintptr:
return &uintptrCodec{}
case reflect.Uint64:
return &uint64Codec{}
case reflect.Float32:
return &float32Codec{}
case reflect.Float64:
return &float64Codec{}
case reflect.Bool:
return &boolCodec{}
case reflect.Interface:
return &dynamicEncoder{typ}
case reflect.Struct:
@ -226,6 +200,7 @@ type stringModeStringEncoder struct {
func (encoder *stringModeStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
tempStream := encoder.cfg.BorrowStream(nil)
tempStream.Attachment = stream.Attachment
defer encoder.cfg.ReturnStream(tempStream)
encoder.elemEncoder.Encode(ptr, tempStream)
stream.WriteString(string(tempStream.Buffer()))

View File

@ -5,8 +5,8 @@ import (
"encoding/json"
"testing"
"github.com/stretchr/testify/require"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
func Test_skip_number_in_array(t *testing.T) {
@ -105,6 +105,15 @@ func Test_skip_and_return_bytes_with_reader(t *testing.T) {
should.Equal(`{"a" : [{"stream": "c"}], "d": 102 }`, string(skipped))
}
func Test_append_skip_and_return_bytes_with_reader(t *testing.T) {
should := require.New(t)
iter := jsoniter.Parse(jsoniter.ConfigDefault, bytes.NewBufferString(`[ {"a" : [{"stream": "c"}], "d": 102 }, "stream"]`), 4)
iter.ReadArray()
buf := make([]byte, 0, 1024)
buf = iter.SkipAndAppendBytes(buf)
should.Equal(`{"a" : [{"stream": "c"}], "d": 102 }`, string(buf))
}
func Test_skip_empty(t *testing.T) {
should := require.New(t)
should.NotNil(jsoniter.Get([]byte("")).LastError())

View File

@ -6,8 +6,8 @@ import (
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"io"
"testing"
"reflect"
"testing"
)
type testCase struct {

View File

@ -14,4 +14,4 @@ func init() {
`"\t"`, // valid
},
})
}
}

View File

@ -13,7 +13,7 @@ func init() {
`{"hello":{}}`, // valid
`{"hello":{}}}`, // invalid
`{"hello": { "hello": 1}}`, // valid
`{abc}`, // invalid
`{abc}`, // invalid
},
})
}
}

View File

@ -55,6 +55,11 @@ func (stream *Stream) Buffer() []byte {
return stream.buf
}
// SetBuffer allows to append to the internal buffer directly
func (stream *Stream) SetBuffer(buf []byte) {
stream.buf = buf
}
// Write writes the contents of p into the buffer.
// It returns the number of bytes written.
// If nn < len(p), it also returns an error explaining
@ -98,14 +103,14 @@ func (stream *Stream) Flush() error {
if stream.Error != nil {
return stream.Error
}
n, err := stream.out.Write(stream.buf)
_, err := stream.out.Write(stream.buf)
if err != nil {
if stream.Error == nil {
stream.Error = err
}
return err
}
stream.buf = stream.buf[n:]
stream.buf = stream.buf[:0]
return nil
}
@ -172,7 +177,6 @@ func (stream *Stream) WriteEmptyObject() {
func (stream *Stream) WriteMore() {
stream.writeByte(',')
stream.writeIndention(0)
stream.Flush()
}
// WriteArrayStart write [ with possible indention

View File

@ -1,6 +1,7 @@
package jsoniter
import (
"fmt"
"math"
"strconv"
)
@ -13,6 +14,10 @@ func init() {
// WriteFloat32 write float32 to stream
func (stream *Stream) WriteFloat32(val float32) {
if math.IsInf(float64(val), 0) || math.IsNaN(float64(val)) {
stream.Error = fmt.Errorf("unsupported value: %f", val)
return
}
abs := math.Abs(float64(val))
fmt := byte('f')
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
@ -22,10 +27,22 @@ func (stream *Stream) WriteFloat32(val float32) {
}
}
stream.buf = strconv.AppendFloat(stream.buf, float64(val), fmt, -1, 32)
if fmt == 'e' {
// clean up e-09 to e-9
n := len(stream.buf)
if n >= 4 && stream.buf[n-4] == 'e' && stream.buf[n-3] == '-' && stream.buf[n-2] == '0' {
stream.buf[n-2] = stream.buf[n-1]
stream.buf = stream.buf[:n-1]
}
}
}
// WriteFloat32Lossy write float32 to stream with ONLY 6 digits precision although much much faster
func (stream *Stream) WriteFloat32Lossy(val float32) {
if math.IsInf(float64(val), 0) || math.IsNaN(float64(val)) {
stream.Error = fmt.Errorf("unsupported value: %f", val)
return
}
if val < 0 {
stream.writeByte('-')
val = -val
@ -54,6 +71,10 @@ func (stream *Stream) WriteFloat32Lossy(val float32) {
// WriteFloat64 write float64 to stream
func (stream *Stream) WriteFloat64(val float64) {
if math.IsInf(val, 0) || math.IsNaN(val) {
stream.Error = fmt.Errorf("unsupported value: %f", val)
return
}
abs := math.Abs(val)
fmt := byte('f')
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
@ -63,10 +84,22 @@ func (stream *Stream) WriteFloat64(val float64) {
}
}
stream.buf = strconv.AppendFloat(stream.buf, float64(val), fmt, -1, 64)
if fmt == 'e' {
// clean up e-09 to e-9
n := len(stream.buf)
if n >= 4 && stream.buf[n-4] == 'e' && stream.buf[n-3] == '-' && stream.buf[n-2] == '0' {
stream.buf[n-2] = stream.buf[n-1]
stream.buf = stream.buf[:n-1]
}
}
}
// WriteFloat64Lossy write float64 to stream with ONLY 6 digits precision although much much faster
func (stream *Stream) WriteFloat64Lossy(val float64) {
if math.IsInf(val, 0) || math.IsNaN(val) {
stream.Error = fmt.Errorf("unsupported value: %f", val)
return
}
if val < 0 {
stream.writeByte('-')
val = -val

View File

@ -17,16 +17,16 @@ func init() {
func writeFirstBuf(space []byte, v uint32) []byte {
start := v >> 24
if start == 0 {
space = append(space, byte(v >> 16), byte(v >> 8))
space = append(space, byte(v>>16), byte(v>>8))
} else if start == 1 {
space = append(space, byte(v >> 8))
space = append(space, byte(v>>8))
}
space = append(space, byte(v))
return space
}
func writeBuf(buf []byte, v uint32) []byte {
return append(buf, byte(v >> 16), byte(v >> 8), byte(v))
return append(buf, byte(v>>16), byte(v>>8), byte(v))
}
// WriteUint8 write uint8 to stream
@ -91,7 +91,7 @@ func (stream *Stream) WriteUint32(val uint32) {
stream.buf = writeFirstBuf(stream.buf, digits[q2])
} else {
r3 := q2 - q3*1000
stream.buf = append(stream.buf, byte(q3 + '0'))
stream.buf = append(stream.buf, byte(q3+'0'))
stream.buf = writeBuf(stream.buf, digits[r3])
}
stream.buf = writeBuf(stream.buf, digits[r2])

View File

@ -1,8 +1,9 @@
package jsoniter
import (
"github.com/stretchr/testify/require"
"testing"
"github.com/stretchr/testify/require"
)
func Test_writeByte_should_grow_buffer(t *testing.T) {
@ -62,8 +63,24 @@ func (w *NopWriter) Write(p []byte) (n int, err error) {
}
func Test_flush_buffer_should_stop_grow_buffer(t *testing.T) {
// Stream an array of a zillion zeros.
writer := new(NopWriter)
NewEncoder(writer).Encode(make([]int, 10000000))
stream := NewStream(ConfigDefault, writer, 512)
stream.WriteArrayStart()
for i := 0; i < 10000000; i++ {
stream.WriteInt(0)
stream.WriteMore()
stream.Flush()
}
stream.WriteInt(0)
stream.WriteArrayEnd()
// Confirm that the buffer didn't have to grow.
should := require.New(t)
should.Equal(8, writer.bufferSize)
// 512 is the internal buffer size set in NewEncoder
//
// Flush is called after each array element, so only the first 8 bytes of it
// is ever used, and it is never extended. Capacity remains 512.
should.Equal(512, writer.bufferSize)
}

View File

@ -60,4 +60,4 @@ func init() {
}
type structEmpty struct{}
type arrayAlis [4]stringAlias
type arrayAlis [4]stringAlias

View File

@ -1,8 +1,12 @@
// +build go1.15
// remove these tests temporarily until https://github.com/golang/go/issues/38105 and
// https://github.com/golang/go/issues/38940 is fixed
package test
import (
"strings"
"encoding"
"strings"
)
func init() {
@ -26,7 +30,6 @@ func (k *stringKeyType) UnmarshalText(text []byte) error {
var _ encoding.TextMarshaler = stringKeyType("")
var _ encoding.TextUnmarshaler = new(stringKeyType)
type structKeyType struct {
X string
}
@ -41,4 +44,4 @@ func (k *structKeyType) UnmarshalText(text []byte) error {
}
var _ encoding.TextMarshaler = structKeyType{}
var _ encoding.TextUnmarshaler = &structKeyType{}
var _ encoding.TextUnmarshaler = &structKeyType{}

View File

@ -3,8 +3,8 @@ package test
import (
"bytes"
"encoding/base64"
"strings"
"encoding/json"
"strings"
)
type StringMarshaler string
@ -49,4 +49,4 @@ var _ json.Unmarshaler = new(StringMarshaler)
func init() {
testCases = append(testCases, (*StringMarshaler)(nil))
}
}

View File

@ -1,10 +1,10 @@
package test
import (
"strings"
"encoding/base64"
"bytes"
"encoding/base64"
"encoding/json"
"strings"
)
type structMarshaler struct {

View File

@ -91,7 +91,6 @@ func (p *jsonMarshaler) UnmarshalJSON(input []byte) error {
return nil
}
type jsonMarshalerMap map[int]int
func (p *jsonMarshalerMap) MarshalJSON() ([]byte, error) {
@ -117,11 +116,10 @@ func (p *textMarshaler) UnmarshalText(input []byte) error {
type textMarshalerMap map[int]int
func (p *textMarshalerMap) MarshalText() ([]byte, error) {
return []byte(`{}`), nil
}
func (p *textMarshalerMap) UnmarshalText(input []byte) error {
return nil
}
}

View File

@ -60,6 +60,7 @@ func init() {
(*SameLevel2NoTags)(nil),
(*SameLevel2Tagged)(nil),
(*EmbeddedPtr)(nil),
(*UnnamedLiteral)(nil),
)
}
@ -230,4 +231,8 @@ type EmbeddedPtrOption struct {
type EmbeddedPtr struct {
EmbeddedPtrOption `json:","`
}
}
type UnnamedLiteral struct {
_ struct{}
}

View File

@ -121,10 +121,11 @@ func init() {
F1 int32 `json:"F1"`
F2 int32 `json:"F2,string"`
})(nil),
(*struct {
F1 string `json:"F1"`
F2 string `json:"F2,string"`
})(nil),
// remove temporarily until https://github.com/golang/go/issues/38126 is fixed
// (*struct {
// F1 string `json:"F1"`
// F2 string `json:"F2,string"`
// })(nil),
(*struct {
F1 uint8 `json:"F1"`
F2 uint8 `json:"F2,string"`
@ -145,6 +146,9 @@ func init() {
(*struct {
Field bool `json:",omitempty,string"`
})(nil),
(*struct {
Field bool `json:"中文"`
})(nil),
)
}
@ -192,12 +196,12 @@ type StringFieldNameE struct {
}
type StringFieldName struct {
F1 string `json:"F1"`
F2 string `json:"f2"`
F3 string `json:"-"`
F4 string `json:"-,"`
F5 string `json:","`
F6 string `json:""`
F1 string `json:"F1"`
F2 string `json:"f2"`
F3 string `json:"-"`
F4 string `json:"-,"`
F5 string `json:","`
F6 string `json:""`
StringFieldNameE `json:"e"`
}

View File

@ -1,10 +1,10 @@
package test
import (
"strings"
"encoding"
"bytes"
"encoding"
"encoding/base64"
"strings"
)
func init() {

View File

@ -2,9 +2,9 @@ package test
import (
"bytes"
"encoding"
"encoding/base64"
"strings"
"encoding"
)
func init() {
@ -66,4 +66,4 @@ func (m *structTextMarshaler) UnmarshalText(text []byte) error {
var _ encoding.TextMarshaler = structTextMarshaler{}
var _ encoding.TextUnmarshaler = &structTextMarshaler{}
type structTextMarshalerAlias structTextMarshaler
type structTextMarshalerAlias structTextMarshaler

View File

@ -1,15 +1,15 @@
package test
import (
"testing"
"reflect"
"fmt"
"github.com/google/gofuzz"
"strings"
"github.com/json-iterator/go"
"encoding/json"
"bytes"
"encoding/json"
"fmt"
"github.com/davecgh/go-spew/spew"
"github.com/google/gofuzz"
"github.com/json-iterator/go"
"reflect"
"strings"
"testing"
)
var testCases []interface{}

View File

@ -8,13 +8,13 @@ func init() {
[2]*float64{},
)
unmarshalCases = append(unmarshalCases, unmarshalCase{
ptr: (*[0]int)(nil),
ptr: (*[0]int)(nil),
input: `[1]`,
}, unmarshalCase{
ptr: (*[1]int)(nil),
ptr: (*[1]int)(nil),
input: `[2]`,
}, unmarshalCase{
ptr: (*[1]int)(nil),
ptr: (*[1]int)(nil),
input: `[]`,
})
}

View File

@ -8,16 +8,16 @@ func init() {
return &val
}
unmarshalCases = append(unmarshalCases, unmarshalCase{
ptr: (**interface{})(nil),
ptr: (**interface{})(nil),
input: `"hello"`,
}, unmarshalCase{
ptr: (**interface{})(nil),
ptr: (**interface{})(nil),
input: `1e1`,
}, unmarshalCase{
ptr: (**interface{})(nil),
ptr: (**interface{})(nil),
input: `1.0e1`,
}, unmarshalCase{
ptr: (*[]interface{})(nil),
ptr: (*[]interface{})(nil),
input: `[1.0e1]`,
}, unmarshalCase{
ptr: (*struct {
@ -66,12 +66,12 @@ func init() {
}{"hello"},
struct {
Field interface{}
}{struct{
}{struct {
field chan int
}{}},
struct {
Field interface{}
}{struct{
}{struct {
Field *int
}{pInt(100)}},
)

36
value_tests/error_test.go Normal file
View File

@ -0,0 +1,36 @@
package test
import (
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"reflect"
"testing"
)
func Test_errorInput(t *testing.T) {
for _, testCase := range unmarshalCases {
if testCase.obj != nil {
continue
}
valType := reflect.TypeOf(testCase.ptr).Elem()
t.Run(valType.String(), func(t *testing.T) {
for _, data := range []string{
`x`,
`n`,
`nul`,
`{x}`,
`{"x"}`,
`{"x": "y"x}`,
`{"x": "y"`,
`{"x": "y", "a"}`,
`[`,
`[{"x": "y"}`,
} {
ptrVal := reflect.New(valType)
ptr := ptrVal.Interface()
err := jsoniter.ConfigCompatibleWithStandardLibrary.Unmarshal([]byte(data), ptr)
require.Error(t, err, "on input %q", data)
}
})
}
}

View File

@ -1,13 +1,14 @@
package test
import (
"fmt"
"testing"
"github.com/stretchr/testify/require"
"strconv"
"bytes"
"github.com/json-iterator/go"
"encoding/json"
"fmt"
"strconv"
"testing"
jsoniter "github.com/json-iterator/go"
"github.com/stretchr/testify/require"
)
func Test_read_float(t *testing.T) {
@ -50,7 +51,6 @@ func Test_read_float(t *testing.T) {
}
}
func Test_write_float32(t *testing.T) {
vals := []float32{0, 1, -1, 99, 0xff, 0xfff, 0xffff, 0xfffff, 0xffffff, 0x4ffffff, 0xfffffff,
-0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001}
@ -89,7 +89,7 @@ func Test_write_float32(t *testing.T) {
stream = jsoniter.NewStream(jsoniter.ConfigDefault, nil, 0)
stream.WriteFloat32(float32(0.0000001))
should.Equal("1e-07", string(stream.Buffer()))
should.Equal("1e-7", string(stream.Buffer()))
}
func Test_write_float64(t *testing.T) {
@ -126,5 +126,5 @@ func Test_write_float64(t *testing.T) {
stream = jsoniter.NewStream(jsoniter.ConfigDefault, nil, 0)
stream.WriteFloat64(float64(0.0000001))
should.Equal("1e-07", string(stream.Buffer()))
}
should.Equal("1e-7", string(stream.Buffer()))
}

View File

@ -17,7 +17,7 @@ func init() {
pCloser2("hello"),
)
unmarshalCases = append(unmarshalCases, unmarshalCase{
ptr: (*[]io.Closer)(nil),
ptr: (*[]io.Closer)(nil),
input: "[null]",
}, unmarshalCase{
obj: func() interface{} {

View File

@ -1,12 +1,12 @@
package test
import (
"bytes"
"fmt"
"github.com/json-iterator/go"
"github.com/stretchr/testify/require"
"strconv"
"fmt"
"testing"
"bytes"
"github.com/json-iterator/go"
)
func init() {
@ -158,7 +158,6 @@ func Test_read_int64(t *testing.T) {
}
}
func Test_write_uint8(t *testing.T) {
vals := []uint8{0, 1, 11, 111, 255}
for _, val := range vals {
@ -417,4 +416,4 @@ func Test_write_int64(t *testing.T) {
stream.Flush()
should.Nil(stream.Error)
should.Equal("a4294967295", buf.String())
}
}

View File

@ -3,11 +3,11 @@ package test
import (
"bytes"
"encoding/json"
"github.com/json-iterator/go"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"io"
"testing"
"github.com/json-iterator/go"
)
func Test_missing_object_end(t *testing.T) {
@ -103,18 +103,44 @@ func Test_invalid_float(t *testing.T) {
}
func Test_chan(t *testing.T) {
t.Skip("do not support chan")
type TestObject struct {
MyChan chan bool
MyField int
}
should := require.New(t)
obj := TestObject{}
str, err := json.Marshal(obj)
should.Nil(err)
should.Equal(``, str)
t.Run("Encode channel", func(t *testing.T) {
should := require.New(t)
str, err := jsoniter.Marshal(obj)
should.NotNil(err)
should.Nil(str)
})
t.Run("Encode channel using compatible configuration", func(t *testing.T) {
should := require.New(t)
str, err := jsoniter.ConfigCompatibleWithStandardLibrary.Marshal(obj)
should.NotNil(err)
should.Nil(str)
})
}
func Test_invalid_in_map(t *testing.T) {
testMap := map[string]interface{}{"chan": make(chan interface{})}
t.Run("Encode map with invalid content", func(t *testing.T) {
should := require.New(t)
str, err := jsoniter.Marshal(testMap)
should.NotNil(err)
should.Nil(str)
})
t.Run("Encode map with invalid content using compatible configuration", func(t *testing.T) {
should := require.New(t)
str, err := jsoniter.ConfigCompatibleWithStandardLibrary.Marshal(testMap)
should.NotNil(err)
should.Nil(str)
})
}
func Test_invalid_number(t *testing.T) {
@ -223,4 +249,14 @@ func Test_EmptyInput(t *testing.T) {
if err == nil {
t.Errorf("Expected error")
}
}
}
type Foo struct {
A jsoniter.Any
}
func Test_nil_any(t *testing.T) {
should := require.New(t)
data, _ := jsoniter.Marshal(&Foo{})
should.Equal(`{"A":null}`, string(data))
}

View File

@ -1,8 +1,10 @@
package test
import (
"math/big"
"encoding/json"
"fmt"
"math/big"
"time"
)
func init() {
@ -26,17 +28,37 @@ func init() {
},
nilMap,
&nilMap,
map[string]*json.RawMessage{"hello":pRawMessage(json.RawMessage("[]"))},
map[string]*json.RawMessage{"hello": pRawMessage(json.RawMessage("[]"))},
map[Date]bool{{}: true},
map[Date2]bool{{}: true},
map[customKey]string{customKey(1): "bar"},
)
unmarshalCases = append(unmarshalCases, unmarshalCase{
ptr: (*map[string]string)(nil),
ptr: (*map[string]string)(nil),
input: `{"k\"ey": "val"}`,
}, unmarshalCase{
ptr: (*map[string]string)(nil),
ptr: (*map[string]string)(nil),
input: `null`,
}, unmarshalCase{
ptr: (*map[string]*json.RawMessage)(nil),
ptr: (*map[string]*json.RawMessage)(nil),
input: "{\"test\":[{\"key\":\"value\"}]}",
}, unmarshalCase{
ptr: (*map[Date]bool)(nil),
input: `{
"2018-12-12": true,
"2018-12-13": true,
"2018-12-14": true
}`,
}, unmarshalCase{
ptr: (*map[Date2]bool)(nil),
input: `{
"2018-12-12": true,
"2018-12-13": true,
"2018-12-14": true
}`,
}, unmarshalCase{
ptr: (*map[customKey]string)(nil),
input: `{"foo": "bar"}`,
})
}
@ -49,3 +71,62 @@ type MyString string
func (ms MyString) Hello() string {
return string(ms)
}
type Date struct {
time.Time
}
func (d *Date) UnmarshalJSON(b []byte) error {
dateStr := string(b) // something like `"2017-08-20"`
if dateStr == "null" {
return nil
}
t, err := time.Parse(`"2006-01-02"`, dateStr)
if err != nil {
return fmt.Errorf("cant parse date: %#v", err)
}
d.Time = t
return nil
}
func (d *Date) MarshalJSON() ([]byte, error) {
return []byte(d.Time.Format("2006-01-02")), nil
}
type Date2 struct {
time.Time
}
func (d Date2) UnmarshalJSON(b []byte) error {
dateStr := string(b) // something like `"2017-08-20"`
if dateStr == "null" {
return nil
}
t, err := time.Parse(`"2006-01-02"`, dateStr)
if err != nil {
return fmt.Errorf("cant parse date: %#v", err)
}
d.Time = t
return nil
}
func (d Date2) MarshalJSON() ([]byte, error) {
return []byte(d.Time.Format("2006-01-02")), nil
}
type customKey int32
func (c customKey) MarshalText() ([]byte, error) {
return []byte("foo"), nil
}
func (c *customKey) UnmarshalText(value []byte) error {
*c = 1
return nil
}

View File

@ -1,8 +1,8 @@
package test
import (
"encoding/json"
"encoding"
"encoding/json"
)
func init() {
@ -17,20 +17,20 @@ func init() {
tmOfStructInt{},
&tm2,
map[tmOfStruct]int{
tmOfStruct{}: 100,
{}: 100,
},
map[*tmOfStruct]int{
&tmOfStruct{}: 100,
{}: 100,
},
map[encoding.TextMarshaler]int{
tm1: 100,
},
)
unmarshalCases = append(unmarshalCases, unmarshalCase{
ptr: (*tmOfMap)(nil),
ptr: (*tmOfMap)(nil),
input: `"{1:2}"`,
}, unmarshalCase{
ptr: (*tmOfMapPtr)(nil),
ptr: (*tmOfMapPtr)(nil),
input: `"{1:2}"`,
})
}
@ -47,7 +47,6 @@ func (q *jmOfStruct) UnmarshalJSON(value []byte) error {
return nil
}
type tmOfStruct struct {
F2 chan []byte
}

Some files were not shown because too many files have changed in this diff Show More