Adding upstream version 0.10.5.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
104c0c203d
commit
e733edafba
141 changed files with 102352 additions and 0 deletions
32
.codecov.yml
Normal file
32
.codecov.yml
Normal file
|
@ -0,0 +1,32 @@
|
|||
codecov:
|
||||
require_ci_to_pass: yes
|
||||
|
||||
coverage:
|
||||
precision: 2
|
||||
round: down
|
||||
range: "70...100"
|
||||
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: 70%
|
||||
threshold: 2%
|
||||
patch: off
|
||||
changes: no
|
||||
|
||||
parsers:
|
||||
gcov:
|
||||
branch_detection:
|
||||
conditional: yes
|
||||
loop: yes
|
||||
method: no
|
||||
macro: no
|
||||
|
||||
comment:
|
||||
layout: "header,diff"
|
||||
behavior: default
|
||||
require_changes: no
|
||||
|
||||
ignore:
|
||||
- internal/encoder/vm_color
|
||||
- internal/encoder/vm_color_indent
|
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
github: [goccy]
|
92
.github/workflows/go.yml
vendored
Normal file
92
.github/workflows/go.yml
vendored
Normal file
|
@ -0,0 +1,92 @@
|
|||
name: Go
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
jobs:
|
||||
build:
|
||||
name: Build on limited environment
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: build
|
||||
run: docker compose run go-json
|
||||
|
||||
test:
|
||||
name: Test
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ "ubuntu-latest", "macos-latest", "windows-latest" ]
|
||||
go-version: [ "1.19", "1.20", "1.21" ]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: setup Go ${{ matrix.go-version }}
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
- name: checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: simple test
|
||||
run: go test -v ./... -count=1
|
||||
- name: test with GC pressure
|
||||
run: go test -v ./... -count=1
|
||||
env:
|
||||
GOGC: 1
|
||||
- name: test with race detector
|
||||
run: go test -v -race ./... -count=1
|
||||
|
||||
lint:
|
||||
name: Lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: setup Go
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: '1.21'
|
||||
- name: lint
|
||||
run: |
|
||||
make lint
|
||||
bench:
|
||||
name: Benchmark
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: setup Go
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: '1.21'
|
||||
- name: checkout ( feature )
|
||||
uses: actions/checkout@v3
|
||||
- name: run benchmark ( feature )
|
||||
run: cd benchmarks && go test -bench GoJson | tee $HOME/new.txt
|
||||
- name: install benchstat
|
||||
run: go install golang.org/x/perf/cmd/benchstat@latest
|
||||
- name: checkout ( master )
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: master
|
||||
- name: run benchmark ( master )
|
||||
run: cd benchmarks && go test -bench GoJson | tee $HOME/old.txt
|
||||
- name: compare benchmark results
|
||||
run: benchstat $HOME/old.txt $HOME/new.txt
|
||||
|
||||
coverage:
|
||||
name: Coverage
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: setup Go
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: '1.21'
|
||||
- name: measure coverage
|
||||
run: make cover
|
||||
- uses: codecov/codecov-action@v4
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
cover.html
|
||||
cover.out
|
86
.golangci.yml
Normal file
86
.golangci.yml
Normal file
|
@ -0,0 +1,86 @@
|
|||
run:
|
||||
skip-files:
|
||||
- encode_optype.go
|
||||
- ".*_test\\.go$"
|
||||
|
||||
linters-settings:
|
||||
govet:
|
||||
enable-all: true
|
||||
disable:
|
||||
- shadow
|
||||
|
||||
linters:
|
||||
enable-all: true
|
||||
disable:
|
||||
- dogsled
|
||||
- dupl
|
||||
- exhaustive
|
||||
- exhaustivestruct
|
||||
- errorlint
|
||||
- forbidigo
|
||||
- funlen
|
||||
- gci
|
||||
- gochecknoglobals
|
||||
- gochecknoinits
|
||||
- gocognit
|
||||
- gocritic
|
||||
- gocyclo
|
||||
- godot
|
||||
- godox
|
||||
- goerr113
|
||||
- gofumpt
|
||||
- gomnd
|
||||
- gosec
|
||||
- ifshort
|
||||
- lll
|
||||
- makezero
|
||||
- nakedret
|
||||
- nestif
|
||||
- nlreturn
|
||||
- paralleltest
|
||||
- testpackage
|
||||
- thelper
|
||||
- wrapcheck
|
||||
- interfacer
|
||||
- lll
|
||||
- nakedret
|
||||
- nestif
|
||||
- nlreturn
|
||||
- testpackage
|
||||
- wsl
|
||||
- varnamelen
|
||||
- nilnil
|
||||
- ireturn
|
||||
- govet
|
||||
- forcetypeassert
|
||||
- cyclop
|
||||
- containedctx
|
||||
- revive
|
||||
- nosnakecase
|
||||
- exhaustruct
|
||||
- depguard
|
||||
|
||||
issues:
|
||||
exclude-rules:
|
||||
# not needed
|
||||
- path: /*.go
|
||||
text: "ST1003: should not use underscores in package names"
|
||||
linters:
|
||||
- stylecheck
|
||||
- path: /*.go
|
||||
text: "don't use an underscore in package name"
|
||||
linters:
|
||||
- golint
|
||||
- path: rtype.go
|
||||
linters:
|
||||
- golint
|
||||
- stylecheck
|
||||
- path: error.go
|
||||
linters:
|
||||
- staticcheck
|
||||
|
||||
# Maximum issues count per one linter. Set to 0 to disable. Default is 50.
|
||||
max-issues-per-linter: 0
|
||||
|
||||
# Maximum count of issues with the same text. Set to 0 to disable. Default is 3.
|
||||
max-same-issues: 0
|
425
CHANGELOG.md
Normal file
425
CHANGELOG.md
Normal file
|
@ -0,0 +1,425 @@
|
|||
# v0.10.2 - 2023/03/20
|
||||
|
||||
### New features
|
||||
|
||||
* Support DebugDOT option for debugging encoder ( #440 )
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Fix combination of embedding structure and omitempty option ( #442 )
|
||||
|
||||
# v0.10.1 - 2023/03/13
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Fix checkptr error for array decoder ( #415 )
|
||||
* Fix added buffer size check when decoding key ( #430 )
|
||||
* Fix handling of anonymous fields other than struct ( #431 )
|
||||
* Fix to not optimize when lower conversion can't handle byte-by-byte ( #432 )
|
||||
* Fix a problem that MarshalIndent does not work when UnorderedMap is specified ( #435 )
|
||||
* Fix mapDecoder.DecodeStream() for empty objects containing whitespace ( #425 )
|
||||
* Fix an issue that could not set the correct NextField for fields in the embedded structure ( #438 )
|
||||
|
||||
# v0.10.0 - 2022/11/29
|
||||
|
||||
### New features
|
||||
|
||||
* Support JSON Path ( #250 )
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Fix marshaler for map's key ( #409 )
|
||||
|
||||
# v0.9.11 - 2022/08/18
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Fix unexpected behavior when buffer ends with backslash ( #383 )
|
||||
* Fix stream decoding of escaped character ( #387 )
|
||||
|
||||
# v0.9.10 - 2022/07/15
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Fix boundary exception of type caching ( #382 )
|
||||
|
||||
# v0.9.9 - 2022/07/15
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Fix encoding of directed interface with typed nil ( #377 )
|
||||
* Fix embedded primitive type encoding using alias ( #378 )
|
||||
* Fix slice/array type encoding with types implementing MarshalJSON ( #379 )
|
||||
* Fix unicode decoding when the expected buffer state is not met after reading ( #380 )
|
||||
|
||||
# v0.9.8 - 2022/06/30
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Fix decoding of surrogate-pair ( #365 )
|
||||
* Fix handling of embedded primitive type ( #366 )
|
||||
* Add validation of escape sequence for decoder ( #367 )
|
||||
* Fix stream tokenizing respecting UseNumber ( #369 )
|
||||
* Fix encoding when struct pointer type that implements Marshal JSON is embedded ( #375 )
|
||||
|
||||
### Improve performance
|
||||
|
||||
* Improve performance of linkRecursiveCode ( #368 )
|
||||
|
||||
# v0.9.7 - 2022/04/22
|
||||
|
||||
### Fix bugs
|
||||
|
||||
#### Encoder
|
||||
|
||||
* Add filtering process for encoding on slow path ( #355 )
|
||||
* Fix encoding of interface{} with pointer type ( #363 )
|
||||
|
||||
#### Decoder
|
||||
|
||||
* Fix map key decoder that implements UnmarshalJSON ( #353 )
|
||||
* Fix decoding of []uint8 type ( #361 )
|
||||
|
||||
### New features
|
||||
|
||||
* Add DebugWith option for encoder ( #356 )
|
||||
|
||||
# v0.9.6 - 2022/03/22
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Correct the handling of the minimum value of int type for decoder ( #344 )
|
||||
* Fix bugs of stream decoder's bufferSize ( #349 )
|
||||
* Add a guard to use typeptr more safely ( #351 )
|
||||
|
||||
### Improve decoder performance
|
||||
|
||||
* Improve escapeString's performance ( #345 )
|
||||
|
||||
### Others
|
||||
|
||||
* Update go version for CI ( #347 )
|
||||
|
||||
# v0.9.5 - 2022/03/04
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Fix panic when decoding time.Time with context ( #328 )
|
||||
* Fix reading the next character in buffer to nul consideration ( #338 )
|
||||
* Fix incorrect handling on skipValue ( #341 )
|
||||
|
||||
### Improve decoder performance
|
||||
|
||||
* Improve performance when a payload contains escape sequence ( #334 )
|
||||
|
||||
# v0.9.4 - 2022/01/21
|
||||
|
||||
* Fix IsNilForMarshaler for string type with omitempty ( #323 )
|
||||
* Fix the case where the embedded field is at the end ( #326 )
|
||||
|
||||
# v0.9.3 - 2022/01/14
|
||||
|
||||
* Fix logic of removing struct field for decoder ( #322 )
|
||||
|
||||
# v0.9.2 - 2022/01/14
|
||||
|
||||
* Add invalid decoder to delay type error judgment at decode ( #321 )
|
||||
|
||||
# v0.9.1 - 2022/01/11
|
||||
|
||||
* Fix encoding of MarshalText/MarshalJSON operation with head offset ( #319 )
|
||||
|
||||
# v0.9.0 - 2022/01/05
|
||||
|
||||
### New feature
|
||||
|
||||
* Supports dynamic filtering of struct fields ( #314 )
|
||||
|
||||
### Improve encoding performance
|
||||
|
||||
* Improve map encoding performance ( #310 )
|
||||
* Optimize encoding path for escaped string ( #311 )
|
||||
* Add encoding option for performance ( #312 )
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Fix panic at encoding map value on 1.18 ( #310 )
|
||||
* Fix MarshalIndent for interface type ( #317 )
|
||||
|
||||
# v0.8.1 - 2021/12/05
|
||||
|
||||
* Fix operation conversion from PtrHead to Head in Recursive type ( #305 )
|
||||
|
||||
# v0.8.0 - 2021/12/02
|
||||
|
||||
* Fix embedded field conflict behavior ( #300 )
|
||||
* Refactor compiler for encoder ( #301 #302 )
|
||||
|
||||
# v0.7.10 - 2021/10/16
|
||||
|
||||
* Fix conversion from pointer to uint64 ( #294 )
|
||||
|
||||
# v0.7.9 - 2021/09/28
|
||||
|
||||
* Fix encoding of nil value about interface type that has method ( #291 )
|
||||
|
||||
# v0.7.8 - 2021/09/01
|
||||
|
||||
* Fix mapassign_faststr for indirect struct type ( #283 )
|
||||
* Fix encoding of not empty interface type ( #284 )
|
||||
* Fix encoding of empty struct interface type ( #286 )
|
||||
|
||||
# v0.7.7 - 2021/08/25
|
||||
|
||||
* Fix invalid utf8 on stream decoder ( #279 )
|
||||
* Fix buffer length bug on string stream decoder ( #280 )
|
||||
|
||||
Thank you @orisano !!
|
||||
|
||||
# v0.7.6 - 2021/08/13
|
||||
|
||||
* Fix nil slice assignment ( #276 )
|
||||
* Improve error message ( #277 )
|
||||
|
||||
# v0.7.5 - 2021/08/12
|
||||
|
||||
* Fix encoding of embedded struct with tags ( #265 )
|
||||
* Fix encoding of embedded struct that isn't first field ( #272 )
|
||||
* Fix decoding of binary type with escaped char ( #273 )
|
||||
|
||||
# v0.7.4 - 2021/07/06
|
||||
|
||||
* Fix encoding of indirect layout structure ( #264 )
|
||||
|
||||
# v0.7.3 - 2021/06/29
|
||||
|
||||
* Fix encoding of pointer type in empty interface ( #262 )
|
||||
|
||||
# v0.7.2 - 2021/06/26
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Add decoder for func type to fix decoding of nil function value ( #257 )
|
||||
* Fix stream decoding of []byte type ( #258 )
|
||||
|
||||
### Performance
|
||||
|
||||
* Improve decoding performance of map[string]interface{} type ( use `mapassign_faststr` ) ( #256 )
|
||||
* Improve encoding performance of empty interface type ( remove recursive calling of `vm.Run` ) ( #259 )
|
||||
|
||||
### Benchmark
|
||||
|
||||
* Add bytedance/sonic as benchmark target ( #254 )
|
||||
|
||||
# v0.7.1 - 2021/06/18
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Fix error when unmarshal empty array ( #253 )
|
||||
|
||||
# v0.7.0 - 2021/06/12
|
||||
|
||||
### Support context for MarshalJSON and UnmarshalJSON ( #248 )
|
||||
|
||||
* json.MarshalContext(context.Context, interface{}, ...json.EncodeOption) ([]byte, error)
|
||||
* json.NewEncoder(io.Writer).EncodeContext(context.Context, interface{}, ...json.EncodeOption) error
|
||||
* json.UnmarshalContext(context.Context, []byte, interface{}, ...json.DecodeOption) error
|
||||
* json.NewDecoder(io.Reader).DecodeContext(context.Context, interface{}) error
|
||||
|
||||
```go
|
||||
type MarshalerContext interface {
|
||||
MarshalJSON(context.Context) ([]byte, error)
|
||||
}
|
||||
|
||||
type UnmarshalerContext interface {
|
||||
UnmarshalJSON(context.Context, []byte) error
|
||||
}
|
||||
```
|
||||
|
||||
### Add DecodeFieldPriorityFirstWin option ( #242 )
|
||||
|
||||
In the default behavior, go-json, like encoding/json, will reflect the result of the last evaluation when a field with the same name exists. I've added new options to allow you to change this behavior. `json.DecodeFieldPriorityFirstWin` option reflects the result of the first evaluation if a field with the same name exists. This behavior has a performance advantage as it allows the subsequent strings to be skipped if all fields have been evaluated.
|
||||
|
||||
### Fix encoder
|
||||
|
||||
* Fix indent number contains recursive type ( #249 )
|
||||
* Fix encoding of using empty interface as map key ( #244 )
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Fix decoding fields containing escaped characters ( #237 )
|
||||
|
||||
### Refactor
|
||||
|
||||
* Move some tests to subdirectory ( #243 )
|
||||
* Refactor package layout for decoder ( #238 )
|
||||
|
||||
# v0.6.1 - 2021/06/02
|
||||
|
||||
### Fix encoder
|
||||
|
||||
* Fix value of totalLength for encoding ( #236 )
|
||||
|
||||
# v0.6.0 - 2021/06/01
|
||||
|
||||
### Support Colorize option for encoding (#233)
|
||||
|
||||
```go
|
||||
b, err := json.MarshalWithOption(v, json.Colorize(json.DefaultColorScheme))
|
||||
if err != nil {
|
||||
...
|
||||
}
|
||||
fmt.Println(string(b)) // print colored json
|
||||
```
|
||||
|
||||
### Refactor
|
||||
|
||||
* Fix opcode layout - Adjust memory layout of the opcode to 128 bytes in a 64-bit environment ( #230 )
|
||||
* Refactor encode option ( #231 )
|
||||
* Refactor escape string ( #232 )
|
||||
|
||||
# v0.5.1 - 2021/5/20
|
||||
|
||||
### Optimization
|
||||
|
||||
* Add type addrShift to enable bigger encoder/decoder cache ( #213 )
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Keep original reference of slice element ( #229 )
|
||||
|
||||
### Refactor
|
||||
|
||||
* Refactor Debug mode for encoding ( #226 )
|
||||
* Generate VM sources for encoding ( #227 )
|
||||
* Refactor validator for null/true/false for decoding ( #221 )
|
||||
|
||||
# v0.5.0 - 2021/5/9
|
||||
|
||||
### Supports using omitempty and string tags at the same time ( #216 )
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Fix stream decoder for unicode char ( #215 )
|
||||
* Fix decoding of slice element ( #219 )
|
||||
* Fix calculating of buffer length for stream decoder ( #220 )
|
||||
|
||||
### Refactor
|
||||
|
||||
* replace skipWhiteSpace goto by loop ( #212 )
|
||||
|
||||
# v0.4.14 - 2021/5/4
|
||||
|
||||
### Benchmark
|
||||
|
||||
* Add valyala/fastjson to benchmark ( #193 )
|
||||
* Add benchmark task for CI ( #211 )
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Fix decoding of slice with unmarshal json type ( #198 )
|
||||
* Fix decoding of null value for interface type that does not implement Unmarshaler ( #205 )
|
||||
* Fix decoding of null value to []byte by json.Unmarshal ( #206 )
|
||||
* Fix decoding of backslash char at the end of string ( #207 )
|
||||
* Fix stream decoder for null/true/false value ( #208 )
|
||||
* Fix stream decoder for slow reader ( #211 )
|
||||
|
||||
### Performance
|
||||
|
||||
* If cap of slice is enough, reuse slice data for compatibility with encoding/json ( #200 )
|
||||
|
||||
# v0.4.13 - 2021/4/20
|
||||
|
||||
### Fix json.Compact and json.Indent
|
||||
|
||||
* Support validation the input buffer for json.Compact and json.Indent ( #189 )
|
||||
* Optimize json.Compact and json.Indent ( improve memory footprint ) ( #190 )
|
||||
|
||||
# v0.4.12 - 2021/4/15
|
||||
|
||||
### Fix encoder
|
||||
|
||||
* Fix unnecessary indent for empty slice type ( #181 )
|
||||
* Fix encoding of omitempty feature for the slice or interface type ( #183 )
|
||||
* Fix encoding custom types zero values with omitempty when marshaller exists ( #187 )
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Fix decoder for invalid top level value ( #184 )
|
||||
* Fix decoder for invalid number value ( #185 )
|
||||
|
||||
# v0.4.11 - 2021/4/3
|
||||
|
||||
* Improve decoder performance for interface type
|
||||
|
||||
# v0.4.10 - 2021/4/2
|
||||
|
||||
### Fix encoder
|
||||
|
||||
* Fixed a bug when encoding slice and map containing recursive structures
|
||||
* Fixed a logic to determine if indirect reference
|
||||
|
||||
# v0.4.9 - 2021/3/29
|
||||
|
||||
### Add debug mode
|
||||
|
||||
If you use `json.MarshalWithOption(v, json.Debug())` and `panic` occurred in `go-json`, produces debug information to console.
|
||||
|
||||
### Support a new feature to compatible with encoding/json
|
||||
|
||||
- invalid UTF-8 is coerced to valid UTF-8 ( without performance down )
|
||||
|
||||
### Fix encoder
|
||||
|
||||
- Fixed handling of MarshalJSON of function type
|
||||
|
||||
### Fix decoding of slice of pointer type
|
||||
|
||||
If there is a pointer value, go-json will use it. (This behavior is necessary to achieve the ability to prioritize pre-filled values). However, since slices are reused internally, there was a bug that referred to the previous pointer value. Therefore, it is not necessary to refer to the pointer value in advance for the slice element, so we explicitly initialize slice element by `nil`.
|
||||
|
||||
# v0.4.8 - 2021/3/21
|
||||
|
||||
### Reduce memory usage at compile time
|
||||
|
||||
* go-json have used about 2GB of memory at compile time, but now it can compile with about less than 550MB.
|
||||
|
||||
### Fix any encoder's bug
|
||||
|
||||
* Add many test cases for encoder
|
||||
* Fix composite type ( slice/array/map )
|
||||
* Fix pointer types
|
||||
* Fix encoding of MarshalJSON or MarshalText or json.Number type
|
||||
|
||||
### Refactor encoder
|
||||
|
||||
* Change package layout for reducing memory usage at compile
|
||||
* Remove anonymous and only operation
|
||||
* Remove root property from encodeCompileContext and opcode
|
||||
|
||||
### Fix CI
|
||||
|
||||
* Add Go 1.16
|
||||
* Remove Go 1.13
|
||||
* Fix `make cover` task
|
||||
|
||||
### Number/Delim/Token/RawMessage use the types defined in encoding/json by type alias
|
||||
|
||||
# v0.4.7 - 2021/02/22
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Fix decoding of deep recursive structure
|
||||
* Fix decoding of embedded unexported pointer field
|
||||
* Fix invalid test case
|
||||
* Fix decoding of invalid value
|
||||
* Fix decoding of prefilled value
|
||||
* Fix not being able to return UnmarshalTypeError when it should be returned
|
||||
* Fix decoding of null value
|
||||
* Fix decoding of type of null string
|
||||
* Use pre allocated pointer if exists it at decoding
|
||||
|
||||
### Reduce memory usage at compile
|
||||
|
||||
* Integrate int/int8/int16/int32/int64 and uint/uint8/uint16/uint32/uint64 operation to reduce memory usage at compile
|
||||
|
||||
### Remove unnecessary optype
|
21
LICENSE
Normal file
21
LICENSE
Normal file
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2020 Masaaki Goshima
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
39
Makefile
Normal file
39
Makefile
Normal file
|
@ -0,0 +1,39 @@
|
|||
PKG := github.com/goccy/go-json
|
||||
|
||||
BIN_DIR := $(CURDIR)/bin
|
||||
PKGS := $(shell go list ./... | grep -v internal/cmd|grep -v test)
|
||||
COVER_PKGS := $(foreach pkg,$(PKGS),$(subst $(PKG),.,$(pkg)))
|
||||
|
||||
COMMA := ,
|
||||
EMPTY :=
|
||||
SPACE := $(EMPTY) $(EMPTY)
|
||||
COVERPKG_OPT := $(subst $(SPACE),$(COMMA),$(COVER_PKGS))
|
||||
|
||||
$(BIN_DIR):
|
||||
@mkdir -p $(BIN_DIR)
|
||||
|
||||
.PHONY: cover
|
||||
cover:
|
||||
go test -coverpkg=$(COVERPKG_OPT) -coverprofile=cover.out ./...
|
||||
|
||||
.PHONY: cover-html
|
||||
cover-html: cover
|
||||
go tool cover -html=cover.out
|
||||
|
||||
.PHONY: lint
|
||||
lint: golangci-lint
|
||||
$(BIN_DIR)/golangci-lint run
|
||||
|
||||
golangci-lint: | $(BIN_DIR)
|
||||
@{ \
|
||||
set -e; \
|
||||
GOLANGCI_LINT_TMP_DIR=$$(mktemp -d); \
|
||||
cd $$GOLANGCI_LINT_TMP_DIR; \
|
||||
go mod init tmp; \
|
||||
GOBIN=$(BIN_DIR) go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.54.2; \
|
||||
rm -rf $$GOLANGCI_LINT_TMP_DIR; \
|
||||
}
|
||||
|
||||
.PHONY: generate
|
||||
generate:
|
||||
go generate ./internal/...
|
529
README.md
Normal file
529
README.md
Normal file
|
@ -0,0 +1,529 @@
|
|||
# go-json
|
||||
|
||||

|
||||
[](https://pkg.go.dev/github.com/goccy/go-json?tab=doc)
|
||||
[](https://codecov.io/gh/goccy/go-json)
|
||||
|
||||
Fast JSON encoder/decoder compatible with encoding/json for Go
|
||||
|
||||
<img width="400px" src="https://user-images.githubusercontent.com/209884/92572337-42b42900-f2bf-11ea-973a-c74a359553a5.png"></img>
|
||||
|
||||
# Roadmap
|
||||
|
||||
```
|
||||
* version ( expected release date )
|
||||
|
||||
* v0.9.0
|
||||
|
|
||||
| while maintaining compatibility with encoding/json, we will add convenient APIs
|
||||
|
|
||||
v
|
||||
* v1.0.0
|
||||
```
|
||||
|
||||
We are accepting requests for features that will be implemented between v0.9.0 and v.1.0.0.
|
||||
If you have the API you need, please submit your issue [here](https://github.com/goccy/go-json/issues).
|
||||
|
||||
# Features
|
||||
|
||||
- Drop-in replacement of `encoding/json`
|
||||
- Fast ( See [Benchmark section](https://github.com/goccy/go-json#benchmarks) )
|
||||
- Flexible customization with options
|
||||
- Coloring the encoded string
|
||||
- Can propagate context.Context to `MarshalJSON` or `UnmarshalJSON`
|
||||
- Can dynamically filter the fields of the structure type-safely
|
||||
|
||||
# Installation
|
||||
|
||||
```
|
||||
go get github.com/goccy/go-json
|
||||
```
|
||||
|
||||
# How to use
|
||||
|
||||
Replace import statement from `encoding/json` to `github.com/goccy/go-json`
|
||||
|
||||
```
|
||||
-import "encoding/json"
|
||||
+import "github.com/goccy/go-json"
|
||||
```
|
||||
|
||||
# JSON library comparison
|
||||
|
||||
| name | encoder | decoder | compatible with `encoding/json` |
|
||||
| :----: | :------: | :-----: | :-----------------------------: |
|
||||
| encoding/json | yes | yes | N/A |
|
||||
| [json-iterator/go](https://github.com/json-iterator/go) | yes | yes | partial |
|
||||
| [easyjson](https://github.com/mailru/easyjson) | yes | yes | no |
|
||||
| [gojay](https://github.com/francoispqt/gojay) | yes | yes | no |
|
||||
| [segmentio/encoding/json](https://github.com/segmentio/encoding/tree/master/json) | yes | yes | partial |
|
||||
| [jettison](https://github.com/wI2L/jettison) | yes | no | no |
|
||||
| [simdjson-go](https://github.com/minio/simdjson-go) | no | yes | no |
|
||||
| goccy/go-json | yes | yes | yes |
|
||||
|
||||
- `json-iterator/go` isn't compatible with `encoding/json` in many ways (e.g. https://github.com/json-iterator/go/issues/229 ), but it hasn't been supported for a long time.
|
||||
- `segmentio/encoding/json` is well supported for encoders, but some are not supported for decoder APIs such as `Token` ( streaming decode )
|
||||
|
||||
## Other libraries
|
||||
|
||||
- [jingo](https://github.com/bet365/jingo)
|
||||
|
||||
I tried the benchmark but it didn't work.
|
||||
Also, it seems to panic when it receives an unexpected value because there is no error handling...
|
||||
|
||||
- [ffjson](https://github.com/pquerna/ffjson)
|
||||
|
||||
Benchmarking gave very slow results.
|
||||
It seems that it is assumed that the user will use the buffer pool properly.
|
||||
Also, development seems to have already stopped
|
||||
|
||||
# Benchmarks
|
||||
|
||||
```
|
||||
$ cd benchmarks
|
||||
$ go test -bench .
|
||||
```
|
||||
|
||||
## Encode
|
||||
|
||||
<img width="700px" src="https://user-images.githubusercontent.com/209884/107126758-0845cb00-68f5-11eb-8db7-086fcf9bcfaa.png"></img>
|
||||
<img width="700px" src="https://user-images.githubusercontent.com/209884/107126757-07ad3480-68f5-11eb-87aa-858cc5eacfcb.png"></img>
|
||||
|
||||
## Decode
|
||||
|
||||
<img width="700" alt="" src="https://user-images.githubusercontent.com/209884/107979944-bd1d6d80-7002-11eb-944b-9d17b6674e3f.png">
|
||||
<img width="700" alt="" src="https://user-images.githubusercontent.com/209884/107979931-b989e680-7002-11eb-87a0-66fc22d90dd4.png">
|
||||
<img width="700" alt="" src="https://user-images.githubusercontent.com/209884/107979940-bc84d700-7002-11eb-9647-869bbc25c9d9.png">
|
||||
|
||||
|
||||
# Fuzzing
|
||||
|
||||
[go-json-fuzz](https://github.com/goccy/go-json-fuzz) is the repository for fuzzing tests.
|
||||
If you run the test in this repository and find a bug, please commit to corpus to go-json-fuzz and report the issue to [go-json](https://github.com/goccy/go-json/issues).
|
||||
|
||||
# How it works
|
||||
|
||||
`go-json` is very fast in both encoding and decoding compared to other libraries.
|
||||
It's easier to implement by using automatic code generation for performance or by using a dedicated interface, but `go-json` dares to stick to compatibility with `encoding/json` and is the simple interface. Despite this, we are developing with the aim of being the fastest library.
|
||||
|
||||
Here, we explain the various speed-up techniques implemented by `go-json`.
|
||||
|
||||
## Basic technique
|
||||
|
||||
The techniques listed here are the ones used by most of the libraries listed above.
|
||||
|
||||
### Buffer reuse
|
||||
|
||||
Since the only value required for the result of `json.Marshal(interface{}) ([]byte, error)` is `[]byte`, the only value that must be allocated during encoding is the return value `[]byte` .
|
||||
|
||||
Also, as the number of allocations increases, the performance will be affected, so the number of allocations should be kept as low as possible when creating `[]byte`.
|
||||
|
||||
Therefore, there is a technique to reduce the number of times a new buffer must be allocated by reusing the buffer used for the previous encoding by using `sync.Pool`.
|
||||
|
||||
Finally, you allocate a buffer that is as long as the resulting buffer and copy the contents into it, you only need to allocate the buffer once in theory.
|
||||
|
||||
```go
|
||||
type buffer struct {
|
||||
data []byte
|
||||
}
|
||||
|
||||
var bufPool = sync.Pool{
|
||||
New: func() interface{} {
|
||||
return &buffer{data: make([]byte, 0, 1024)}
|
||||
},
|
||||
}
|
||||
|
||||
buf := bufPool.Get().(*buffer)
|
||||
data := encode(buf.data) // reuse buf.data
|
||||
|
||||
newBuf := make([]byte, len(data))
|
||||
copy(newBuf, buf)
|
||||
|
||||
buf.data = data
|
||||
bufPool.Put(buf)
|
||||
```
|
||||
|
||||
### Elimination of reflection
|
||||
|
||||
As you know, the reflection operation is very slow.
|
||||
|
||||
Therefore, using the fact that the address position where the type information is stored is fixed for each binary ( we call this `typeptr` ),
|
||||
we can use the address in the type information to call a pre-built optimized process.
|
||||
|
||||
For example, you can get the address to the type information from `interface{}` as follows and you can use that information to call a process that does not have reflection.
|
||||
|
||||
To process without reflection, pass a pointer (`unsafe.Pointer`) to the value is stored.
|
||||
|
||||
```go
|
||||
|
||||
type emptyInterface struct {
|
||||
typ unsafe.Pointer
|
||||
ptr unsafe.Pointer
|
||||
}
|
||||
|
||||
var typeToEncoder = map[uintptr]func(unsafe.Pointer)([]byte, error){}
|
||||
|
||||
func Marshal(v interface{}) ([]byte, error) {
|
||||
iface := (*emptyInterface)(unsafe.Pointer(&v)
|
||||
typeptr := uintptr(iface.typ)
|
||||
if enc, exists := typeToEncoder[typeptr]; exists {
|
||||
return enc(iface.ptr)
|
||||
}
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
※ In reality, `typeToEncoder` can be referenced by multiple goroutines, so exclusive control is required.
|
||||
|
||||
## Unique speed-up technique
|
||||
|
||||
## Encoder
|
||||
|
||||
### Do not escape arguments of `Marshal`
|
||||
|
||||
`json.Marshal` and `json.Unmarshal` receive `interface{}` value and they perform type determination dynamically to process.
|
||||
In normal case, you need to use the `reflect` library to determine the type dynamically, but since `reflect.Type` is defined as `interface`, when you call the method of `reflect.Type`, The reflect's argument is escaped.
|
||||
|
||||
Therefore, the arguments for `Marshal` and `Unmarshal` are always escaped to the heap.
|
||||
However, `go-json` can use the feature of `reflect.Type` while avoiding escaping.
|
||||
|
||||
`reflect.Type` is defined as `interface`, but in reality `reflect.Type` is implemented only by the structure `rtype` defined in the `reflect` package.
|
||||
For this reason, to date `reflect.Type` is the same as `*reflect.rtype`.
|
||||
|
||||
Therefore, by directly handling `*reflect.rtype`, which is an implementation of `reflect.Type`, it is possible to avoid escaping because it changes from `interface` to using `struct`.
|
||||
|
||||
The technique for working with `*reflect.rtype` directly from `go-json` is implemented at [rtype.go](https://github.com/goccy/go-json/blob/master/internal/runtime/rtype.go)
|
||||
|
||||
Also, the same technique is cut out as a library ( https://github.com/goccy/go-reflect )
|
||||
|
||||
Initially this feature was the default behavior of `go-json`.
|
||||
But after careful testing, I found that I passed a large value to `json.Marshal()` and if the argument could not be assigned to the stack, it could not be properly escaped to the heap (a bug in the Go compiler).
|
||||
|
||||
Therefore, this feature will be provided as an **optional** until this issue is resolved.
|
||||
|
||||
To use it, add `NoEscape` like `MarshalNoEscape()`
|
||||
|
||||
### Encoding using opcode sequence
|
||||
|
||||
I explained that you can use `typeptr` to call a pre-built process from type information.
|
||||
|
||||
In other libraries, this dedicated process is processed by making it an function calling like anonymous function, but function calls are inherently slow processes and should be avoided as much as possible.
|
||||
|
||||
Therefore, `go-json` adopted the Instruction-based execution processing system, which is also used to implement virtual machines for programming language.
|
||||
|
||||
If it is the first type to encode, create the opcode ( instruction ) sequence required for encoding.
|
||||
From the second time onward, use `typeptr` to get the cached pre-built opcode sequence and encode it based on it. An example of the opcode sequence is shown below.
|
||||
|
||||
```go
|
||||
json.Marshal(struct{
|
||||
X int `json:"x"`
|
||||
Y string `json:"y"`
|
||||
}{X: 1, Y: "hello"})
|
||||
```
|
||||
|
||||
When encoding a structure like the one above, create a sequence of opcodes like this:
|
||||
|
||||
```
|
||||
- opStructFieldHead ( `{` )
|
||||
- opStructFieldInt ( `"x": 1,` )
|
||||
- opStructFieldString ( `"y": "hello"` )
|
||||
- opStructEnd ( `}` )
|
||||
- opEnd
|
||||
```
|
||||
|
||||
※ When processing each operation, write the letters on the right.
|
||||
|
||||
In addition, each opcode is managed by the following structure (
|
||||
Pseudo code ).
|
||||
|
||||
```go
|
||||
type opType int
|
||||
const (
|
||||
opStructFieldHead opType = iota
|
||||
opStructFieldInt
|
||||
opStructFieldStirng
|
||||
opStructEnd
|
||||
opEnd
|
||||
)
|
||||
type opcode struct {
|
||||
op opType
|
||||
key []byte
|
||||
next *opcode
|
||||
}
|
||||
```
|
||||
|
||||
The process of encoding using the opcode sequence is roughly implemented as follows.
|
||||
|
||||
```go
|
||||
func encode(code *opcode, b []byte, p unsafe.Pointer) ([]byte, error) {
|
||||
for {
|
||||
switch code.op {
|
||||
case opStructFieldHead:
|
||||
b = append(b, '{')
|
||||
code = code.next
|
||||
case opStructFieldInt:
|
||||
b = append(b, code.key...)
|
||||
b = appendInt((*int)(unsafe.Pointer(uintptr(p)+code.offset)))
|
||||
code = code.next
|
||||
case opStructFieldString:
|
||||
b = append(b, code.key...)
|
||||
b = appendString((*string)(unsafe.Pointer(uintptr(p)+code.offset)))
|
||||
code = code.next
|
||||
case opStructEnd:
|
||||
b = append(b, '}')
|
||||
code = code.next
|
||||
case opEnd:
|
||||
goto END
|
||||
}
|
||||
}
|
||||
END:
|
||||
return b, nil
|
||||
}
|
||||
```
|
||||
|
||||
In this way, the huge `switch-case` is used to encode by manipulating the linked list opcodes to avoid unnecessary function calls.
|
||||
|
||||
### Opcode sequence optimization
|
||||
|
||||
One of the advantages of encoding using the opcode sequence is the ease of optimization.
|
||||
The opcode sequence mentioned above is actually converted into the following optimized operations and used.
|
||||
|
||||
```
|
||||
- opStructFieldHeadInt ( `{"x": 1,` )
|
||||
- opStructEndString ( `"y": "hello"}` )
|
||||
- opEnd
|
||||
```
|
||||
|
||||
It has been reduced from 5 opcodes to 3 opcodes !
|
||||
Reducing the number of opcodees means reducing the number of branches with `switch-case`.
|
||||
In other words, the closer the number of operations is to 1, the faster the processing can be performed.
|
||||
|
||||
In `go-json`, optimization to reduce the number of opcodes itself like the above and it speeds up by preparing opcodes with optimized paths.
|
||||
|
||||
### Change recursive call from CALL to JMP
|
||||
|
||||
Recursive processing is required during encoding if the type is defined recursively as follows:
|
||||
|
||||
```go
|
||||
type T struct {
|
||||
X int
|
||||
U *U
|
||||
}
|
||||
|
||||
type U struct {
|
||||
T *T
|
||||
}
|
||||
|
||||
b, err := json.Marshal(&T{
|
||||
X: 1,
|
||||
U: &U{
|
||||
T: &T{
|
||||
X: 2,
|
||||
},
|
||||
},
|
||||
})
|
||||
fmt.Println(string(b)) // {"X":1,"U":{"T":{"X":2,"U":null}}}
|
||||
```
|
||||
|
||||
In `go-json`, recursive processing is processed by the operation type of ` opStructFieldRecursive`.
|
||||
|
||||
In this operation, after acquiring the opcode sequence used for recursive processing, the function is **not** called recursively as it is, but the necessary values are saved by itself and implemented by moving to the next operation.
|
||||
|
||||
The technique of implementing recursive processing with the `JMP` operation while avoiding the `CALL` operation is a famous technique for implementing a high-speed virtual machine.
|
||||
|
||||
For more details, please refer to [the article](https://engineering.mercari.com/blog/entry/1599563768-081104c850) ( but Japanese only ).
|
||||
|
||||
### Dispatch by typeptr from map to slice
|
||||
|
||||
When retrieving the data cached from the type information by `typeptr`, we usually use map.
|
||||
Map requires exclusive control, so use `sync.Map` for a naive implementation.
|
||||
|
||||
However, this is slow, so it's a good idea to use the `atomic` package for exclusive control as implemented by `segmentio/encoding/json` ( https://github.com/segmentio/encoding/blob/master/json/codec.go#L41-L55 ).
|
||||
|
||||
This implementation slows down the set instead of speeding up the get, but it works well because of the nature of the library, it encodes much more for the same type.
|
||||
|
||||
However, as a result of profiling, I noticed that `runtime.mapaccess2` accounts for a significant percentage of the execution time. So I thought if I could change the lookup from map to slice.
|
||||
|
||||
There is an API named `typelinks` defined in the `runtime` package that the `reflect` package uses internally.
|
||||
This allows you to get all the type information defined in the binary at runtime.
|
||||
|
||||
The fact that all type information can be acquired means that by constructing slices in advance with the acquired total number of type information, it is possible to look up with the value of `typeptr` without worrying about out-of-range access.
|
||||
|
||||
However, if there is too much type information, it will use a lot of memory, so by default we will only use this optimization if the slice size fits within **2Mib** .
|
||||
|
||||
If this approach is not available, it will fall back to the `atomic` based process described above.
|
||||
|
||||
If you want to know more, please refer to the implementation [here](https://github.com/goccy/go-json/blob/master/internal/runtime/type.go#L36-L100)
|
||||
|
||||
## Decoder
|
||||
|
||||
### Dispatch by typeptr from map to slice
|
||||
|
||||
Like the encoder, the decoder also uses typeptr to call the dedicated process.
|
||||
|
||||
### Faster termination character inspection using NUL character
|
||||
|
||||
In order to decode, you have to traverse the input buffer character by position.
|
||||
At that time, if you check whether the buffer has reached the end, it will be very slow.
|
||||
|
||||
`buf` : `[]byte` type variable. holds the string passed to the decoder
|
||||
`cursor` : `int64` type variable. holds the current read position
|
||||
|
||||
```go
|
||||
buflen := len(buf)
|
||||
for ; cursor < buflen; cursor++ { // compare cursor and buflen at all times, it is so slow.
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\r', '\t':
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Therefore, by adding the `NUL` (`\000`) character to the end of the read buffer as shown below, it is possible to check the termination character at the same time as other characters.
|
||||
|
||||
```go
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\r', '\t':
|
||||
case '\000':
|
||||
return nil
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
```
|
||||
|
||||
### Use Boundary Check Elimination
|
||||
|
||||
Due to the `NUL` character optimization, the Go compiler does a boundary check every time, even though `buf[cursor]` does not cause out-of-range access.
|
||||
|
||||
Therefore, `go-json` eliminates boundary check by fetching characters for hotspot by pointer operation. For example, the following code.
|
||||
|
||||
```go
|
||||
func char(ptr unsafe.Pointer, offset int64) byte {
|
||||
return *(*byte)(unsafe.Pointer(uintptr(ptr) + uintptr(offset)))
|
||||
}
|
||||
|
||||
p := (*sliceHeader)(&unsafe.Pointer(buf)).data
|
||||
for {
|
||||
switch char(p, cursor) {
|
||||
case ' ', '\n', '\r', '\t':
|
||||
case '\000':
|
||||
return nil
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
```
|
||||
|
||||
### Checking the existence of fields of struct using Bitmaps
|
||||
|
||||
I found by the profiling result, in the struct decode, lookup process for field was taking a long time.
|
||||
|
||||
For example, consider decoding a string like `{"a":1,"b":2,"c":3}` into the following structure:
|
||||
|
||||
```go
|
||||
type T struct {
|
||||
A int `json:"a"`
|
||||
B int `json:"b"`
|
||||
C int `json:"c"`
|
||||
}
|
||||
```
|
||||
|
||||
At this time, it was found that it takes a lot of time to acquire the decoding process corresponding to the field from the field name as shown below during the decoding process.
|
||||
|
||||
```go
|
||||
fieldName := decodeKey(buf, cursor) // "a" or "b" or "c"
|
||||
decoder, exists := fieldToDecoderMap[fieldName] // so slow
|
||||
if exists {
|
||||
decoder(buf, cursor)
|
||||
} else {
|
||||
skipValue(buf, cursor)
|
||||
}
|
||||
```
|
||||
|
||||
To improve this process, `json-iterator/go` is optimized so that it can be branched by switch-case when the number of fields in the structure is 10 or less (switch-case is faster than map). However, there is a risk of hash collision because the value hashed by the FNV algorithm is used for conditional branching. Also, `gojay` processes this part at high speed by letting the library user yourself write `switch-case`.
|
||||
|
||||
|
||||
`go-json` considers and implements a new approach that is different from these. I call this **bitmap field optimization**.
|
||||
|
||||
The range of values per character can be represented by `[256]byte`. Also, if the number of fields in the structure is 8 or less, `int8` type can represent the state of each field.
|
||||
In other words, it has the following structure.
|
||||
|
||||
- Base ( 8bit ): `00000000`
|
||||
- Key "a": `00000001` ( assign key "a" to the first bit )
|
||||
- Key "b": `00000010` ( assign key "b" to the second bit )
|
||||
- Key "c": `00000100` ( assign key "c" to the third bit )
|
||||
|
||||
Bitmap structure is the following
|
||||
|
||||
```
|
||||
| key index(0) |
|
||||
------------------------
|
||||
0 | 00000000 |
|
||||
1 | 00000000 |
|
||||
~~ | |
|
||||
97 (a) | 00000001 |
|
||||
98 (b) | 00000010 |
|
||||
99 (c) | 00000100 |
|
||||
~~ | |
|
||||
255 | 00000000 |
|
||||
```
|
||||
|
||||
You can think of this as a Bitmap with a height of `256` and a width of the maximum string length in the field name.
|
||||
In other words, it can be represented by the following type .
|
||||
|
||||
```go
|
||||
[maxFieldKeyLength][256]int8
|
||||
```
|
||||
|
||||
When decoding a field character, check whether the corresponding character exists by referring to the pre-built bitmap like the following.
|
||||
|
||||
```go
|
||||
var curBit int8 = math.MaxInt8 // 11111111
|
||||
|
||||
c := char(buf, cursor)
|
||||
bit := bitmap[keyIdx][c]
|
||||
curBit &= bit
|
||||
if curBit == 0 {
|
||||
// not found field
|
||||
}
|
||||
```
|
||||
|
||||
If `curBit` is not `0` until the end of the field string, then the string is
|
||||
You may have hit one of the fields.
|
||||
But the possibility is that if the decoded string is shorter than the field string, you will get a false hit.
|
||||
|
||||
- input: `{"a":1}`
|
||||
```go
|
||||
type T struct {
|
||||
X int `json:"abc"`
|
||||
}
|
||||
```
|
||||
※ Since `a` is shorter than `abc`, it can decode to the end of the field character without `curBit` being 0.
|
||||
|
||||
Rest assured. In this case, it doesn't matter because you can tell if you hit by comparing the string length of `a` with the string length of `abc`.
|
||||
|
||||
Finally, calculate the position of the bit where `1` is set and get the corresponding value, and you're done.
|
||||
|
||||
Using this technique, field lookups are possible with only bitwise operations and access to slices.
|
||||
|
||||
`go-json` uses a similar technique for fields with 9 or more and 16 or less fields. At this time, Bitmap is constructed as `[maxKeyLen][256]int16` type.
|
||||
|
||||
Currently, this optimization is not performed when the maximum length of the field name is long (specifically, 64 bytes or more) in addition to the limitation of the number of fields from the viewpoint of saving memory usage.
|
||||
|
||||
### Others
|
||||
|
||||
I have done a lot of other optimizations. I will find time to write about them. If you have any questions about what's written here or other optimizations, please visit the `#go-json` channel on `gophers.slack.com` .
|
||||
|
||||
## Reference
|
||||
|
||||
Regarding the story of go-json, there are the following articles in Japanese only.
|
||||
|
||||
- https://speakerdeck.com/goccy/zui-su-falsejsonraiburariwoqiu-mete
|
||||
- https://engineering.mercari.com/blog/entry/1599563768-081104c850/
|
||||
|
||||
# Looking for Sponsors
|
||||
|
||||
I'm looking for sponsors this library. This library is being developed as a personal project in my spare time. If you want a quick response or problem resolution when using this library in your project, please register as a [sponsor](https://github.com/sponsors/goccy). I will cooperate as much as possible. Of course, this library is developed as an MIT license, so you can use it freely for free.
|
||||
|
||||
# License
|
||||
|
||||
MIT
|
642
benchmarks/bench_test.go
Normal file
642
benchmarks/bench_test.go
Normal file
|
@ -0,0 +1,642 @@
|
|||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Large data benchmark.
|
||||
// The JSON data is a summary of agl's changes in the
|
||||
// go, webkit, and chromium open source projects.
|
||||
// We benchmark converting between the JSON form
|
||||
// and in-memory data structures.
|
||||
|
||||
package benchmark
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
stdjson "encoding/json"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
segmentiojson "github.com/segmentio/encoding/json"
|
||||
"github.com/wI2L/jettison"
|
||||
)
|
||||
|
||||
type codeResponse struct {
|
||||
Tree *codeNode `json:"tree"`
|
||||
Username string `json:"username"`
|
||||
}
|
||||
|
||||
type codeNode struct {
|
||||
Name string `json:"name"`
|
||||
Kids []*codeNode `json:"kids"`
|
||||
CLWeight float64 `json:"cl_weight"`
|
||||
Touches int `json:"touches"`
|
||||
MinT int64 `json:"min_t"`
|
||||
MaxT int64 `json:"max_t"`
|
||||
MeanT int64 `json:"mean_t"`
|
||||
}
|
||||
|
||||
var codeJSON []byte
|
||||
var codeStruct codeResponse
|
||||
|
||||
func codeInit() {
|
||||
f, err := os.Open("testdata/code.json.gz")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer f.Close()
|
||||
gz, err := gzip.NewReader(f)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
data, err := io.ReadAll(gz)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
codeJSON = data
|
||||
|
||||
if err := stdjson.Unmarshal(codeJSON, &codeStruct); err != nil {
|
||||
panic("unmarshal code.json: " + err.Error())
|
||||
}
|
||||
{
|
||||
stdjsonbytes, err := stdjson.Marshal(&codeStruct)
|
||||
if err != nil {
|
||||
panic("marshal code.json: " + err.Error())
|
||||
}
|
||||
jsonbytes, err := json.Marshal(&codeStruct)
|
||||
if err != nil {
|
||||
panic("marshal code.json: " + err.Error())
|
||||
}
|
||||
if len(stdjsonbytes) != len(jsonbytes) {
|
||||
panic(fmt.Sprintf("stdjson = %d but go-json = %d", len(stdjsonbytes), len(jsonbytes)))
|
||||
}
|
||||
}
|
||||
if _, err := json.Marshal(&codeStruct); err != nil {
|
||||
panic("marshal code.json: " + err.Error())
|
||||
}
|
||||
if !bytes.Equal(data, codeJSON) {
|
||||
println("different lengths", len(data), len(codeJSON))
|
||||
for i := 0; i < len(data) && i < len(codeJSON); i++ {
|
||||
if data[i] != codeJSON[i] {
|
||||
println("re-marshal: changed at byte", i)
|
||||
println("orig: ", string(codeJSON[i-10:i+10]))
|
||||
println("new: ", string(data[i-10:i+10]))
|
||||
break
|
||||
}
|
||||
}
|
||||
panic("re-marshal code.json: different result")
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_EncodeBigData_GoJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
enc := json.NewEncoder(io.Discard)
|
||||
for pb.Next() {
|
||||
if err := enc.Encode(&codeStruct); err != nil {
|
||||
b.Fatal("Encode:", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
b.SetBytes(int64(len(codeJSON)))
|
||||
}
|
||||
|
||||
func Benchmark_EncodeBigData_EncodingJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
enc := stdjson.NewEncoder(io.Discard)
|
||||
for pb.Next() {
|
||||
if err := enc.Encode(&codeStruct); err != nil {
|
||||
b.Fatal("Encode:", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
b.SetBytes(int64(len(codeJSON)))
|
||||
}
|
||||
|
||||
func Benchmark_EncodeBigData_JsonIter(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
enc := json.NewEncoder(io.Discard)
|
||||
for pb.Next() {
|
||||
if err := enc.Encode(&codeStruct); err != nil {
|
||||
b.Fatal("Encode:", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
b.SetBytes(int64(len(codeJSON)))
|
||||
}
|
||||
|
||||
func Benchmark_EncodeBigData_SegmentioJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
enc := segmentiojson.NewEncoder(io.Discard)
|
||||
for pb.Next() {
|
||||
if err := enc.Encode(&codeStruct); err != nil {
|
||||
b.Fatal("Encode:", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
b.SetBytes(int64(len(codeJSON)))
|
||||
}
|
||||
|
||||
func Benchmark_MarshalBigData_GoJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
if _, err := json.Marshal(&codeStruct); err != nil {
|
||||
b.Fatal("Marshal:", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
b.SetBytes(int64(len(codeJSON)))
|
||||
}
|
||||
|
||||
func Benchmark_MarshalBigData_EncodingJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
if _, err := stdjson.Marshal(&codeStruct); err != nil {
|
||||
b.Fatal("Marshal:", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
b.SetBytes(int64(len(codeJSON)))
|
||||
}
|
||||
|
||||
func Benchmark_MarshalBigData_JsonIter(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
if _, err := json.Marshal(&codeStruct); err != nil {
|
||||
b.Fatal("Marshal:", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
b.SetBytes(int64(len(codeJSON)))
|
||||
}
|
||||
|
||||
func Benchmark_MarshalBigData_Jettison(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
if _, err := jettison.Marshal(&codeStruct); err != nil {
|
||||
b.Fatal("Marshal:", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
b.SetBytes(int64(len(codeJSON)))
|
||||
}
|
||||
|
||||
func Benchmark_MarshalBigData_SegmentioJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
if _, err := segmentiojson.Marshal(&codeStruct); err != nil {
|
||||
b.Fatal("Marshal:", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
b.SetBytes(int64(len(codeJSON)))
|
||||
}
|
||||
|
||||
func benchMarshalBytes(n int, marshaler func(interface{}) ([]byte, error)) func(*testing.B) {
|
||||
sample := []byte("hello world")
|
||||
// Use a struct pointer, to avoid an allocation when passing it as an
|
||||
// interface parameter to Marshal.
|
||||
v := &struct {
|
||||
Bytes []byte
|
||||
}{
|
||||
bytes.Repeat(sample, (n/len(sample))+1)[:n],
|
||||
}
|
||||
return func(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := marshaler(v); err != nil {
|
||||
b.Fatal("Marshal:", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_MarshalBytes_EncodingJson(b *testing.B) {
|
||||
// 32 fits within encodeState.scratch.
|
||||
b.Run("32", benchMarshalBytes(32, stdjson.Marshal))
|
||||
// 256 doesn't fit in encodeState.scratch, but is small enough to
|
||||
// allocate and avoid the slower base64.NewEncoder.
|
||||
b.Run("256", benchMarshalBytes(256, stdjson.Marshal))
|
||||
// 4096 is large enough that we want to avoid allocating for it.
|
||||
b.Run("4096", benchMarshalBytes(4096, stdjson.Marshal))
|
||||
}
|
||||
|
||||
func Benchmark_MarshalBytes_JsonIter(b *testing.B) {
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
// 32 fits within encodeState.scratch.
|
||||
b.Run("32", benchMarshalBytes(32, json.Marshal))
|
||||
// 256 doesn't fit in encodeState.scratch, but is small enough to
|
||||
// allocate and avoid the slower base64.NewEncoder.
|
||||
b.Run("256", benchMarshalBytes(256, json.Marshal))
|
||||
// 4096 is large enough that we want to avoid allocating for it.
|
||||
b.Run("4096", benchMarshalBytes(4096, json.Marshal))
|
||||
}
|
||||
|
||||
func Benchmark_MarshalBytes_GoJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
// 32 fits within encodeState.scratch.
|
||||
b.Run("32", benchMarshalBytes(32, json.Marshal))
|
||||
// 256 doesn't fit in encodeState.scratch, but is small enough to
|
||||
// allocate and avoid the slower base64.NewEncoder.
|
||||
b.Run("256", benchMarshalBytes(256, json.Marshal))
|
||||
// 4096 is large enough that we want to avoid allocating for it.
|
||||
b.Run("4096", benchMarshalBytes(4096, json.Marshal))
|
||||
}
|
||||
|
||||
func Benchmark_EncodeRawMessage_EncodingJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
|
||||
m := struct {
|
||||
A int
|
||||
B json.RawMessage
|
||||
}{}
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
enc := stdjson.NewEncoder(io.Discard)
|
||||
for pb.Next() {
|
||||
if err := enc.Encode(&m); err != nil {
|
||||
b.Fatal("Encode:", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func Benchmark_EncodeRawMessage_JsonIter(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
|
||||
m := struct {
|
||||
A int
|
||||
B json.RawMessage
|
||||
}{}
|
||||
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
enc := json.NewEncoder(io.Discard)
|
||||
for pb.Next() {
|
||||
if err := enc.Encode(&m); err != nil {
|
||||
b.Fatal("Encode:", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func Benchmark_EncodeRawMessage_GoJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
|
||||
m := struct {
|
||||
A int
|
||||
B json.RawMessage
|
||||
}{}
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
enc := json.NewEncoder(io.Discard)
|
||||
for pb.Next() {
|
||||
if err := enc.Encode(&m); err != nil {
|
||||
b.Fatal("Encode:", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func Benchmark_MarshalString_EncodingJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
j := struct {
|
||||
Bar string `json:"bar,string"`
|
||||
}{
|
||||
Bar: `foobar`,
|
||||
}
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
if _, err := stdjson.Marshal(&j); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func Benchmark_MarshalString_JsonIter(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
j := struct {
|
||||
Bar string `json:"bar,string"`
|
||||
}{
|
||||
Bar: `foobar`,
|
||||
}
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
if _, err := json.Marshal(&j); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func Benchmark_MarshalString_GoJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
j := struct {
|
||||
Bar string `json:"bar,string"`
|
||||
}{
|
||||
Bar: `foobar`,
|
||||
}
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
if _, err := json.Marshal(&j); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkCodeDecoder(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
var buf bytes.Buffer
|
||||
dec := json.NewDecoder(&buf)
|
||||
var r codeResponse
|
||||
for pb.Next() {
|
||||
buf.Write(codeJSON)
|
||||
// hide EOF
|
||||
buf.WriteByte('\n')
|
||||
buf.WriteByte('\n')
|
||||
buf.WriteByte('\n')
|
||||
if err := dec.Decode(&r); err != nil {
|
||||
if err != io.EOF {
|
||||
b.Fatal("Decode:", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
b.SetBytes(int64(len(codeJSON)))
|
||||
}
|
||||
|
||||
func BenchmarkUnicodeDecoder(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
j := []byte(`"\uD83D\uDE01"`)
|
||||
b.SetBytes(int64(len(j)))
|
||||
r := bytes.NewReader(j)
|
||||
dec := json.NewDecoder(r)
|
||||
var out string
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if err := dec.Decode(&out); err != nil {
|
||||
if err != io.EOF {
|
||||
b.Fatal("Decode:", err)
|
||||
}
|
||||
}
|
||||
r.Seek(0, 0)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkDecoderStream(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
b.StopTimer()
|
||||
var buf bytes.Buffer
|
||||
dec := json.NewDecoder(&buf)
|
||||
buf.WriteString(`"` + strings.Repeat("x", 1000000) + `"` + "\n\n\n")
|
||||
var x interface{}
|
||||
if err := dec.Decode(&x); err != nil {
|
||||
b.Fatal("Decode:", err)
|
||||
}
|
||||
ones := strings.Repeat(" 1\n", 300000) + "\n\n\n"
|
||||
b.StartTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if i%300000 == 0 {
|
||||
buf.WriteString(ones)
|
||||
}
|
||||
x = nil
|
||||
if err := dec.Decode(&x); err != nil || x != 1.0 {
|
||||
if err != io.EOF {
|
||||
b.Fatalf("Decode: %v after %d", err, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkCodeUnmarshal(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
var r codeResponse
|
||||
if err := json.Unmarshal(codeJSON, &r); err != nil {
|
||||
b.Fatal("Unmarshal:", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
b.SetBytes(int64(len(codeJSON)))
|
||||
}
|
||||
|
||||
func BenchmarkCodeUnmarshalReuse(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
var r codeResponse
|
||||
for pb.Next() {
|
||||
if err := json.Unmarshal(codeJSON, &r); err != nil {
|
||||
b.Fatal("Unmarshal:", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
b.SetBytes(int64(len(codeJSON)))
|
||||
}
|
||||
|
||||
func BenchmarkUnmarshalString(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
data := []byte(`"hello, world"`)
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
var s string
|
||||
for pb.Next() {
|
||||
if err := json.Unmarshal(data, &s); err != nil {
|
||||
b.Fatal("Unmarshal:", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkUnmarshalFloat64(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
data := []byte(`3.14`)
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
var f float64
|
||||
for pb.Next() {
|
||||
if err := json.Unmarshal(data, &f); err != nil {
|
||||
b.Fatal("Unmarshal:", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkUnmarshalInt64(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
data := []byte(`3`)
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
var x int64
|
||||
for pb.Next() {
|
||||
if err := json.Unmarshal(data, &x); err != nil {
|
||||
b.Fatal("Unmarshal:", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkIssue10335(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
j := []byte(`{"a":{ }}`)
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
var s struct{}
|
||||
for pb.Next() {
|
||||
if err := json.Unmarshal(j, &s); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkUnmapped(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
j := []byte(`{"s": "hello", "y": 2, "o": {"x": 0}, "a": [1, 99, {"x": 1}]}`)
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
var s struct{}
|
||||
for pb.Next() {
|
||||
if err := json.Unmarshal(j, &s); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func Benchmark_Compact_EncodingJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
for i := 0; i < b.N; i++ {
|
||||
var buf bytes.Buffer
|
||||
if err := stdjson.Compact(&buf, codeJSON); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Compact_GoJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
for i := 0; i < b.N; i++ {
|
||||
var buf bytes.Buffer
|
||||
if err := json.Compact(&buf, codeJSON); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Indent_EncodingJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
for i := 0; i < b.N; i++ {
|
||||
var buf bytes.Buffer
|
||||
if err := stdjson.Indent(&buf, codeJSON, "-", " "); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Indent_GoJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
if codeJSON == nil {
|
||||
b.StopTimer()
|
||||
codeInit()
|
||||
b.StartTimer()
|
||||
}
|
||||
for i := 0; i < b.N; i++ {
|
||||
var buf bytes.Buffer
|
||||
if err := json.Indent(&buf, codeJSON, "-", " "); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
489
benchmarks/decode_test.go
Normal file
489
benchmarks/decode_test.go
Normal file
|
@ -0,0 +1,489 @@
|
|||
package benchmark
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
gojay "github.com/francoispqt/gojay"
|
||||
gojson "github.com/goccy/go-json"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
segmentiojson "github.com/segmentio/encoding/json"
|
||||
fastjson "github.com/valyala/fastjson"
|
||||
)
|
||||
|
||||
func Benchmark_Decode_SmallStruct_Unmarshal_EncodingJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for n := 0; n < b.N; n++ {
|
||||
result := SmallPayload{}
|
||||
if err := json.Unmarshal(SmallFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_SmallStruct_Unmarshal_FastJson(b *testing.B) {
|
||||
smallFixture := string(SmallFixture)
|
||||
b.ReportAllocs()
|
||||
for n := 0; n < b.N; n++ {
|
||||
var p fastjson.Parser
|
||||
if _, err := p.Parse(smallFixture); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_SmallStruct_Unmarshal_SegmentioJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for n := 0; n < b.N; n++ {
|
||||
result := SmallPayload{}
|
||||
if err := segmentiojson.Unmarshal(SmallFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_SmallStruct_Unmarshal_JsonIter(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for n := 0; n < b.N; n++ {
|
||||
result := SmallPayload{}
|
||||
if err := jsoniter.Unmarshal(SmallFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_SmallStruct_Unmarshal_GoJay(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for n := 0; n < b.N; n++ {
|
||||
result := SmallPayload{}
|
||||
if err := gojay.UnmarshalJSONObject(SmallFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_SmallStruct_Unmarshal_GoJayUnsafe(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := SmallPayload{}
|
||||
if err := gojay.Unsafe.UnmarshalJSONObject(SmallFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_SmallStruct_Unmarshal_GoJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := SmallPayload{}
|
||||
if err := gojson.Unmarshal(SmallFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_SmallStruct_Unmarshal_GoJsonNoEscape(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := SmallPayload{}
|
||||
if err := gojson.UnmarshalNoEscape(SmallFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_SmallStruct_Stream_EncodingJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
reader := bytes.NewReader(SmallFixture)
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := SmallPayload{}
|
||||
reader.Reset(SmallFixture)
|
||||
if err := json.NewDecoder(reader).Decode(&result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_SmallStruct_Stream_SegmentioJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
reader := bytes.NewReader(SmallFixture)
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := SmallPayload{}
|
||||
reader.Reset(SmallFixture)
|
||||
if err := segmentiojson.NewDecoder(reader).Decode(&result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_SmallStruct_Stream_JsonIter(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
reader := bytes.NewReader(SmallFixture)
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := SmallPayload{}
|
||||
reader.Reset(SmallFixture)
|
||||
if err := jsoniter.NewDecoder(reader).Decode(&result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_SmallStruct_Stream_GoJay(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
reader := bytes.NewReader(SmallFixture)
|
||||
for n := 0; n < b.N; n++ {
|
||||
reader.Reset(SmallFixture)
|
||||
result := SmallPayload{}
|
||||
if err := gojay.NewDecoder(reader).DecodeObject(&result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_SmallStruct_Stream_GoJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
reader := bytes.NewReader(SmallFixture)
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := SmallPayload{}
|
||||
reader.Reset(SmallFixture)
|
||||
if err := gojson.NewDecoder(reader).Decode(&result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_MediumStruct_Unmarshal_EncodingJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for n := 0; n < b.N; n++ {
|
||||
result := MediumPayload{}
|
||||
if err := json.Unmarshal(MediumFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_MediumStruct_Unmarshal_FastJson(b *testing.B) {
|
||||
mediumFixture := string(MediumFixture)
|
||||
b.ReportAllocs()
|
||||
for n := 0; n < b.N; n++ {
|
||||
var p fastjson.Parser
|
||||
if _, err := p.Parse(mediumFixture); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_MediumStruct_Unmarshal_SegmentioJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := MediumPayload{}
|
||||
if err := segmentiojson.Unmarshal(MediumFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_MediumStruct_Unmarshal_JsonIter(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for n := 0; n < b.N; n++ {
|
||||
result := MediumPayload{}
|
||||
if err := jsoniter.Unmarshal(MediumFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_MediumStruct_Unmarshal_GoJay(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for n := 0; n < b.N; n++ {
|
||||
result := MediumPayload{}
|
||||
if err := gojay.UnmarshalJSONObject(MediumFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_MediumStruct_Unmarshal_GoJayUnsafe(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := MediumPayload{}
|
||||
if err := gojay.Unsafe.UnmarshalJSONObject(MediumFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_MediumStruct_Unmarshal_GoJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := MediumPayload{}
|
||||
if err := gojson.Unmarshal(MediumFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_MediumStruct_Unmarshal_GoJsonNoEscape(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := MediumPayload{}
|
||||
if err := gojson.UnmarshalNoEscape(MediumFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_MediumStruct_Stream_EncodingJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
reader := bytes.NewReader(MediumFixture)
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := MediumPayload{}
|
||||
reader.Reset(MediumFixture)
|
||||
if err := json.NewDecoder(reader).Decode(&result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_MediumStruct_Stream_SegmentioJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
reader := bytes.NewReader(MediumFixture)
|
||||
for n := 0; n < b.N; n++ {
|
||||
reader.Reset(MediumFixture)
|
||||
result := MediumPayload{}
|
||||
if err := segmentiojson.NewDecoder(reader).Decode(&result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_MediumStruct_Stream_JsonIter(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
reader := bytes.NewReader(MediumFixture)
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := MediumPayload{}
|
||||
reader.Reset(MediumFixture)
|
||||
if err := jsoniter.NewDecoder(reader).Decode(&result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_MediumStruct_Stream_GoJay(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
reader := bytes.NewReader(MediumFixture)
|
||||
for n := 0; n < b.N; n++ {
|
||||
reader.Reset(MediumFixture)
|
||||
result := MediumPayload{}
|
||||
if err := gojay.NewDecoder(reader).DecodeObject(&result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_MediumStruct_Stream_GoJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
reader := bytes.NewReader(MediumFixture)
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := MediumPayload{}
|
||||
reader.Reset(MediumFixture)
|
||||
if err := gojson.NewDecoder(reader).Decode(&result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_LargeStruct_Unmarshal_EncodingJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for n := 0; n < b.N; n++ {
|
||||
result := LargePayload{}
|
||||
if err := json.Unmarshal(LargeFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_LargeStruct_Unmarshal_FastJson(b *testing.B) {
|
||||
largeFixture := string(LargeFixture)
|
||||
b.ReportAllocs()
|
||||
for n := 0; n < b.N; n++ {
|
||||
var p fastjson.Parser
|
||||
if _, err := p.Parse(largeFixture); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_LargeStruct_Unmarshal_SegmentioJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := LargePayload{}
|
||||
if err := segmentiojson.Unmarshal(LargeFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_LargeStruct_Unmarshal_JsonIter(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for n := 0; n < b.N; n++ {
|
||||
result := LargePayload{}
|
||||
if err := jsoniter.Unmarshal(LargeFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_LargeStruct_Unmarshal_GoJay(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for n := 0; n < b.N; n++ {
|
||||
result := LargePayload{}
|
||||
if err := gojay.UnmarshalJSONObject(LargeFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_LargeStruct_Unmarshal_GoJayUnsafe(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := LargePayload{}
|
||||
if err := gojay.Unsafe.UnmarshalJSONObject(LargeFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_LargeStruct_Unmarshal_GoJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := LargePayload{}
|
||||
if err := gojson.Unmarshal(LargeFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_LargeStruct_Unmarshal_GoJsonNoEscape(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := LargePayload{}
|
||||
if err := gojson.UnmarshalNoEscape(LargeFixture, &result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_LargeStruct_Unmarshal_GoJsonFirstWinMode(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := LargePayload{}
|
||||
if err := gojson.UnmarshalWithOption(
|
||||
LargeFixture,
|
||||
&result,
|
||||
gojson.DecodeFieldPriorityFirstWin(),
|
||||
); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_LargeStruct_Unmarshal_GoJsonNoEscapeFirstWinMode(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := LargePayload{}
|
||||
if err := gojson.UnmarshalNoEscape(
|
||||
LargeFixture,
|
||||
&result,
|
||||
gojson.DecodeFieldPriorityFirstWin(),
|
||||
); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_LargeStruct_Stream_EncodingJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
reader := bytes.NewReader(LargeFixture)
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := LargePayload{}
|
||||
reader.Reset(LargeFixture)
|
||||
if err := json.NewDecoder(reader).Decode(&result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_LargeStruct_Stream_SegmentioJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
reader := bytes.NewReader(LargeFixture)
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := LargePayload{}
|
||||
reader.Reset(LargeFixture)
|
||||
if err := segmentiojson.NewDecoder(reader).Decode(&result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_LargeStruct_Stream_JsonIter(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
reader := bytes.NewReader(LargeFixture)
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := LargePayload{}
|
||||
reader.Reset(LargeFixture)
|
||||
if err := jsoniter.NewDecoder(reader).Decode(&result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_LargeStruct_Stream_GoJay(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
reader := bytes.NewReader(LargeFixture)
|
||||
for n := 0; n < b.N; n++ {
|
||||
reader.Reset(LargeFixture)
|
||||
result := LargePayload{}
|
||||
if err := gojay.NewDecoder(reader).DecodeObject(&result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_LargeStruct_Stream_GoJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
reader := bytes.NewReader(LargeFixture)
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := LargePayload{}
|
||||
reader.Reset(LargeFixture)
|
||||
if err := gojson.NewDecoder(reader).Decode(&result); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_LargeStruct_Stream_GoJsonFirstWinMode(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
reader := bytes.NewReader(LargeFixture)
|
||||
for i := 0; i < b.N; i++ {
|
||||
result := LargePayload{}
|
||||
reader.Reset(LargeFixture)
|
||||
if err := gojson.NewDecoder(reader).DecodeWithOption(
|
||||
&result,
|
||||
gojson.DecodeFieldPriorityFirstWin(),
|
||||
); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_LargeSlice_EscapedString_GoJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
var v []string
|
||||
if err := gojson.Unmarshal(LargeSliceEscapedString, &v); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
915
benchmarks/encode_test.go
Normal file
915
benchmarks/encode_test.go
Normal file
|
@ -0,0 +1,915 @@
|
|||
package benchmark
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
gojay "github.com/francoispqt/gojay"
|
||||
gojson "github.com/goccy/go-json"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
"github.com/pquerna/ffjson/ffjson"
|
||||
segmentiojson "github.com/segmentio/encoding/json"
|
||||
"github.com/wI2L/jettison"
|
||||
)
|
||||
|
||||
func Benchmark_Encode_SmallStruct_EncodingJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := json.Marshal(NewSmallPayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_SmallStruct_FFJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := ffjson.Marshal(NewSmallPayloadFFJson()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_SmallStruct_JsonIter(b *testing.B) {
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := json.Marshal(NewSmallPayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_SmallStruct_EasyJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := NewSmallPayloadEasyJson().MarshalJSON(); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_SmallStruct_Jettison(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := jettison.Marshal(NewSmallPayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_SmallStruct_GoJay(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojay.MarshalJSONObject(NewSmallPayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_SmallStruct_SegmentioJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := segmentiojson.Marshal(NewSmallPayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_SmallStruct_GoJsonColored(b *testing.B) {
|
||||
colorOpt := gojson.Colorize(gojson.DefaultColorScheme)
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.MarshalWithOption(NewSmallPayload(), colorOpt); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_SmallStruct_GoJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.Marshal(NewSmallPayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_SmallStruct_GoJsonNoEscape(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.MarshalNoEscape(NewSmallPayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_SmallStructCached_EncodingJson(b *testing.B) {
|
||||
cached := NewSmallPayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := json.Marshal(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_SmallStructCached_FFJson(b *testing.B) {
|
||||
cached := NewSmallPayloadFFJson()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := ffjson.Marshal(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_SmallStructCached_JsonIter(b *testing.B) {
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
cached := NewSmallPayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := json.Marshal(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_SmallStructCached_EasyJson(b *testing.B) {
|
||||
cached := NewSmallPayloadEasyJson()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := cached.MarshalJSON(); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_SmallStructCached_Jettison(b *testing.B) {
|
||||
cached := NewSmallPayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := jettison.Marshal(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_SmallStructCached_GoJay(b *testing.B) {
|
||||
cached := NewSmallPayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojay.MarshalJSONObject(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_SmallStructCached_SegmentioJson(b *testing.B) {
|
||||
cached := NewSmallPayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := segmentiojson.Marshal(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_SmallStructCached_GoJsonColored(b *testing.B) {
|
||||
cached := NewSmallPayload()
|
||||
colorOpt := gojson.Colorize(gojson.DefaultColorScheme)
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.MarshalWithOption(cached, colorOpt); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_SmallStructCached_GoJson(b *testing.B) {
|
||||
cached := NewSmallPayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.Marshal(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_SmallStructCached_GoJsonNoEscape(b *testing.B) {
|
||||
cached := NewSmallPayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.MarshalNoEscape(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MediumStruct_EncodingJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := json.Marshal(NewMediumPayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MediumStruct_JsonIter(b *testing.B) {
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := json.Marshal(NewMediumPayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MediumStruct_EasyJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := NewMediumPayloadEasyJson().MarshalJSON(); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MediumStruct_Jettison(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := jettison.Marshal(NewMediumPayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MediumStruct_GoJay(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojay.MarshalJSONObject(NewMediumPayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MediumStruct_SegmentioJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := segmentiojson.Marshal(NewMediumPayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MediumStruct_GoJsonColored(b *testing.B) {
|
||||
colorOpt := gojson.Colorize(gojson.DefaultColorScheme)
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.MarshalWithOption(NewMediumPayload(), colorOpt); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MediumStruct_GoJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.Marshal(NewMediumPayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MediumStruct_GoJsonNoEscape(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.MarshalNoEscape(NewMediumPayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MediumStructCached_EncodingJson(b *testing.B) {
|
||||
cached := NewMediumPayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := json.Marshal(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MediumStructCached_JsonIter(b *testing.B) {
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
cached := NewMediumPayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := json.Marshal(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MediumStructCached_EasyJson(b *testing.B) {
|
||||
cached := NewMediumPayloadEasyJson()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := cached.MarshalJSON(); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MediumStructCached_Jettison(b *testing.B) {
|
||||
cached := NewMediumPayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := jettison.Marshal(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MediumStructCached_GoJay(b *testing.B) {
|
||||
cached := NewMediumPayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojay.MarshalJSONObject(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MediumStructCached_SegmentioJson(b *testing.B) {
|
||||
cached := NewMediumPayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := segmentiojson.Marshal(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MediumStructCached_GoJsonColored(b *testing.B) {
|
||||
cached := NewMediumPayload()
|
||||
colorOpt := gojson.Colorize(gojson.DefaultColorScheme)
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.MarshalWithOption(cached, colorOpt); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MediumStructCached_GoJson(b *testing.B) {
|
||||
cached := NewMediumPayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.Marshal(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MediumStructCached_GoJsonNoEscape(b *testing.B) {
|
||||
cached := NewMediumPayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.MarshalNoEscape(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_LargeStruct_EncodingJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := json.Marshal(NewLargePayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_LargeStruct_JsonIter(b *testing.B) {
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := json.Marshal(NewLargePayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_LargeStruct_EasyJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := NewLargePayloadEasyJson().MarshalJSON(); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_LargeStruct_Jettison(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := jettison.Marshal(NewLargePayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_LargeStruct_GoJay(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojay.MarshalJSONObject(NewLargePayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_LargeStruct_SegmentioJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := segmentiojson.Marshal(NewLargePayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_LargeStruct_GoJsonColored(b *testing.B) {
|
||||
colorOpt := gojson.Colorize(gojson.DefaultColorScheme)
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.MarshalWithOption(NewLargePayload(), colorOpt); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_LargeStruct_GoJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.Marshal(NewLargePayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_LargeStruct_GoJsonNoEscape(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.MarshalNoEscape(NewLargePayload()); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_LargeStructCached_EncodingJson(b *testing.B) {
|
||||
cached := NewLargePayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := json.Marshal(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_LargeStructCached_JsonIter(b *testing.B) {
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
cached := NewLargePayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := json.Marshal(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_LargeStructCached_EasyJson(b *testing.B) {
|
||||
cached := NewLargePayloadEasyJson()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := cached.MarshalJSON(); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_LargeStructCached_Jettison(b *testing.B) {
|
||||
cached := NewLargePayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := jettison.Marshal(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_LargeStructCached_GoJay(b *testing.B) {
|
||||
cached := NewLargePayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojay.MarshalJSONObject(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_LargeStructCached_SegmentioJson(b *testing.B) {
|
||||
cached := NewLargePayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := segmentiojson.Marshal(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_LargeStructCached_GoJsonColored(b *testing.B) {
|
||||
cached := NewLargePayload()
|
||||
colorOpt := gojson.Colorize(gojson.DefaultColorScheme)
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.MarshalWithOption(cached, colorOpt); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_LargeStructCached_GoJson(b *testing.B) {
|
||||
cached := NewLargePayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.Marshal(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_LargeStructCached_GoJsonNoEscape(b *testing.B) {
|
||||
cached := NewLargePayload()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.MarshalNoEscape(cached); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func benchMapValue() map[string]interface{} {
|
||||
return map[string]interface{}{
|
||||
"a": 1,
|
||||
"b": 2.1,
|
||||
"c": "hello",
|
||||
"d": struct {
|
||||
V int
|
||||
}{
|
||||
V: 1,
|
||||
},
|
||||
"e": true,
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MapInterface_EncodingJson(b *testing.B) {
|
||||
v := benchMapValue()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := json.Marshal(v); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MapInterface_JsonIter(b *testing.B) {
|
||||
v := benchMapValue()
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := json.Marshal(v); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MapInterface_Jettison(b *testing.B) {
|
||||
v := benchMapValue()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := jettison.Marshal(v); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MapInterface_SegmentioJson(b *testing.B) {
|
||||
v := benchMapValue()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := segmentiojson.Marshal(v); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MapInterface_GoJson(b *testing.B) {
|
||||
v := benchMapValue()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.Marshal(v); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_Interface_SegmentioJson(b *testing.B) {
|
||||
v := []interface{}{1}
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := segmentiojson.Marshal(v); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_Interface_GoJson(b *testing.B) {
|
||||
v := []interface{}{1}
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.Marshal(v); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_Bool_EncodingJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
var buf bytes.Buffer
|
||||
enc := json.NewEncoder(&buf)
|
||||
for i := 0; i < b.N; i++ {
|
||||
if err := enc.Encode(true); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_Bool_JsonIter(b *testing.B) {
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
b.ReportAllocs()
|
||||
var buf bytes.Buffer
|
||||
enc := json.NewEncoder(&buf)
|
||||
for i := 0; i < b.N; i++ {
|
||||
if err := enc.Encode(true); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_Bool_SegmentioJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
var buf bytes.Buffer
|
||||
enc := segmentiojson.NewEncoder(&buf)
|
||||
for i := 0; i < b.N; i++ {
|
||||
if err := enc.Encode(true); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_Bool_GoJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
var buf bytes.Buffer
|
||||
enc := gojson.NewEncoder(&buf)
|
||||
for i := 0; i < b.N; i++ {
|
||||
if err := enc.Encode(true); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Marshal_Bool_EncodingJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := json.Marshal(true); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Marshal_Bool_JsonIter(b *testing.B) {
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := json.Marshal(true); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Marshal_Bool_Jettison(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := jettison.Marshal(true); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Marshal_Bool_SegmentioJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := segmentiojson.Marshal(true); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Marshal_Bool_GoJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.Marshal(true); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_Int_EncodingJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := json.Marshal(1); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_Int_JsonIter(b *testing.B) {
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := json.Marshal(1); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_Int_Jettison(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := jettison.Marshal(1); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_Int_SegmentioJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := segmentiojson.Marshal(1); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_Int_GoJson(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.Marshal(1); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type marshaler struct{}
|
||||
|
||||
func (*marshaler) MarshalJSON() ([]byte, error) {
|
||||
return []byte(`"hello"`), nil
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MarshalJSON_EncodingJson(b *testing.B) {
|
||||
v := &marshaler{}
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := json.Marshal(v); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MarshalJSON_JsonIter(b *testing.B) {
|
||||
v := &marshaler{}
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := json.Marshal(v); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MarshalJSON_Jettison(b *testing.B) {
|
||||
v := &marshaler{}
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := jettison.Marshal(v); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MarshalJSON_SegmentioJson(b *testing.B) {
|
||||
v := &marshaler{}
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := segmentiojson.Marshal(v); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_MarshalJSON_GoJson(b *testing.B) {
|
||||
v := &marshaler{}
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.Marshal(v); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type queryTestX struct {
|
||||
XA int
|
||||
XB string
|
||||
XC *queryTestY
|
||||
XD bool
|
||||
XE float32
|
||||
}
|
||||
|
||||
type queryTestY struct {
|
||||
YA int
|
||||
YB string
|
||||
YC bool
|
||||
YD float32
|
||||
}
|
||||
|
||||
func Benchmark_Encode_FilterByMap(b *testing.B) {
|
||||
v := &queryTestX{
|
||||
XA: 1,
|
||||
XB: "xb",
|
||||
XC: &queryTestY{
|
||||
YA: 2,
|
||||
YB: "yb",
|
||||
YC: true,
|
||||
YD: 4,
|
||||
},
|
||||
XD: true,
|
||||
XE: 5,
|
||||
}
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
filteredMap := map[string]interface{}{
|
||||
"XA": v.XA,
|
||||
"XB": v.XB,
|
||||
"XC": map[string]interface{}{
|
||||
"YA": v.XC.YA,
|
||||
"YB": v.XC.YB,
|
||||
},
|
||||
}
|
||||
if _, err := gojson.Marshal(filteredMap); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Encode_FilterByFieldQuery(b *testing.B) {
|
||||
query, err := gojson.BuildFieldQuery(
|
||||
"XA",
|
||||
"XB",
|
||||
gojson.BuildSubFieldQuery("XC").Fields(
|
||||
"YA",
|
||||
"YB",
|
||||
),
|
||||
)
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
v := &queryTestX{
|
||||
XA: 1,
|
||||
XB: "xb",
|
||||
XC: &queryTestY{
|
||||
YA: 2,
|
||||
YB: "yb",
|
||||
YC: true,
|
||||
YD: 4,
|
||||
},
|
||||
XD: true,
|
||||
XE: 5,
|
||||
}
|
||||
ctx := gojson.SetFieldQueryToContext(context.Background(), query)
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
if _, err := gojson.MarshalContext(ctx, v); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
22
benchmarks/go.mod
Normal file
22
benchmarks/go.mod
Normal file
|
@ -0,0 +1,22 @@
|
|||
module benchmark
|
||||
|
||||
go 1.19
|
||||
|
||||
require (
|
||||
github.com/francoispqt/gojay v1.2.13
|
||||
github.com/goccy/go-json v0.0.0-00010101000000-000000000000
|
||||
github.com/json-iterator/go v1.1.10
|
||||
github.com/mailru/easyjson v0.0.0-20190312143242-1de009706dbe
|
||||
github.com/pquerna/ffjson v0.0.0-20190930134022-aa0246cd15f7
|
||||
github.com/segmentio/encoding v0.2.4
|
||||
github.com/valyala/fastjson v1.6.3
|
||||
github.com/wI2L/jettison v0.7.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.1 // indirect
|
||||
github.com/stretchr/testify v1.7.0 // indirect
|
||||
)
|
||||
|
||||
replace github.com/goccy/go-json => ../
|
195
benchmarks/go.sum
Normal file
195
benchmarks/go.sum
Normal file
|
@ -0,0 +1,195 @@
|
|||
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
cloud.google.com/go v0.31.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
cloud.google.com/go v0.37.0/go.mod h1:TS1dMSSfndXH133OKGwekG838Om/cQT0BUHV3HcBgoo=
|
||||
dmitri.shuralyov.com/app/changes v0.0.0-20180602232624-0a106ad413e3/go.mod h1:Yl+fi1br7+Rr3LqpNJf1/uxUdtRUV+Tnj0o93V2B9MU=
|
||||
dmitri.shuralyov.com/html/belt v0.0.0-20180602232347-f7d459c86be0/go.mod h1:JLBrvjyP0v+ecvNYvCpyZgu5/xkfAUhi6wJj28eUfSU=
|
||||
dmitri.shuralyov.com/service/change v0.0.0-20181023043359-a85b471d5412/go.mod h1:a1inKt/atXimZ4Mv927x+r7UpyzRUf4emIoiiSC2TN4=
|
||||
dmitri.shuralyov.com/state v0.0.0-20180228185332-28bcc343414c/go.mod h1:0PRwlb0D6DFvNNtx+9ybjezNCa8XF0xaYcETyp6rHWU=
|
||||
git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
|
||||
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
|
||||
github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g=
|
||||
github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/coreos/go-systemd v0.0.0-20181012123002-c6f51f82210d/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
|
||||
github.com/francoispqt/gojay v1.2.13 h1:d2m3sFjloqoIUQU3TsHBgj6qg/BVGlTBeHDUmyJnXKk=
|
||||
github.com/francoispqt/gojay v1.2.13/go.mod h1:ehT5mTG4ua4581f1++1WLG0vPdaA9HaiDsoyrBGkyDY=
|
||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/gliderlabs/ssh v0.1.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
|
||||
github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q=
|
||||
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||
github.com/golang/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E=
|
||||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ=
|
||||
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
||||
github.com/googleapis/gax-go v2.0.0+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY=
|
||||
github.com/googleapis/gax-go/v2 v2.0.3/go.mod h1:LLvjysVCY1JZeum8Z6l8qUty8fiNwE08qbEPm1M08qg=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA=
|
||||
github.com/grpc-ecosystem/grpc-gateway v1.5.0/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw=
|
||||
github.com/jellevandenhooff/dkim v0.0.0-20150330215556-f50fe3d243e1/go.mod h1:E0B/fFc00Y+Rasa88328GlI/XbtyysCtTHZS8h7IrBU=
|
||||
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
||||
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68=
|
||||
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/pty v1.1.3/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/lunixbochs/vtclean v1.0.0/go.mod h1:pHhQNgMf3btfWnGBVipUOjRYhoOsdGqdm/+2c2E2WMI=
|
||||
github.com/mailru/easyjson v0.0.0-20190312143242-1de009706dbe h1:W/GaMY0y69G4cFlmsC6B9sbuo2fP8OFP1ABjt4kPz+w=
|
||||
github.com/mailru/easyjson v0.0.0-20190312143242-1de009706dbe/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
||||
github.com/microcosm-cc/bluemonday v1.0.1/go.mod h1:hsXNsILzKxV+sX77C5b8FSuKF00vh2OMYv+xgHpAMF4=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI=
|
||||
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo=
|
||||
github.com/neelance/sourcemap v0.0.0-20151028013722-8c68805598ab/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/openzipkin/zipkin-go v0.1.1/go.mod h1:NtoC/o8u3JlF1lSlyPNswIbeQH9bJTmOf0Erfk+hxe8=
|
||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/pquerna/ffjson v0.0.0-20190930134022-aa0246cd15f7 h1:xoIK0ctDddBMnc74udxJYBqlo9Ylnsp1waqjLsnef20=
|
||||
github.com/pquerna/ffjson v0.0.0-20190930134022-aa0246cd15f7/go.mod h1:YARuvh7BUWHNhzDq2OM5tzR2RiCcN2D7sapiKyCel/M=
|
||||
github.com/prometheus/client_golang v0.8.0/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
||||
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
|
||||
github.com/prometheus/common v0.0.0-20180801064454-c7de2306084e/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
|
||||
github.com/prometheus/procfs v0.0.0-20180725123919-05ee40e3a273/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
||||
github.com/segmentio/encoding v0.1.10/go.mod h1:RWhr02uzMB9gQC1x+MfYxedtmBibb9cZ6Vv9VxRSSbw=
|
||||
github.com/segmentio/encoding v0.2.4 h1:TQRXhTlXj4urZe3Z5QVgxs9Ad1i7GYHg9peAtjOPe28=
|
||||
github.com/segmentio/encoding v0.2.4/go.mod h1:MJjRE6bMDocliO2FyFC2Dusp+uYdBfHWh5Bw7QyExto=
|
||||
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
||||
github.com/shurcooL/component v0.0.0-20170202220835-f88ec8f54cc4/go.mod h1:XhFIlyj5a1fBNx5aJTbKoIq0mNaPvOagO+HjB3EtxrY=
|
||||
github.com/shurcooL/events v0.0.0-20181021180414-410e4ca65f48/go.mod h1:5u70Mqkb5O5cxEA8nxTsgrgLehJeAw6Oc4Ab1c/P1HM=
|
||||
github.com/shurcooL/github_flavored_markdown v0.0.0-20181002035957-2122de532470/go.mod h1:2dOwnU2uBioM+SGy2aZoq1f/Sd1l9OkAeAUvjSyvgU0=
|
||||
github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk=
|
||||
github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ=
|
||||
github.com/shurcooL/gofontwoff v0.0.0-20180329035133-29b52fc0a18d/go.mod h1:05UtEgK5zq39gLST6uB0cf3NEHjETfB4Fgr3Gx5R9Vw=
|
||||
github.com/shurcooL/gopherjslib v0.0.0-20160914041154-feb6d3990c2c/go.mod h1:8d3azKNyqcHP1GaQE/c6dDgjkgSx2BZ4IoEi4F1reUI=
|
||||
github.com/shurcooL/highlight_diff v0.0.0-20170515013008-09bb4053de1b/go.mod h1:ZpfEhSmds4ytuByIcDnOLkTHGUI6KNqRNPDLHDk+mUU=
|
||||
github.com/shurcooL/highlight_go v0.0.0-20181028180052-98c3abbbae20/go.mod h1:UDKB5a1T23gOMUJrI+uSuH0VRDStOiUVSjBTRDVBVag=
|
||||
github.com/shurcooL/home v0.0.0-20181020052607-80b7ffcb30f9/go.mod h1:+rgNQw2P9ARFAs37qieuu7ohDNQ3gds9msbT2yn85sg=
|
||||
github.com/shurcooL/htmlg v0.0.0-20170918183704-d01228ac9e50/go.mod h1:zPn1wHpTIePGnXSHpsVPWEktKXHr6+SS6x/IKRb7cpw=
|
||||
github.com/shurcooL/httperror v0.0.0-20170206035902-86b7830d14cc/go.mod h1:aYMfkZ6DWSJPJ6c4Wwz3QtW22G7mf/PEgaB9k/ik5+Y=
|
||||
github.com/shurcooL/httpfs v0.0.0-20171119174359-809beceb2371/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg=
|
||||
github.com/shurcooL/httpgzip v0.0.0-20180522190206-b1c53ac65af9/go.mod h1:919LwcH0M7/W4fcZ0/jy0qGght1GIhqyS/EgWGH2j5Q=
|
||||
github.com/shurcooL/issues v0.0.0-20181008053335-6292fdc1e191/go.mod h1:e2qWDig5bLteJ4fwvDAc2NHzqFEthkqn7aOZAOpj+PQ=
|
||||
github.com/shurcooL/issuesapp v0.0.0-20180602232740-048589ce2241/go.mod h1:NPpHK2TI7iSaM0buivtFUc9offApnI0Alt/K8hcHy0I=
|
||||
github.com/shurcooL/notifications v0.0.0-20181007000457-627ab5aea122/go.mod h1:b5uSkrEVM1jQUspwbixRBhaIjIzL2xazXp6kntxYle0=
|
||||
github.com/shurcooL/octicon v0.0.0-20181028054416-fa4f57f9efb2/go.mod h1:eWdoE5JD4R5UVWDucdOPg1g2fqQRq78IQa9zlOV1vpQ=
|
||||
github.com/shurcooL/reactions v0.0.0-20181006231557-f2e0b4ca5b82/go.mod h1:TCR1lToEk4d2s07G3XGfz2QrgHXg4RJBvjrOozvoWfk=
|
||||
github.com/shurcooL/sanitized_anchor_name v0.0.0-20170918181015-86672fcb3f95/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||
github.com/shurcooL/users v0.0.0-20180125191416-49c67e49c537/go.mod h1:QJTqeLYEDaXHZDBsXlPCDqdhQuJkuw4NOtaxYe3xii4=
|
||||
github.com/shurcooL/webdavfs v0.0.0-20170829043945-18c3829fa133/go.mod h1:hKmq5kWdCj2z2KEozexVbfEZIWiTjhE0+UjmZgPqehw=
|
||||
github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d/go.mod h1:UdhH50NIW0fCiwBSr0co2m7BnFLdv4fQTgdqdJTHFeE=
|
||||
github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e/go.mod h1:HuIsMU8RRBOtsCgI77wP899iHVBQpCmg4ErYMZB+2IA=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/tarm/serial v0.0.0-20180830185346-98f6abe2eb07/go.mod h1:kDXzergiv9cbyO7IOYJZWg1U88JhDg3PB6klq9Hg2pA=
|
||||
github.com/valyala/fastjson v1.6.3 h1:tAKFnnwmeMGPbwJ7IwxcTPCNr3uIzoIj3/Fh90ra4xc=
|
||||
github.com/valyala/fastjson v1.6.3/go.mod h1:CLCAqky6SMuOcxStkYQvblddUtoRxhYMGLrsQns1aXY=
|
||||
github.com/viant/assertly v0.4.8/go.mod h1:aGifi++jvCrUaklKEKT0BU95igDNaqkvz+49uaYMPRU=
|
||||
github.com/viant/toolbox v0.24.0/go.mod h1:OxMCG57V0PXuIP2HNQrtJf2CjqdmbrOx5EkMILuUhzM=
|
||||
github.com/wI2L/jettison v0.7.1 h1:XNq/WvSOAiJhFww9F5JZZcBZtKFL2Y/9WHHEHLDq9TE=
|
||||
github.com/wI2L/jettison v0.7.1/go.mod h1:dj49nOP41M7x6Jql62BqqF/+nW+XJgBaWzJR0hd6M84=
|
||||
go.opencensus.io v0.18.0/go.mod h1:vKdFvxhtzZ9onBp9VKHK8z/sRpBMnKAsufL7wlDrCOA=
|
||||
go4.org v0.0.0-20180809161055-417644f6feb5/go.mod h1:MkTOUMDaeVYJUOUsaDXIhWPZYa1yOyC1qaOBpL57BhE=
|
||||
golang.org/x/build v0.0.0-20190111050920-041ab4dc3f9d/go.mod h1:OWs+y06UdEOHN4y+MfF/py+xQ/tYqIWW03b70/CG9Rw=
|
||||
golang.org/x/crypto v0.0.0-20181030102418-4d3f4d9ffa16/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190313024323-a1f597ede03a/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181029044818-c44066c5c816/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181106065722-10aee1819953/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190313220215-9f648a60d977/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/perf v0.0.0-20180704124530-6e6d33e29852/go.mod h1:JLpeXjPJfIyPr5TlbXLkXWLhP8nz10XfvxElABhCtcw=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181029174526-d69651ed3497/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190316082340-a2f829d7f35f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20181030000716-a0a13e073c7b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
||||
google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
|
||||
google.golang.org/api v0.0.0-20181030000543-1d582fd0359e/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
|
||||
google.golang.org/api v0.1.0/go.mod h1:UGEZY7KEX120AnNLIHFMKIo4obdJhkp2tPbaPlQx13Y=
|
||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||
google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||
google.golang.org/genproto v0.0.0-20180831171423-11092d34479b/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||
google.golang.org/genproto v0.0.0-20181029155118-b69ba1387ce2/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||
google.golang.org/genproto v0.0.0-20181202183823-bd91e49a0898/go.mod h1:7Ep/1NZk928CDR8SjdVbjWNpdIf6nzjE3BTgJDr2Atg=
|
||||
google.golang.org/genproto v0.0.0-20190306203927-b5d61aea6440/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
|
||||
google.golang.org/grpc v1.16.0/go.mod h1:0JHn/cJsOMiMfNA9+DeHDlAU7KAAB5GDlYFpa9MZMio=
|
||||
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
|
||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
grpc.go4.org v0.0.0-20170609214715-11d0a25b4919/go.mod h1:77eQGdRu53HpSqPFJFmuJdjuHRquDANNeA4x7B8WQ9o=
|
||||
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
sourcegraph.com/sourcegraph/go-diff v0.5.0/go.mod h1:kuch7UrkMzY0X+p9CRK03kfuPQ2zzQcaEFbx8wA8rck=
|
||||
sourcegraph.com/sqs/pbtypes v0.0.0-20180604144634-d3ebe8f20ae4/go.mod h1:ketZ/q3QxT9HOBeFhu6RdvsftgpsbFHBF5Cas6cDKZ0=
|
213
benchmarks/large_payload.go
Normal file
213
benchmarks/large_payload.go
Normal file
File diff suppressed because one or more lines are too long
367
benchmarks/large_payload_easyjson.go
Normal file
367
benchmarks/large_payload_easyjson.go
Normal file
|
@ -0,0 +1,367 @@
|
|||
// Code generated by easyjson for marshaling/unmarshaling. DO NOT EDIT.
|
||||
|
||||
package benchmark
|
||||
|
||||
import (
|
||||
json "encoding/json"
|
||||
easyjson "github.com/mailru/easyjson"
|
||||
jlexer "github.com/mailru/easyjson/jlexer"
|
||||
jwriter "github.com/mailru/easyjson/jwriter"
|
||||
)
|
||||
|
||||
// suppress unused package warning
|
||||
var (
|
||||
_ *json.RawMessage
|
||||
_ *jlexer.Lexer
|
||||
_ *jwriter.Writer
|
||||
_ easyjson.Marshaler
|
||||
)
|
||||
|
||||
func easyjsonD519278DecodeBenchmark(in *jlexer.Lexer, out *LargePayloadEasyJson) {
|
||||
isTopLevel := in.IsStart()
|
||||
if in.IsNull() {
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
in.Skip()
|
||||
return
|
||||
}
|
||||
in.Delim('{')
|
||||
for !in.IsDelim('}') {
|
||||
key := in.UnsafeString()
|
||||
in.WantColon()
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
in.WantComma()
|
||||
continue
|
||||
}
|
||||
switch key {
|
||||
case "Users":
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
out.Users = nil
|
||||
} else {
|
||||
in.Delim('[')
|
||||
if out.Users == nil {
|
||||
if !in.IsDelim(']') {
|
||||
out.Users = make(DSUsers, 0, 8)
|
||||
} else {
|
||||
out.Users = DSUsers{}
|
||||
}
|
||||
} else {
|
||||
out.Users = (out.Users)[:0]
|
||||
}
|
||||
for !in.IsDelim(']') {
|
||||
var v1 *DSUser
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
v1 = nil
|
||||
} else {
|
||||
if v1 == nil {
|
||||
v1 = new(DSUser)
|
||||
}
|
||||
easyjsonD519278DecodeBenchmark1(in, v1)
|
||||
}
|
||||
out.Users = append(out.Users, v1)
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim(']')
|
||||
}
|
||||
case "Topics":
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
out.Topics = nil
|
||||
} else {
|
||||
if out.Topics == nil {
|
||||
out.Topics = new(DSTopicsList)
|
||||
}
|
||||
easyjsonD519278DecodeBenchmark2(in, out.Topics)
|
||||
}
|
||||
default:
|
||||
in.SkipRecursive()
|
||||
}
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim('}')
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
}
|
||||
func easyjsonD519278EncodeBenchmark(out *jwriter.Writer, in LargePayloadEasyJson) {
|
||||
out.RawByte('{')
|
||||
first := true
|
||||
_ = first
|
||||
{
|
||||
const prefix string = ",\"Users\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
if in.Users == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 {
|
||||
out.RawString("null")
|
||||
} else {
|
||||
out.RawByte('[')
|
||||
for v2, v3 := range in.Users {
|
||||
if v2 > 0 {
|
||||
out.RawByte(',')
|
||||
}
|
||||
if v3 == nil {
|
||||
out.RawString("null")
|
||||
} else {
|
||||
easyjsonD519278EncodeBenchmark1(out, *v3)
|
||||
}
|
||||
}
|
||||
out.RawByte(']')
|
||||
}
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Topics\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
if in.Topics == nil {
|
||||
out.RawString("null")
|
||||
} else {
|
||||
easyjsonD519278EncodeBenchmark2(out, *in.Topics)
|
||||
}
|
||||
}
|
||||
out.RawByte('}')
|
||||
}
|
||||
|
||||
// MarshalJSON supports json.Marshaler interface
|
||||
func (v LargePayloadEasyJson) MarshalJSON() ([]byte, error) {
|
||||
w := jwriter.Writer{}
|
||||
easyjsonD519278EncodeBenchmark(&w, v)
|
||||
return w.Buffer.BuildBytes(), w.Error
|
||||
}
|
||||
|
||||
// MarshalEasyJSON supports easyjson.Marshaler interface
|
||||
func (v LargePayloadEasyJson) MarshalEasyJSON(w *jwriter.Writer) {
|
||||
easyjsonD519278EncodeBenchmark(w, v)
|
||||
}
|
||||
|
||||
// UnmarshalJSON supports json.Unmarshaler interface
|
||||
func (v *LargePayloadEasyJson) UnmarshalJSON(data []byte) error {
|
||||
r := jlexer.Lexer{Data: data}
|
||||
easyjsonD519278DecodeBenchmark(&r, v)
|
||||
return r.Error()
|
||||
}
|
||||
|
||||
// UnmarshalEasyJSON supports easyjson.Unmarshaler interface
|
||||
func (v *LargePayloadEasyJson) UnmarshalEasyJSON(l *jlexer.Lexer) {
|
||||
easyjsonD519278DecodeBenchmark(l, v)
|
||||
}
|
||||
func easyjsonD519278DecodeBenchmark2(in *jlexer.Lexer, out *DSTopicsList) {
|
||||
isTopLevel := in.IsStart()
|
||||
if in.IsNull() {
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
in.Skip()
|
||||
return
|
||||
}
|
||||
in.Delim('{')
|
||||
for !in.IsDelim('}') {
|
||||
key := in.UnsafeString()
|
||||
in.WantColon()
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
in.WantComma()
|
||||
continue
|
||||
}
|
||||
switch key {
|
||||
case "Topics":
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
out.Topics = nil
|
||||
} else {
|
||||
in.Delim('[')
|
||||
if out.Topics == nil {
|
||||
if !in.IsDelim(']') {
|
||||
out.Topics = make(DSTopics, 0, 8)
|
||||
} else {
|
||||
out.Topics = DSTopics{}
|
||||
}
|
||||
} else {
|
||||
out.Topics = (out.Topics)[:0]
|
||||
}
|
||||
for !in.IsDelim(']') {
|
||||
var v4 *DSTopic
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
v4 = nil
|
||||
} else {
|
||||
if v4 == nil {
|
||||
v4 = new(DSTopic)
|
||||
}
|
||||
easyjsonD519278DecodeBenchmark3(in, v4)
|
||||
}
|
||||
out.Topics = append(out.Topics, v4)
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim(']')
|
||||
}
|
||||
case "MoreTopicsUrl":
|
||||
out.MoreTopicsUrl = string(in.String())
|
||||
default:
|
||||
in.SkipRecursive()
|
||||
}
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim('}')
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
}
|
||||
func easyjsonD519278EncodeBenchmark2(out *jwriter.Writer, in DSTopicsList) {
|
||||
out.RawByte('{')
|
||||
first := true
|
||||
_ = first
|
||||
{
|
||||
const prefix string = ",\"Topics\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
if in.Topics == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 {
|
||||
out.RawString("null")
|
||||
} else {
|
||||
out.RawByte('[')
|
||||
for v5, v6 := range in.Topics {
|
||||
if v5 > 0 {
|
||||
out.RawByte(',')
|
||||
}
|
||||
if v6 == nil {
|
||||
out.RawString("null")
|
||||
} else {
|
||||
easyjsonD519278EncodeBenchmark3(out, *v6)
|
||||
}
|
||||
}
|
||||
out.RawByte(']')
|
||||
}
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"MoreTopicsUrl\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.MoreTopicsUrl))
|
||||
}
|
||||
out.RawByte('}')
|
||||
}
|
||||
func easyjsonD519278DecodeBenchmark3(in *jlexer.Lexer, out *DSTopic) {
|
||||
isTopLevel := in.IsStart()
|
||||
if in.IsNull() {
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
in.Skip()
|
||||
return
|
||||
}
|
||||
in.Delim('{')
|
||||
for !in.IsDelim('}') {
|
||||
key := in.UnsafeString()
|
||||
in.WantColon()
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
in.WantComma()
|
||||
continue
|
||||
}
|
||||
switch key {
|
||||
case "Id":
|
||||
out.Id = int(in.Int())
|
||||
case "Slug":
|
||||
out.Slug = string(in.String())
|
||||
default:
|
||||
in.SkipRecursive()
|
||||
}
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim('}')
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
}
|
||||
func easyjsonD519278EncodeBenchmark3(out *jwriter.Writer, in DSTopic) {
|
||||
out.RawByte('{')
|
||||
first := true
|
||||
_ = first
|
||||
{
|
||||
const prefix string = ",\"Id\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.Id))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Slug\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.Slug))
|
||||
}
|
||||
out.RawByte('}')
|
||||
}
|
||||
func easyjsonD519278DecodeBenchmark1(in *jlexer.Lexer, out *DSUser) {
|
||||
isTopLevel := in.IsStart()
|
||||
if in.IsNull() {
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
in.Skip()
|
||||
return
|
||||
}
|
||||
in.Delim('{')
|
||||
for !in.IsDelim('}') {
|
||||
key := in.UnsafeString()
|
||||
in.WantColon()
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
in.WantComma()
|
||||
continue
|
||||
}
|
||||
switch key {
|
||||
case "Username":
|
||||
out.Username = string(in.String())
|
||||
default:
|
||||
in.SkipRecursive()
|
||||
}
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim('}')
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
}
|
||||
func easyjsonD519278EncodeBenchmark1(out *jwriter.Writer, in DSUser) {
|
||||
out.RawByte('{')
|
||||
first := true
|
||||
_ = first
|
||||
{
|
||||
const prefix string = ",\"Username\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.Username))
|
||||
}
|
||||
out.RawByte('}')
|
||||
}
|
363
benchmarks/medium_payload.go
Normal file
363
benchmarks/medium_payload.go
Normal file
|
@ -0,0 +1,363 @@
|
|||
package benchmark
|
||||
|
||||
import "github.com/francoispqt/gojay"
|
||||
|
||||
// Response from Clearbit API. Size: 2.4kb
|
||||
var MediumFixture = []byte(`{
|
||||
"person": {
|
||||
"id": "d50887ca-a6ce-4e59-b89f-14f0b5d03b03",
|
||||
"name": {
|
||||
"fullName": "Leonid Bugaev",
|
||||
"givenName": "Leonid",
|
||||
"familyName": "Bugaev"
|
||||
},
|
||||
"email": "leonsbox@gmail.com",
|
||||
"gender": "male",
|
||||
"location": "Saint Petersburg, Saint Petersburg, RU",
|
||||
"geo": {
|
||||
"city": "Saint Petersburg",
|
||||
"state": "Saint Petersburg",
|
||||
"country": "Russia",
|
||||
"lat": 59.9342802,
|
||||
"lng": 30.3350986
|
||||
},
|
||||
"bio": "Senior engineer at Granify.com",
|
||||
"site": "http://flickfaver.com",
|
||||
"avatar": "https://d1ts43dypk8bqh.cloudfront.net/v1/avatars/d50887ca-a6ce-4e59-b89f-14f0b5d03b03",
|
||||
"employment": {
|
||||
"name": "www.latera.ru",
|
||||
"title": "Software Engineer",
|
||||
"domain": "gmail.com"
|
||||
},
|
||||
"facebook": {
|
||||
"handle": "leonid.bugaev"
|
||||
},
|
||||
"github": {
|
||||
"handle": "buger",
|
||||
"id": 14009,
|
||||
"avatar": "https://avatars.githubusercontent.com/u/14009?v=3",
|
||||
"company": "Granify",
|
||||
"blog": "http://leonsbox.com",
|
||||
"followers": 95,
|
||||
"following": 10
|
||||
},
|
||||
"twitter": {
|
||||
"handle": "flickfaver",
|
||||
"id": 77004410,
|
||||
"bio": null,
|
||||
"followers": 2,
|
||||
"following": 1,
|
||||
"statuses": 5,
|
||||
"favorites": 0,
|
||||
"location": "",
|
||||
"site": "http://flickfaver.com",
|
||||
"avatar": null
|
||||
},
|
||||
"linkedin": {
|
||||
"handle": "in/leonidbugaev"
|
||||
},
|
||||
"googleplus": {
|
||||
"handle": null
|
||||
},
|
||||
"angellist": {
|
||||
"handle": "leonid-bugaev",
|
||||
"id": 61541,
|
||||
"bio": "Senior engineer at Granify.com",
|
||||
"blog": "http://buger.github.com",
|
||||
"site": "http://buger.github.com",
|
||||
"followers": 41,
|
||||
"avatar": "https://d1qb2nb5cznatu.cloudfront.net/users/61541-medium_jpg?1405474390"
|
||||
},
|
||||
"klout": {
|
||||
"handle": null,
|
||||
"score": null
|
||||
},
|
||||
"foursquare": {
|
||||
"handle": null
|
||||
},
|
||||
"aboutme": {
|
||||
"handle": "leonid.bugaev",
|
||||
"bio": null,
|
||||
"avatar": null
|
||||
},
|
||||
"gravatar": {
|
||||
"handle": "buger",
|
||||
"urls": [
|
||||
],
|
||||
"avatar": "http://1.gravatar.com/avatar/f7c8edd577d13b8930d5522f28123510",
|
||||
"avatars": [
|
||||
{
|
||||
"url": "http://1.gravatar.com/avatar/f7c8edd577d13b8930d5522f28123510",
|
||||
"type": "thumbnail"
|
||||
}
|
||||
]
|
||||
},
|
||||
"fuzzy": false
|
||||
},
|
||||
"company": null
|
||||
}`)
|
||||
|
||||
type CBAvatar struct {
|
||||
Url string
|
||||
}
|
||||
|
||||
func (m *CBAvatar) UnmarshalJSONObject(dec *gojay.Decoder, key string) error {
|
||||
switch key {
|
||||
case "avatars":
|
||||
return dec.AddString(&m.Url)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
func (m *CBAvatar) NKeys() int {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (m *CBAvatar) MarshalJSONObject(enc *gojay.Encoder) {
|
||||
enc.AddStringKey("url", m.Url)
|
||||
}
|
||||
|
||||
func (m *CBAvatar) IsNil() bool {
|
||||
return m == nil
|
||||
}
|
||||
|
||||
type Avatars []*CBAvatar
|
||||
|
||||
func (t *Avatars) UnmarshalJSONArray(dec *gojay.Decoder) error {
|
||||
avatar := CBAvatar{}
|
||||
*t = append(*t, &avatar)
|
||||
return dec.AddObject(&avatar)
|
||||
}
|
||||
|
||||
func (m *Avatars) MarshalJSONArray(enc *gojay.Encoder) {
|
||||
for _, e := range *m {
|
||||
enc.AddObject(e)
|
||||
}
|
||||
}
|
||||
func (m *Avatars) IsNil() bool {
|
||||
return m == nil
|
||||
}
|
||||
|
||||
type CBGravatar struct {
|
||||
Avatars Avatars
|
||||
}
|
||||
|
||||
func (m *CBGravatar) UnmarshalJSONObject(dec *gojay.Decoder, key string) error {
|
||||
switch key {
|
||||
case "avatars":
|
||||
return dec.AddArray(&m.Avatars)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
func (m *CBGravatar) NKeys() int {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (m *CBGravatar) MarshalJSONObject(enc *gojay.Encoder) {
|
||||
enc.AddArrayKey("avatars", &m.Avatars)
|
||||
}
|
||||
|
||||
func (m *CBGravatar) IsNil() bool {
|
||||
return m == nil
|
||||
}
|
||||
|
||||
type CBGithub struct {
|
||||
Followers int
|
||||
}
|
||||
|
||||
func (m *CBGithub) UnmarshalJSONObject(dec *gojay.Decoder, key string) error {
|
||||
switch key {
|
||||
case "followers":
|
||||
return dec.AddInt(&m.Followers)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *CBGithub) NKeys() int {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (m *CBGithub) MarshalJSONObject(enc *gojay.Encoder) {
|
||||
enc.AddIntKey("followers", m.Followers)
|
||||
}
|
||||
|
||||
func (m *CBGithub) IsNil() bool {
|
||||
return m == nil
|
||||
}
|
||||
|
||||
type CBName struct {
|
||||
FullName string `json:"fullName"`
|
||||
}
|
||||
|
||||
func (m *CBName) UnmarshalJSONObject(dec *gojay.Decoder, key string) error {
|
||||
switch key {
|
||||
case "fullName":
|
||||
return dec.AddString(&m.FullName)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *CBName) NKeys() int {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (m *CBName) MarshalJSONObject(enc *gojay.Encoder) {
|
||||
enc.AddStringKey("fullName", m.FullName)
|
||||
}
|
||||
|
||||
func (m *CBName) IsNil() bool {
|
||||
return m == nil
|
||||
}
|
||||
|
||||
type CBPerson struct {
|
||||
Name *CBName `json:"name"`
|
||||
Github *CBGithub `json:"github"`
|
||||
Gravatar *CBGravatar
|
||||
}
|
||||
|
||||
func (m *CBPerson) UnmarshalJSONObject(dec *gojay.Decoder, key string) error {
|
||||
switch key {
|
||||
case "name":
|
||||
m.Name = &CBName{}
|
||||
return dec.AddObject(m.Name)
|
||||
case "github":
|
||||
m.Github = &CBGithub{}
|
||||
return dec.AddObject(m.Github)
|
||||
case "gravatar":
|
||||
m.Gravatar = &CBGravatar{}
|
||||
return dec.AddObject(m.Gravatar)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *CBPerson) NKeys() int {
|
||||
return 3
|
||||
}
|
||||
|
||||
func (m *CBPerson) MarshalJSONObject(enc *gojay.Encoder) {
|
||||
enc.AddObjectKey("name", m.Name)
|
||||
enc.AddObjectKey("github", m.Github)
|
||||
enc.AddObjectKey("gravatar", m.Gravatar)
|
||||
}
|
||||
|
||||
func (m *CBPerson) IsNil() bool {
|
||||
return m == nil
|
||||
}
|
||||
|
||||
type MediumPayload struct {
|
||||
Person *CBPerson `json:"person"`
|
||||
Company string `json:"company"`
|
||||
}
|
||||
|
||||
//easyjson:json
|
||||
type MediumPayloadEasyJson struct {
|
||||
Person *CBPerson `json:"person"`
|
||||
Company string `json:"company"`
|
||||
}
|
||||
|
||||
func (m *MediumPayload) UnmarshalJSONObject(dec *gojay.Decoder, key string) error {
|
||||
switch key {
|
||||
case "person":
|
||||
m.Person = &CBPerson{}
|
||||
return dec.AddObject(m.Person)
|
||||
case "company":
|
||||
dec.AddString(&m.Company)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *MediumPayload) NKeys() int {
|
||||
return 2
|
||||
}
|
||||
|
||||
func (m *MediumPayload) MarshalJSONObject(enc *gojay.Encoder) {
|
||||
enc.AddObjectKey("person", m.Person)
|
||||
// enc.AddStringKey("company", m.Company)
|
||||
}
|
||||
|
||||
func (m *MediumPayload) IsNil() bool {
|
||||
return m == nil
|
||||
}
|
||||
|
||||
func NewMediumPayload() *MediumPayload {
|
||||
return &MediumPayload{
|
||||
Company: "test",
|
||||
Person: &CBPerson{
|
||||
Name: &CBName{
|
||||
FullName: "test",
|
||||
},
|
||||
Github: &CBGithub{
|
||||
Followers: 100,
|
||||
},
|
||||
Gravatar: &CBGravatar{
|
||||
Avatars: Avatars{
|
||||
&CBAvatar{
|
||||
Url: "http://test.com",
|
||||
},
|
||||
&CBAvatar{
|
||||
Url: "http://test.com",
|
||||
},
|
||||
&CBAvatar{
|
||||
Url: "http://test.com",
|
||||
},
|
||||
&CBAvatar{
|
||||
Url: "http://test.com",
|
||||
},
|
||||
&CBAvatar{
|
||||
Url: "http://test.com",
|
||||
},
|
||||
&CBAvatar{
|
||||
Url: "http://test.com",
|
||||
},
|
||||
&CBAvatar{
|
||||
Url: "http://test.com",
|
||||
},
|
||||
&CBAvatar{
|
||||
Url: "http://test.com",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func NewMediumPayloadEasyJson() *MediumPayloadEasyJson {
|
||||
return &MediumPayloadEasyJson{
|
||||
Company: "test",
|
||||
Person: &CBPerson{
|
||||
Name: &CBName{
|
||||
FullName: "test",
|
||||
},
|
||||
Github: &CBGithub{
|
||||
Followers: 100,
|
||||
},
|
||||
Gravatar: &CBGravatar{
|
||||
Avatars: Avatars{
|
||||
&CBAvatar{
|
||||
Url: "http://test.com",
|
||||
},
|
||||
&CBAvatar{
|
||||
Url: "http://test.com",
|
||||
},
|
||||
&CBAvatar{
|
||||
Url: "http://test.com",
|
||||
},
|
||||
&CBAvatar{
|
||||
Url: "http://test.com",
|
||||
},
|
||||
&CBAvatar{
|
||||
Url: "http://test.com",
|
||||
},
|
||||
&CBAvatar{
|
||||
Url: "http://test.com",
|
||||
},
|
||||
&CBAvatar{
|
||||
Url: "http://test.com",
|
||||
},
|
||||
&CBAvatar{
|
||||
Url: "http://test.com",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
453
benchmarks/medium_payload_easyjson.go
Normal file
453
benchmarks/medium_payload_easyjson.go
Normal file
|
@ -0,0 +1,453 @@
|
|||
// Code generated by easyjson for marshaling/unmarshaling. DO NOT EDIT.
|
||||
|
||||
package benchmark
|
||||
|
||||
import (
|
||||
json "encoding/json"
|
||||
easyjson "github.com/mailru/easyjson"
|
||||
jlexer "github.com/mailru/easyjson/jlexer"
|
||||
jwriter "github.com/mailru/easyjson/jwriter"
|
||||
)
|
||||
|
||||
// suppress unused package warning
|
||||
var (
|
||||
_ *json.RawMessage
|
||||
_ *jlexer.Lexer
|
||||
_ *jwriter.Writer
|
||||
_ easyjson.Marshaler
|
||||
)
|
||||
|
||||
func easyjson8ca7813eDecodeBenchmark(in *jlexer.Lexer, out *MediumPayloadEasyJson) {
|
||||
isTopLevel := in.IsStart()
|
||||
if in.IsNull() {
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
in.Skip()
|
||||
return
|
||||
}
|
||||
in.Delim('{')
|
||||
for !in.IsDelim('}') {
|
||||
key := in.UnsafeString()
|
||||
in.WantColon()
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
in.WantComma()
|
||||
continue
|
||||
}
|
||||
switch key {
|
||||
case "person":
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
out.Person = nil
|
||||
} else {
|
||||
if out.Person == nil {
|
||||
out.Person = new(CBPerson)
|
||||
}
|
||||
easyjson8ca7813eDecodeBenchmark1(in, out.Person)
|
||||
}
|
||||
case "company":
|
||||
out.Company = string(in.String())
|
||||
default:
|
||||
in.SkipRecursive()
|
||||
}
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim('}')
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
}
|
||||
func easyjson8ca7813eEncodeBenchmark(out *jwriter.Writer, in MediumPayloadEasyJson) {
|
||||
out.RawByte('{')
|
||||
first := true
|
||||
_ = first
|
||||
{
|
||||
const prefix string = ",\"person\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
if in.Person == nil {
|
||||
out.RawString("null")
|
||||
} else {
|
||||
easyjson8ca7813eEncodeBenchmark1(out, *in.Person)
|
||||
}
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"company\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.Company))
|
||||
}
|
||||
out.RawByte('}')
|
||||
}
|
||||
|
||||
// MarshalJSON supports json.Marshaler interface
|
||||
func (v MediumPayloadEasyJson) MarshalJSON() ([]byte, error) {
|
||||
w := jwriter.Writer{}
|
||||
easyjson8ca7813eEncodeBenchmark(&w, v)
|
||||
return w.Buffer.BuildBytes(), w.Error
|
||||
}
|
||||
|
||||
// MarshalEasyJSON supports easyjson.Marshaler interface
|
||||
func (v MediumPayloadEasyJson) MarshalEasyJSON(w *jwriter.Writer) {
|
||||
easyjson8ca7813eEncodeBenchmark(w, v)
|
||||
}
|
||||
|
||||
// UnmarshalJSON supports json.Unmarshaler interface
|
||||
func (v *MediumPayloadEasyJson) UnmarshalJSON(data []byte) error {
|
||||
r := jlexer.Lexer{Data: data}
|
||||
easyjson8ca7813eDecodeBenchmark(&r, v)
|
||||
return r.Error()
|
||||
}
|
||||
|
||||
// UnmarshalEasyJSON supports easyjson.Unmarshaler interface
|
||||
func (v *MediumPayloadEasyJson) UnmarshalEasyJSON(l *jlexer.Lexer) {
|
||||
easyjson8ca7813eDecodeBenchmark(l, v)
|
||||
}
|
||||
func easyjson8ca7813eDecodeBenchmark1(in *jlexer.Lexer, out *CBPerson) {
|
||||
isTopLevel := in.IsStart()
|
||||
if in.IsNull() {
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
in.Skip()
|
||||
return
|
||||
}
|
||||
in.Delim('{')
|
||||
for !in.IsDelim('}') {
|
||||
key := in.UnsafeString()
|
||||
in.WantColon()
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
in.WantComma()
|
||||
continue
|
||||
}
|
||||
switch key {
|
||||
case "name":
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
out.Name = nil
|
||||
} else {
|
||||
if out.Name == nil {
|
||||
out.Name = new(CBName)
|
||||
}
|
||||
easyjson8ca7813eDecodeBenchmark2(in, out.Name)
|
||||
}
|
||||
case "github":
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
out.Github = nil
|
||||
} else {
|
||||
if out.Github == nil {
|
||||
out.Github = new(CBGithub)
|
||||
}
|
||||
easyjson8ca7813eDecodeBenchmark3(in, out.Github)
|
||||
}
|
||||
case "Gravatar":
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
out.Gravatar = nil
|
||||
} else {
|
||||
if out.Gravatar == nil {
|
||||
out.Gravatar = new(CBGravatar)
|
||||
}
|
||||
easyjson8ca7813eDecodeBenchmark4(in, out.Gravatar)
|
||||
}
|
||||
default:
|
||||
in.SkipRecursive()
|
||||
}
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim('}')
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
}
|
||||
func easyjson8ca7813eEncodeBenchmark1(out *jwriter.Writer, in CBPerson) {
|
||||
out.RawByte('{')
|
||||
first := true
|
||||
_ = first
|
||||
{
|
||||
const prefix string = ",\"name\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
if in.Name == nil {
|
||||
out.RawString("null")
|
||||
} else {
|
||||
easyjson8ca7813eEncodeBenchmark2(out, *in.Name)
|
||||
}
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"github\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
if in.Github == nil {
|
||||
out.RawString("null")
|
||||
} else {
|
||||
easyjson8ca7813eEncodeBenchmark3(out, *in.Github)
|
||||
}
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Gravatar\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
if in.Gravatar == nil {
|
||||
out.RawString("null")
|
||||
} else {
|
||||
easyjson8ca7813eEncodeBenchmark4(out, *in.Gravatar)
|
||||
}
|
||||
}
|
||||
out.RawByte('}')
|
||||
}
|
||||
func easyjson8ca7813eDecodeBenchmark4(in *jlexer.Lexer, out *CBGravatar) {
|
||||
isTopLevel := in.IsStart()
|
||||
if in.IsNull() {
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
in.Skip()
|
||||
return
|
||||
}
|
||||
in.Delim('{')
|
||||
for !in.IsDelim('}') {
|
||||
key := in.UnsafeString()
|
||||
in.WantColon()
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
in.WantComma()
|
||||
continue
|
||||
}
|
||||
switch key {
|
||||
case "Avatars":
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
out.Avatars = nil
|
||||
} else {
|
||||
in.Delim('[')
|
||||
if out.Avatars == nil {
|
||||
if !in.IsDelim(']') {
|
||||
out.Avatars = make(Avatars, 0, 8)
|
||||
} else {
|
||||
out.Avatars = Avatars{}
|
||||
}
|
||||
} else {
|
||||
out.Avatars = (out.Avatars)[:0]
|
||||
}
|
||||
for !in.IsDelim(']') {
|
||||
var v1 *CBAvatar
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
v1 = nil
|
||||
} else {
|
||||
if v1 == nil {
|
||||
v1 = new(CBAvatar)
|
||||
}
|
||||
easyjson8ca7813eDecodeBenchmark5(in, v1)
|
||||
}
|
||||
out.Avatars = append(out.Avatars, v1)
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim(']')
|
||||
}
|
||||
default:
|
||||
in.SkipRecursive()
|
||||
}
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim('}')
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
}
|
||||
func easyjson8ca7813eEncodeBenchmark4(out *jwriter.Writer, in CBGravatar) {
|
||||
out.RawByte('{')
|
||||
first := true
|
||||
_ = first
|
||||
{
|
||||
const prefix string = ",\"Avatars\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
if in.Avatars == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 {
|
||||
out.RawString("null")
|
||||
} else {
|
||||
out.RawByte('[')
|
||||
for v2, v3 := range in.Avatars {
|
||||
if v2 > 0 {
|
||||
out.RawByte(',')
|
||||
}
|
||||
if v3 == nil {
|
||||
out.RawString("null")
|
||||
} else {
|
||||
easyjson8ca7813eEncodeBenchmark5(out, *v3)
|
||||
}
|
||||
}
|
||||
out.RawByte(']')
|
||||
}
|
||||
}
|
||||
out.RawByte('}')
|
||||
}
|
||||
func easyjson8ca7813eDecodeBenchmark5(in *jlexer.Lexer, out *CBAvatar) {
|
||||
isTopLevel := in.IsStart()
|
||||
if in.IsNull() {
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
in.Skip()
|
||||
return
|
||||
}
|
||||
in.Delim('{')
|
||||
for !in.IsDelim('}') {
|
||||
key := in.UnsafeString()
|
||||
in.WantColon()
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
in.WantComma()
|
||||
continue
|
||||
}
|
||||
switch key {
|
||||
case "Url":
|
||||
out.Url = string(in.String())
|
||||
default:
|
||||
in.SkipRecursive()
|
||||
}
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim('}')
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
}
|
||||
func easyjson8ca7813eEncodeBenchmark5(out *jwriter.Writer, in CBAvatar) {
|
||||
out.RawByte('{')
|
||||
first := true
|
||||
_ = first
|
||||
{
|
||||
const prefix string = ",\"Url\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.Url))
|
||||
}
|
||||
out.RawByte('}')
|
||||
}
|
||||
func easyjson8ca7813eDecodeBenchmark3(in *jlexer.Lexer, out *CBGithub) {
|
||||
isTopLevel := in.IsStart()
|
||||
if in.IsNull() {
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
in.Skip()
|
||||
return
|
||||
}
|
||||
in.Delim('{')
|
||||
for !in.IsDelim('}') {
|
||||
key := in.UnsafeString()
|
||||
in.WantColon()
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
in.WantComma()
|
||||
continue
|
||||
}
|
||||
switch key {
|
||||
case "Followers":
|
||||
out.Followers = int(in.Int())
|
||||
default:
|
||||
in.SkipRecursive()
|
||||
}
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim('}')
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
}
|
||||
func easyjson8ca7813eEncodeBenchmark3(out *jwriter.Writer, in CBGithub) {
|
||||
out.RawByte('{')
|
||||
first := true
|
||||
_ = first
|
||||
{
|
||||
const prefix string = ",\"Followers\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.Followers))
|
||||
}
|
||||
out.RawByte('}')
|
||||
}
|
||||
func easyjson8ca7813eDecodeBenchmark2(in *jlexer.Lexer, out *CBName) {
|
||||
isTopLevel := in.IsStart()
|
||||
if in.IsNull() {
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
in.Skip()
|
||||
return
|
||||
}
|
||||
in.Delim('{')
|
||||
for !in.IsDelim('}') {
|
||||
key := in.UnsafeString()
|
||||
in.WantColon()
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
in.WantComma()
|
||||
continue
|
||||
}
|
||||
switch key {
|
||||
case "fullName":
|
||||
out.FullName = string(in.String())
|
||||
default:
|
||||
in.SkipRecursive()
|
||||
}
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim('}')
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
}
|
||||
func easyjson8ca7813eEncodeBenchmark2(out *jwriter.Writer, in CBName) {
|
||||
out.RawByte('{')
|
||||
first := true
|
||||
_ = first
|
||||
{
|
||||
const prefix string = ",\"fullName\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.FullName))
|
||||
}
|
||||
out.RawByte('}')
|
||||
}
|
24
benchmarks/path_test.go
Normal file
24
benchmarks/path_test.go
Normal file
|
@ -0,0 +1,24 @@
|
|||
package benchmark
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
gojson "github.com/goccy/go-json"
|
||||
)
|
||||
|
||||
func Benchmark_Decode_SmallStruct_UnmarshalPath_GoJson(b *testing.B) {
|
||||
path, err := gojson.CreatePath("$.st")
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
var v int
|
||||
if err := path.Unmarshal(SmallFixture, &v); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
if v != 1 {
|
||||
b.Fatal("failed to unmarshal path")
|
||||
}
|
||||
}
|
||||
}
|
78
benchmarks/slow_reader_test.go
Normal file
78
benchmarks/slow_reader_test.go
Normal file
|
@ -0,0 +1,78 @@
|
|||
package benchmark
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
gojson "github.com/goccy/go-json"
|
||||
)
|
||||
|
||||
// Benchmark decoding from a slow io.Reader that never fills the buffer completely
|
||||
func Benchmark_Decode_SlowReader_EncodingJson(b *testing.B) {
|
||||
var expected LargePayload
|
||||
if err := json.Unmarshal(LargeFixture, &expected); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
for _, chunkSize := range [5]int{16384, 4096, 1024, 256, 64} {
|
||||
b.Run(fmt.Sprintf("chunksize %v", chunkSize), func(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
index = 0
|
||||
var got LargePayload
|
||||
if err := json.NewDecoder(slowReader{chunkSize: chunkSize}).Decode(&got); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
if !reflect.DeepEqual(expected, got) {
|
||||
b.Fatalf("failed to decode. expected:[%+v] but got:[%+v]", expected, got)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Decode_SlowReader_GoJson(b *testing.B) {
|
||||
var expected LargePayload
|
||||
if err := json.Unmarshal(LargeFixture, &expected); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
for _, chunkSize := range []int{16384, 4096, 1024, 256, 64} {
|
||||
b.Run(fmt.Sprintf("chunksize %v", chunkSize), func(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
index = 0
|
||||
var got LargePayload
|
||||
if err := gojson.NewDecoder(slowReader{chunkSize: chunkSize}).Decode(&got); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
if !reflect.DeepEqual(expected, got) {
|
||||
b.Fatalf("failed to decode. expected:[%+v] but got:[%+v]", expected, got)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
type slowReader struct {
|
||||
chunkSize int
|
||||
}
|
||||
|
||||
var index int
|
||||
|
||||
func (s slowReader) Read(p []byte) (n int, err error) {
|
||||
smallBuf := make([]byte, Min(s.chunkSize, len(p)))
|
||||
x := bytes.NewReader(LargeFixture)
|
||||
n, err = x.ReadAt(smallBuf, int64(index))
|
||||
index += n
|
||||
copy(p, smallBuf)
|
||||
return
|
||||
}
|
||||
|
||||
func Min(x, y int) int {
|
||||
if x < y {
|
||||
return x
|
||||
}
|
||||
return y
|
||||
}
|
129
benchmarks/small_payload.go
Normal file
129
benchmarks/small_payload.go
Normal file
|
@ -0,0 +1,129 @@
|
|||
package benchmark
|
||||
|
||||
import "github.com/francoispqt/gojay"
|
||||
|
||||
var SmallFixture = []byte(`{"st": 1,"sid": 486,"tt": "active","gr": 0,"uuid": "de305d54-75b4-431b-adb2-eb6b9e546014","ip": "127.0.0.1","ua": "user_agent","tz": -6,"v": 1}`)
|
||||
|
||||
// ffjson:skip
|
||||
type SmallPayload struct {
|
||||
St int
|
||||
Sid int
|
||||
Tt string
|
||||
Gr int
|
||||
Uuid string
|
||||
Ip string
|
||||
Ua string
|
||||
Tz int
|
||||
V int
|
||||
}
|
||||
|
||||
type SmallPayloadFFJson struct {
|
||||
St int
|
||||
Sid int
|
||||
Tt string
|
||||
Gr int
|
||||
Uuid string
|
||||
Ip string
|
||||
Ua string
|
||||
Tz int
|
||||
V int
|
||||
}
|
||||
|
||||
//easyjson:json
|
||||
type SmallPayloadEasyJson struct {
|
||||
St int
|
||||
Sid int
|
||||
Tt string
|
||||
Gr int
|
||||
Uuid string
|
||||
Ip string
|
||||
Ua string
|
||||
Tz int
|
||||
V int
|
||||
}
|
||||
|
||||
func (t *SmallPayload) MarshalJSONObject(enc *gojay.Encoder) {
|
||||
enc.AddIntKey("st", t.St)
|
||||
enc.AddIntKey("sid", t.Sid)
|
||||
enc.AddStringKey("tt", t.Tt)
|
||||
enc.AddIntKey("gr", t.Gr)
|
||||
enc.AddStringKey("uuid", t.Uuid)
|
||||
enc.AddStringKey("ip", t.Ip)
|
||||
enc.AddStringKey("ua", t.Ua)
|
||||
enc.AddIntKey("tz", t.Tz)
|
||||
enc.AddIntKey("v", t.V)
|
||||
}
|
||||
|
||||
func (t *SmallPayload) IsNil() bool {
|
||||
return t == nil
|
||||
}
|
||||
|
||||
func (t *SmallPayload) UnmarshalJSONObject(dec *gojay.Decoder, key string) error {
|
||||
switch key {
|
||||
case "st":
|
||||
return dec.AddInt(&t.St)
|
||||
case "sid":
|
||||
return dec.AddInt(&t.Sid)
|
||||
case "gr":
|
||||
return dec.AddInt(&t.Gr)
|
||||
case "tz":
|
||||
return dec.AddInt(&t.Tz)
|
||||
case "v":
|
||||
return dec.AddInt(&t.V)
|
||||
case "tt":
|
||||
return dec.AddString(&t.Tt)
|
||||
case "uuid":
|
||||
return dec.AddString(&t.Uuid)
|
||||
case "ip":
|
||||
return dec.AddString(&t.Ip)
|
||||
case "ua":
|
||||
return dec.AddString(&t.Ua)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *SmallPayload) NKeys() int {
|
||||
return 9
|
||||
}
|
||||
|
||||
func NewSmallPayload() *SmallPayload {
|
||||
return &SmallPayload{
|
||||
St: 1,
|
||||
Sid: 2,
|
||||
Tt: "TestString",
|
||||
Gr: 4,
|
||||
Uuid: "8f9a65eb-4807-4d57-b6e0-bda5d62f1429",
|
||||
Ip: "127.0.0.1",
|
||||
Ua: "Mozilla",
|
||||
Tz: 8,
|
||||
V: 6,
|
||||
}
|
||||
}
|
||||
|
||||
func NewSmallPayloadEasyJson() *SmallPayloadEasyJson {
|
||||
return &SmallPayloadEasyJson{
|
||||
St: 1,
|
||||
Sid: 2,
|
||||
Tt: "TestString",
|
||||
Gr: 4,
|
||||
Uuid: "8f9a65eb-4807-4d57-b6e0-bda5d62f1429",
|
||||
Ip: "127.0.0.1",
|
||||
Ua: "Mozilla",
|
||||
Tz: 8,
|
||||
V: 6,
|
||||
}
|
||||
}
|
||||
|
||||
func NewSmallPayloadFFJson() *SmallPayloadFFJson {
|
||||
return &SmallPayloadFFJson{
|
||||
St: 1,
|
||||
Sid: 2,
|
||||
Tt: "TestString",
|
||||
Gr: 4,
|
||||
Uuid: "8f9a65eb-4807-4d57-b6e0-bda5d62f1429",
|
||||
Ip: "127.0.0.1",
|
||||
Ua: "Mozilla",
|
||||
Tz: 8,
|
||||
V: 6,
|
||||
}
|
||||
}
|
186
benchmarks/small_payload_easyjson.go
Normal file
186
benchmarks/small_payload_easyjson.go
Normal file
|
@ -0,0 +1,186 @@
|
|||
// Code generated by easyjson for marshaling/unmarshaling. DO NOT EDIT.
|
||||
|
||||
package benchmark
|
||||
|
||||
import (
|
||||
json "encoding/json"
|
||||
easyjson "github.com/mailru/easyjson"
|
||||
jlexer "github.com/mailru/easyjson/jlexer"
|
||||
jwriter "github.com/mailru/easyjson/jwriter"
|
||||
)
|
||||
|
||||
// suppress unused package warning
|
||||
var (
|
||||
_ *json.RawMessage
|
||||
_ *jlexer.Lexer
|
||||
_ *jwriter.Writer
|
||||
_ easyjson.Marshaler
|
||||
)
|
||||
|
||||
func easyjson21677a1cDecodeBenchmark(in *jlexer.Lexer, out *SmallPayloadEasyJson) {
|
||||
isTopLevel := in.IsStart()
|
||||
if in.IsNull() {
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
in.Skip()
|
||||
return
|
||||
}
|
||||
in.Delim('{')
|
||||
for !in.IsDelim('}') {
|
||||
key := in.UnsafeString()
|
||||
in.WantColon()
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
in.WantComma()
|
||||
continue
|
||||
}
|
||||
switch key {
|
||||
case "St":
|
||||
out.St = int(in.Int())
|
||||
case "Sid":
|
||||
out.Sid = int(in.Int())
|
||||
case "Tt":
|
||||
out.Tt = string(in.String())
|
||||
case "Gr":
|
||||
out.Gr = int(in.Int())
|
||||
case "Uuid":
|
||||
out.Uuid = string(in.String())
|
||||
case "Ip":
|
||||
out.Ip = string(in.String())
|
||||
case "Ua":
|
||||
out.Ua = string(in.String())
|
||||
case "Tz":
|
||||
out.Tz = int(in.Int())
|
||||
case "V":
|
||||
out.V = int(in.Int())
|
||||
default:
|
||||
in.SkipRecursive()
|
||||
}
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim('}')
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
}
|
||||
func easyjson21677a1cEncodeBenchmark(out *jwriter.Writer, in SmallPayloadEasyJson) {
|
||||
out.RawByte('{')
|
||||
first := true
|
||||
_ = first
|
||||
{
|
||||
const prefix string = ",\"St\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.St))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Sid\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.Sid))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Tt\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.Tt))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Gr\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.Gr))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Uuid\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.Uuid))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Ip\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.Ip))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Ua\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.Ua))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Tz\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.Tz))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"V\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.V))
|
||||
}
|
||||
out.RawByte('}')
|
||||
}
|
||||
|
||||
// MarshalJSON supports json.Marshaler interface
|
||||
func (v SmallPayloadEasyJson) MarshalJSON() ([]byte, error) {
|
||||
w := jwriter.Writer{}
|
||||
easyjson21677a1cEncodeBenchmark(&w, v)
|
||||
return w.Buffer.BuildBytes(), w.Error
|
||||
}
|
||||
|
||||
// MarshalEasyJSON supports easyjson.Marshaler interface
|
||||
func (v SmallPayloadEasyJson) MarshalEasyJSON(w *jwriter.Writer) {
|
||||
easyjson21677a1cEncodeBenchmark(w, v)
|
||||
}
|
||||
|
||||
// UnmarshalJSON supports json.Unmarshaler interface
|
||||
func (v *SmallPayloadEasyJson) UnmarshalJSON(data []byte) error {
|
||||
r := jlexer.Lexer{Data: data}
|
||||
easyjson21677a1cDecodeBenchmark(&r, v)
|
||||
return r.Error()
|
||||
}
|
||||
|
||||
// UnmarshalEasyJSON supports easyjson.Unmarshaler interface
|
||||
func (v *SmallPayloadEasyJson) UnmarshalEasyJSON(l *jlexer.Lexer) {
|
||||
easyjson21677a1cDecodeBenchmark(l, v)
|
||||
}
|
608
benchmarks/small_payload_ffjson.go
Normal file
608
benchmarks/small_payload_ffjson.go
Normal file
|
@ -0,0 +1,608 @@
|
|||
// Code generated by ffjson <https://github.com/pquerna/ffjson>. DO NOT EDIT.
|
||||
// source: small_payload.go
|
||||
|
||||
package benchmark
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
|
||||
fflib "github.com/pquerna/ffjson/fflib/v1"
|
||||
)
|
||||
|
||||
// MarshalJSON marshal bytes to json - template
|
||||
func (j *SmallPayloadFFJson) MarshalJSON() ([]byte, error) {
|
||||
var buf fflib.Buffer
|
||||
if j == nil {
|
||||
buf.WriteString("null")
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
err := j.MarshalJSONBuf(&buf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
// MarshalJSONBuf marshal buff to json - template
|
||||
func (j *SmallPayloadFFJson) MarshalJSONBuf(buf fflib.EncodingBuffer) error {
|
||||
if j == nil {
|
||||
buf.WriteString("null")
|
||||
return nil
|
||||
}
|
||||
var err error
|
||||
var obj []byte
|
||||
_ = obj
|
||||
_ = err
|
||||
buf.WriteString(`{"St":`)
|
||||
fflib.FormatBits2(buf, uint64(j.St), 10, j.St < 0)
|
||||
buf.WriteString(`,"Sid":`)
|
||||
fflib.FormatBits2(buf, uint64(j.Sid), 10, j.Sid < 0)
|
||||
buf.WriteString(`,"Tt":`)
|
||||
fflib.WriteJsonString(buf, string(j.Tt))
|
||||
buf.WriteString(`,"Gr":`)
|
||||
fflib.FormatBits2(buf, uint64(j.Gr), 10, j.Gr < 0)
|
||||
buf.WriteString(`,"Uuid":`)
|
||||
fflib.WriteJsonString(buf, string(j.Uuid))
|
||||
buf.WriteString(`,"Ip":`)
|
||||
fflib.WriteJsonString(buf, string(j.Ip))
|
||||
buf.WriteString(`,"Ua":`)
|
||||
fflib.WriteJsonString(buf, string(j.Ua))
|
||||
buf.WriteString(`,"Tz":`)
|
||||
fflib.FormatBits2(buf, uint64(j.Tz), 10, j.Tz < 0)
|
||||
buf.WriteString(`,"V":`)
|
||||
fflib.FormatBits2(buf, uint64(j.V), 10, j.V < 0)
|
||||
buf.WriteByte('}')
|
||||
return nil
|
||||
}
|
||||
|
||||
const (
|
||||
ffjtSmallPayloadFFJsonbase = iota
|
||||
ffjtSmallPayloadFFJsonnosuchkey
|
||||
|
||||
ffjtSmallPayloadFFJsonSt
|
||||
|
||||
ffjtSmallPayloadFFJsonSid
|
||||
|
||||
ffjtSmallPayloadFFJsonTt
|
||||
|
||||
ffjtSmallPayloadFFJsonGr
|
||||
|
||||
ffjtSmallPayloadFFJsonUuid
|
||||
|
||||
ffjtSmallPayloadFFJsonIp
|
||||
|
||||
ffjtSmallPayloadFFJsonUa
|
||||
|
||||
ffjtSmallPayloadFFJsonTz
|
||||
|
||||
ffjtSmallPayloadFFJsonV
|
||||
)
|
||||
|
||||
var ffjKeySmallPayloadFFJsonSt = []byte("St")
|
||||
|
||||
var ffjKeySmallPayloadFFJsonSid = []byte("Sid")
|
||||
|
||||
var ffjKeySmallPayloadFFJsonTt = []byte("Tt")
|
||||
|
||||
var ffjKeySmallPayloadFFJsonGr = []byte("Gr")
|
||||
|
||||
var ffjKeySmallPayloadFFJsonUuid = []byte("Uuid")
|
||||
|
||||
var ffjKeySmallPayloadFFJsonIp = []byte("Ip")
|
||||
|
||||
var ffjKeySmallPayloadFFJsonUa = []byte("Ua")
|
||||
|
||||
var ffjKeySmallPayloadFFJsonTz = []byte("Tz")
|
||||
|
||||
var ffjKeySmallPayloadFFJsonV = []byte("V")
|
||||
|
||||
// UnmarshalJSON umarshall json - template of ffjson
|
||||
func (j *SmallPayloadFFJson) UnmarshalJSON(input []byte) error {
|
||||
fs := fflib.NewFFLexer(input)
|
||||
return j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)
|
||||
}
|
||||
|
||||
// UnmarshalJSONFFLexer fast json unmarshall - template ffjson
|
||||
func (j *SmallPayloadFFJson) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error {
|
||||
var err error
|
||||
currentKey := ffjtSmallPayloadFFJsonbase
|
||||
_ = currentKey
|
||||
tok := fflib.FFTok_init
|
||||
wantedTok := fflib.FFTok_init
|
||||
|
||||
mainparse:
|
||||
for {
|
||||
tok = fs.Scan()
|
||||
// println(fmt.Sprintf("debug: tok: %v state: %v", tok, state))
|
||||
if tok == fflib.FFTok_error {
|
||||
goto tokerror
|
||||
}
|
||||
|
||||
switch state {
|
||||
|
||||
case fflib.FFParse_map_start:
|
||||
if tok != fflib.FFTok_left_bracket {
|
||||
wantedTok = fflib.FFTok_left_bracket
|
||||
goto wrongtokenerror
|
||||
}
|
||||
state = fflib.FFParse_want_key
|
||||
continue
|
||||
|
||||
case fflib.FFParse_after_value:
|
||||
if tok == fflib.FFTok_comma {
|
||||
state = fflib.FFParse_want_key
|
||||
} else if tok == fflib.FFTok_right_bracket {
|
||||
goto done
|
||||
} else {
|
||||
wantedTok = fflib.FFTok_comma
|
||||
goto wrongtokenerror
|
||||
}
|
||||
|
||||
case fflib.FFParse_want_key:
|
||||
// json {} ended. goto exit. woo.
|
||||
if tok == fflib.FFTok_right_bracket {
|
||||
goto done
|
||||
}
|
||||
if tok != fflib.FFTok_string {
|
||||
wantedTok = fflib.FFTok_string
|
||||
goto wrongtokenerror
|
||||
}
|
||||
|
||||
kn := fs.Output.Bytes()
|
||||
if len(kn) <= 0 {
|
||||
// "" case. hrm.
|
||||
currentKey = ffjtSmallPayloadFFJsonnosuchkey
|
||||
state = fflib.FFParse_want_colon
|
||||
goto mainparse
|
||||
} else {
|
||||
switch kn[0] {
|
||||
|
||||
case 'G':
|
||||
|
||||
if bytes.Equal(ffjKeySmallPayloadFFJsonGr, kn) {
|
||||
currentKey = ffjtSmallPayloadFFJsonGr
|
||||
state = fflib.FFParse_want_colon
|
||||
goto mainparse
|
||||
}
|
||||
|
||||
case 'I':
|
||||
|
||||
if bytes.Equal(ffjKeySmallPayloadFFJsonIp, kn) {
|
||||
currentKey = ffjtSmallPayloadFFJsonIp
|
||||
state = fflib.FFParse_want_colon
|
||||
goto mainparse
|
||||
}
|
||||
|
||||
case 'S':
|
||||
|
||||
if bytes.Equal(ffjKeySmallPayloadFFJsonSt, kn) {
|
||||
currentKey = ffjtSmallPayloadFFJsonSt
|
||||
state = fflib.FFParse_want_colon
|
||||
goto mainparse
|
||||
|
||||
} else if bytes.Equal(ffjKeySmallPayloadFFJsonSid, kn) {
|
||||
currentKey = ffjtSmallPayloadFFJsonSid
|
||||
state = fflib.FFParse_want_colon
|
||||
goto mainparse
|
||||
}
|
||||
|
||||
case 'T':
|
||||
|
||||
if bytes.Equal(ffjKeySmallPayloadFFJsonTt, kn) {
|
||||
currentKey = ffjtSmallPayloadFFJsonTt
|
||||
state = fflib.FFParse_want_colon
|
||||
goto mainparse
|
||||
|
||||
} else if bytes.Equal(ffjKeySmallPayloadFFJsonTz, kn) {
|
||||
currentKey = ffjtSmallPayloadFFJsonTz
|
||||
state = fflib.FFParse_want_colon
|
||||
goto mainparse
|
||||
}
|
||||
|
||||
case 'U':
|
||||
|
||||
if bytes.Equal(ffjKeySmallPayloadFFJsonUuid, kn) {
|
||||
currentKey = ffjtSmallPayloadFFJsonUuid
|
||||
state = fflib.FFParse_want_colon
|
||||
goto mainparse
|
||||
|
||||
} else if bytes.Equal(ffjKeySmallPayloadFFJsonUa, kn) {
|
||||
currentKey = ffjtSmallPayloadFFJsonUa
|
||||
state = fflib.FFParse_want_colon
|
||||
goto mainparse
|
||||
}
|
||||
|
||||
case 'V':
|
||||
|
||||
if bytes.Equal(ffjKeySmallPayloadFFJsonV, kn) {
|
||||
currentKey = ffjtSmallPayloadFFJsonV
|
||||
state = fflib.FFParse_want_colon
|
||||
goto mainparse
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if fflib.SimpleLetterEqualFold(ffjKeySmallPayloadFFJsonV, kn) {
|
||||
currentKey = ffjtSmallPayloadFFJsonV
|
||||
state = fflib.FFParse_want_colon
|
||||
goto mainparse
|
||||
}
|
||||
|
||||
if fflib.SimpleLetterEqualFold(ffjKeySmallPayloadFFJsonTz, kn) {
|
||||
currentKey = ffjtSmallPayloadFFJsonTz
|
||||
state = fflib.FFParse_want_colon
|
||||
goto mainparse
|
||||
}
|
||||
|
||||
if fflib.SimpleLetterEqualFold(ffjKeySmallPayloadFFJsonUa, kn) {
|
||||
currentKey = ffjtSmallPayloadFFJsonUa
|
||||
state = fflib.FFParse_want_colon
|
||||
goto mainparse
|
||||
}
|
||||
|
||||
if fflib.SimpleLetterEqualFold(ffjKeySmallPayloadFFJsonIp, kn) {
|
||||
currentKey = ffjtSmallPayloadFFJsonIp
|
||||
state = fflib.FFParse_want_colon
|
||||
goto mainparse
|
||||
}
|
||||
|
||||
if fflib.SimpleLetterEqualFold(ffjKeySmallPayloadFFJsonUuid, kn) {
|
||||
currentKey = ffjtSmallPayloadFFJsonUuid
|
||||
state = fflib.FFParse_want_colon
|
||||
goto mainparse
|
||||
}
|
||||
|
||||
if fflib.SimpleLetterEqualFold(ffjKeySmallPayloadFFJsonGr, kn) {
|
||||
currentKey = ffjtSmallPayloadFFJsonGr
|
||||
state = fflib.FFParse_want_colon
|
||||
goto mainparse
|
||||
}
|
||||
|
||||
if fflib.SimpleLetterEqualFold(ffjKeySmallPayloadFFJsonTt, kn) {
|
||||
currentKey = ffjtSmallPayloadFFJsonTt
|
||||
state = fflib.FFParse_want_colon
|
||||
goto mainparse
|
||||
}
|
||||
|
||||
if fflib.EqualFoldRight(ffjKeySmallPayloadFFJsonSid, kn) {
|
||||
currentKey = ffjtSmallPayloadFFJsonSid
|
||||
state = fflib.FFParse_want_colon
|
||||
goto mainparse
|
||||
}
|
||||
|
||||
if fflib.EqualFoldRight(ffjKeySmallPayloadFFJsonSt, kn) {
|
||||
currentKey = ffjtSmallPayloadFFJsonSt
|
||||
state = fflib.FFParse_want_colon
|
||||
goto mainparse
|
||||
}
|
||||
|
||||
currentKey = ffjtSmallPayloadFFJsonnosuchkey
|
||||
state = fflib.FFParse_want_colon
|
||||
goto mainparse
|
||||
}
|
||||
|
||||
case fflib.FFParse_want_colon:
|
||||
if tok != fflib.FFTok_colon {
|
||||
wantedTok = fflib.FFTok_colon
|
||||
goto wrongtokenerror
|
||||
}
|
||||
state = fflib.FFParse_want_value
|
||||
continue
|
||||
case fflib.FFParse_want_value:
|
||||
|
||||
if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null {
|
||||
switch currentKey {
|
||||
|
||||
case ffjtSmallPayloadFFJsonSt:
|
||||
goto handle_St
|
||||
|
||||
case ffjtSmallPayloadFFJsonSid:
|
||||
goto handle_Sid
|
||||
|
||||
case ffjtSmallPayloadFFJsonTt:
|
||||
goto handle_Tt
|
||||
|
||||
case ffjtSmallPayloadFFJsonGr:
|
||||
goto handle_Gr
|
||||
|
||||
case ffjtSmallPayloadFFJsonUuid:
|
||||
goto handle_Uuid
|
||||
|
||||
case ffjtSmallPayloadFFJsonIp:
|
||||
goto handle_Ip
|
||||
|
||||
case ffjtSmallPayloadFFJsonUa:
|
||||
goto handle_Ua
|
||||
|
||||
case ffjtSmallPayloadFFJsonTz:
|
||||
goto handle_Tz
|
||||
|
||||
case ffjtSmallPayloadFFJsonV:
|
||||
goto handle_V
|
||||
|
||||
case ffjtSmallPayloadFFJsonnosuchkey:
|
||||
err = fs.SkipField(tok)
|
||||
if err != nil {
|
||||
return fs.WrapErr(err)
|
||||
}
|
||||
state = fflib.FFParse_after_value
|
||||
goto mainparse
|
||||
}
|
||||
} else {
|
||||
goto wantedvalue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
handle_St:
|
||||
|
||||
/* handler: j.St type=int kind=int quoted=false*/
|
||||
|
||||
{
|
||||
if tok != fflib.FFTok_integer && tok != fflib.FFTok_null {
|
||||
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for int", tok))
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
|
||||
if tok == fflib.FFTok_null {
|
||||
|
||||
} else {
|
||||
|
||||
tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64)
|
||||
|
||||
if err != nil {
|
||||
return fs.WrapErr(err)
|
||||
}
|
||||
|
||||
j.St = int(tval)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
state = fflib.FFParse_after_value
|
||||
goto mainparse
|
||||
|
||||
handle_Sid:
|
||||
|
||||
/* handler: j.Sid type=int kind=int quoted=false*/
|
||||
|
||||
{
|
||||
if tok != fflib.FFTok_integer && tok != fflib.FFTok_null {
|
||||
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for int", tok))
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
|
||||
if tok == fflib.FFTok_null {
|
||||
|
||||
} else {
|
||||
|
||||
tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64)
|
||||
|
||||
if err != nil {
|
||||
return fs.WrapErr(err)
|
||||
}
|
||||
|
||||
j.Sid = int(tval)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
state = fflib.FFParse_after_value
|
||||
goto mainparse
|
||||
|
||||
handle_Tt:
|
||||
|
||||
/* handler: j.Tt type=string kind=string quoted=false*/
|
||||
|
||||
{
|
||||
|
||||
{
|
||||
if tok != fflib.FFTok_string && tok != fflib.FFTok_null {
|
||||
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok))
|
||||
}
|
||||
}
|
||||
|
||||
if tok == fflib.FFTok_null {
|
||||
|
||||
} else {
|
||||
|
||||
outBuf := fs.Output.Bytes()
|
||||
|
||||
j.Tt = string(string(outBuf))
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
state = fflib.FFParse_after_value
|
||||
goto mainparse
|
||||
|
||||
handle_Gr:
|
||||
|
||||
/* handler: j.Gr type=int kind=int quoted=false*/
|
||||
|
||||
{
|
||||
if tok != fflib.FFTok_integer && tok != fflib.FFTok_null {
|
||||
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for int", tok))
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
|
||||
if tok == fflib.FFTok_null {
|
||||
|
||||
} else {
|
||||
|
||||
tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64)
|
||||
|
||||
if err != nil {
|
||||
return fs.WrapErr(err)
|
||||
}
|
||||
|
||||
j.Gr = int(tval)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
state = fflib.FFParse_after_value
|
||||
goto mainparse
|
||||
|
||||
handle_Uuid:
|
||||
|
||||
/* handler: j.Uuid type=string kind=string quoted=false*/
|
||||
|
||||
{
|
||||
|
||||
{
|
||||
if tok != fflib.FFTok_string && tok != fflib.FFTok_null {
|
||||
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok))
|
||||
}
|
||||
}
|
||||
|
||||
if tok == fflib.FFTok_null {
|
||||
|
||||
} else {
|
||||
|
||||
outBuf := fs.Output.Bytes()
|
||||
|
||||
j.Uuid = string(string(outBuf))
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
state = fflib.FFParse_after_value
|
||||
goto mainparse
|
||||
|
||||
handle_Ip:
|
||||
|
||||
/* handler: j.Ip type=string kind=string quoted=false*/
|
||||
|
||||
{
|
||||
|
||||
{
|
||||
if tok != fflib.FFTok_string && tok != fflib.FFTok_null {
|
||||
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok))
|
||||
}
|
||||
}
|
||||
|
||||
if tok == fflib.FFTok_null {
|
||||
|
||||
} else {
|
||||
|
||||
outBuf := fs.Output.Bytes()
|
||||
|
||||
j.Ip = string(string(outBuf))
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
state = fflib.FFParse_after_value
|
||||
goto mainparse
|
||||
|
||||
handle_Ua:
|
||||
|
||||
/* handler: j.Ua type=string kind=string quoted=false*/
|
||||
|
||||
{
|
||||
|
||||
{
|
||||
if tok != fflib.FFTok_string && tok != fflib.FFTok_null {
|
||||
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok))
|
||||
}
|
||||
}
|
||||
|
||||
if tok == fflib.FFTok_null {
|
||||
|
||||
} else {
|
||||
|
||||
outBuf := fs.Output.Bytes()
|
||||
|
||||
j.Ua = string(string(outBuf))
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
state = fflib.FFParse_after_value
|
||||
goto mainparse
|
||||
|
||||
handle_Tz:
|
||||
|
||||
/* handler: j.Tz type=int kind=int quoted=false*/
|
||||
|
||||
{
|
||||
if tok != fflib.FFTok_integer && tok != fflib.FFTok_null {
|
||||
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for int", tok))
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
|
||||
if tok == fflib.FFTok_null {
|
||||
|
||||
} else {
|
||||
|
||||
tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64)
|
||||
|
||||
if err != nil {
|
||||
return fs.WrapErr(err)
|
||||
}
|
||||
|
||||
j.Tz = int(tval)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
state = fflib.FFParse_after_value
|
||||
goto mainparse
|
||||
|
||||
handle_V:
|
||||
|
||||
/* handler: j.V type=int kind=int quoted=false*/
|
||||
|
||||
{
|
||||
if tok != fflib.FFTok_integer && tok != fflib.FFTok_null {
|
||||
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for int", tok))
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
|
||||
if tok == fflib.FFTok_null {
|
||||
|
||||
} else {
|
||||
|
||||
tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64)
|
||||
|
||||
if err != nil {
|
||||
return fs.WrapErr(err)
|
||||
}
|
||||
|
||||
j.V = int(tval)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
state = fflib.FFParse_after_value
|
||||
goto mainparse
|
||||
|
||||
wantedvalue:
|
||||
return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok))
|
||||
wrongtokenerror:
|
||||
return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String()))
|
||||
tokerror:
|
||||
if fs.BigError != nil {
|
||||
return fs.WrapErr(fs.BigError)
|
||||
}
|
||||
err = fs.Error.ToError()
|
||||
if err != nil {
|
||||
return fs.WrapErr(err)
|
||||
}
|
||||
panic("ffjson-generated: unreachable, please report bug.")
|
||||
done:
|
||||
|
||||
return nil
|
||||
}
|
BIN
benchmarks/testdata/code.json.gz
vendored
Normal file
BIN
benchmarks/testdata/code.json.gz
vendored
Normal file
Binary file not shown.
68
color.go
Normal file
68
color.go
Normal file
|
@ -0,0 +1,68 @@
|
|||
package json
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/goccy/go-json/internal/encoder"
|
||||
)
|
||||
|
||||
type (
|
||||
ColorFormat = encoder.ColorFormat
|
||||
ColorScheme = encoder.ColorScheme
|
||||
)
|
||||
|
||||
const escape = "\x1b"
|
||||
|
||||
type colorAttr int
|
||||
|
||||
//nolint:deadcode,varcheck
|
||||
const (
|
||||
fgBlackColor colorAttr = iota + 30
|
||||
fgRedColor
|
||||
fgGreenColor
|
||||
fgYellowColor
|
||||
fgBlueColor
|
||||
fgMagentaColor
|
||||
fgCyanColor
|
||||
fgWhiteColor
|
||||
)
|
||||
|
||||
//nolint:deadcode,varcheck
|
||||
const (
|
||||
fgHiBlackColor colorAttr = iota + 90
|
||||
fgHiRedColor
|
||||
fgHiGreenColor
|
||||
fgHiYellowColor
|
||||
fgHiBlueColor
|
||||
fgHiMagentaColor
|
||||
fgHiCyanColor
|
||||
fgHiWhiteColor
|
||||
)
|
||||
|
||||
func createColorFormat(attr colorAttr) ColorFormat {
|
||||
return ColorFormat{
|
||||
Header: wrapColor(attr),
|
||||
Footer: resetColor(),
|
||||
}
|
||||
}
|
||||
|
||||
func wrapColor(attr colorAttr) string {
|
||||
return fmt.Sprintf("%s[%dm", escape, attr)
|
||||
}
|
||||
|
||||
func resetColor() string {
|
||||
return wrapColor(colorAttr(0))
|
||||
}
|
||||
|
||||
var (
|
||||
DefaultColorScheme = &ColorScheme{
|
||||
Int: createColorFormat(fgHiMagentaColor),
|
||||
Uint: createColorFormat(fgHiMagentaColor),
|
||||
Float: createColorFormat(fgHiMagentaColor),
|
||||
Bool: createColorFormat(fgHiYellowColor),
|
||||
String: createColorFormat(fgHiGreenColor),
|
||||
Binary: createColorFormat(fgHiRedColor),
|
||||
ObjectKey: createColorFormat(fgHiCyanColor),
|
||||
Null: createColorFormat(fgBlueColor),
|
||||
}
|
||||
)
|
49
color_test.go
Normal file
49
color_test.go
Normal file
|
@ -0,0 +1,49 @@
|
|||
package json_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
)
|
||||
|
||||
func TestColorize(t *testing.T) {
|
||||
v := struct {
|
||||
A int
|
||||
B uint
|
||||
C float32
|
||||
D string
|
||||
E bool
|
||||
F []byte
|
||||
G []int
|
||||
H *struct{}
|
||||
I map[string]interface{}
|
||||
}{
|
||||
A: 123,
|
||||
B: 456,
|
||||
C: 3.14,
|
||||
D: "hello",
|
||||
E: true,
|
||||
F: []byte("binary"),
|
||||
G: []int{1, 2, 3, 4},
|
||||
H: nil,
|
||||
I: map[string]interface{}{
|
||||
"mapA": -10,
|
||||
"mapB": 10,
|
||||
"mapC": nil,
|
||||
},
|
||||
}
|
||||
t.Run("marshal with color", func(t *testing.T) {
|
||||
b, err := json.MarshalWithOption(v, json.Colorize(json.DefaultColorScheme))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
t.Log(string(b))
|
||||
})
|
||||
t.Run("marshal indent with color", func(t *testing.T) {
|
||||
b, err := json.MarshalIndentWithOption(v, "", "\t", json.Colorize(json.DefaultColorScheme))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
t.Log("\n" + string(b))
|
||||
})
|
||||
}
|
263
decode.go
Normal file
263
decode.go
Normal file
|
@ -0,0 +1,263 @@
|
|||
package json
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/decoder"
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type Decoder struct {
|
||||
s *decoder.Stream
|
||||
}
|
||||
|
||||
const (
|
||||
nul = '\000'
|
||||
)
|
||||
|
||||
type emptyInterface struct {
|
||||
typ *runtime.Type
|
||||
ptr unsafe.Pointer
|
||||
}
|
||||
|
||||
func unmarshal(data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error {
|
||||
src := make([]byte, len(data)+1) // append nul byte to the end
|
||||
copy(src, data)
|
||||
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
|
||||
if err := validateType(header.typ, uintptr(header.ptr)); err != nil {
|
||||
return err
|
||||
}
|
||||
dec, err := decoder.CompileToGetDecoder(header.typ)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
ctx := decoder.TakeRuntimeContext()
|
||||
ctx.Buf = src
|
||||
ctx.Option.Flags = 0
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(ctx.Option)
|
||||
}
|
||||
cursor, err := dec.Decode(ctx, 0, 0, header.ptr)
|
||||
if err != nil {
|
||||
decoder.ReleaseRuntimeContext(ctx)
|
||||
return err
|
||||
}
|
||||
decoder.ReleaseRuntimeContext(ctx)
|
||||
return validateEndBuf(src, cursor)
|
||||
}
|
||||
|
||||
func unmarshalContext(ctx context.Context, data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error {
|
||||
src := make([]byte, len(data)+1) // append nul byte to the end
|
||||
copy(src, data)
|
||||
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
|
||||
if err := validateType(header.typ, uintptr(header.ptr)); err != nil {
|
||||
return err
|
||||
}
|
||||
dec, err := decoder.CompileToGetDecoder(header.typ)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rctx := decoder.TakeRuntimeContext()
|
||||
rctx.Buf = src
|
||||
rctx.Option.Flags = 0
|
||||
rctx.Option.Flags |= decoder.ContextOption
|
||||
rctx.Option.Context = ctx
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(rctx.Option)
|
||||
}
|
||||
cursor, err := dec.Decode(rctx, 0, 0, header.ptr)
|
||||
if err != nil {
|
||||
decoder.ReleaseRuntimeContext(rctx)
|
||||
return err
|
||||
}
|
||||
decoder.ReleaseRuntimeContext(rctx)
|
||||
return validateEndBuf(src, cursor)
|
||||
}
|
||||
|
||||
var (
|
||||
pathDecoder = decoder.NewPathDecoder()
|
||||
)
|
||||
|
||||
func extractFromPath(path *Path, data []byte, optFuncs ...DecodeOptionFunc) ([][]byte, error) {
|
||||
if path.path.RootSelectorOnly {
|
||||
return [][]byte{data}, nil
|
||||
}
|
||||
src := make([]byte, len(data)+1) // append nul byte to the end
|
||||
copy(src, data)
|
||||
|
||||
ctx := decoder.TakeRuntimeContext()
|
||||
ctx.Buf = src
|
||||
ctx.Option.Flags = 0
|
||||
ctx.Option.Flags |= decoder.PathOption
|
||||
ctx.Option.Path = path.path
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(ctx.Option)
|
||||
}
|
||||
paths, cursor, err := pathDecoder.DecodePath(ctx, 0, 0)
|
||||
if err != nil {
|
||||
decoder.ReleaseRuntimeContext(ctx)
|
||||
return nil, err
|
||||
}
|
||||
decoder.ReleaseRuntimeContext(ctx)
|
||||
if err := validateEndBuf(src, cursor); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return paths, nil
|
||||
}
|
||||
|
||||
func unmarshalNoEscape(data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error {
|
||||
src := make([]byte, len(data)+1) // append nul byte to the end
|
||||
copy(src, data)
|
||||
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
|
||||
if err := validateType(header.typ, uintptr(header.ptr)); err != nil {
|
||||
return err
|
||||
}
|
||||
dec, err := decoder.CompileToGetDecoder(header.typ)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ctx := decoder.TakeRuntimeContext()
|
||||
ctx.Buf = src
|
||||
ctx.Option.Flags = 0
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(ctx.Option)
|
||||
}
|
||||
cursor, err := dec.Decode(ctx, 0, 0, noescape(header.ptr))
|
||||
if err != nil {
|
||||
decoder.ReleaseRuntimeContext(ctx)
|
||||
return err
|
||||
}
|
||||
decoder.ReleaseRuntimeContext(ctx)
|
||||
return validateEndBuf(src, cursor)
|
||||
}
|
||||
|
||||
func validateEndBuf(src []byte, cursor int64) error {
|
||||
for {
|
||||
switch src[cursor] {
|
||||
case ' ', '\t', '\n', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case nul:
|
||||
return nil
|
||||
}
|
||||
return errors.ErrSyntax(
|
||||
fmt.Sprintf("invalid character '%c' after top-level value", src[cursor]),
|
||||
cursor+1,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
//nolint:staticcheck
|
||||
//go:nosplit
|
||||
func noescape(p unsafe.Pointer) unsafe.Pointer {
|
||||
x := uintptr(p)
|
||||
return unsafe.Pointer(x ^ 0)
|
||||
}
|
||||
|
||||
func validateType(typ *runtime.Type, p uintptr) error {
|
||||
if typ == nil || typ.Kind() != reflect.Ptr || p == 0 {
|
||||
return &InvalidUnmarshalError{Type: runtime.RType2Type(typ)}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// NewDecoder returns a new decoder that reads from r.
|
||||
//
|
||||
// The decoder introduces its own buffering and may
|
||||
// read data from r beyond the JSON values requested.
|
||||
func NewDecoder(r io.Reader) *Decoder {
|
||||
s := decoder.NewStream(r)
|
||||
return &Decoder{
|
||||
s: s,
|
||||
}
|
||||
}
|
||||
|
||||
// Buffered returns a reader of the data remaining in the Decoder's
|
||||
// buffer. The reader is valid until the next call to Decode.
|
||||
func (d *Decoder) Buffered() io.Reader {
|
||||
return d.s.Buffered()
|
||||
}
|
||||
|
||||
// Decode reads the next JSON-encoded value from its
|
||||
// input and stores it in the value pointed to by v.
|
||||
//
|
||||
// See the documentation for Unmarshal for details about
|
||||
// the conversion of JSON into a Go value.
|
||||
func (d *Decoder) Decode(v interface{}) error {
|
||||
return d.DecodeWithOption(v)
|
||||
}
|
||||
|
||||
// DecodeContext reads the next JSON-encoded value from its
|
||||
// input and stores it in the value pointed to by v with context.Context.
|
||||
func (d *Decoder) DecodeContext(ctx context.Context, v interface{}) error {
|
||||
d.s.Option.Flags |= decoder.ContextOption
|
||||
d.s.Option.Context = ctx
|
||||
return d.DecodeWithOption(v)
|
||||
}
|
||||
|
||||
func (d *Decoder) DecodeWithOption(v interface{}, optFuncs ...DecodeOptionFunc) error {
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
typ := header.typ
|
||||
ptr := uintptr(header.ptr)
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
// noescape trick for header.typ ( reflect.*rtype )
|
||||
copiedType := *(**runtime.Type)(unsafe.Pointer(&typeptr))
|
||||
|
||||
if err := validateType(copiedType, ptr); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
dec, err := decoder.CompileToGetDecoder(typ)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.s.PrepareForDecode(); err != nil {
|
||||
return err
|
||||
}
|
||||
s := d.s
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(s.Option)
|
||||
}
|
||||
if err := dec.DecodeStream(s, 0, header.ptr); err != nil {
|
||||
return err
|
||||
}
|
||||
s.Reset()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Decoder) More() bool {
|
||||
return d.s.More()
|
||||
}
|
||||
|
||||
func (d *Decoder) Token() (Token, error) {
|
||||
return d.s.Token()
|
||||
}
|
||||
|
||||
// DisallowUnknownFields causes the Decoder to return an error when the destination
|
||||
// is a struct and the input contains object keys which do not match any
|
||||
// non-ignored, exported fields in the destination.
|
||||
func (d *Decoder) DisallowUnknownFields() {
|
||||
d.s.DisallowUnknownFields = true
|
||||
}
|
||||
|
||||
func (d *Decoder) InputOffset() int64 {
|
||||
return d.s.TotalOffset()
|
||||
}
|
||||
|
||||
// UseNumber causes the Decoder to unmarshal a number into an interface{} as a
|
||||
// Number instead of as a float64.
|
||||
func (d *Decoder) UseNumber() {
|
||||
d.s.UseNumber = true
|
||||
}
|
4059
decode_test.go
Normal file
4059
decode_test.go
Normal file
File diff suppressed because it is too large
Load diff
13
docker-compose.yml
Normal file
13
docker-compose.yml
Normal file
|
@ -0,0 +1,13 @@
|
|||
version: '2'
|
||||
services:
|
||||
go-json:
|
||||
image: golang:1.18
|
||||
volumes:
|
||||
- '.:/go/src/go-json'
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 620M
|
||||
working_dir: /go/src/go-json
|
||||
command: |
|
||||
sh -c "go test -c . && ls go-json.test"
|
326
encode.go
Normal file
326
encode.go
Normal file
|
@ -0,0 +1,326 @@
|
|||
package json
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"os"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/encoder"
|
||||
"github.com/goccy/go-json/internal/encoder/vm"
|
||||
"github.com/goccy/go-json/internal/encoder/vm_color"
|
||||
"github.com/goccy/go-json/internal/encoder/vm_color_indent"
|
||||
"github.com/goccy/go-json/internal/encoder/vm_indent"
|
||||
)
|
||||
|
||||
// An Encoder writes JSON values to an output stream.
|
||||
type Encoder struct {
|
||||
w io.Writer
|
||||
enabledIndent bool
|
||||
enabledHTMLEscape bool
|
||||
prefix string
|
||||
indentStr string
|
||||
}
|
||||
|
||||
// NewEncoder returns a new encoder that writes to w.
|
||||
func NewEncoder(w io.Writer) *Encoder {
|
||||
return &Encoder{w: w, enabledHTMLEscape: true}
|
||||
}
|
||||
|
||||
// Encode writes the JSON encoding of v to the stream, followed by a newline character.
|
||||
//
|
||||
// See the documentation for Marshal for details about the conversion of Go values to JSON.
|
||||
func (e *Encoder) Encode(v interface{}) error {
|
||||
return e.EncodeWithOption(v)
|
||||
}
|
||||
|
||||
// EncodeWithOption call Encode with EncodeOption.
|
||||
func (e *Encoder) EncodeWithOption(v interface{}, optFuncs ...EncodeOptionFunc) error {
|
||||
ctx := encoder.TakeRuntimeContext()
|
||||
ctx.Option.Flag = 0
|
||||
|
||||
err := e.encodeWithOption(ctx, v, optFuncs...)
|
||||
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
// EncodeContext call Encode with context.Context and EncodeOption.
|
||||
func (e *Encoder) EncodeContext(ctx context.Context, v interface{}, optFuncs ...EncodeOptionFunc) error {
|
||||
rctx := encoder.TakeRuntimeContext()
|
||||
rctx.Option.Flag = 0
|
||||
rctx.Option.Flag |= encoder.ContextOption
|
||||
rctx.Option.Context = ctx
|
||||
|
||||
err := e.encodeWithOption(rctx, v, optFuncs...) //nolint: contextcheck
|
||||
|
||||
encoder.ReleaseRuntimeContext(rctx)
|
||||
return err
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeWithOption(ctx *encoder.RuntimeContext, v interface{}, optFuncs ...EncodeOptionFunc) error {
|
||||
if e.enabledHTMLEscape {
|
||||
ctx.Option.Flag |= encoder.HTMLEscapeOption
|
||||
}
|
||||
ctx.Option.Flag |= encoder.NormalizeUTF8Option
|
||||
ctx.Option.DebugOut = os.Stdout
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(ctx.Option)
|
||||
}
|
||||
var (
|
||||
buf []byte
|
||||
err error
|
||||
)
|
||||
if e.enabledIndent {
|
||||
buf, err = encodeIndent(ctx, v, e.prefix, e.indentStr)
|
||||
} else {
|
||||
buf, err = encode(ctx, v)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if e.enabledIndent {
|
||||
buf = buf[:len(buf)-2]
|
||||
} else {
|
||||
buf = buf[:len(buf)-1]
|
||||
}
|
||||
buf = append(buf, '\n')
|
||||
if _, err := e.w.Write(buf); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// SetEscapeHTML specifies whether problematic HTML characters should be escaped inside JSON quoted strings.
|
||||
// The default behavior is to escape &, <, and > to \u0026, \u003c, and \u003e to avoid certain safety problems that can arise when embedding JSON in HTML.
|
||||
//
|
||||
// In non-HTML settings where the escaping interferes with the readability of the output, SetEscapeHTML(false) disables this behavior.
|
||||
func (e *Encoder) SetEscapeHTML(on bool) {
|
||||
e.enabledHTMLEscape = on
|
||||
}
|
||||
|
||||
// SetIndent instructs the encoder to format each subsequent encoded value as if indented by the package-level function Indent(dst, src, prefix, indent).
|
||||
// Calling SetIndent("", "") disables indentation.
|
||||
func (e *Encoder) SetIndent(prefix, indent string) {
|
||||
if prefix == "" && indent == "" {
|
||||
e.enabledIndent = false
|
||||
return
|
||||
}
|
||||
e.prefix = prefix
|
||||
e.indentStr = indent
|
||||
e.enabledIndent = true
|
||||
}
|
||||
|
||||
func marshalContext(ctx context.Context, v interface{}, optFuncs ...EncodeOptionFunc) ([]byte, error) {
|
||||
rctx := encoder.TakeRuntimeContext()
|
||||
rctx.Option.Flag = 0
|
||||
rctx.Option.Flag = encoder.HTMLEscapeOption | encoder.NormalizeUTF8Option | encoder.ContextOption
|
||||
rctx.Option.Context = ctx
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(rctx.Option)
|
||||
}
|
||||
|
||||
buf, err := encode(rctx, v) //nolint: contextcheck
|
||||
if err != nil {
|
||||
encoder.ReleaseRuntimeContext(rctx)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// this line exists to escape call of `runtime.makeslicecopy` .
|
||||
// if use `make([]byte, len(buf)-1)` and `copy(copied, buf)`,
|
||||
// dst buffer size and src buffer size are differrent.
|
||||
// in this case, compiler uses `runtime.makeslicecopy`, but it is slow.
|
||||
buf = buf[:len(buf)-1]
|
||||
copied := make([]byte, len(buf))
|
||||
copy(copied, buf)
|
||||
|
||||
encoder.ReleaseRuntimeContext(rctx)
|
||||
return copied, nil
|
||||
}
|
||||
|
||||
func marshal(v interface{}, optFuncs ...EncodeOptionFunc) ([]byte, error) {
|
||||
ctx := encoder.TakeRuntimeContext()
|
||||
|
||||
ctx.Option.Flag = 0
|
||||
ctx.Option.Flag |= (encoder.HTMLEscapeOption | encoder.NormalizeUTF8Option)
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(ctx.Option)
|
||||
}
|
||||
|
||||
buf, err := encode(ctx, v)
|
||||
if err != nil {
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// this line exists to escape call of `runtime.makeslicecopy` .
|
||||
// if use `make([]byte, len(buf)-1)` and `copy(copied, buf)`,
|
||||
// dst buffer size and src buffer size are differrent.
|
||||
// in this case, compiler uses `runtime.makeslicecopy`, but it is slow.
|
||||
buf = buf[:len(buf)-1]
|
||||
copied := make([]byte, len(buf))
|
||||
copy(copied, buf)
|
||||
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return copied, nil
|
||||
}
|
||||
|
||||
func marshalNoEscape(v interface{}) ([]byte, error) {
|
||||
ctx := encoder.TakeRuntimeContext()
|
||||
|
||||
ctx.Option.Flag = 0
|
||||
ctx.Option.Flag |= (encoder.HTMLEscapeOption | encoder.NormalizeUTF8Option)
|
||||
|
||||
buf, err := encodeNoEscape(ctx, v)
|
||||
if err != nil {
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// this line exists to escape call of `runtime.makeslicecopy` .
|
||||
// if use `make([]byte, len(buf)-1)` and `copy(copied, buf)`,
|
||||
// dst buffer size and src buffer size are differrent.
|
||||
// in this case, compiler uses `runtime.makeslicecopy`, but it is slow.
|
||||
buf = buf[:len(buf)-1]
|
||||
copied := make([]byte, len(buf))
|
||||
copy(copied, buf)
|
||||
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return copied, nil
|
||||
}
|
||||
|
||||
func marshalIndent(v interface{}, prefix, indent string, optFuncs ...EncodeOptionFunc) ([]byte, error) {
|
||||
ctx := encoder.TakeRuntimeContext()
|
||||
|
||||
ctx.Option.Flag = 0
|
||||
ctx.Option.Flag |= (encoder.HTMLEscapeOption | encoder.NormalizeUTF8Option | encoder.IndentOption)
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(ctx.Option)
|
||||
}
|
||||
|
||||
buf, err := encodeIndent(ctx, v, prefix, indent)
|
||||
if err != nil {
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
buf = buf[:len(buf)-2]
|
||||
copied := make([]byte, len(buf))
|
||||
copy(copied, buf)
|
||||
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return copied, nil
|
||||
}
|
||||
|
||||
func encode(ctx *encoder.RuntimeContext, v interface{}) ([]byte, error) {
|
||||
b := ctx.Buf[:0]
|
||||
if v == nil {
|
||||
b = encoder.AppendNull(ctx, b)
|
||||
b = encoder.AppendComma(ctx, b)
|
||||
return b, nil
|
||||
}
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
typ := header.typ
|
||||
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
codeSet, err := encoder.CompileToGetCodeSet(ctx, typeptr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p := uintptr(header.ptr)
|
||||
ctx.Init(p, codeSet.CodeLength)
|
||||
ctx.KeepRefs = append(ctx.KeepRefs, header.ptr)
|
||||
|
||||
buf, err := encodeRunCode(ctx, b, codeSet)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ctx.Buf = buf
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
func encodeNoEscape(ctx *encoder.RuntimeContext, v interface{}) ([]byte, error) {
|
||||
b := ctx.Buf[:0]
|
||||
if v == nil {
|
||||
b = encoder.AppendNull(ctx, b)
|
||||
b = encoder.AppendComma(ctx, b)
|
||||
return b, nil
|
||||
}
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
typ := header.typ
|
||||
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
codeSet, err := encoder.CompileToGetCodeSet(ctx, typeptr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p := uintptr(header.ptr)
|
||||
ctx.Init(p, codeSet.CodeLength)
|
||||
buf, err := encodeRunCode(ctx, b, codeSet)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ctx.Buf = buf
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
func encodeIndent(ctx *encoder.RuntimeContext, v interface{}, prefix, indent string) ([]byte, error) {
|
||||
b := ctx.Buf[:0]
|
||||
if v == nil {
|
||||
b = encoder.AppendNull(ctx, b)
|
||||
b = encoder.AppendCommaIndent(ctx, b)
|
||||
return b, nil
|
||||
}
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
typ := header.typ
|
||||
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
codeSet, err := encoder.CompileToGetCodeSet(ctx, typeptr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p := uintptr(header.ptr)
|
||||
ctx.Init(p, codeSet.CodeLength)
|
||||
buf, err := encodeRunIndentCode(ctx, b, codeSet, prefix, indent)
|
||||
|
||||
ctx.KeepRefs = append(ctx.KeepRefs, header.ptr)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ctx.Buf = buf
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
func encodeRunCode(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) {
|
||||
if (ctx.Option.Flag & encoder.DebugOption) != 0 {
|
||||
if (ctx.Option.Flag & encoder.ColorizeOption) != 0 {
|
||||
return vm_color.DebugRun(ctx, b, codeSet)
|
||||
}
|
||||
return vm.DebugRun(ctx, b, codeSet)
|
||||
}
|
||||
if (ctx.Option.Flag & encoder.ColorizeOption) != 0 {
|
||||
return vm_color.Run(ctx, b, codeSet)
|
||||
}
|
||||
return vm.Run(ctx, b, codeSet)
|
||||
}
|
||||
|
||||
func encodeRunIndentCode(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet, prefix, indent string) ([]byte, error) {
|
||||
ctx.Prefix = []byte(prefix)
|
||||
ctx.IndentStr = []byte(indent)
|
||||
if (ctx.Option.Flag & encoder.DebugOption) != 0 {
|
||||
if (ctx.Option.Flag & encoder.ColorizeOption) != 0 {
|
||||
return vm_color_indent.DebugRun(ctx, b, codeSet)
|
||||
}
|
||||
return vm_indent.DebugRun(ctx, b, codeSet)
|
||||
}
|
||||
if (ctx.Option.Flag & encoder.ColorizeOption) != 0 {
|
||||
return vm_color_indent.Run(ctx, b, codeSet)
|
||||
}
|
||||
return vm_indent.Run(ctx, b, codeSet)
|
||||
}
|
2717
encode_test.go
Normal file
2717
encode_test.go
Normal file
File diff suppressed because it is too large
Load diff
41
error.go
Normal file
41
error.go
Normal file
|
@ -0,0 +1,41 @@
|
|||
package json
|
||||
|
||||
import (
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
// Before Go 1.2, an InvalidUTF8Error was returned by Marshal when
|
||||
// attempting to encode a string value with invalid UTF-8 sequences.
|
||||
// As of Go 1.2, Marshal instead coerces the string to valid UTF-8 by
|
||||
// replacing invalid bytes with the Unicode replacement rune U+FFFD.
|
||||
//
|
||||
// Deprecated: No longer used; kept for compatibility.
|
||||
type InvalidUTF8Error = errors.InvalidUTF8Error
|
||||
|
||||
// An InvalidUnmarshalError describes an invalid argument passed to Unmarshal.
|
||||
// (The argument to Unmarshal must be a non-nil pointer.)
|
||||
type InvalidUnmarshalError = errors.InvalidUnmarshalError
|
||||
|
||||
// A MarshalerError represents an error from calling a MarshalJSON or MarshalText method.
|
||||
type MarshalerError = errors.MarshalerError
|
||||
|
||||
// A SyntaxError is a description of a JSON syntax error.
|
||||
type SyntaxError = errors.SyntaxError
|
||||
|
||||
// An UnmarshalFieldError describes a JSON object key that
|
||||
// led to an unexported (and therefore unwritable) struct field.
|
||||
//
|
||||
// Deprecated: No longer used; kept for compatibility.
|
||||
type UnmarshalFieldError = errors.UnmarshalFieldError
|
||||
|
||||
// An UnmarshalTypeError describes a JSON value that was
|
||||
// not appropriate for a value of a specific Go type.
|
||||
type UnmarshalTypeError = errors.UnmarshalTypeError
|
||||
|
||||
// An UnsupportedTypeError is returned by Marshal when attempting
|
||||
// to encode an unsupported value type.
|
||||
type UnsupportedTypeError = errors.UnsupportedTypeError
|
||||
|
||||
type UnsupportedValueError = errors.UnsupportedValueError
|
||||
|
||||
type PathError = errors.PathError
|
10
export_test.go
Normal file
10
export_test.go
Normal file
|
@ -0,0 +1,10 @@
|
|||
package json
|
||||
|
||||
import (
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
var (
|
||||
NewSyntaxError = errors.ErrSyntax
|
||||
NewMarshalerError = errors.ErrMarshaler
|
||||
)
|
3
go.mod
Normal file
3
go.mod
Normal file
|
@ -0,0 +1,3 @@
|
|||
module github.com/goccy/go-json
|
||||
|
||||
go 1.19
|
0
go.sum
Normal file
0
go.sum
Normal file
24
helper_test.go
Normal file
24
helper_test.go
Normal file
|
@ -0,0 +1,24 @@
|
|||
package json_test
|
||||
|
||||
import "testing"
|
||||
|
||||
func assertErr(t *testing.T, err error) {
|
||||
t.Helper()
|
||||
if err != nil {
|
||||
t.Fatalf("%+v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func assertEq(t *testing.T, msg string, exp interface{}, act interface{}) {
|
||||
t.Helper()
|
||||
if exp != act {
|
||||
t.Fatalf("failed to test for %s. exp=[%v] but act=[%v]", msg, exp, act)
|
||||
}
|
||||
}
|
||||
|
||||
func assertNeq(t *testing.T, msg string, exp interface{}, act interface{}) {
|
||||
t.Helper()
|
||||
if exp == act {
|
||||
t.Fatalf("failed to test for %s. expected value is not [%v] but got same value", msg, act)
|
||||
}
|
||||
}
|
319
internal/cmd/generator/main.go
Normal file
319
internal/cmd/generator/main.go
Normal file
|
@ -0,0 +1,319 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/format"
|
||||
"go/parser"
|
||||
"go/printer"
|
||||
"go/token"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"text/template"
|
||||
)
|
||||
|
||||
type opType struct {
|
||||
Op string
|
||||
Code string
|
||||
}
|
||||
|
||||
func createOpType(op, code string) opType {
|
||||
return opType{
|
||||
Op: op,
|
||||
Code: code,
|
||||
}
|
||||
}
|
||||
|
||||
func _main() error {
|
||||
tmpl, err := template.New("").Parse(`// Code generated by internal/cmd/generator. DO NOT EDIT!
|
||||
package encoder
|
||||
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
type CodeType int
|
||||
|
||||
const (
|
||||
{{- range $index, $type := .CodeTypes }}
|
||||
Code{{ $type }} CodeType = {{ $index }}
|
||||
{{- end }}
|
||||
)
|
||||
|
||||
var opTypeStrings = [{{ .OpLen }}]string{
|
||||
{{- range $type := .OpTypes }}
|
||||
"{{ $type.Op }}",
|
||||
{{- end }}
|
||||
}
|
||||
|
||||
type OpType uint16
|
||||
|
||||
const (
|
||||
{{- range $index, $type := .OpTypes }}
|
||||
Op{{ $type.Op }} OpType = {{ $index }}
|
||||
{{- end }}
|
||||
)
|
||||
|
||||
func (t OpType) String() string {
|
||||
if int(t) >= {{ .OpLen }} {
|
||||
return ""
|
||||
}
|
||||
return opTypeStrings[int(t)]
|
||||
}
|
||||
|
||||
func (t OpType) CodeType() CodeType {
|
||||
if strings.Contains(t.String(), "Struct") {
|
||||
if strings.Contains(t.String(), "End") {
|
||||
return CodeStructEnd
|
||||
}
|
||||
return CodeStructField
|
||||
}
|
||||
switch t {
|
||||
case OpArray, OpArrayPtr:
|
||||
return CodeArrayHead
|
||||
case OpArrayElem:
|
||||
return CodeArrayElem
|
||||
case OpSlice, OpSlicePtr:
|
||||
return CodeSliceHead
|
||||
case OpSliceElem:
|
||||
return CodeSliceElem
|
||||
case OpMap, OpMapPtr:
|
||||
return CodeMapHead
|
||||
case OpMapKey:
|
||||
return CodeMapKey
|
||||
case OpMapValue:
|
||||
return CodeMapValue
|
||||
case OpMapEnd:
|
||||
return CodeMapEnd
|
||||
}
|
||||
|
||||
return CodeOp
|
||||
}
|
||||
|
||||
func (t OpType) HeadToPtrHead() OpType {
|
||||
if strings.Index(t.String(), "PtrHead") > 0 {
|
||||
return t
|
||||
}
|
||||
|
||||
idx := strings.Index(t.String(), "Head")
|
||||
if idx == -1 {
|
||||
return t
|
||||
}
|
||||
suffix := "PtrHead"+t.String()[idx+len("Head"):]
|
||||
|
||||
const toPtrOffset = 2
|
||||
if strings.Contains(OpType(int(t) + toPtrOffset).String(), suffix) {
|
||||
return OpType(int(t) + toPtrOffset)
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
func (t OpType) HeadToOmitEmptyHead() OpType {
|
||||
const toOmitEmptyOffset = 1
|
||||
if strings.Contains(OpType(int(t) + toOmitEmptyOffset).String(), "OmitEmpty") {
|
||||
return OpType(int(t) + toOmitEmptyOffset)
|
||||
}
|
||||
|
||||
return t
|
||||
}
|
||||
|
||||
func (t OpType) PtrHeadToHead() OpType {
|
||||
idx := strings.Index(t.String(), "PtrHead")
|
||||
if idx == -1 {
|
||||
return t
|
||||
}
|
||||
suffix := t.String()[idx+len("Ptr"):]
|
||||
|
||||
const toPtrOffset = 2
|
||||
if strings.Contains(OpType(int(t) - toPtrOffset).String(), suffix) {
|
||||
return OpType(int(t) - toPtrOffset)
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
func (t OpType) FieldToEnd() OpType {
|
||||
idx := strings.Index(t.String(), "Field")
|
||||
if idx == -1 {
|
||||
return t
|
||||
}
|
||||
suffix := t.String()[idx+len("Field"):]
|
||||
if suffix == "" || suffix == "OmitEmpty" {
|
||||
return t
|
||||
}
|
||||
const toEndOffset = 2
|
||||
if strings.Contains(OpType(int(t) + toEndOffset).String(), "End"+suffix) {
|
||||
return OpType(int(t) + toEndOffset)
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
func (t OpType) FieldToOmitEmptyField() OpType {
|
||||
const toOmitEmptyOffset = 1
|
||||
if strings.Contains(OpType(int(t) + toOmitEmptyOffset).String(), "OmitEmpty") {
|
||||
return OpType(int(t) + toOmitEmptyOffset)
|
||||
}
|
||||
return t
|
||||
}
|
||||
`)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
codeTypes := []string{
|
||||
"Op",
|
||||
"ArrayHead",
|
||||
"ArrayElem",
|
||||
"SliceHead",
|
||||
"SliceElem",
|
||||
"MapHead",
|
||||
"MapKey",
|
||||
"MapValue",
|
||||
"MapEnd",
|
||||
"Recursive",
|
||||
"StructField",
|
||||
"StructEnd",
|
||||
}
|
||||
primitiveTypes := []string{
|
||||
"int", "uint", "float32", "float64", "bool", "string", "bytes", "number",
|
||||
"array", "map", "slice", "struct", "MarshalJSON", "MarshalText",
|
||||
"intString", "uintString", "float32String", "float64String", "boolString", "stringString", "numberString",
|
||||
"intPtr", "uintPtr", "float32Ptr", "float64Ptr", "boolPtr", "stringPtr", "bytesPtr", "numberPtr",
|
||||
"arrayPtr", "mapPtr", "slicePtr", "marshalJSONPtr", "marshalTextPtr", "interfacePtr",
|
||||
"intPtrString", "uintPtrString", "float32PtrString", "float64PtrString", "boolPtrString", "stringPtrString", "numberPtrString",
|
||||
}
|
||||
primitiveTypesUpper := []string{}
|
||||
for _, typ := range primitiveTypes {
|
||||
primitiveTypesUpper = append(primitiveTypesUpper, strings.ToUpper(string(typ[0]))+typ[1:])
|
||||
}
|
||||
opTypes := []opType{
|
||||
createOpType("End", "Op"),
|
||||
createOpType("Interface", "Op"),
|
||||
createOpType("Ptr", "Op"),
|
||||
createOpType("SliceElem", "SliceElem"),
|
||||
createOpType("SliceEnd", "Op"),
|
||||
createOpType("ArrayElem", "ArrayElem"),
|
||||
createOpType("ArrayEnd", "Op"),
|
||||
createOpType("MapKey", "MapKey"),
|
||||
createOpType("MapValue", "MapValue"),
|
||||
createOpType("MapEnd", "Op"),
|
||||
createOpType("Recursive", "Op"),
|
||||
createOpType("RecursivePtr", "Op"),
|
||||
createOpType("RecursiveEnd", "Op"),
|
||||
createOpType("InterfaceEnd", "Op"),
|
||||
}
|
||||
for _, typ := range primitiveTypesUpper {
|
||||
typ := typ
|
||||
opTypes = append(opTypes, createOpType(typ, "Op"))
|
||||
}
|
||||
for _, typ := range append(primitiveTypesUpper, "") {
|
||||
for _, ptrOrNot := range []string{"", "Ptr"} {
|
||||
for _, opt := range []string{"", "OmitEmpty"} {
|
||||
ptrOrNot := ptrOrNot
|
||||
opt := opt
|
||||
typ := typ
|
||||
|
||||
op := fmt.Sprintf(
|
||||
"Struct%sHead%s%s",
|
||||
ptrOrNot,
|
||||
opt,
|
||||
typ,
|
||||
)
|
||||
opTypes = append(opTypes, opType{
|
||||
Op: op,
|
||||
Code: "StructField",
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, typ := range append(primitiveTypesUpper, "") {
|
||||
for _, opt := range []string{"", "OmitEmpty"} {
|
||||
opt := opt
|
||||
typ := typ
|
||||
|
||||
op := fmt.Sprintf(
|
||||
"StructField%s%s",
|
||||
opt,
|
||||
typ,
|
||||
)
|
||||
opTypes = append(opTypes, opType{
|
||||
Op: op,
|
||||
Code: "StructField",
|
||||
})
|
||||
}
|
||||
for _, opt := range []string{"", "OmitEmpty"} {
|
||||
opt := opt
|
||||
typ := typ
|
||||
|
||||
op := fmt.Sprintf(
|
||||
"StructEnd%s%s",
|
||||
opt,
|
||||
typ,
|
||||
)
|
||||
opTypes = append(opTypes, opType{
|
||||
Op: op,
|
||||
Code: "StructEnd",
|
||||
})
|
||||
}
|
||||
}
|
||||
var b bytes.Buffer
|
||||
if err := tmpl.Execute(&b, struct {
|
||||
CodeTypes []string
|
||||
OpTypes []opType
|
||||
OpLen int
|
||||
}{
|
||||
CodeTypes: codeTypes,
|
||||
OpTypes: opTypes,
|
||||
OpLen: len(opTypes),
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
path := filepath.Join(repoRoot(), "internal", "encoder", "optype.go")
|
||||
buf, err := format.Source(b.Bytes())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return os.WriteFile(path, buf, 0644)
|
||||
}
|
||||
|
||||
func generateVM() error {
|
||||
file, err := os.ReadFile("vm.go.tmpl")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fset := token.NewFileSet()
|
||||
f, err := parser.ParseFile(fset, "", string(file), parser.ParseComments)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, pkg := range []string{"vm", "vm_indent", "vm_color", "vm_color_indent"} {
|
||||
f.Name.Name = pkg
|
||||
var buf bytes.Buffer
|
||||
printer.Fprint(&buf, fset, f)
|
||||
path := filepath.Join(repoRoot(), "internal", "encoder", pkg, "vm.go")
|
||||
source, err := format.Source(buf.Bytes())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := os.WriteFile(path, source, 0644); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func repoRoot() string {
|
||||
_, file, _, _ := runtime.Caller(0)
|
||||
relativePathFromRepoRoot := filepath.Join("internal", "cmd", "generator")
|
||||
return strings.TrimSuffix(filepath.Dir(file), relativePathFromRepoRoot)
|
||||
}
|
||||
|
||||
//go:generate go run main.go
|
||||
func main() {
|
||||
if err := generateVM(); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
if err := _main(); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
4859
internal/cmd/generator/vm.go.tmpl
Normal file
4859
internal/cmd/generator/vm.go.tmpl
Normal file
File diff suppressed because it is too large
Load diff
41
internal/decoder/anonymous_field.go
Normal file
41
internal/decoder/anonymous_field.go
Normal file
|
@ -0,0 +1,41 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type anonymousFieldDecoder struct {
|
||||
structType *runtime.Type
|
||||
offset uintptr
|
||||
dec Decoder
|
||||
}
|
||||
|
||||
func newAnonymousFieldDecoder(structType *runtime.Type, offset uintptr, dec Decoder) *anonymousFieldDecoder {
|
||||
return &anonymousFieldDecoder{
|
||||
structType: structType,
|
||||
offset: offset,
|
||||
dec: dec,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *anonymousFieldDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
if *(*unsafe.Pointer)(p) == nil {
|
||||
*(*unsafe.Pointer)(p) = unsafe_New(d.structType)
|
||||
}
|
||||
p = *(*unsafe.Pointer)(p)
|
||||
return d.dec.DecodeStream(s, depth, unsafe.Pointer(uintptr(p)+d.offset))
|
||||
}
|
||||
|
||||
func (d *anonymousFieldDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
if *(*unsafe.Pointer)(p) == nil {
|
||||
*(*unsafe.Pointer)(p) = unsafe_New(d.structType)
|
||||
}
|
||||
p = *(*unsafe.Pointer)(p)
|
||||
return d.dec.Decode(ctx, cursor, depth, unsafe.Pointer(uintptr(p)+d.offset))
|
||||
}
|
||||
|
||||
func (d *anonymousFieldDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
return d.dec.DecodePath(ctx, cursor, depth)
|
||||
}
|
176
internal/decoder/array.go
Normal file
176
internal/decoder/array.go
Normal file
|
@ -0,0 +1,176 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type arrayDecoder struct {
|
||||
elemType *runtime.Type
|
||||
size uintptr
|
||||
valueDecoder Decoder
|
||||
alen int
|
||||
structName string
|
||||
fieldName string
|
||||
zeroValue unsafe.Pointer
|
||||
}
|
||||
|
||||
func newArrayDecoder(dec Decoder, elemType *runtime.Type, alen int, structName, fieldName string) *arrayDecoder {
|
||||
// workaround to avoid checkptr errors. cannot use `*(*unsafe.Pointer)(unsafe_New(elemType))` directly.
|
||||
zeroValuePtr := unsafe_New(elemType)
|
||||
zeroValue := **(**unsafe.Pointer)(unsafe.Pointer(&zeroValuePtr))
|
||||
return &arrayDecoder{
|
||||
valueDecoder: dec,
|
||||
elemType: elemType,
|
||||
size: elemType.Size(),
|
||||
alen: alen,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
zeroValue: zeroValue,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *arrayDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||
}
|
||||
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
case '[':
|
||||
idx := 0
|
||||
s.cursor++
|
||||
if s.skipWhiteSpace() == ']' {
|
||||
for idx < d.alen {
|
||||
*(*unsafe.Pointer)(unsafe.Pointer(uintptr(p) + uintptr(idx)*d.size)) = d.zeroValue
|
||||
idx++
|
||||
}
|
||||
s.cursor++
|
||||
return nil
|
||||
}
|
||||
for {
|
||||
if idx < d.alen {
|
||||
if err := d.valueDecoder.DecodeStream(s, depth, unsafe.Pointer(uintptr(p)+uintptr(idx)*d.size)); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
idx++
|
||||
switch s.skipWhiteSpace() {
|
||||
case ']':
|
||||
for idx < d.alen {
|
||||
*(*unsafe.Pointer)(unsafe.Pointer(uintptr(p) + uintptr(idx)*d.size)) = d.zeroValue
|
||||
idx++
|
||||
}
|
||||
s.cursor++
|
||||
return nil
|
||||
case ',':
|
||||
s.cursor++
|
||||
continue
|
||||
case nul:
|
||||
if s.read() {
|
||||
s.cursor++
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
default:
|
||||
goto ERROR
|
||||
}
|
||||
}
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
default:
|
||||
goto ERROR
|
||||
}
|
||||
s.cursor++
|
||||
}
|
||||
ERROR:
|
||||
return errors.ErrUnexpectedEndOfJSON("array", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *arrayDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return cursor, nil
|
||||
case '[':
|
||||
idx := 0
|
||||
cursor++
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if buf[cursor] == ']' {
|
||||
for idx < d.alen {
|
||||
*(*unsafe.Pointer)(unsafe.Pointer(uintptr(p) + uintptr(idx)*d.size)) = d.zeroValue
|
||||
idx++
|
||||
}
|
||||
cursor++
|
||||
return cursor, nil
|
||||
}
|
||||
for {
|
||||
if idx < d.alen {
|
||||
c, err := d.valueDecoder.Decode(ctx, cursor, depth, unsafe.Pointer(uintptr(p)+uintptr(idx)*d.size))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = c
|
||||
} else {
|
||||
c, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = c
|
||||
}
|
||||
idx++
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
switch buf[cursor] {
|
||||
case ']':
|
||||
for idx < d.alen {
|
||||
*(*unsafe.Pointer)(unsafe.Pointer(uintptr(p) + uintptr(idx)*d.size)) = d.zeroValue
|
||||
idx++
|
||||
}
|
||||
cursor++
|
||||
return cursor, nil
|
||||
case ',':
|
||||
cursor++
|
||||
continue
|
||||
default:
|
||||
return 0, errors.ErrInvalidCharacter(buf[cursor], "array", cursor)
|
||||
}
|
||||
}
|
||||
default:
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("array", cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (d *arrayDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
return nil, 0, fmt.Errorf("json: array decoder does not support decode path")
|
||||
}
|
438
internal/decoder/assign.go
Normal file
438
internal/decoder/assign.go
Normal file
|
@ -0,0 +1,438 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
var (
|
||||
nilValue = reflect.ValueOf(nil)
|
||||
)
|
||||
|
||||
func AssignValue(src, dst reflect.Value) error {
|
||||
if dst.Type().Kind() != reflect.Ptr {
|
||||
return fmt.Errorf("invalid dst type. required pointer type: %T", dst.Type())
|
||||
}
|
||||
casted, err := castValue(dst.Elem().Type(), src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
dst.Elem().Set(casted)
|
||||
return nil
|
||||
}
|
||||
|
||||
func castValue(t reflect.Type, v reflect.Value) (reflect.Value, error) {
|
||||
switch t.Kind() {
|
||||
case reflect.Int:
|
||||
vv, err := castInt(v)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(int(vv.Int())), nil
|
||||
case reflect.Int8:
|
||||
vv, err := castInt(v)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(int8(vv.Int())), nil
|
||||
case reflect.Int16:
|
||||
vv, err := castInt(v)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(int16(vv.Int())), nil
|
||||
case reflect.Int32:
|
||||
vv, err := castInt(v)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(int32(vv.Int())), nil
|
||||
case reflect.Int64:
|
||||
return castInt(v)
|
||||
case reflect.Uint:
|
||||
vv, err := castUint(v)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(uint(vv.Uint())), nil
|
||||
case reflect.Uint8:
|
||||
vv, err := castUint(v)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(uint8(vv.Uint())), nil
|
||||
case reflect.Uint16:
|
||||
vv, err := castUint(v)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(uint16(vv.Uint())), nil
|
||||
case reflect.Uint32:
|
||||
vv, err := castUint(v)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(uint32(vv.Uint())), nil
|
||||
case reflect.Uint64:
|
||||
return castUint(v)
|
||||
case reflect.Uintptr:
|
||||
vv, err := castUint(v)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(uintptr(vv.Uint())), nil
|
||||
case reflect.String:
|
||||
return castString(v)
|
||||
case reflect.Bool:
|
||||
return castBool(v)
|
||||
case reflect.Float32:
|
||||
vv, err := castFloat(v)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(float32(vv.Float())), nil
|
||||
case reflect.Float64:
|
||||
return castFloat(v)
|
||||
case reflect.Array:
|
||||
return castArray(t, v)
|
||||
case reflect.Slice:
|
||||
return castSlice(t, v)
|
||||
case reflect.Map:
|
||||
return castMap(t, v)
|
||||
case reflect.Struct:
|
||||
return castStruct(t, v)
|
||||
}
|
||||
return v, nil
|
||||
}
|
||||
|
||||
func castInt(v reflect.Value) (reflect.Value, error) {
|
||||
switch v.Type().Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return v, nil
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
return reflect.ValueOf(int64(v.Uint())), nil
|
||||
case reflect.String:
|
||||
i64, err := strconv.ParseInt(v.String(), 10, 64)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(i64), nil
|
||||
case reflect.Bool:
|
||||
if v.Bool() {
|
||||
return reflect.ValueOf(int64(1)), nil
|
||||
}
|
||||
return reflect.ValueOf(int64(0)), nil
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return reflect.ValueOf(int64(v.Float())), nil
|
||||
case reflect.Array:
|
||||
if v.Len() > 0 {
|
||||
return castInt(v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to int64 from empty array")
|
||||
case reflect.Slice:
|
||||
if v.Len() > 0 {
|
||||
return castInt(v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to int64 from empty slice")
|
||||
case reflect.Interface:
|
||||
return castInt(reflect.ValueOf(v.Interface()))
|
||||
case reflect.Map:
|
||||
return nilValue, fmt.Errorf("failed to cast to int64 from map")
|
||||
case reflect.Struct:
|
||||
return nilValue, fmt.Errorf("failed to cast to int64 from struct")
|
||||
case reflect.Ptr:
|
||||
return castInt(v.Elem())
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to int64 from %s", v.Type().Kind())
|
||||
}
|
||||
|
||||
func castUint(v reflect.Value) (reflect.Value, error) {
|
||||
switch v.Type().Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return reflect.ValueOf(uint64(v.Int())), nil
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
return v, nil
|
||||
case reflect.String:
|
||||
u64, err := strconv.ParseUint(v.String(), 10, 64)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(u64), nil
|
||||
case reflect.Bool:
|
||||
if v.Bool() {
|
||||
return reflect.ValueOf(uint64(1)), nil
|
||||
}
|
||||
return reflect.ValueOf(uint64(0)), nil
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return reflect.ValueOf(uint64(v.Float())), nil
|
||||
case reflect.Array:
|
||||
if v.Len() > 0 {
|
||||
return castUint(v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to uint64 from empty array")
|
||||
case reflect.Slice:
|
||||
if v.Len() > 0 {
|
||||
return castUint(v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to uint64 from empty slice")
|
||||
case reflect.Interface:
|
||||
return castUint(reflect.ValueOf(v.Interface()))
|
||||
case reflect.Map:
|
||||
return nilValue, fmt.Errorf("failed to cast to uint64 from map")
|
||||
case reflect.Struct:
|
||||
return nilValue, fmt.Errorf("failed to cast to uint64 from struct")
|
||||
case reflect.Ptr:
|
||||
return castUint(v.Elem())
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to uint64 from %s", v.Type().Kind())
|
||||
}
|
||||
|
||||
func castString(v reflect.Value) (reflect.Value, error) {
|
||||
switch v.Type().Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return reflect.ValueOf(fmt.Sprint(v.Int())), nil
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
return reflect.ValueOf(fmt.Sprint(v.Uint())), nil
|
||||
case reflect.String:
|
||||
return v, nil
|
||||
case reflect.Bool:
|
||||
if v.Bool() {
|
||||
return reflect.ValueOf("true"), nil
|
||||
}
|
||||
return reflect.ValueOf("false"), nil
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return reflect.ValueOf(fmt.Sprint(v.Float())), nil
|
||||
case reflect.Array:
|
||||
if v.Len() > 0 {
|
||||
return castString(v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to string from empty array")
|
||||
case reflect.Slice:
|
||||
if v.Len() > 0 {
|
||||
return castString(v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to string from empty slice")
|
||||
case reflect.Interface:
|
||||
return castString(reflect.ValueOf(v.Interface()))
|
||||
case reflect.Map:
|
||||
return nilValue, fmt.Errorf("failed to cast to string from map")
|
||||
case reflect.Struct:
|
||||
return nilValue, fmt.Errorf("failed to cast to string from struct")
|
||||
case reflect.Ptr:
|
||||
return castString(v.Elem())
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to string from %s", v.Type().Kind())
|
||||
}
|
||||
|
||||
func castBool(v reflect.Value) (reflect.Value, error) {
|
||||
switch v.Type().Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
switch v.Int() {
|
||||
case 0:
|
||||
return reflect.ValueOf(false), nil
|
||||
case 1:
|
||||
return reflect.ValueOf(true), nil
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to bool from %d", v.Int())
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
switch v.Uint() {
|
||||
case 0:
|
||||
return reflect.ValueOf(false), nil
|
||||
case 1:
|
||||
return reflect.ValueOf(true), nil
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to bool from %d", v.Uint())
|
||||
case reflect.String:
|
||||
b, err := strconv.ParseBool(v.String())
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(b), nil
|
||||
case reflect.Bool:
|
||||
return v, nil
|
||||
case reflect.Float32, reflect.Float64:
|
||||
switch v.Float() {
|
||||
case 0:
|
||||
return reflect.ValueOf(false), nil
|
||||
case 1:
|
||||
return reflect.ValueOf(true), nil
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to bool from %f", v.Float())
|
||||
case reflect.Array:
|
||||
if v.Len() > 0 {
|
||||
return castBool(v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to string from empty array")
|
||||
case reflect.Slice:
|
||||
if v.Len() > 0 {
|
||||
return castBool(v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to string from empty slice")
|
||||
case reflect.Interface:
|
||||
return castBool(reflect.ValueOf(v.Interface()))
|
||||
case reflect.Map:
|
||||
return nilValue, fmt.Errorf("failed to cast to string from map")
|
||||
case reflect.Struct:
|
||||
return nilValue, fmt.Errorf("failed to cast to string from struct")
|
||||
case reflect.Ptr:
|
||||
return castBool(v.Elem())
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to bool from %s", v.Type().Kind())
|
||||
}
|
||||
|
||||
func castFloat(v reflect.Value) (reflect.Value, error) {
|
||||
switch v.Type().Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return reflect.ValueOf(float64(v.Int())), nil
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
return reflect.ValueOf(float64(v.Uint())), nil
|
||||
case reflect.String:
|
||||
f64, err := strconv.ParseFloat(v.String(), 64)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(f64), nil
|
||||
case reflect.Bool:
|
||||
if v.Bool() {
|
||||
return reflect.ValueOf(float64(1)), nil
|
||||
}
|
||||
return reflect.ValueOf(float64(0)), nil
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return v, nil
|
||||
case reflect.Array:
|
||||
if v.Len() > 0 {
|
||||
return castFloat(v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to float64 from empty array")
|
||||
case reflect.Slice:
|
||||
if v.Len() > 0 {
|
||||
return castFloat(v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to float64 from empty slice")
|
||||
case reflect.Interface:
|
||||
return castFloat(reflect.ValueOf(v.Interface()))
|
||||
case reflect.Map:
|
||||
return nilValue, fmt.Errorf("failed to cast to float64 from map")
|
||||
case reflect.Struct:
|
||||
return nilValue, fmt.Errorf("failed to cast to float64 from struct")
|
||||
case reflect.Ptr:
|
||||
return castFloat(v.Elem())
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to float64 from %s", v.Type().Kind())
|
||||
}
|
||||
|
||||
func castArray(t reflect.Type, v reflect.Value) (reflect.Value, error) {
|
||||
kind := v.Type().Kind()
|
||||
if kind == reflect.Interface {
|
||||
return castArray(t, reflect.ValueOf(v.Interface()))
|
||||
}
|
||||
if kind != reflect.Slice && kind != reflect.Array {
|
||||
return nilValue, fmt.Errorf("failed to cast to array from %s", kind)
|
||||
}
|
||||
if t.Elem() == v.Type().Elem() {
|
||||
return v, nil
|
||||
}
|
||||
if t.Len() != v.Len() {
|
||||
return nilValue, fmt.Errorf("failed to cast [%d]array from slice of %d length", t.Len(), v.Len())
|
||||
}
|
||||
ret := reflect.New(t).Elem()
|
||||
for i := 0; i < v.Len(); i++ {
|
||||
vv, err := castValue(t.Elem(), v.Index(i))
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
ret.Index(i).Set(vv)
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func castSlice(t reflect.Type, v reflect.Value) (reflect.Value, error) {
|
||||
kind := v.Type().Kind()
|
||||
if kind == reflect.Interface {
|
||||
return castSlice(t, reflect.ValueOf(v.Interface()))
|
||||
}
|
||||
if kind != reflect.Slice && kind != reflect.Array {
|
||||
return nilValue, fmt.Errorf("failed to cast to slice from %s", kind)
|
||||
}
|
||||
if t.Elem() == v.Type().Elem() {
|
||||
return v, nil
|
||||
}
|
||||
ret := reflect.MakeSlice(t, v.Len(), v.Len())
|
||||
for i := 0; i < v.Len(); i++ {
|
||||
vv, err := castValue(t.Elem(), v.Index(i))
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
ret.Index(i).Set(vv)
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func castMap(t reflect.Type, v reflect.Value) (reflect.Value, error) {
|
||||
ret := reflect.MakeMap(t)
|
||||
switch v.Type().Kind() {
|
||||
case reflect.Map:
|
||||
iter := v.MapRange()
|
||||
for iter.Next() {
|
||||
key, err := castValue(t.Key(), iter.Key())
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
value, err := castValue(t.Elem(), iter.Value())
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
ret.SetMapIndex(key, value)
|
||||
}
|
||||
return ret, nil
|
||||
case reflect.Interface:
|
||||
return castMap(t, reflect.ValueOf(v.Interface()))
|
||||
case reflect.Slice:
|
||||
if v.Len() > 0 {
|
||||
return castMap(t, v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to map from empty slice")
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to map from %s", v.Type().Kind())
|
||||
}
|
||||
|
||||
func castStruct(t reflect.Type, v reflect.Value) (reflect.Value, error) {
|
||||
ret := reflect.New(t).Elem()
|
||||
switch v.Type().Kind() {
|
||||
case reflect.Map:
|
||||
iter := v.MapRange()
|
||||
for iter.Next() {
|
||||
key := iter.Key()
|
||||
k, err := castString(key)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
fieldName := k.String()
|
||||
field, ok := t.FieldByName(fieldName)
|
||||
if ok {
|
||||
value, err := castValue(field.Type, iter.Value())
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
ret.FieldByName(fieldName).Set(value)
|
||||
}
|
||||
}
|
||||
return ret, nil
|
||||
case reflect.Struct:
|
||||
for i := 0; i < v.Type().NumField(); i++ {
|
||||
name := v.Type().Field(i).Name
|
||||
ret.FieldByName(name).Set(v.FieldByName(name))
|
||||
}
|
||||
return ret, nil
|
||||
case reflect.Interface:
|
||||
return castStruct(t, reflect.ValueOf(v.Interface()))
|
||||
case reflect.Slice:
|
||||
if v.Len() > 0 {
|
||||
return castStruct(t, v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to struct from empty slice")
|
||||
default:
|
||||
return nilValue, fmt.Errorf("failed to cast to struct from %s", v.Type().Kind())
|
||||
}
|
||||
}
|
83
internal/decoder/bool.go
Normal file
83
internal/decoder/bool.go
Normal file
|
@ -0,0 +1,83 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
type boolDecoder struct {
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newBoolDecoder(structName, fieldName string) *boolDecoder {
|
||||
return &boolDecoder{structName: structName, fieldName: fieldName}
|
||||
}
|
||||
|
||||
func (d *boolDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
c := s.skipWhiteSpace()
|
||||
for {
|
||||
switch c {
|
||||
case 't':
|
||||
if err := trueBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
**(**bool)(unsafe.Pointer(&p)) = true
|
||||
return nil
|
||||
case 'f':
|
||||
if err := falseBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
**(**bool)(unsafe.Pointer(&p)) = false
|
||||
return nil
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
c = s.char()
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
}
|
||||
break
|
||||
}
|
||||
ERROR:
|
||||
return errors.ErrUnexpectedEndOfJSON("bool", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *boolDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
switch buf[cursor] {
|
||||
case 't':
|
||||
if err := validateTrue(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
**(**bool)(unsafe.Pointer(&p)) = true
|
||||
return cursor, nil
|
||||
case 'f':
|
||||
if err := validateFalse(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 5
|
||||
**(**bool)(unsafe.Pointer(&p)) = false
|
||||
return cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return cursor, nil
|
||||
}
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("bool", cursor)
|
||||
}
|
||||
|
||||
func (d *boolDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
return nil, 0, fmt.Errorf("json: bool decoder does not support decode path")
|
||||
}
|
118
internal/decoder/bytes.go
Normal file
118
internal/decoder/bytes.go
Normal file
|
@ -0,0 +1,118 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type bytesDecoder struct {
|
||||
typ *runtime.Type
|
||||
sliceDecoder Decoder
|
||||
stringDecoder *stringDecoder
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func byteUnmarshalerSliceDecoder(typ *runtime.Type, structName string, fieldName string) Decoder {
|
||||
var unmarshalDecoder Decoder
|
||||
switch {
|
||||
case runtime.PtrTo(typ).Implements(unmarshalJSONType):
|
||||
unmarshalDecoder = newUnmarshalJSONDecoder(runtime.PtrTo(typ), structName, fieldName)
|
||||
case runtime.PtrTo(typ).Implements(unmarshalTextType):
|
||||
unmarshalDecoder = newUnmarshalTextDecoder(runtime.PtrTo(typ), structName, fieldName)
|
||||
default:
|
||||
unmarshalDecoder, _ = compileUint8(typ, structName, fieldName)
|
||||
}
|
||||
return newSliceDecoder(unmarshalDecoder, typ, 1, structName, fieldName)
|
||||
}
|
||||
|
||||
func newBytesDecoder(typ *runtime.Type, structName string, fieldName string) *bytesDecoder {
|
||||
return &bytesDecoder{
|
||||
typ: typ,
|
||||
sliceDecoder: byteUnmarshalerSliceDecoder(typ, structName, fieldName),
|
||||
stringDecoder: newStringDecoder(structName, fieldName),
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *bytesDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
bytes, err := d.decodeStreamBinary(s, depth, p)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if bytes == nil {
|
||||
s.reset()
|
||||
return nil
|
||||
}
|
||||
decodedLen := base64.StdEncoding.DecodedLen(len(bytes))
|
||||
buf := make([]byte, decodedLen)
|
||||
n, err := base64.StdEncoding.Decode(buf, bytes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*(*[]byte)(p) = buf[:n]
|
||||
s.reset()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *bytesDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
bytes, c, err := d.decodeBinary(ctx, cursor, depth, p)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if bytes == nil {
|
||||
return c, nil
|
||||
}
|
||||
cursor = c
|
||||
decodedLen := base64.StdEncoding.DecodedLen(len(bytes))
|
||||
b := make([]byte, decodedLen)
|
||||
n, err := base64.StdEncoding.Decode(b, bytes)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
*(*[]byte)(p) = b[:n]
|
||||
return cursor, nil
|
||||
}
|
||||
|
||||
func (d *bytesDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
return nil, 0, fmt.Errorf("json: []byte decoder does not support decode path")
|
||||
}
|
||||
|
||||
func (d *bytesDecoder) decodeStreamBinary(s *Stream, depth int64, p unsafe.Pointer) ([]byte, error) {
|
||||
c := s.skipWhiteSpace()
|
||||
if c == '[' {
|
||||
if d.sliceDecoder == nil {
|
||||
return nil, &errors.UnmarshalTypeError{
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
}
|
||||
err := d.sliceDecoder.DecodeStream(s, depth, p)
|
||||
return nil, err
|
||||
}
|
||||
return d.stringDecoder.decodeStreamByte(s)
|
||||
}
|
||||
|
||||
func (d *bytesDecoder) decodeBinary(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) ([]byte, int64, error) {
|
||||
buf := ctx.Buf
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if buf[cursor] == '[' {
|
||||
if d.sliceDecoder == nil {
|
||||
return nil, 0, &errors.UnmarshalTypeError{
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: cursor,
|
||||
}
|
||||
}
|
||||
c, err := d.sliceDecoder.Decode(ctx, cursor, depth, p)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
return nil, c, nil
|
||||
}
|
||||
return d.stringDecoder.decodeByte(buf, cursor)
|
||||
}
|
493
internal/decoder/compile.go
Normal file
493
internal/decoder/compile.go
Normal file
|
@ -0,0 +1,493 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"unicode"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
var (
|
||||
jsonNumberType = reflect.TypeOf(json.Number(""))
|
||||
typeAddr *runtime.TypeAddr
|
||||
cachedDecoderMap unsafe.Pointer // map[uintptr]decoder
|
||||
cachedDecoder []Decoder
|
||||
initOnce sync.Once
|
||||
)
|
||||
|
||||
func initDecoder() {
|
||||
initOnce.Do(func() {
|
||||
typeAddr = runtime.AnalyzeTypeAddr()
|
||||
if typeAddr == nil {
|
||||
typeAddr = &runtime.TypeAddr{}
|
||||
}
|
||||
cachedDecoder = make([]Decoder, typeAddr.AddrRange>>typeAddr.AddrShift+1)
|
||||
})
|
||||
}
|
||||
|
||||
func loadDecoderMap() map[uintptr]Decoder {
|
||||
initDecoder()
|
||||
p := atomic.LoadPointer(&cachedDecoderMap)
|
||||
return *(*map[uintptr]Decoder)(unsafe.Pointer(&p))
|
||||
}
|
||||
|
||||
func storeDecoder(typ uintptr, dec Decoder, m map[uintptr]Decoder) {
|
||||
initDecoder()
|
||||
newDecoderMap := make(map[uintptr]Decoder, len(m)+1)
|
||||
newDecoderMap[typ] = dec
|
||||
|
||||
for k, v := range m {
|
||||
newDecoderMap[k] = v
|
||||
}
|
||||
|
||||
atomic.StorePointer(&cachedDecoderMap, *(*unsafe.Pointer)(unsafe.Pointer(&newDecoderMap)))
|
||||
}
|
||||
|
||||
func compileToGetDecoderSlowPath(typeptr uintptr, typ *runtime.Type) (Decoder, error) {
|
||||
decoderMap := loadDecoderMap()
|
||||
if dec, exists := decoderMap[typeptr]; exists {
|
||||
return dec, nil
|
||||
}
|
||||
|
||||
dec, err := compileHead(typ, map[uintptr]Decoder{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
storeDecoder(typeptr, dec, decoderMap)
|
||||
return dec, nil
|
||||
}
|
||||
|
||||
func compileHead(typ *runtime.Type, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
switch {
|
||||
case implementsUnmarshalJSONType(runtime.PtrTo(typ)):
|
||||
return newUnmarshalJSONDecoder(runtime.PtrTo(typ), "", ""), nil
|
||||
case runtime.PtrTo(typ).Implements(unmarshalTextType):
|
||||
return newUnmarshalTextDecoder(runtime.PtrTo(typ), "", ""), nil
|
||||
}
|
||||
return compile(typ.Elem(), "", "", structTypeToDecoder)
|
||||
}
|
||||
|
||||
func compile(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
switch {
|
||||
case implementsUnmarshalJSONType(runtime.PtrTo(typ)):
|
||||
return newUnmarshalJSONDecoder(runtime.PtrTo(typ), structName, fieldName), nil
|
||||
case runtime.PtrTo(typ).Implements(unmarshalTextType):
|
||||
return newUnmarshalTextDecoder(runtime.PtrTo(typ), structName, fieldName), nil
|
||||
}
|
||||
|
||||
switch typ.Kind() {
|
||||
case reflect.Ptr:
|
||||
return compilePtr(typ, structName, fieldName, structTypeToDecoder)
|
||||
case reflect.Struct:
|
||||
return compileStruct(typ, structName, fieldName, structTypeToDecoder)
|
||||
case reflect.Slice:
|
||||
elem := typ.Elem()
|
||||
if elem.Kind() == reflect.Uint8 {
|
||||
return compileBytes(elem, structName, fieldName)
|
||||
}
|
||||
return compileSlice(typ, structName, fieldName, structTypeToDecoder)
|
||||
case reflect.Array:
|
||||
return compileArray(typ, structName, fieldName, structTypeToDecoder)
|
||||
case reflect.Map:
|
||||
return compileMap(typ, structName, fieldName, structTypeToDecoder)
|
||||
case reflect.Interface:
|
||||
return compileInterface(typ, structName, fieldName)
|
||||
case reflect.Uintptr:
|
||||
return compileUint(typ, structName, fieldName)
|
||||
case reflect.Int:
|
||||
return compileInt(typ, structName, fieldName)
|
||||
case reflect.Int8:
|
||||
return compileInt8(typ, structName, fieldName)
|
||||
case reflect.Int16:
|
||||
return compileInt16(typ, structName, fieldName)
|
||||
case reflect.Int32:
|
||||
return compileInt32(typ, structName, fieldName)
|
||||
case reflect.Int64:
|
||||
return compileInt64(typ, structName, fieldName)
|
||||
case reflect.Uint:
|
||||
return compileUint(typ, structName, fieldName)
|
||||
case reflect.Uint8:
|
||||
return compileUint8(typ, structName, fieldName)
|
||||
case reflect.Uint16:
|
||||
return compileUint16(typ, structName, fieldName)
|
||||
case reflect.Uint32:
|
||||
return compileUint32(typ, structName, fieldName)
|
||||
case reflect.Uint64:
|
||||
return compileUint64(typ, structName, fieldName)
|
||||
case reflect.String:
|
||||
return compileString(typ, structName, fieldName)
|
||||
case reflect.Bool:
|
||||
return compileBool(structName, fieldName)
|
||||
case reflect.Float32:
|
||||
return compileFloat32(structName, fieldName)
|
||||
case reflect.Float64:
|
||||
return compileFloat64(structName, fieldName)
|
||||
case reflect.Func:
|
||||
return compileFunc(typ, structName, fieldName)
|
||||
}
|
||||
return newInvalidDecoder(typ, structName, fieldName), nil
|
||||
}
|
||||
|
||||
func isStringTagSupportedType(typ *runtime.Type) bool {
|
||||
switch {
|
||||
case implementsUnmarshalJSONType(runtime.PtrTo(typ)):
|
||||
return false
|
||||
case runtime.PtrTo(typ).Implements(unmarshalTextType):
|
||||
return false
|
||||
}
|
||||
switch typ.Kind() {
|
||||
case reflect.Map:
|
||||
return false
|
||||
case reflect.Slice:
|
||||
return false
|
||||
case reflect.Array:
|
||||
return false
|
||||
case reflect.Struct:
|
||||
return false
|
||||
case reflect.Interface:
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func compileMapKey(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
if runtime.PtrTo(typ).Implements(unmarshalTextType) {
|
||||
return newUnmarshalTextDecoder(runtime.PtrTo(typ), structName, fieldName), nil
|
||||
}
|
||||
if typ.Kind() == reflect.String {
|
||||
return newStringDecoder(structName, fieldName), nil
|
||||
}
|
||||
dec, err := compile(typ, structName, fieldName, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for {
|
||||
switch t := dec.(type) {
|
||||
case *stringDecoder, *interfaceDecoder:
|
||||
return dec, nil
|
||||
case *boolDecoder, *intDecoder, *uintDecoder, *numberDecoder:
|
||||
return newWrappedStringDecoder(typ, dec, structName, fieldName), nil
|
||||
case *ptrDecoder:
|
||||
dec = t.dec
|
||||
default:
|
||||
return newInvalidDecoder(typ, structName, fieldName), nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func compilePtr(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
dec, err := compile(typ.Elem(), structName, fieldName, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return newPtrDecoder(dec, typ.Elem(), structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileInt(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) {
|
||||
*(*int)(p) = int(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileInt8(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) {
|
||||
*(*int8)(p) = int8(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileInt16(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) {
|
||||
*(*int16)(p) = int16(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileInt32(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) {
|
||||
*(*int32)(p) = int32(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileInt64(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) {
|
||||
*(*int64)(p) = v
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileUint(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) {
|
||||
*(*uint)(p) = uint(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileUint8(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) {
|
||||
*(*uint8)(p) = uint8(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileUint16(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) {
|
||||
*(*uint16)(p) = uint16(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileUint32(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) {
|
||||
*(*uint32)(p) = uint32(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileUint64(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) {
|
||||
*(*uint64)(p) = v
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileFloat32(structName, fieldName string) (Decoder, error) {
|
||||
return newFloatDecoder(structName, fieldName, func(p unsafe.Pointer, v float64) {
|
||||
*(*float32)(p) = float32(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileFloat64(structName, fieldName string) (Decoder, error) {
|
||||
return newFloatDecoder(structName, fieldName, func(p unsafe.Pointer, v float64) {
|
||||
*(*float64)(p) = v
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileString(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
if typ == runtime.Type2RType(jsonNumberType) {
|
||||
return newNumberDecoder(structName, fieldName, func(p unsafe.Pointer, v json.Number) {
|
||||
*(*json.Number)(p) = v
|
||||
}), nil
|
||||
}
|
||||
return newStringDecoder(structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileBool(structName, fieldName string) (Decoder, error) {
|
||||
return newBoolDecoder(structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileBytes(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newBytesDecoder(typ, structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileSlice(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
elem := typ.Elem()
|
||||
decoder, err := compile(elem, structName, fieldName, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return newSliceDecoder(decoder, elem, elem.Size(), structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileArray(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
elem := typ.Elem()
|
||||
decoder, err := compile(elem, structName, fieldName, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return newArrayDecoder(decoder, elem, typ.Len(), structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileMap(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
keyDec, err := compileMapKey(typ.Key(), structName, fieldName, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
valueDec, err := compile(typ.Elem(), structName, fieldName, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return newMapDecoder(typ, typ.Key(), keyDec, typ.Elem(), valueDec, structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileInterface(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newInterfaceDecoder(typ, structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileFunc(typ *runtime.Type, strutName, fieldName string) (Decoder, error) {
|
||||
return newFuncDecoder(typ, strutName, fieldName), nil
|
||||
}
|
||||
|
||||
func typeToStructTags(typ *runtime.Type) runtime.StructTags {
|
||||
tags := runtime.StructTags{}
|
||||
fieldNum := typ.NumField()
|
||||
for i := 0; i < fieldNum; i++ {
|
||||
field := typ.Field(i)
|
||||
if runtime.IsIgnoredStructField(field) {
|
||||
continue
|
||||
}
|
||||
tags = append(tags, runtime.StructTagFromField(field))
|
||||
}
|
||||
return tags
|
||||
}
|
||||
|
||||
func compileStruct(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
fieldNum := typ.NumField()
|
||||
fieldMap := map[string]*structFieldSet{}
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
if dec, exists := structTypeToDecoder[typeptr]; exists {
|
||||
return dec, nil
|
||||
}
|
||||
structDec := newStructDecoder(structName, fieldName, fieldMap)
|
||||
structTypeToDecoder[typeptr] = structDec
|
||||
structName = typ.Name()
|
||||
tags := typeToStructTags(typ)
|
||||
allFields := []*structFieldSet{}
|
||||
for i := 0; i < fieldNum; i++ {
|
||||
field := typ.Field(i)
|
||||
if runtime.IsIgnoredStructField(field) {
|
||||
continue
|
||||
}
|
||||
isUnexportedField := unicode.IsLower([]rune(field.Name)[0])
|
||||
tag := runtime.StructTagFromField(field)
|
||||
dec, err := compile(runtime.Type2RType(field.Type), structName, field.Name, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if field.Anonymous && !tag.IsTaggedKey {
|
||||
if stDec, ok := dec.(*structDecoder); ok {
|
||||
if runtime.Type2RType(field.Type) == typ {
|
||||
// recursive definition
|
||||
continue
|
||||
}
|
||||
for k, v := range stDec.fieldMap {
|
||||
if tags.ExistsKey(k) {
|
||||
continue
|
||||
}
|
||||
fieldSet := &structFieldSet{
|
||||
dec: v.dec,
|
||||
offset: field.Offset + v.offset,
|
||||
isTaggedKey: v.isTaggedKey,
|
||||
key: k,
|
||||
keyLen: int64(len(k)),
|
||||
}
|
||||
allFields = append(allFields, fieldSet)
|
||||
}
|
||||
} else if pdec, ok := dec.(*ptrDecoder); ok {
|
||||
contentDec := pdec.contentDecoder()
|
||||
if pdec.typ == typ {
|
||||
// recursive definition
|
||||
continue
|
||||
}
|
||||
var fieldSetErr error
|
||||
if isUnexportedField {
|
||||
fieldSetErr = fmt.Errorf(
|
||||
"json: cannot set embedded pointer to unexported struct: %v",
|
||||
field.Type.Elem(),
|
||||
)
|
||||
}
|
||||
if dec, ok := contentDec.(*structDecoder); ok {
|
||||
for k, v := range dec.fieldMap {
|
||||
if tags.ExistsKey(k) {
|
||||
continue
|
||||
}
|
||||
fieldSet := &structFieldSet{
|
||||
dec: newAnonymousFieldDecoder(pdec.typ, v.offset, v.dec),
|
||||
offset: field.Offset,
|
||||
isTaggedKey: v.isTaggedKey,
|
||||
key: k,
|
||||
keyLen: int64(len(k)),
|
||||
err: fieldSetErr,
|
||||
}
|
||||
allFields = append(allFields, fieldSet)
|
||||
}
|
||||
} else {
|
||||
fieldSet := &structFieldSet{
|
||||
dec: pdec,
|
||||
offset: field.Offset,
|
||||
isTaggedKey: tag.IsTaggedKey,
|
||||
key: field.Name,
|
||||
keyLen: int64(len(field.Name)),
|
||||
}
|
||||
allFields = append(allFields, fieldSet)
|
||||
}
|
||||
} else {
|
||||
fieldSet := &structFieldSet{
|
||||
dec: dec,
|
||||
offset: field.Offset,
|
||||
isTaggedKey: tag.IsTaggedKey,
|
||||
key: field.Name,
|
||||
keyLen: int64(len(field.Name)),
|
||||
}
|
||||
allFields = append(allFields, fieldSet)
|
||||
}
|
||||
} else {
|
||||
if tag.IsString && isStringTagSupportedType(runtime.Type2RType(field.Type)) {
|
||||
dec = newWrappedStringDecoder(runtime.Type2RType(field.Type), dec, structName, field.Name)
|
||||
}
|
||||
var key string
|
||||
if tag.Key != "" {
|
||||
key = tag.Key
|
||||
} else {
|
||||
key = field.Name
|
||||
}
|
||||
fieldSet := &structFieldSet{
|
||||
dec: dec,
|
||||
offset: field.Offset,
|
||||
isTaggedKey: tag.IsTaggedKey,
|
||||
key: key,
|
||||
keyLen: int64(len(key)),
|
||||
}
|
||||
allFields = append(allFields, fieldSet)
|
||||
}
|
||||
}
|
||||
for _, set := range filterDuplicatedFields(allFields) {
|
||||
fieldMap[set.key] = set
|
||||
lower := strings.ToLower(set.key)
|
||||
if _, exists := fieldMap[lower]; !exists {
|
||||
// first win
|
||||
fieldMap[lower] = set
|
||||
}
|
||||
}
|
||||
delete(structTypeToDecoder, typeptr)
|
||||
structDec.tryOptimize()
|
||||
return structDec, nil
|
||||
}
|
||||
|
||||
func filterDuplicatedFields(allFields []*structFieldSet) []*structFieldSet {
|
||||
fieldMap := map[string][]*structFieldSet{}
|
||||
for _, field := range allFields {
|
||||
fieldMap[field.key] = append(fieldMap[field.key], field)
|
||||
}
|
||||
duplicatedFieldMap := map[string]struct{}{}
|
||||
for k, sets := range fieldMap {
|
||||
sets = filterFieldSets(sets)
|
||||
if len(sets) != 1 {
|
||||
duplicatedFieldMap[k] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
filtered := make([]*structFieldSet, 0, len(allFields))
|
||||
for _, field := range allFields {
|
||||
if _, exists := duplicatedFieldMap[field.key]; exists {
|
||||
continue
|
||||
}
|
||||
filtered = append(filtered, field)
|
||||
}
|
||||
return filtered
|
||||
}
|
||||
|
||||
func filterFieldSets(sets []*structFieldSet) []*structFieldSet {
|
||||
if len(sets) == 1 {
|
||||
return sets
|
||||
}
|
||||
filtered := make([]*structFieldSet, 0, len(sets))
|
||||
for _, set := range sets {
|
||||
if set.isTaggedKey {
|
||||
filtered = append(filtered, set)
|
||||
}
|
||||
}
|
||||
return filtered
|
||||
}
|
||||
|
||||
func implementsUnmarshalJSONType(typ *runtime.Type) bool {
|
||||
return typ.Implements(unmarshalJSONType) || typ.Implements(unmarshalJSONContextType)
|
||||
}
|
30
internal/decoder/compile_norace.go
Normal file
30
internal/decoder/compile_norace.go
Normal file
|
@ -0,0 +1,30 @@
|
|||
//go:build !race
|
||||
// +build !race
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
func CompileToGetDecoder(typ *runtime.Type) (Decoder, error) {
|
||||
initDecoder()
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
if typeptr > typeAddr.MaxTypeAddr {
|
||||
return compileToGetDecoderSlowPath(typeptr, typ)
|
||||
}
|
||||
|
||||
index := (typeptr - typeAddr.BaseTypeAddr) >> typeAddr.AddrShift
|
||||
if dec := cachedDecoder[index]; dec != nil {
|
||||
return dec, nil
|
||||
}
|
||||
|
||||
dec, err := compileHead(typ, map[uintptr]Decoder{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cachedDecoder[index] = dec
|
||||
return dec, nil
|
||||
}
|
38
internal/decoder/compile_race.go
Normal file
38
internal/decoder/compile_race.go
Normal file
|
@ -0,0 +1,38 @@
|
|||
//go:build race
|
||||
// +build race
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
var decMu sync.RWMutex
|
||||
|
||||
func CompileToGetDecoder(typ *runtime.Type) (Decoder, error) {
|
||||
initDecoder()
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
if typeptr > typeAddr.MaxTypeAddr {
|
||||
return compileToGetDecoderSlowPath(typeptr, typ)
|
||||
}
|
||||
|
||||
index := (typeptr - typeAddr.BaseTypeAddr) >> typeAddr.AddrShift
|
||||
decMu.RLock()
|
||||
if dec := cachedDecoder[index]; dec != nil {
|
||||
decMu.RUnlock()
|
||||
return dec, nil
|
||||
}
|
||||
decMu.RUnlock()
|
||||
|
||||
dec, err := compileHead(typ, map[uintptr]Decoder{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
decMu.Lock()
|
||||
cachedDecoder[index] = dec
|
||||
decMu.Unlock()
|
||||
return dec, nil
|
||||
}
|
254
internal/decoder/context.go
Normal file
254
internal/decoder/context.go
Normal file
|
@ -0,0 +1,254 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
type RuntimeContext struct {
|
||||
Buf []byte
|
||||
Option *Option
|
||||
}
|
||||
|
||||
var (
|
||||
runtimeContextPool = sync.Pool{
|
||||
New: func() interface{} {
|
||||
return &RuntimeContext{
|
||||
Option: &Option{},
|
||||
}
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
func TakeRuntimeContext() *RuntimeContext {
|
||||
return runtimeContextPool.Get().(*RuntimeContext)
|
||||
}
|
||||
|
||||
func ReleaseRuntimeContext(ctx *RuntimeContext) {
|
||||
runtimeContextPool.Put(ctx)
|
||||
}
|
||||
|
||||
var (
|
||||
isWhiteSpace = [256]bool{}
|
||||
)
|
||||
|
||||
func init() {
|
||||
isWhiteSpace[' '] = true
|
||||
isWhiteSpace['\n'] = true
|
||||
isWhiteSpace['\t'] = true
|
||||
isWhiteSpace['\r'] = true
|
||||
}
|
||||
|
||||
func char(ptr unsafe.Pointer, offset int64) byte {
|
||||
return *(*byte)(unsafe.Pointer(uintptr(ptr) + uintptr(offset)))
|
||||
}
|
||||
|
||||
func skipWhiteSpace(buf []byte, cursor int64) int64 {
|
||||
for isWhiteSpace[buf[cursor]] {
|
||||
cursor++
|
||||
}
|
||||
return cursor
|
||||
}
|
||||
|
||||
func skipObject(buf []byte, cursor, depth int64) (int64, error) {
|
||||
braceCount := 1
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case '{':
|
||||
braceCount++
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
case '}':
|
||||
depth--
|
||||
braceCount--
|
||||
if braceCount == 0 {
|
||||
return cursor + 1, nil
|
||||
}
|
||||
case '[':
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
case ']':
|
||||
depth--
|
||||
case '"':
|
||||
for {
|
||||
cursor++
|
||||
switch buf[cursor] {
|
||||
case '\\':
|
||||
cursor++
|
||||
if buf[cursor] == nul {
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
case '"':
|
||||
goto SWITCH_OUT
|
||||
case nul:
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
}
|
||||
case nul:
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("object of object", cursor)
|
||||
}
|
||||
SWITCH_OUT:
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func skipArray(buf []byte, cursor, depth int64) (int64, error) {
|
||||
bracketCount := 1
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case '[':
|
||||
bracketCount++
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
case ']':
|
||||
bracketCount--
|
||||
depth--
|
||||
if bracketCount == 0 {
|
||||
return cursor + 1, nil
|
||||
}
|
||||
case '{':
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
case '}':
|
||||
depth--
|
||||
case '"':
|
||||
for {
|
||||
cursor++
|
||||
switch buf[cursor] {
|
||||
case '\\':
|
||||
cursor++
|
||||
if buf[cursor] == nul {
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
case '"':
|
||||
goto SWITCH_OUT
|
||||
case nul:
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
}
|
||||
case nul:
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("array of object", cursor)
|
||||
}
|
||||
SWITCH_OUT:
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func skipValue(buf []byte, cursor, depth int64) (int64, error) {
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\t', '\n', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case '{':
|
||||
return skipObject(buf, cursor+1, depth+1)
|
||||
case '[':
|
||||
return skipArray(buf, cursor+1, depth+1)
|
||||
case '"':
|
||||
for {
|
||||
cursor++
|
||||
switch buf[cursor] {
|
||||
case '\\':
|
||||
cursor++
|
||||
if buf[cursor] == nul {
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
case '"':
|
||||
return cursor + 1, nil
|
||||
case nul:
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
}
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
for {
|
||||
cursor++
|
||||
if floatTable[buf[cursor]] {
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
return cursor, nil
|
||||
case 't':
|
||||
if err := validateTrue(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return cursor, nil
|
||||
case 'f':
|
||||
if err := validateFalse(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 5
|
||||
return cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return cursor, nil
|
||||
default:
|
||||
return cursor, errors.ErrUnexpectedEndOfJSON("null", cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func validateTrue(buf []byte, cursor int64) error {
|
||||
if cursor+3 >= int64(len(buf)) {
|
||||
return errors.ErrUnexpectedEndOfJSON("true", cursor)
|
||||
}
|
||||
if buf[cursor+1] != 'r' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+1], "true", cursor)
|
||||
}
|
||||
if buf[cursor+2] != 'u' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+2], "true", cursor)
|
||||
}
|
||||
if buf[cursor+3] != 'e' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+3], "true", cursor)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateFalse(buf []byte, cursor int64) error {
|
||||
if cursor+4 >= int64(len(buf)) {
|
||||
return errors.ErrUnexpectedEndOfJSON("false", cursor)
|
||||
}
|
||||
if buf[cursor+1] != 'a' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+1], "false", cursor)
|
||||
}
|
||||
if buf[cursor+2] != 'l' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+2], "false", cursor)
|
||||
}
|
||||
if buf[cursor+3] != 's' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+3], "false", cursor)
|
||||
}
|
||||
if buf[cursor+4] != 'e' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+4], "false", cursor)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateNull(buf []byte, cursor int64) error {
|
||||
if cursor+3 >= int64(len(buf)) {
|
||||
return errors.ErrUnexpectedEndOfJSON("null", cursor)
|
||||
}
|
||||
if buf[cursor+1] != 'u' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+1], "null", cursor)
|
||||
}
|
||||
if buf[cursor+2] != 'l' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+2], "null", cursor)
|
||||
}
|
||||
if buf[cursor+3] != 'l' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+3], "null", cursor)
|
||||
}
|
||||
return nil
|
||||
}
|
170
internal/decoder/float.go
Normal file
170
internal/decoder/float.go
Normal file
|
@ -0,0 +1,170 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
type floatDecoder struct {
|
||||
op func(unsafe.Pointer, float64)
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newFloatDecoder(structName, fieldName string, op func(unsafe.Pointer, float64)) *floatDecoder {
|
||||
return &floatDecoder{op: op, structName: structName, fieldName: fieldName}
|
||||
}
|
||||
|
||||
var (
|
||||
floatTable = [256]bool{
|
||||
'0': true,
|
||||
'1': true,
|
||||
'2': true,
|
||||
'3': true,
|
||||
'4': true,
|
||||
'5': true,
|
||||
'6': true,
|
||||
'7': true,
|
||||
'8': true,
|
||||
'9': true,
|
||||
'.': true,
|
||||
'e': true,
|
||||
'E': true,
|
||||
'+': true,
|
||||
'-': true,
|
||||
}
|
||||
|
||||
validEndNumberChar = [256]bool{
|
||||
nul: true,
|
||||
' ': true,
|
||||
'\t': true,
|
||||
'\r': true,
|
||||
'\n': true,
|
||||
',': true,
|
||||
':': true,
|
||||
'}': true,
|
||||
']': true,
|
||||
}
|
||||
)
|
||||
|
||||
func floatBytes(s *Stream) []byte {
|
||||
start := s.cursor
|
||||
for {
|
||||
s.cursor++
|
||||
if floatTable[s.char()] {
|
||||
continue
|
||||
} else if s.char() == nul {
|
||||
if s.read() {
|
||||
s.cursor-- // for retry current character
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
return s.buf[start:s.cursor]
|
||||
}
|
||||
|
||||
func (d *floatDecoder) decodeStreamByte(s *Stream) ([]byte, error) {
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
s.cursor++
|
||||
continue
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return floatBytes(s), nil
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return nil, nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
default:
|
||||
goto ERROR
|
||||
}
|
||||
}
|
||||
ERROR:
|
||||
return nil, errors.ErrUnexpectedEndOfJSON("float", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *floatDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) {
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
start := cursor
|
||||
cursor++
|
||||
for floatTable[buf[cursor]] {
|
||||
cursor++
|
||||
}
|
||||
num := buf[start:cursor]
|
||||
return num, cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return nil, cursor, nil
|
||||
default:
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("float", cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (d *floatDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
bytes, err := d.decodeStreamByte(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if bytes == nil {
|
||||
return nil
|
||||
}
|
||||
str := *(*string)(unsafe.Pointer(&bytes))
|
||||
f64, err := strconv.ParseFloat(str, 64)
|
||||
if err != nil {
|
||||
return errors.ErrSyntax(err.Error(), s.totalOffset())
|
||||
}
|
||||
d.op(p, f64)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *floatDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
bytes, c, err := d.decodeByte(buf, cursor)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if bytes == nil {
|
||||
return c, nil
|
||||
}
|
||||
cursor = c
|
||||
if !validEndNumberChar[buf[cursor]] {
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("float", cursor)
|
||||
}
|
||||
s := *(*string)(unsafe.Pointer(&bytes))
|
||||
f64, err := strconv.ParseFloat(s, 64)
|
||||
if err != nil {
|
||||
return 0, errors.ErrSyntax(err.Error(), cursor)
|
||||
}
|
||||
d.op(p, f64)
|
||||
return cursor, nil
|
||||
}
|
||||
|
||||
func (d *floatDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
buf := ctx.Buf
|
||||
bytes, c, err := d.decodeByte(buf, cursor)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
if bytes == nil {
|
||||
return [][]byte{nullbytes}, c, nil
|
||||
}
|
||||
return [][]byte{bytes}, c, nil
|
||||
}
|
146
internal/decoder/func.go
Normal file
146
internal/decoder/func.go
Normal file
|
@ -0,0 +1,146 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type funcDecoder struct {
|
||||
typ *runtime.Type
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newFuncDecoder(typ *runtime.Type, structName, fieldName string) *funcDecoder {
|
||||
fnDecoder := &funcDecoder{typ, structName, fieldName}
|
||||
return fnDecoder
|
||||
}
|
||||
|
||||
func (d *funcDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
s.skipWhiteSpace()
|
||||
start := s.cursor
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
src := s.buf[start:s.cursor]
|
||||
if len(src) > 0 {
|
||||
switch src[0] {
|
||||
case '"':
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "string",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
case '[':
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "array",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
case '{':
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "object",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "number",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return nil
|
||||
case 't':
|
||||
if err := trueBytes(s); err == nil {
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "boolean",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
}
|
||||
case 'f':
|
||||
if err := falseBytes(s); err == nil {
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "boolean",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return errors.ErrInvalidBeginningOfValue(s.buf[s.cursor], s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *funcDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
start := cursor
|
||||
end, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
src := buf[start:end]
|
||||
if len(src) > 0 {
|
||||
switch src[0] {
|
||||
case '"':
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "string",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
case '[':
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "array",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
case '{':
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "object",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "number",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
case 'n':
|
||||
if bytes.Equal(src, nullbytes) {
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return end, nil
|
||||
}
|
||||
case 't':
|
||||
if err := validateTrue(buf, start); err == nil {
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "boolean",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
}
|
||||
case 'f':
|
||||
if err := validateFalse(buf, start); err == nil {
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "boolean",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return cursor, errors.ErrInvalidBeginningOfValue(buf[cursor], cursor)
|
||||
}
|
||||
|
||||
func (d *funcDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
return nil, 0, fmt.Errorf("json: func decoder does not support decode path")
|
||||
}
|
246
internal/decoder/int.go
Normal file
246
internal/decoder/int.go
Normal file
|
@ -0,0 +1,246 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type intDecoder struct {
|
||||
typ *runtime.Type
|
||||
kind reflect.Kind
|
||||
op func(unsafe.Pointer, int64)
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newIntDecoder(typ *runtime.Type, structName, fieldName string, op func(unsafe.Pointer, int64)) *intDecoder {
|
||||
return &intDecoder{
|
||||
typ: typ,
|
||||
kind: typ.Kind(),
|
||||
op: op,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *intDecoder) typeError(buf []byte, offset int64) *errors.UnmarshalTypeError {
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: fmt.Sprintf("number %s", string(buf)),
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Struct: d.structName,
|
||||
Field: d.fieldName,
|
||||
Offset: offset,
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
pow10i64 = [...]int64{
|
||||
1e00, 1e01, 1e02, 1e03, 1e04, 1e05, 1e06, 1e07, 1e08, 1e09,
|
||||
1e10, 1e11, 1e12, 1e13, 1e14, 1e15, 1e16, 1e17, 1e18,
|
||||
}
|
||||
pow10i64Len = len(pow10i64)
|
||||
)
|
||||
|
||||
func (d *intDecoder) parseInt(b []byte) (int64, error) {
|
||||
isNegative := false
|
||||
if b[0] == '-' {
|
||||
b = b[1:]
|
||||
isNegative = true
|
||||
}
|
||||
maxDigit := len(b)
|
||||
if maxDigit > pow10i64Len {
|
||||
return 0, fmt.Errorf("invalid length of number")
|
||||
}
|
||||
sum := int64(0)
|
||||
for i := 0; i < maxDigit; i++ {
|
||||
c := int64(b[i]) - 48
|
||||
digitValue := pow10i64[maxDigit-i-1]
|
||||
sum += c * digitValue
|
||||
}
|
||||
if isNegative {
|
||||
return -1 * sum, nil
|
||||
}
|
||||
return sum, nil
|
||||
}
|
||||
|
||||
var (
|
||||
numTable = [256]bool{
|
||||
'0': true,
|
||||
'1': true,
|
||||
'2': true,
|
||||
'3': true,
|
||||
'4': true,
|
||||
'5': true,
|
||||
'6': true,
|
||||
'7': true,
|
||||
'8': true,
|
||||
'9': true,
|
||||
}
|
||||
)
|
||||
|
||||
var (
|
||||
numZeroBuf = []byte{'0'}
|
||||
)
|
||||
|
||||
func (d *intDecoder) decodeStreamByte(s *Stream) ([]byte, error) {
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
s.cursor++
|
||||
continue
|
||||
case '-':
|
||||
start := s.cursor
|
||||
for {
|
||||
s.cursor++
|
||||
if numTable[s.char()] {
|
||||
continue
|
||||
} else if s.char() == nul {
|
||||
if s.read() {
|
||||
s.cursor-- // for retry current character
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
num := s.buf[start:s.cursor]
|
||||
if len(num) < 2 {
|
||||
goto ERROR
|
||||
}
|
||||
return num, nil
|
||||
case '0':
|
||||
s.cursor++
|
||||
return numZeroBuf, nil
|
||||
case '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
start := s.cursor
|
||||
for {
|
||||
s.cursor++
|
||||
if numTable[s.char()] {
|
||||
continue
|
||||
} else if s.char() == nul {
|
||||
if s.read() {
|
||||
s.cursor-- // for retry current character
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
num := s.buf[start:s.cursor]
|
||||
return num, nil
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return nil, nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
default:
|
||||
return nil, d.typeError([]byte{s.char()}, s.totalOffset())
|
||||
}
|
||||
}
|
||||
ERROR:
|
||||
return nil, errors.ErrUnexpectedEndOfJSON("number(integer)", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *intDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) {
|
||||
b := (*sliceHeader)(unsafe.Pointer(&buf)).data
|
||||
for {
|
||||
switch char(b, cursor) {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case '0':
|
||||
cursor++
|
||||
return numZeroBuf, cursor, nil
|
||||
case '-', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
start := cursor
|
||||
cursor++
|
||||
for numTable[char(b, cursor)] {
|
||||
cursor++
|
||||
}
|
||||
num := buf[start:cursor]
|
||||
return num, cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return nil, cursor, nil
|
||||
default:
|
||||
return nil, 0, d.typeError([]byte{char(b, cursor)}, cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (d *intDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
bytes, err := d.decodeStreamByte(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if bytes == nil {
|
||||
return nil
|
||||
}
|
||||
i64, err := d.parseInt(bytes)
|
||||
if err != nil {
|
||||
return d.typeError(bytes, s.totalOffset())
|
||||
}
|
||||
switch d.kind {
|
||||
case reflect.Int8:
|
||||
if i64 < -1*(1<<7) || (1<<7) <= i64 {
|
||||
return d.typeError(bytes, s.totalOffset())
|
||||
}
|
||||
case reflect.Int16:
|
||||
if i64 < -1*(1<<15) || (1<<15) <= i64 {
|
||||
return d.typeError(bytes, s.totalOffset())
|
||||
}
|
||||
case reflect.Int32:
|
||||
if i64 < -1*(1<<31) || (1<<31) <= i64 {
|
||||
return d.typeError(bytes, s.totalOffset())
|
||||
}
|
||||
}
|
||||
d.op(p, i64)
|
||||
s.reset()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *intDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
bytes, c, err := d.decodeByte(ctx.Buf, cursor)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if bytes == nil {
|
||||
return c, nil
|
||||
}
|
||||
cursor = c
|
||||
|
||||
i64, err := d.parseInt(bytes)
|
||||
if err != nil {
|
||||
return 0, d.typeError(bytes, cursor)
|
||||
}
|
||||
switch d.kind {
|
||||
case reflect.Int8:
|
||||
if i64 < -1*(1<<7) || (1<<7) <= i64 {
|
||||
return 0, d.typeError(bytes, cursor)
|
||||
}
|
||||
case reflect.Int16:
|
||||
if i64 < -1*(1<<15) || (1<<15) <= i64 {
|
||||
return 0, d.typeError(bytes, cursor)
|
||||
}
|
||||
case reflect.Int32:
|
||||
if i64 < -1*(1<<31) || (1<<31) <= i64 {
|
||||
return 0, d.typeError(bytes, cursor)
|
||||
}
|
||||
}
|
||||
d.op(p, i64)
|
||||
return cursor, nil
|
||||
}
|
||||
|
||||
func (d *intDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
return nil, 0, fmt.Errorf("json: int decoder does not support decode path")
|
||||
}
|
528
internal/decoder/interface.go
Normal file
528
internal/decoder/interface.go
Normal file
|
@ -0,0 +1,528 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding"
|
||||
"encoding/json"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type interfaceDecoder struct {
|
||||
typ *runtime.Type
|
||||
structName string
|
||||
fieldName string
|
||||
sliceDecoder *sliceDecoder
|
||||
mapDecoder *mapDecoder
|
||||
floatDecoder *floatDecoder
|
||||
numberDecoder *numberDecoder
|
||||
stringDecoder *stringDecoder
|
||||
}
|
||||
|
||||
func newEmptyInterfaceDecoder(structName, fieldName string) *interfaceDecoder {
|
||||
ifaceDecoder := &interfaceDecoder{
|
||||
typ: emptyInterfaceType,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
floatDecoder: newFloatDecoder(structName, fieldName, func(p unsafe.Pointer, v float64) {
|
||||
*(*interface{})(p) = v
|
||||
}),
|
||||
numberDecoder: newNumberDecoder(structName, fieldName, func(p unsafe.Pointer, v json.Number) {
|
||||
*(*interface{})(p) = v
|
||||
}),
|
||||
stringDecoder: newStringDecoder(structName, fieldName),
|
||||
}
|
||||
ifaceDecoder.sliceDecoder = newSliceDecoder(
|
||||
ifaceDecoder,
|
||||
emptyInterfaceType,
|
||||
emptyInterfaceType.Size(),
|
||||
structName, fieldName,
|
||||
)
|
||||
ifaceDecoder.mapDecoder = newMapDecoder(
|
||||
interfaceMapType,
|
||||
stringType,
|
||||
ifaceDecoder.stringDecoder,
|
||||
interfaceMapType.Elem(),
|
||||
ifaceDecoder,
|
||||
structName,
|
||||
fieldName,
|
||||
)
|
||||
return ifaceDecoder
|
||||
}
|
||||
|
||||
func newInterfaceDecoder(typ *runtime.Type, structName, fieldName string) *interfaceDecoder {
|
||||
emptyIfaceDecoder := newEmptyInterfaceDecoder(structName, fieldName)
|
||||
stringDecoder := newStringDecoder(structName, fieldName)
|
||||
return &interfaceDecoder{
|
||||
typ: typ,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
sliceDecoder: newSliceDecoder(
|
||||
emptyIfaceDecoder,
|
||||
emptyInterfaceType,
|
||||
emptyInterfaceType.Size(),
|
||||
structName, fieldName,
|
||||
),
|
||||
mapDecoder: newMapDecoder(
|
||||
interfaceMapType,
|
||||
stringType,
|
||||
stringDecoder,
|
||||
interfaceMapType.Elem(),
|
||||
emptyIfaceDecoder,
|
||||
structName,
|
||||
fieldName,
|
||||
),
|
||||
floatDecoder: newFloatDecoder(structName, fieldName, func(p unsafe.Pointer, v float64) {
|
||||
*(*interface{})(p) = v
|
||||
}),
|
||||
numberDecoder: newNumberDecoder(structName, fieldName, func(p unsafe.Pointer, v json.Number) {
|
||||
*(*interface{})(p) = v
|
||||
}),
|
||||
stringDecoder: stringDecoder,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *interfaceDecoder) numDecoder(s *Stream) Decoder {
|
||||
if s.UseNumber {
|
||||
return d.numberDecoder
|
||||
}
|
||||
return d.floatDecoder
|
||||
}
|
||||
|
||||
var (
|
||||
emptyInterfaceType = runtime.Type2RType(reflect.TypeOf((*interface{})(nil)).Elem())
|
||||
EmptyInterfaceType = emptyInterfaceType
|
||||
interfaceMapType = runtime.Type2RType(
|
||||
reflect.TypeOf((*map[string]interface{})(nil)).Elem(),
|
||||
)
|
||||
stringType = runtime.Type2RType(
|
||||
reflect.TypeOf(""),
|
||||
)
|
||||
)
|
||||
|
||||
func decodeStreamUnmarshaler(s *Stream, depth int64, unmarshaler json.Unmarshaler) error {
|
||||
start := s.cursor
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
src := s.buf[start:s.cursor]
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
if err := unmarshaler.UnmarshalJSON(dst); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func decodeStreamUnmarshalerContext(s *Stream, depth int64, unmarshaler unmarshalerContext) error {
|
||||
start := s.cursor
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
src := s.buf[start:s.cursor]
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
if err := unmarshaler.UnmarshalJSON(s.Option.Context, dst); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func decodeUnmarshaler(buf []byte, cursor, depth int64, unmarshaler json.Unmarshaler) (int64, error) {
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
start := cursor
|
||||
end, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
src := buf[start:end]
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
if err := unmarshaler.UnmarshalJSON(dst); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return end, nil
|
||||
}
|
||||
|
||||
func decodeUnmarshalerContext(ctx *RuntimeContext, buf []byte, cursor, depth int64, unmarshaler unmarshalerContext) (int64, error) {
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
start := cursor
|
||||
end, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
src := buf[start:end]
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
if err := unmarshaler.UnmarshalJSON(ctx.Option.Context, dst); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return end, nil
|
||||
}
|
||||
|
||||
func decodeStreamTextUnmarshaler(s *Stream, depth int64, unmarshaler encoding.TextUnmarshaler, p unsafe.Pointer) error {
|
||||
start := s.cursor
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
src := s.buf[start:s.cursor]
|
||||
if bytes.Equal(src, nullbytes) {
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
if err := unmarshaler.UnmarshalText(dst); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func decodeTextUnmarshaler(buf []byte, cursor, depth int64, unmarshaler encoding.TextUnmarshaler, p unsafe.Pointer) (int64, error) {
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
start := cursor
|
||||
end, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
src := buf[start:end]
|
||||
if bytes.Equal(src, nullbytes) {
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return end, nil
|
||||
}
|
||||
if s, ok := unquoteBytes(src); ok {
|
||||
src = s
|
||||
}
|
||||
if err := unmarshaler.UnmarshalText(src); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return end, nil
|
||||
}
|
||||
|
||||
func (d *interfaceDecoder) decodeStreamEmptyInterface(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
c := s.skipWhiteSpace()
|
||||
for {
|
||||
switch c {
|
||||
case '{':
|
||||
var v map[string]interface{}
|
||||
ptr := unsafe.Pointer(&v)
|
||||
if err := d.mapDecoder.DecodeStream(s, depth, ptr); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*interface{})(p) = v
|
||||
return nil
|
||||
case '[':
|
||||
var v []interface{}
|
||||
ptr := unsafe.Pointer(&v)
|
||||
if err := d.sliceDecoder.DecodeStream(s, depth, ptr); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*interface{})(p) = v
|
||||
return nil
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return d.numDecoder(s).DecodeStream(s, depth, p)
|
||||
case '"':
|
||||
s.cursor++
|
||||
start := s.cursor
|
||||
for {
|
||||
switch s.char() {
|
||||
case '\\':
|
||||
if _, err := decodeEscapeString(s, nil); err != nil {
|
||||
return err
|
||||
}
|
||||
case '"':
|
||||
literal := s.buf[start:s.cursor]
|
||||
s.cursor++
|
||||
*(*interface{})(p) = string(literal)
|
||||
return nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||
}
|
||||
s.cursor++
|
||||
}
|
||||
case 't':
|
||||
if err := trueBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
**(**interface{})(unsafe.Pointer(&p)) = true
|
||||
return nil
|
||||
case 'f':
|
||||
if err := falseBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
**(**interface{})(unsafe.Pointer(&p)) = false
|
||||
return nil
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*interface{})(p) = nil
|
||||
return nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
c = s.char()
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
return errors.ErrInvalidBeginningOfValue(c, s.totalOffset())
|
||||
}
|
||||
|
||||
type emptyInterface struct {
|
||||
typ *runtime.Type
|
||||
ptr unsafe.Pointer
|
||||
}
|
||||
|
||||
func (d *interfaceDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
runtimeInterfaceValue := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||
typ: d.typ,
|
||||
ptr: p,
|
||||
}))
|
||||
rv := reflect.ValueOf(runtimeInterfaceValue)
|
||||
if rv.NumMethod() > 0 && rv.CanInterface() {
|
||||
if u, ok := rv.Interface().(unmarshalerContext); ok {
|
||||
return decodeStreamUnmarshalerContext(s, depth, u)
|
||||
}
|
||||
if u, ok := rv.Interface().(json.Unmarshaler); ok {
|
||||
return decodeStreamUnmarshaler(s, depth, u)
|
||||
}
|
||||
if u, ok := rv.Interface().(encoding.TextUnmarshaler); ok {
|
||||
return decodeStreamTextUnmarshaler(s, depth, u, p)
|
||||
}
|
||||
if s.skipWhiteSpace() == 'n' {
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*interface{})(p) = nil
|
||||
return nil
|
||||
}
|
||||
return d.errUnmarshalType(rv.Type(), s.totalOffset())
|
||||
}
|
||||
iface := rv.Interface()
|
||||
ifaceHeader := (*emptyInterface)(unsafe.Pointer(&iface))
|
||||
typ := ifaceHeader.typ
|
||||
if ifaceHeader.ptr == nil || d.typ == typ || typ == nil {
|
||||
// concrete type is empty interface
|
||||
return d.decodeStreamEmptyInterface(s, depth, p)
|
||||
}
|
||||
if typ.Kind() == reflect.Ptr && typ.Elem() == d.typ || typ.Kind() != reflect.Ptr {
|
||||
return d.decodeStreamEmptyInterface(s, depth, p)
|
||||
}
|
||||
if s.skipWhiteSpace() == 'n' {
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*interface{})(p) = nil
|
||||
return nil
|
||||
}
|
||||
decoder, err := CompileToGetDecoder(typ)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return decoder.DecodeStream(s, depth, ifaceHeader.ptr)
|
||||
}
|
||||
|
||||
func (d *interfaceDecoder) errUnmarshalType(typ reflect.Type, offset int64) *errors.UnmarshalTypeError {
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: typ.String(),
|
||||
Type: typ,
|
||||
Offset: offset,
|
||||
Struct: d.structName,
|
||||
Field: d.fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *interfaceDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
runtimeInterfaceValue := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||
typ: d.typ,
|
||||
ptr: p,
|
||||
}))
|
||||
rv := reflect.ValueOf(runtimeInterfaceValue)
|
||||
if rv.NumMethod() > 0 && rv.CanInterface() {
|
||||
if u, ok := rv.Interface().(unmarshalerContext); ok {
|
||||
return decodeUnmarshalerContext(ctx, buf, cursor, depth, u)
|
||||
}
|
||||
if u, ok := rv.Interface().(json.Unmarshaler); ok {
|
||||
return decodeUnmarshaler(buf, cursor, depth, u)
|
||||
}
|
||||
if u, ok := rv.Interface().(encoding.TextUnmarshaler); ok {
|
||||
return decodeTextUnmarshaler(buf, cursor, depth, u, p)
|
||||
}
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if buf[cursor] == 'n' {
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
**(**interface{})(unsafe.Pointer(&p)) = nil
|
||||
return cursor, nil
|
||||
}
|
||||
return 0, d.errUnmarshalType(rv.Type(), cursor)
|
||||
}
|
||||
|
||||
iface := rv.Interface()
|
||||
ifaceHeader := (*emptyInterface)(unsafe.Pointer(&iface))
|
||||
typ := ifaceHeader.typ
|
||||
if ifaceHeader.ptr == nil || d.typ == typ || typ == nil {
|
||||
// concrete type is empty interface
|
||||
return d.decodeEmptyInterface(ctx, cursor, depth, p)
|
||||
}
|
||||
if typ.Kind() == reflect.Ptr && typ.Elem() == d.typ || typ.Kind() != reflect.Ptr {
|
||||
return d.decodeEmptyInterface(ctx, cursor, depth, p)
|
||||
}
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if buf[cursor] == 'n' {
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
**(**interface{})(unsafe.Pointer(&p)) = nil
|
||||
return cursor, nil
|
||||
}
|
||||
decoder, err := CompileToGetDecoder(typ)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return decoder.Decode(ctx, cursor, depth, ifaceHeader.ptr)
|
||||
}
|
||||
|
||||
func (d *interfaceDecoder) decodeEmptyInterface(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
switch buf[cursor] {
|
||||
case '{':
|
||||
var v map[string]interface{}
|
||||
ptr := unsafe.Pointer(&v)
|
||||
cursor, err := d.mapDecoder.Decode(ctx, cursor, depth, ptr)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
**(**interface{})(unsafe.Pointer(&p)) = v
|
||||
return cursor, nil
|
||||
case '[':
|
||||
var v []interface{}
|
||||
ptr := unsafe.Pointer(&v)
|
||||
cursor, err := d.sliceDecoder.Decode(ctx, cursor, depth, ptr)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
**(**interface{})(unsafe.Pointer(&p)) = v
|
||||
return cursor, nil
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return d.floatDecoder.Decode(ctx, cursor, depth, p)
|
||||
case '"':
|
||||
var v string
|
||||
ptr := unsafe.Pointer(&v)
|
||||
cursor, err := d.stringDecoder.Decode(ctx, cursor, depth, ptr)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
**(**interface{})(unsafe.Pointer(&p)) = v
|
||||
return cursor, nil
|
||||
case 't':
|
||||
if err := validateTrue(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
**(**interface{})(unsafe.Pointer(&p)) = true
|
||||
return cursor, nil
|
||||
case 'f':
|
||||
if err := validateFalse(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 5
|
||||
**(**interface{})(unsafe.Pointer(&p)) = false
|
||||
return cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
**(**interface{})(unsafe.Pointer(&p)) = nil
|
||||
return cursor, nil
|
||||
}
|
||||
return cursor, errors.ErrInvalidBeginningOfValue(buf[cursor], cursor)
|
||||
}
|
||||
|
||||
func NewPathDecoder() Decoder {
|
||||
ifaceDecoder := &interfaceDecoder{
|
||||
typ: emptyInterfaceType,
|
||||
structName: "",
|
||||
fieldName: "",
|
||||
floatDecoder: newFloatDecoder("", "", func(p unsafe.Pointer, v float64) {
|
||||
*(*interface{})(p) = v
|
||||
}),
|
||||
numberDecoder: newNumberDecoder("", "", func(p unsafe.Pointer, v json.Number) {
|
||||
*(*interface{})(p) = v
|
||||
}),
|
||||
stringDecoder: newStringDecoder("", ""),
|
||||
}
|
||||
ifaceDecoder.sliceDecoder = newSliceDecoder(
|
||||
ifaceDecoder,
|
||||
emptyInterfaceType,
|
||||
emptyInterfaceType.Size(),
|
||||
"", "",
|
||||
)
|
||||
ifaceDecoder.mapDecoder = newMapDecoder(
|
||||
interfaceMapType,
|
||||
stringType,
|
||||
ifaceDecoder.stringDecoder,
|
||||
interfaceMapType.Elem(),
|
||||
ifaceDecoder,
|
||||
"", "",
|
||||
)
|
||||
return ifaceDecoder
|
||||
}
|
||||
|
||||
var (
|
||||
truebytes = []byte("true")
|
||||
falsebytes = []byte("false")
|
||||
)
|
||||
|
||||
func (d *interfaceDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
buf := ctx.Buf
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
switch buf[cursor] {
|
||||
case '{':
|
||||
return d.mapDecoder.DecodePath(ctx, cursor, depth)
|
||||
case '[':
|
||||
return d.sliceDecoder.DecodePath(ctx, cursor, depth)
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return d.floatDecoder.DecodePath(ctx, cursor, depth)
|
||||
case '"':
|
||||
return d.stringDecoder.DecodePath(ctx, cursor, depth)
|
||||
case 't':
|
||||
if err := validateTrue(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return [][]byte{truebytes}, cursor, nil
|
||||
case 'f':
|
||||
if err := validateFalse(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 5
|
||||
return [][]byte{falsebytes}, cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return [][]byte{nullbytes}, cursor, nil
|
||||
}
|
||||
return nil, cursor, errors.ErrInvalidBeginningOfValue(buf[cursor], cursor)
|
||||
}
|
55
internal/decoder/invalid.go
Normal file
55
internal/decoder/invalid.go
Normal file
|
@ -0,0 +1,55 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type invalidDecoder struct {
|
||||
typ *runtime.Type
|
||||
kind reflect.Kind
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newInvalidDecoder(typ *runtime.Type, structName, fieldName string) *invalidDecoder {
|
||||
return &invalidDecoder{
|
||||
typ: typ,
|
||||
kind: typ.Kind(),
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *invalidDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "object",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
Struct: d.structName,
|
||||
Field: d.fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *invalidDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "object",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: cursor,
|
||||
Struct: d.structName,
|
||||
Field: d.fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *invalidDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
return nil, 0, &errors.UnmarshalTypeError{
|
||||
Value: "object",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: cursor,
|
||||
Struct: d.structName,
|
||||
Field: d.fieldName,
|
||||
}
|
||||
}
|
280
internal/decoder/map.go
Normal file
280
internal/decoder/map.go
Normal file
|
@ -0,0 +1,280 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type mapDecoder struct {
|
||||
mapType *runtime.Type
|
||||
keyType *runtime.Type
|
||||
valueType *runtime.Type
|
||||
canUseAssignFaststrType bool
|
||||
keyDecoder Decoder
|
||||
valueDecoder Decoder
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newMapDecoder(mapType *runtime.Type, keyType *runtime.Type, keyDec Decoder, valueType *runtime.Type, valueDec Decoder, structName, fieldName string) *mapDecoder {
|
||||
return &mapDecoder{
|
||||
mapType: mapType,
|
||||
keyDecoder: keyDec,
|
||||
keyType: keyType,
|
||||
canUseAssignFaststrType: canUseAssignFaststrType(keyType, valueType),
|
||||
valueType: valueType,
|
||||
valueDecoder: valueDec,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
const (
|
||||
mapMaxElemSize = 128
|
||||
)
|
||||
|
||||
// See detail: https://github.com/goccy/go-json/pull/283
|
||||
func canUseAssignFaststrType(key *runtime.Type, value *runtime.Type) bool {
|
||||
indirectElem := value.Size() > mapMaxElemSize
|
||||
if indirectElem {
|
||||
return false
|
||||
}
|
||||
return key.Kind() == reflect.String
|
||||
}
|
||||
|
||||
//go:linkname makemap reflect.makemap
|
||||
func makemap(*runtime.Type, int) unsafe.Pointer
|
||||
|
||||
//nolint:golint
|
||||
//go:linkname mapassign_faststr runtime.mapassign_faststr
|
||||
//go:noescape
|
||||
func mapassign_faststr(t *runtime.Type, m unsafe.Pointer, s string) unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign reflect.mapassign
|
||||
//go:noescape
|
||||
func mapassign(t *runtime.Type, m unsafe.Pointer, k, v unsafe.Pointer)
|
||||
|
||||
func (d *mapDecoder) mapassign(t *runtime.Type, m, k, v unsafe.Pointer) {
|
||||
if d.canUseAssignFaststrType {
|
||||
mapV := mapassign_faststr(t, m, *(*string)(k))
|
||||
typedmemmove(d.valueType, mapV, v)
|
||||
} else {
|
||||
mapassign(t, m, k, v)
|
||||
}
|
||||
}
|
||||
|
||||
func (d *mapDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||
}
|
||||
|
||||
switch s.skipWhiteSpace() {
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
**(**unsafe.Pointer)(unsafe.Pointer(&p)) = nil
|
||||
return nil
|
||||
case '{':
|
||||
default:
|
||||
return errors.ErrExpected("{ character for map value", s.totalOffset())
|
||||
}
|
||||
mapValue := *(*unsafe.Pointer)(p)
|
||||
if mapValue == nil {
|
||||
mapValue = makemap(d.mapType, 0)
|
||||
}
|
||||
s.cursor++
|
||||
if s.skipWhiteSpace() == '}' {
|
||||
*(*unsafe.Pointer)(p) = mapValue
|
||||
s.cursor++
|
||||
return nil
|
||||
}
|
||||
for {
|
||||
k := unsafe_New(d.keyType)
|
||||
if err := d.keyDecoder.DecodeStream(s, depth, k); err != nil {
|
||||
return err
|
||||
}
|
||||
s.skipWhiteSpace()
|
||||
if !s.equalChar(':') {
|
||||
return errors.ErrExpected("colon after object key", s.totalOffset())
|
||||
}
|
||||
s.cursor++
|
||||
v := unsafe_New(d.valueType)
|
||||
if err := d.valueDecoder.DecodeStream(s, depth, v); err != nil {
|
||||
return err
|
||||
}
|
||||
d.mapassign(d.mapType, mapValue, k, v)
|
||||
s.skipWhiteSpace()
|
||||
if s.equalChar('}') {
|
||||
**(**unsafe.Pointer)(unsafe.Pointer(&p)) = mapValue
|
||||
s.cursor++
|
||||
return nil
|
||||
}
|
||||
if !s.equalChar(',') {
|
||||
return errors.ErrExpected("comma after object value", s.totalOffset())
|
||||
}
|
||||
s.cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func (d *mapDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
buflen := int64(len(buf))
|
||||
if buflen < 2 {
|
||||
return 0, errors.ErrExpected("{} for map", cursor)
|
||||
}
|
||||
switch buf[cursor] {
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
**(**unsafe.Pointer)(unsafe.Pointer(&p)) = nil
|
||||
return cursor, nil
|
||||
case '{':
|
||||
default:
|
||||
return 0, errors.ErrExpected("{ character for map value", cursor)
|
||||
}
|
||||
cursor++
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
mapValue := *(*unsafe.Pointer)(p)
|
||||
if mapValue == nil {
|
||||
mapValue = makemap(d.mapType, 0)
|
||||
}
|
||||
if buf[cursor] == '}' {
|
||||
**(**unsafe.Pointer)(unsafe.Pointer(&p)) = mapValue
|
||||
cursor++
|
||||
return cursor, nil
|
||||
}
|
||||
for {
|
||||
k := unsafe_New(d.keyType)
|
||||
keyCursor, err := d.keyDecoder.Decode(ctx, cursor, depth, k)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = skipWhiteSpace(buf, keyCursor)
|
||||
if buf[cursor] != ':' {
|
||||
return 0, errors.ErrExpected("colon after object key", cursor)
|
||||
}
|
||||
cursor++
|
||||
v := unsafe_New(d.valueType)
|
||||
valueCursor, err := d.valueDecoder.Decode(ctx, cursor, depth, v)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
d.mapassign(d.mapType, mapValue, k, v)
|
||||
cursor = skipWhiteSpace(buf, valueCursor)
|
||||
if buf[cursor] == '}' {
|
||||
**(**unsafe.Pointer)(unsafe.Pointer(&p)) = mapValue
|
||||
cursor++
|
||||
return cursor, nil
|
||||
}
|
||||
if buf[cursor] != ',' {
|
||||
return 0, errors.ErrExpected("comma after object value", cursor)
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func (d *mapDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
buf := ctx.Buf
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return nil, 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
buflen := int64(len(buf))
|
||||
if buflen < 2 {
|
||||
return nil, 0, errors.ErrExpected("{} for map", cursor)
|
||||
}
|
||||
switch buf[cursor] {
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return [][]byte{nullbytes}, cursor, nil
|
||||
case '{':
|
||||
default:
|
||||
return nil, 0, errors.ErrExpected("{ character for map value", cursor)
|
||||
}
|
||||
cursor++
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if buf[cursor] == '}' {
|
||||
cursor++
|
||||
return nil, cursor, nil
|
||||
}
|
||||
keyDecoder, ok := d.keyDecoder.(*stringDecoder)
|
||||
if !ok {
|
||||
return nil, 0, &errors.UnmarshalTypeError{
|
||||
Value: "string",
|
||||
Type: reflect.TypeOf(""),
|
||||
Offset: cursor,
|
||||
Struct: d.structName,
|
||||
Field: d.fieldName,
|
||||
}
|
||||
}
|
||||
ret := [][]byte{}
|
||||
for {
|
||||
key, keyCursor, err := keyDecoder.decodeByte(buf, cursor)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor = skipWhiteSpace(buf, keyCursor)
|
||||
if buf[cursor] != ':' {
|
||||
return nil, 0, errors.ErrExpected("colon after object key", cursor)
|
||||
}
|
||||
cursor++
|
||||
child, found, err := ctx.Option.Path.Field(string(key))
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
if found {
|
||||
if child != nil {
|
||||
oldPath := ctx.Option.Path.node
|
||||
ctx.Option.Path.node = child
|
||||
paths, c, err := d.valueDecoder.DecodePath(ctx, cursor, depth)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
ctx.Option.Path.node = oldPath
|
||||
ret = append(ret, paths...)
|
||||
cursor = c
|
||||
} else {
|
||||
start := cursor
|
||||
end, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
ret = append(ret, buf[start:end])
|
||||
cursor = end
|
||||
}
|
||||
} else {
|
||||
c, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor = c
|
||||
}
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if buf[cursor] == '}' {
|
||||
cursor++
|
||||
return ret, cursor, nil
|
||||
}
|
||||
if buf[cursor] != ',' {
|
||||
return nil, 0, errors.ErrExpected("comma after object value", cursor)
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
}
|
123
internal/decoder/number.go
Normal file
123
internal/decoder/number.go
Normal file
|
@ -0,0 +1,123 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
type numberDecoder struct {
|
||||
stringDecoder *stringDecoder
|
||||
op func(unsafe.Pointer, json.Number)
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newNumberDecoder(structName, fieldName string, op func(unsafe.Pointer, json.Number)) *numberDecoder {
|
||||
return &numberDecoder{
|
||||
stringDecoder: newStringDecoder(structName, fieldName),
|
||||
op: op,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *numberDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
bytes, err := d.decodeStreamByte(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := strconv.ParseFloat(*(*string)(unsafe.Pointer(&bytes)), 64); err != nil {
|
||||
return errors.ErrSyntax(err.Error(), s.totalOffset())
|
||||
}
|
||||
d.op(p, json.Number(string(bytes)))
|
||||
s.reset()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *numberDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
bytes, c, err := d.decodeByte(ctx.Buf, cursor)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if _, err := strconv.ParseFloat(*(*string)(unsafe.Pointer(&bytes)), 64); err != nil {
|
||||
return 0, errors.ErrSyntax(err.Error(), c)
|
||||
}
|
||||
cursor = c
|
||||
s := *(*string)(unsafe.Pointer(&bytes))
|
||||
d.op(p, json.Number(s))
|
||||
return cursor, nil
|
||||
}
|
||||
|
||||
func (d *numberDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
bytes, c, err := d.decodeByte(ctx.Buf, cursor)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
if bytes == nil {
|
||||
return [][]byte{nullbytes}, c, nil
|
||||
}
|
||||
return [][]byte{bytes}, c, nil
|
||||
}
|
||||
|
||||
func (d *numberDecoder) decodeStreamByte(s *Stream) ([]byte, error) {
|
||||
start := s.cursor
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
s.cursor++
|
||||
continue
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return floatBytes(s), nil
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return nil, nil
|
||||
case '"':
|
||||
return d.stringDecoder.decodeStreamByte(s)
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
default:
|
||||
goto ERROR
|
||||
}
|
||||
}
|
||||
ERROR:
|
||||
if s.cursor == start {
|
||||
return nil, errors.ErrInvalidBeginningOfValue(s.char(), s.totalOffset())
|
||||
}
|
||||
return nil, errors.ErrUnexpectedEndOfJSON("json.Number", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *numberDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) {
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
start := cursor
|
||||
cursor++
|
||||
for floatTable[buf[cursor]] {
|
||||
cursor++
|
||||
}
|
||||
num := buf[start:cursor]
|
||||
return num, cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return nil, cursor, nil
|
||||
case '"':
|
||||
return d.stringDecoder.decodeByte(buf, cursor)
|
||||
default:
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("json.Number", cursor)
|
||||
}
|
||||
}
|
||||
}
|
17
internal/decoder/option.go
Normal file
17
internal/decoder/option.go
Normal file
|
@ -0,0 +1,17 @@
|
|||
package decoder
|
||||
|
||||
import "context"
|
||||
|
||||
type OptionFlags uint8
|
||||
|
||||
const (
|
||||
FirstWinOption OptionFlags = 1 << iota
|
||||
ContextOption
|
||||
PathOption
|
||||
)
|
||||
|
||||
type Option struct {
|
||||
Flags OptionFlags
|
||||
Context context.Context
|
||||
Path *Path
|
||||
}
|
670
internal/decoder/path.go
Normal file
670
internal/decoder/path.go
Normal file
|
@ -0,0 +1,670 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type PathString string
|
||||
|
||||
func (s PathString) Build() (*Path, error) {
|
||||
builder := new(PathBuilder)
|
||||
return builder.Build([]rune(s))
|
||||
}
|
||||
|
||||
type PathBuilder struct {
|
||||
root PathNode
|
||||
node PathNode
|
||||
singleQuotePathSelector bool
|
||||
doubleQuotePathSelector bool
|
||||
}
|
||||
|
||||
func (b *PathBuilder) Build(buf []rune) (*Path, error) {
|
||||
node, err := b.build(buf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &Path{
|
||||
node: node,
|
||||
RootSelectorOnly: node == nil,
|
||||
SingleQuotePathSelector: b.singleQuotePathSelector,
|
||||
DoubleQuotePathSelector: b.doubleQuotePathSelector,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (b *PathBuilder) build(buf []rune) (PathNode, error) {
|
||||
if len(buf) == 0 {
|
||||
return nil, errors.ErrEmptyPath()
|
||||
}
|
||||
if buf[0] != '$' {
|
||||
return nil, errors.ErrInvalidPath("JSON Path must start with a $ character")
|
||||
}
|
||||
if len(buf) == 1 {
|
||||
return nil, nil
|
||||
}
|
||||
buf = buf[1:]
|
||||
offset, err := b.buildNext(buf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(buf) > offset {
|
||||
return nil, errors.ErrInvalidPath("remain invalid path %q", buf[offset:])
|
||||
}
|
||||
return b.root, nil
|
||||
}
|
||||
|
||||
func (b *PathBuilder) buildNextCharIfExists(buf []rune, cursor int) (int, error) {
|
||||
if len(buf) > cursor {
|
||||
offset, err := b.buildNext(buf[cursor:])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return cursor + 1 + offset, nil
|
||||
}
|
||||
return cursor, nil
|
||||
}
|
||||
|
||||
func (b *PathBuilder) buildNext(buf []rune) (int, error) {
|
||||
switch buf[0] {
|
||||
case '.':
|
||||
if len(buf) == 1 {
|
||||
return 0, errors.ErrInvalidPath("JSON Path ends with dot character")
|
||||
}
|
||||
offset, err := b.buildSelector(buf[1:])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return offset + 1, nil
|
||||
case '[':
|
||||
if len(buf) == 1 {
|
||||
return 0, errors.ErrInvalidPath("JSON Path ends with left bracket character")
|
||||
}
|
||||
offset, err := b.buildIndex(buf[1:])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return offset + 1, nil
|
||||
default:
|
||||
return 0, errors.ErrInvalidPath("expect dot or left bracket character. but found %c character", buf[0])
|
||||
}
|
||||
}
|
||||
|
||||
func (b *PathBuilder) buildSelector(buf []rune) (int, error) {
|
||||
switch buf[0] {
|
||||
case '.':
|
||||
if len(buf) == 1 {
|
||||
return 0, errors.ErrInvalidPath("JSON Path ends with double dot character")
|
||||
}
|
||||
offset, err := b.buildPathRecursive(buf[1:])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return 1 + offset, nil
|
||||
case '[', ']', '$', '*':
|
||||
return 0, errors.ErrInvalidPath("found invalid path character %c after dot", buf[0])
|
||||
}
|
||||
for cursor := 0; cursor < len(buf); cursor++ {
|
||||
switch buf[cursor] {
|
||||
case '$', '*', ']':
|
||||
return 0, errors.ErrInvalidPath("found %c character in field selector context", buf[cursor])
|
||||
case '.':
|
||||
if cursor+1 >= len(buf) {
|
||||
return 0, errors.ErrInvalidPath("JSON Path ends with dot character")
|
||||
}
|
||||
selector := buf[:cursor]
|
||||
b.addSelectorNode(string(selector))
|
||||
offset, err := b.buildSelector(buf[cursor+1:])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return cursor + 1 + offset, nil
|
||||
case '[':
|
||||
if cursor+1 >= len(buf) {
|
||||
return 0, errors.ErrInvalidPath("JSON Path ends with left bracket character")
|
||||
}
|
||||
selector := buf[:cursor]
|
||||
b.addSelectorNode(string(selector))
|
||||
offset, err := b.buildIndex(buf[cursor+1:])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return cursor + 1 + offset, nil
|
||||
case '"':
|
||||
if cursor+1 >= len(buf) {
|
||||
return 0, errors.ErrInvalidPath("JSON Path ends with double quote character")
|
||||
}
|
||||
offset, err := b.buildQuoteSelector(buf[cursor+1:], DoubleQuotePathSelector)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return cursor + 1 + offset, nil
|
||||
}
|
||||
}
|
||||
b.addSelectorNode(string(buf))
|
||||
return len(buf), nil
|
||||
}
|
||||
|
||||
func (b *PathBuilder) buildQuoteSelector(buf []rune, sel QuotePathSelector) (int, error) {
|
||||
switch buf[0] {
|
||||
case '[', ']', '$', '.', '*', '\'', '"':
|
||||
return 0, errors.ErrInvalidPath("found invalid path character %c after quote", buf[0])
|
||||
}
|
||||
for cursor := 0; cursor < len(buf); cursor++ {
|
||||
switch buf[cursor] {
|
||||
case '\'':
|
||||
if sel != SingleQuotePathSelector {
|
||||
return 0, errors.ErrInvalidPath("found double quote character in field selector with single quote context")
|
||||
}
|
||||
if len(buf) <= cursor+1 {
|
||||
return 0, errors.ErrInvalidPath("JSON Path ends with single quote character in field selector context")
|
||||
}
|
||||
if buf[cursor+1] != ']' {
|
||||
return 0, errors.ErrInvalidPath("expect right bracket for field selector with single quote but found %c", buf[cursor+1])
|
||||
}
|
||||
selector := buf[:cursor]
|
||||
b.addSelectorNode(string(selector))
|
||||
b.singleQuotePathSelector = true
|
||||
return b.buildNextCharIfExists(buf, cursor+2)
|
||||
case '"':
|
||||
if sel != DoubleQuotePathSelector {
|
||||
return 0, errors.ErrInvalidPath("found single quote character in field selector with double quote context")
|
||||
}
|
||||
selector := buf[:cursor]
|
||||
b.addSelectorNode(string(selector))
|
||||
b.doubleQuotePathSelector = true
|
||||
return b.buildNextCharIfExists(buf, cursor+1)
|
||||
}
|
||||
}
|
||||
return 0, errors.ErrInvalidPath("couldn't find quote character in selector quote path context")
|
||||
}
|
||||
|
||||
func (b *PathBuilder) buildPathRecursive(buf []rune) (int, error) {
|
||||
switch buf[0] {
|
||||
case '.', '[', ']', '$', '*':
|
||||
return 0, errors.ErrInvalidPath("found invalid path character %c after double dot", buf[0])
|
||||
}
|
||||
for cursor := 0; cursor < len(buf); cursor++ {
|
||||
switch buf[cursor] {
|
||||
case '$', '*', ']':
|
||||
return 0, errors.ErrInvalidPath("found %c character in field selector context", buf[cursor])
|
||||
case '.':
|
||||
if cursor+1 >= len(buf) {
|
||||
return 0, errors.ErrInvalidPath("JSON Path ends with dot character")
|
||||
}
|
||||
selector := buf[:cursor]
|
||||
b.addRecursiveNode(string(selector))
|
||||
offset, err := b.buildSelector(buf[cursor+1:])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return cursor + 1 + offset, nil
|
||||
case '[':
|
||||
if cursor+1 >= len(buf) {
|
||||
return 0, errors.ErrInvalidPath("JSON Path ends with left bracket character")
|
||||
}
|
||||
selector := buf[:cursor]
|
||||
b.addRecursiveNode(string(selector))
|
||||
offset, err := b.buildIndex(buf[cursor+1:])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return cursor + 1 + offset, nil
|
||||
}
|
||||
}
|
||||
b.addRecursiveNode(string(buf))
|
||||
return len(buf), nil
|
||||
}
|
||||
|
||||
func (b *PathBuilder) buildIndex(buf []rune) (int, error) {
|
||||
switch buf[0] {
|
||||
case '.', '[', ']', '$':
|
||||
return 0, errors.ErrInvalidPath("found invalid path character %c after left bracket", buf[0])
|
||||
case '\'':
|
||||
if len(buf) == 1 {
|
||||
return 0, errors.ErrInvalidPath("JSON Path ends with single quote character")
|
||||
}
|
||||
offset, err := b.buildQuoteSelector(buf[1:], SingleQuotePathSelector)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return 1 + offset, nil
|
||||
case '*':
|
||||
if len(buf) == 1 {
|
||||
return 0, errors.ErrInvalidPath("JSON Path ends with star character")
|
||||
}
|
||||
if buf[1] != ']' {
|
||||
return 0, errors.ErrInvalidPath("expect right bracket character for index all path but found %c character", buf[1])
|
||||
}
|
||||
b.addIndexAllNode()
|
||||
offset := len("*]")
|
||||
if len(buf) > 2 {
|
||||
buildOffset, err := b.buildNext(buf[2:])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return offset + buildOffset, nil
|
||||
}
|
||||
return offset, nil
|
||||
}
|
||||
|
||||
for cursor := 0; cursor < len(buf); cursor++ {
|
||||
switch buf[cursor] {
|
||||
case ']':
|
||||
index, err := strconv.ParseInt(string(buf[:cursor]), 10, 64)
|
||||
if err != nil {
|
||||
return 0, errors.ErrInvalidPath("%q is unexpected index path", buf[:cursor])
|
||||
}
|
||||
b.addIndexNode(int(index))
|
||||
return b.buildNextCharIfExists(buf, cursor+1)
|
||||
}
|
||||
}
|
||||
return 0, errors.ErrInvalidPath("couldn't find right bracket character in index path context")
|
||||
}
|
||||
|
||||
func (b *PathBuilder) addIndexAllNode() {
|
||||
node := newPathIndexAllNode()
|
||||
if b.root == nil {
|
||||
b.root = node
|
||||
b.node = node
|
||||
} else {
|
||||
b.node = b.node.chain(node)
|
||||
}
|
||||
}
|
||||
|
||||
func (b *PathBuilder) addRecursiveNode(selector string) {
|
||||
node := newPathRecursiveNode(selector)
|
||||
if b.root == nil {
|
||||
b.root = node
|
||||
b.node = node
|
||||
} else {
|
||||
b.node = b.node.chain(node)
|
||||
}
|
||||
}
|
||||
|
||||
func (b *PathBuilder) addSelectorNode(name string) {
|
||||
node := newPathSelectorNode(name)
|
||||
if b.root == nil {
|
||||
b.root = node
|
||||
b.node = node
|
||||
} else {
|
||||
b.node = b.node.chain(node)
|
||||
}
|
||||
}
|
||||
|
||||
func (b *PathBuilder) addIndexNode(idx int) {
|
||||
node := newPathIndexNode(idx)
|
||||
if b.root == nil {
|
||||
b.root = node
|
||||
b.node = node
|
||||
} else {
|
||||
b.node = b.node.chain(node)
|
||||
}
|
||||
}
|
||||
|
||||
type QuotePathSelector int
|
||||
|
||||
const (
|
||||
SingleQuotePathSelector QuotePathSelector = 1
|
||||
DoubleQuotePathSelector QuotePathSelector = 2
|
||||
)
|
||||
|
||||
type Path struct {
|
||||
node PathNode
|
||||
RootSelectorOnly bool
|
||||
SingleQuotePathSelector bool
|
||||
DoubleQuotePathSelector bool
|
||||
}
|
||||
|
||||
func (p *Path) Field(sel string) (PathNode, bool, error) {
|
||||
if p.node == nil {
|
||||
return nil, false, nil
|
||||
}
|
||||
return p.node.Field(sel)
|
||||
}
|
||||
|
||||
func (p *Path) Get(src, dst reflect.Value) error {
|
||||
if p.node == nil {
|
||||
return nil
|
||||
}
|
||||
return p.node.Get(src, dst)
|
||||
}
|
||||
|
||||
func (p *Path) String() string {
|
||||
if p.node == nil {
|
||||
return "$"
|
||||
}
|
||||
return p.node.String()
|
||||
}
|
||||
|
||||
type PathNode interface {
|
||||
fmt.Stringer
|
||||
Index(idx int) (PathNode, bool, error)
|
||||
Field(fieldName string) (PathNode, bool, error)
|
||||
Get(src, dst reflect.Value) error
|
||||
chain(PathNode) PathNode
|
||||
target() bool
|
||||
single() bool
|
||||
}
|
||||
|
||||
type BasePathNode struct {
|
||||
child PathNode
|
||||
}
|
||||
|
||||
func (n *BasePathNode) chain(node PathNode) PathNode {
|
||||
n.child = node
|
||||
return node
|
||||
}
|
||||
|
||||
func (n *BasePathNode) target() bool {
|
||||
return n.child == nil
|
||||
}
|
||||
|
||||
func (n *BasePathNode) single() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
type PathSelectorNode struct {
|
||||
*BasePathNode
|
||||
selector string
|
||||
}
|
||||
|
||||
func newPathSelectorNode(selector string) *PathSelectorNode {
|
||||
return &PathSelectorNode{
|
||||
BasePathNode: &BasePathNode{},
|
||||
selector: selector,
|
||||
}
|
||||
}
|
||||
|
||||
func (n *PathSelectorNode) Index(idx int) (PathNode, bool, error) {
|
||||
return nil, false, &errors.PathError{}
|
||||
}
|
||||
|
||||
func (n *PathSelectorNode) Field(fieldName string) (PathNode, bool, error) {
|
||||
if n.selector == fieldName {
|
||||
return n.child, true, nil
|
||||
}
|
||||
return nil, false, nil
|
||||
}
|
||||
|
||||
func (n *PathSelectorNode) Get(src, dst reflect.Value) error {
|
||||
switch src.Type().Kind() {
|
||||
case reflect.Map:
|
||||
iter := src.MapRange()
|
||||
for iter.Next() {
|
||||
key, ok := iter.Key().Interface().(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("invalid map key type %T", src.Type().Key())
|
||||
}
|
||||
child, found, err := n.Field(key)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if found {
|
||||
if child != nil {
|
||||
return child.Get(iter.Value(), dst)
|
||||
}
|
||||
return AssignValue(iter.Value(), dst)
|
||||
}
|
||||
}
|
||||
case reflect.Struct:
|
||||
typ := src.Type()
|
||||
for i := 0; i < typ.Len(); i++ {
|
||||
tag := runtime.StructTagFromField(typ.Field(i))
|
||||
child, found, err := n.Field(tag.Key)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if found {
|
||||
if child != nil {
|
||||
return child.Get(src.Field(i), dst)
|
||||
}
|
||||
return AssignValue(src.Field(i), dst)
|
||||
}
|
||||
}
|
||||
case reflect.Ptr:
|
||||
return n.Get(src.Elem(), dst)
|
||||
case reflect.Interface:
|
||||
return n.Get(reflect.ValueOf(src.Interface()), dst)
|
||||
case reflect.Float64, reflect.String, reflect.Bool:
|
||||
return AssignValue(src, dst)
|
||||
}
|
||||
return fmt.Errorf("failed to get %s value from %s", n.selector, src.Type())
|
||||
}
|
||||
|
||||
func (n *PathSelectorNode) String() string {
|
||||
s := fmt.Sprintf(".%s", n.selector)
|
||||
if n.child != nil {
|
||||
s += n.child.String()
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
type PathIndexNode struct {
|
||||
*BasePathNode
|
||||
selector int
|
||||
}
|
||||
|
||||
func newPathIndexNode(selector int) *PathIndexNode {
|
||||
return &PathIndexNode{
|
||||
BasePathNode: &BasePathNode{},
|
||||
selector: selector,
|
||||
}
|
||||
}
|
||||
|
||||
func (n *PathIndexNode) Index(idx int) (PathNode, bool, error) {
|
||||
if n.selector == idx {
|
||||
return n.child, true, nil
|
||||
}
|
||||
return nil, false, nil
|
||||
}
|
||||
|
||||
func (n *PathIndexNode) Field(fieldName string) (PathNode, bool, error) {
|
||||
return nil, false, &errors.PathError{}
|
||||
}
|
||||
|
||||
func (n *PathIndexNode) Get(src, dst reflect.Value) error {
|
||||
switch src.Type().Kind() {
|
||||
case reflect.Array, reflect.Slice:
|
||||
if src.Len() > n.selector {
|
||||
if n.child != nil {
|
||||
return n.child.Get(src.Index(n.selector), dst)
|
||||
}
|
||||
return AssignValue(src.Index(n.selector), dst)
|
||||
}
|
||||
case reflect.Ptr:
|
||||
return n.Get(src.Elem(), dst)
|
||||
case reflect.Interface:
|
||||
return n.Get(reflect.ValueOf(src.Interface()), dst)
|
||||
}
|
||||
return fmt.Errorf("failed to get [%d] value from %s", n.selector, src.Type())
|
||||
}
|
||||
|
||||
func (n *PathIndexNode) String() string {
|
||||
s := fmt.Sprintf("[%d]", n.selector)
|
||||
if n.child != nil {
|
||||
s += n.child.String()
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
type PathIndexAllNode struct {
|
||||
*BasePathNode
|
||||
}
|
||||
|
||||
func newPathIndexAllNode() *PathIndexAllNode {
|
||||
return &PathIndexAllNode{
|
||||
BasePathNode: &BasePathNode{},
|
||||
}
|
||||
}
|
||||
|
||||
func (n *PathIndexAllNode) Index(idx int) (PathNode, bool, error) {
|
||||
return n.child, true, nil
|
||||
}
|
||||
|
||||
func (n *PathIndexAllNode) Field(fieldName string) (PathNode, bool, error) {
|
||||
return nil, false, &errors.PathError{}
|
||||
}
|
||||
|
||||
func (n *PathIndexAllNode) Get(src, dst reflect.Value) error {
|
||||
switch src.Type().Kind() {
|
||||
case reflect.Array, reflect.Slice:
|
||||
var arr []interface{}
|
||||
for i := 0; i < src.Len(); i++ {
|
||||
var v interface{}
|
||||
rv := reflect.ValueOf(&v)
|
||||
if n.child != nil {
|
||||
if err := n.child.Get(src.Index(i), rv); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if err := AssignValue(src.Index(i), rv); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
arr = append(arr, v)
|
||||
}
|
||||
if err := AssignValue(reflect.ValueOf(arr), dst); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
case reflect.Ptr:
|
||||
return n.Get(src.Elem(), dst)
|
||||
case reflect.Interface:
|
||||
return n.Get(reflect.ValueOf(src.Interface()), dst)
|
||||
}
|
||||
return fmt.Errorf("failed to get all value from %s", src.Type())
|
||||
}
|
||||
|
||||
func (n *PathIndexAllNode) String() string {
|
||||
s := "[*]"
|
||||
if n.child != nil {
|
||||
s += n.child.String()
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
type PathRecursiveNode struct {
|
||||
*BasePathNode
|
||||
selector string
|
||||
}
|
||||
|
||||
func newPathRecursiveNode(selector string) *PathRecursiveNode {
|
||||
node := newPathSelectorNode(selector)
|
||||
return &PathRecursiveNode{
|
||||
BasePathNode: &BasePathNode{
|
||||
child: node,
|
||||
},
|
||||
selector: selector,
|
||||
}
|
||||
}
|
||||
|
||||
func (n *PathRecursiveNode) Field(fieldName string) (PathNode, bool, error) {
|
||||
if n.selector == fieldName {
|
||||
return n.child, true, nil
|
||||
}
|
||||
return nil, false, nil
|
||||
}
|
||||
|
||||
func (n *PathRecursiveNode) Index(_ int) (PathNode, bool, error) {
|
||||
return n, true, nil
|
||||
}
|
||||
|
||||
func valueToSliceValue(v interface{}) []interface{} {
|
||||
rv := reflect.ValueOf(v)
|
||||
ret := []interface{}{}
|
||||
if rv.Type().Kind() == reflect.Slice || rv.Type().Kind() == reflect.Array {
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
ret = append(ret, rv.Index(i).Interface())
|
||||
}
|
||||
return ret
|
||||
}
|
||||
return []interface{}{v}
|
||||
}
|
||||
|
||||
func (n *PathRecursiveNode) Get(src, dst reflect.Value) error {
|
||||
if n.child == nil {
|
||||
return fmt.Errorf("failed to get by recursive path ..%s", n.selector)
|
||||
}
|
||||
var arr []interface{}
|
||||
switch src.Type().Kind() {
|
||||
case reflect.Map:
|
||||
iter := src.MapRange()
|
||||
for iter.Next() {
|
||||
key, ok := iter.Key().Interface().(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("invalid map key type %T", src.Type().Key())
|
||||
}
|
||||
child, found, err := n.Field(key)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if found {
|
||||
var v interface{}
|
||||
rv := reflect.ValueOf(&v)
|
||||
_ = child.Get(iter.Value(), rv)
|
||||
arr = append(arr, valueToSliceValue(v)...)
|
||||
} else {
|
||||
var v interface{}
|
||||
rv := reflect.ValueOf(&v)
|
||||
_ = n.Get(iter.Value(), rv)
|
||||
if v != nil {
|
||||
arr = append(arr, valueToSliceValue(v)...)
|
||||
}
|
||||
}
|
||||
}
|
||||
_ = AssignValue(reflect.ValueOf(arr), dst)
|
||||
return nil
|
||||
case reflect.Struct:
|
||||
typ := src.Type()
|
||||
for i := 0; i < typ.Len(); i++ {
|
||||
tag := runtime.StructTagFromField(typ.Field(i))
|
||||
child, found, err := n.Field(tag.Key)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if found {
|
||||
var v interface{}
|
||||
rv := reflect.ValueOf(&v)
|
||||
_ = child.Get(src.Field(i), rv)
|
||||
arr = append(arr, valueToSliceValue(v)...)
|
||||
} else {
|
||||
var v interface{}
|
||||
rv := reflect.ValueOf(&v)
|
||||
_ = n.Get(src.Field(i), rv)
|
||||
if v != nil {
|
||||
arr = append(arr, valueToSliceValue(v)...)
|
||||
}
|
||||
}
|
||||
}
|
||||
_ = AssignValue(reflect.ValueOf(arr), dst)
|
||||
return nil
|
||||
case reflect.Array, reflect.Slice:
|
||||
for i := 0; i < src.Len(); i++ {
|
||||
var v interface{}
|
||||
rv := reflect.ValueOf(&v)
|
||||
_ = n.Get(src.Index(i), rv)
|
||||
if v != nil {
|
||||
arr = append(arr, valueToSliceValue(v)...)
|
||||
}
|
||||
}
|
||||
_ = AssignValue(reflect.ValueOf(arr), dst)
|
||||
return nil
|
||||
case reflect.Ptr:
|
||||
return n.Get(src.Elem(), dst)
|
||||
case reflect.Interface:
|
||||
return n.Get(reflect.ValueOf(src.Interface()), dst)
|
||||
}
|
||||
return fmt.Errorf("failed to get %s value from %s", n.selector, src.Type())
|
||||
}
|
||||
|
||||
func (n *PathRecursiveNode) String() string {
|
||||
s := fmt.Sprintf("..%s", n.selector)
|
||||
if n.child != nil {
|
||||
s += n.child.String()
|
||||
}
|
||||
return s
|
||||
}
|
97
internal/decoder/ptr.go
Normal file
97
internal/decoder/ptr.go
Normal file
|
@ -0,0 +1,97 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type ptrDecoder struct {
|
||||
dec Decoder
|
||||
typ *runtime.Type
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newPtrDecoder(dec Decoder, typ *runtime.Type, structName, fieldName string) *ptrDecoder {
|
||||
return &ptrDecoder{
|
||||
dec: dec,
|
||||
typ: typ,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *ptrDecoder) contentDecoder() Decoder {
|
||||
dec, ok := d.dec.(*ptrDecoder)
|
||||
if !ok {
|
||||
return d.dec
|
||||
}
|
||||
return dec.contentDecoder()
|
||||
}
|
||||
|
||||
//nolint:golint
|
||||
//go:linkname unsafe_New reflect.unsafe_New
|
||||
func unsafe_New(*runtime.Type) unsafe.Pointer
|
||||
|
||||
func UnsafeNew(t *runtime.Type) unsafe.Pointer {
|
||||
return unsafe_New(t)
|
||||
}
|
||||
|
||||
func (d *ptrDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
if s.skipWhiteSpace() == nul {
|
||||
s.read()
|
||||
}
|
||||
if s.char() == 'n' {
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return nil
|
||||
}
|
||||
var newptr unsafe.Pointer
|
||||
if *(*unsafe.Pointer)(p) == nil {
|
||||
newptr = unsafe_New(d.typ)
|
||||
*(*unsafe.Pointer)(p) = newptr
|
||||
} else {
|
||||
newptr = *(*unsafe.Pointer)(p)
|
||||
}
|
||||
if err := d.dec.DecodeStream(s, depth, newptr); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *ptrDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if buf[cursor] == 'n' {
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if p != nil {
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
}
|
||||
cursor += 4
|
||||
return cursor, nil
|
||||
}
|
||||
var newptr unsafe.Pointer
|
||||
if *(*unsafe.Pointer)(p) == nil {
|
||||
newptr = unsafe_New(d.typ)
|
||||
*(*unsafe.Pointer)(p) = newptr
|
||||
} else {
|
||||
newptr = *(*unsafe.Pointer)(p)
|
||||
}
|
||||
c, err := d.dec.Decode(ctx, cursor, depth, newptr)
|
||||
if err != nil {
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return 0, err
|
||||
}
|
||||
cursor = c
|
||||
return cursor, nil
|
||||
}
|
||||
|
||||
func (d *ptrDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
return nil, 0, fmt.Errorf("json: ptr decoder does not support decode path")
|
||||
}
|
380
internal/decoder/slice.go
Normal file
380
internal/decoder/slice.go
Normal file
|
@ -0,0 +1,380 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
var (
|
||||
sliceType = runtime.Type2RType(
|
||||
reflect.TypeOf((*sliceHeader)(nil)).Elem(),
|
||||
)
|
||||
nilSlice = unsafe.Pointer(&sliceHeader{})
|
||||
)
|
||||
|
||||
type sliceDecoder struct {
|
||||
elemType *runtime.Type
|
||||
isElemPointerType bool
|
||||
valueDecoder Decoder
|
||||
size uintptr
|
||||
arrayPool sync.Pool
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
// If use reflect.SliceHeader, data type is uintptr.
|
||||
// In this case, Go compiler cannot trace reference created by newArray().
|
||||
// So, define using unsafe.Pointer as data type
|
||||
type sliceHeader struct {
|
||||
data unsafe.Pointer
|
||||
len int
|
||||
cap int
|
||||
}
|
||||
|
||||
const (
|
||||
defaultSliceCapacity = 2
|
||||
)
|
||||
|
||||
func newSliceDecoder(dec Decoder, elemType *runtime.Type, size uintptr, structName, fieldName string) *sliceDecoder {
|
||||
return &sliceDecoder{
|
||||
valueDecoder: dec,
|
||||
elemType: elemType,
|
||||
isElemPointerType: elemType.Kind() == reflect.Ptr || elemType.Kind() == reflect.Map,
|
||||
size: size,
|
||||
arrayPool: sync.Pool{
|
||||
New: func() interface{} {
|
||||
return &sliceHeader{
|
||||
data: newArray(elemType, defaultSliceCapacity),
|
||||
len: 0,
|
||||
cap: defaultSliceCapacity,
|
||||
}
|
||||
},
|
||||
},
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *sliceDecoder) newSlice(src *sliceHeader) *sliceHeader {
|
||||
slice := d.arrayPool.Get().(*sliceHeader)
|
||||
if src.len > 0 {
|
||||
// copy original elem
|
||||
if slice.cap < src.cap {
|
||||
data := newArray(d.elemType, src.cap)
|
||||
slice = &sliceHeader{data: data, len: src.len, cap: src.cap}
|
||||
} else {
|
||||
slice.len = src.len
|
||||
}
|
||||
copySlice(d.elemType, *slice, *src)
|
||||
} else {
|
||||
slice.len = 0
|
||||
}
|
||||
return slice
|
||||
}
|
||||
|
||||
func (d *sliceDecoder) releaseSlice(p *sliceHeader) {
|
||||
d.arrayPool.Put(p)
|
||||
}
|
||||
|
||||
//go:linkname copySlice reflect.typedslicecopy
|
||||
func copySlice(elemType *runtime.Type, dst, src sliceHeader) int
|
||||
|
||||
//go:linkname newArray reflect.unsafe_NewArray
|
||||
func newArray(*runtime.Type, int) unsafe.Pointer
|
||||
|
||||
//go:linkname typedmemmove reflect.typedmemmove
|
||||
func typedmemmove(t *runtime.Type, dst, src unsafe.Pointer)
|
||||
|
||||
func (d *sliceDecoder) errNumber(offset int64) *errors.UnmarshalTypeError {
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "number",
|
||||
Type: reflect.SliceOf(runtime.RType2Type(d.elemType)),
|
||||
Struct: d.structName,
|
||||
Field: d.fieldName,
|
||||
Offset: offset,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *sliceDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||
}
|
||||
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
s.cursor++
|
||||
continue
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
typedmemmove(sliceType, p, nilSlice)
|
||||
return nil
|
||||
case '[':
|
||||
s.cursor++
|
||||
if s.skipWhiteSpace() == ']' {
|
||||
dst := (*sliceHeader)(p)
|
||||
if dst.data == nil {
|
||||
dst.data = newArray(d.elemType, 0)
|
||||
} else {
|
||||
dst.len = 0
|
||||
}
|
||||
s.cursor++
|
||||
return nil
|
||||
}
|
||||
idx := 0
|
||||
slice := d.newSlice((*sliceHeader)(p))
|
||||
srcLen := slice.len
|
||||
capacity := slice.cap
|
||||
data := slice.data
|
||||
for {
|
||||
if capacity <= idx {
|
||||
src := sliceHeader{data: data, len: idx, cap: capacity}
|
||||
capacity *= 2
|
||||
data = newArray(d.elemType, capacity)
|
||||
dst := sliceHeader{data: data, len: idx, cap: capacity}
|
||||
copySlice(d.elemType, dst, src)
|
||||
}
|
||||
ep := unsafe.Pointer(uintptr(data) + uintptr(idx)*d.size)
|
||||
|
||||
// if srcLen is greater than idx, keep the original reference
|
||||
if srcLen <= idx {
|
||||
if d.isElemPointerType {
|
||||
**(**unsafe.Pointer)(unsafe.Pointer(&ep)) = nil // initialize elem pointer
|
||||
} else {
|
||||
// assign new element to the slice
|
||||
typedmemmove(d.elemType, ep, unsafe_New(d.elemType))
|
||||
}
|
||||
}
|
||||
|
||||
if err := d.valueDecoder.DecodeStream(s, depth, ep); err != nil {
|
||||
return err
|
||||
}
|
||||
s.skipWhiteSpace()
|
||||
RETRY:
|
||||
switch s.char() {
|
||||
case ']':
|
||||
slice.cap = capacity
|
||||
slice.len = idx + 1
|
||||
slice.data = data
|
||||
dst := (*sliceHeader)(p)
|
||||
dst.len = idx + 1
|
||||
if dst.len > dst.cap {
|
||||
dst.data = newArray(d.elemType, dst.len)
|
||||
dst.cap = dst.len
|
||||
}
|
||||
copySlice(d.elemType, *dst, *slice)
|
||||
d.releaseSlice(slice)
|
||||
s.cursor++
|
||||
return nil
|
||||
case ',':
|
||||
idx++
|
||||
case nul:
|
||||
if s.read() {
|
||||
goto RETRY
|
||||
}
|
||||
slice.cap = capacity
|
||||
slice.data = data
|
||||
d.releaseSlice(slice)
|
||||
goto ERROR
|
||||
default:
|
||||
slice.cap = capacity
|
||||
slice.data = data
|
||||
d.releaseSlice(slice)
|
||||
goto ERROR
|
||||
}
|
||||
s.cursor++
|
||||
}
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return d.errNumber(s.totalOffset())
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
default:
|
||||
goto ERROR
|
||||
}
|
||||
}
|
||||
ERROR:
|
||||
return errors.ErrUnexpectedEndOfJSON("slice", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *sliceDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
typedmemmove(sliceType, p, nilSlice)
|
||||
return cursor, nil
|
||||
case '[':
|
||||
cursor++
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if buf[cursor] == ']' {
|
||||
dst := (*sliceHeader)(p)
|
||||
if dst.data == nil {
|
||||
dst.data = newArray(d.elemType, 0)
|
||||
} else {
|
||||
dst.len = 0
|
||||
}
|
||||
cursor++
|
||||
return cursor, nil
|
||||
}
|
||||
idx := 0
|
||||
slice := d.newSlice((*sliceHeader)(p))
|
||||
srcLen := slice.len
|
||||
capacity := slice.cap
|
||||
data := slice.data
|
||||
for {
|
||||
if capacity <= idx {
|
||||
src := sliceHeader{data: data, len: idx, cap: capacity}
|
||||
capacity *= 2
|
||||
data = newArray(d.elemType, capacity)
|
||||
dst := sliceHeader{data: data, len: idx, cap: capacity}
|
||||
copySlice(d.elemType, dst, src)
|
||||
}
|
||||
ep := unsafe.Pointer(uintptr(data) + uintptr(idx)*d.size)
|
||||
// if srcLen is greater than idx, keep the original reference
|
||||
if srcLen <= idx {
|
||||
if d.isElemPointerType {
|
||||
**(**unsafe.Pointer)(unsafe.Pointer(&ep)) = nil // initialize elem pointer
|
||||
} else {
|
||||
// assign new element to the slice
|
||||
typedmemmove(d.elemType, ep, unsafe_New(d.elemType))
|
||||
}
|
||||
}
|
||||
c, err := d.valueDecoder.Decode(ctx, cursor, depth, ep)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = c
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
switch buf[cursor] {
|
||||
case ']':
|
||||
slice.cap = capacity
|
||||
slice.len = idx + 1
|
||||
slice.data = data
|
||||
dst := (*sliceHeader)(p)
|
||||
dst.len = idx + 1
|
||||
if dst.len > dst.cap {
|
||||
dst.data = newArray(d.elemType, dst.len)
|
||||
dst.cap = dst.len
|
||||
}
|
||||
copySlice(d.elemType, *dst, *slice)
|
||||
d.releaseSlice(slice)
|
||||
cursor++
|
||||
return cursor, nil
|
||||
case ',':
|
||||
idx++
|
||||
default:
|
||||
slice.cap = capacity
|
||||
slice.data = data
|
||||
d.releaseSlice(slice)
|
||||
return 0, errors.ErrInvalidCharacter(buf[cursor], "slice", cursor)
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return 0, d.errNumber(cursor)
|
||||
default:
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("slice", cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (d *sliceDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
buf := ctx.Buf
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return nil, 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
|
||||
ret := [][]byte{}
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return [][]byte{nullbytes}, cursor, nil
|
||||
case '[':
|
||||
cursor++
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if buf[cursor] == ']' {
|
||||
cursor++
|
||||
return ret, cursor, nil
|
||||
}
|
||||
idx := 0
|
||||
for {
|
||||
child, found, err := ctx.Option.Path.node.Index(idx)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
if found {
|
||||
if child != nil {
|
||||
oldPath := ctx.Option.Path.node
|
||||
ctx.Option.Path.node = child
|
||||
paths, c, err := d.valueDecoder.DecodePath(ctx, cursor, depth)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
ctx.Option.Path.node = oldPath
|
||||
ret = append(ret, paths...)
|
||||
cursor = c
|
||||
} else {
|
||||
start := cursor
|
||||
end, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
ret = append(ret, buf[start:end])
|
||||
cursor = end
|
||||
}
|
||||
} else {
|
||||
c, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor = c
|
||||
}
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
switch buf[cursor] {
|
||||
case ']':
|
||||
cursor++
|
||||
return ret, cursor, nil
|
||||
case ',':
|
||||
idx++
|
||||
default:
|
||||
return nil, 0, errors.ErrInvalidCharacter(buf[cursor], "slice", cursor)
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return nil, 0, d.errNumber(cursor)
|
||||
default:
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("slice", cursor)
|
||||
}
|
||||
}
|
||||
}
|
556
internal/decoder/stream.go
Normal file
556
internal/decoder/stream.go
Normal file
|
@ -0,0 +1,556 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"strconv"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
const (
|
||||
initBufSize = 512
|
||||
)
|
||||
|
||||
type Stream struct {
|
||||
buf []byte
|
||||
bufSize int64
|
||||
length int64
|
||||
r io.Reader
|
||||
offset int64
|
||||
cursor int64
|
||||
filledBuffer bool
|
||||
allRead bool
|
||||
UseNumber bool
|
||||
DisallowUnknownFields bool
|
||||
Option *Option
|
||||
}
|
||||
|
||||
func NewStream(r io.Reader) *Stream {
|
||||
return &Stream{
|
||||
r: r,
|
||||
bufSize: initBufSize,
|
||||
buf: make([]byte, initBufSize),
|
||||
Option: &Option{},
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Stream) TotalOffset() int64 {
|
||||
return s.totalOffset()
|
||||
}
|
||||
|
||||
func (s *Stream) Buffered() io.Reader {
|
||||
buflen := int64(len(s.buf))
|
||||
for i := s.cursor; i < buflen; i++ {
|
||||
if s.buf[i] == nul {
|
||||
return bytes.NewReader(s.buf[s.cursor:i])
|
||||
}
|
||||
}
|
||||
return bytes.NewReader(s.buf[s.cursor:])
|
||||
}
|
||||
|
||||
func (s *Stream) PrepareForDecode() error {
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\t', '\r', '\n':
|
||||
s.cursor++
|
||||
continue
|
||||
case ',', ':':
|
||||
s.cursor++
|
||||
return nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
return io.EOF
|
||||
}
|
||||
break
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Stream) totalOffset() int64 {
|
||||
return s.offset + s.cursor
|
||||
}
|
||||
|
||||
func (s *Stream) char() byte {
|
||||
return s.buf[s.cursor]
|
||||
}
|
||||
|
||||
func (s *Stream) equalChar(c byte) bool {
|
||||
cur := s.buf[s.cursor]
|
||||
if cur == nul {
|
||||
s.read()
|
||||
cur = s.buf[s.cursor]
|
||||
}
|
||||
return cur == c
|
||||
}
|
||||
|
||||
func (s *Stream) stat() ([]byte, int64, unsafe.Pointer) {
|
||||
return s.buf, s.cursor, (*sliceHeader)(unsafe.Pointer(&s.buf)).data
|
||||
}
|
||||
|
||||
func (s *Stream) bufptr() unsafe.Pointer {
|
||||
return (*sliceHeader)(unsafe.Pointer(&s.buf)).data
|
||||
}
|
||||
|
||||
func (s *Stream) statForRetry() ([]byte, int64, unsafe.Pointer) {
|
||||
s.cursor-- // for retry ( because caller progress cursor position in each loop )
|
||||
return s.buf, s.cursor, (*sliceHeader)(unsafe.Pointer(&s.buf)).data
|
||||
}
|
||||
|
||||
func (s *Stream) Reset() {
|
||||
s.reset()
|
||||
s.bufSize = int64(len(s.buf))
|
||||
}
|
||||
|
||||
func (s *Stream) More() bool {
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\n', '\r', '\t':
|
||||
s.cursor++
|
||||
continue
|
||||
case '}', ']':
|
||||
return false
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
return false
|
||||
}
|
||||
break
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (s *Stream) Token() (interface{}, error) {
|
||||
for {
|
||||
c := s.char()
|
||||
switch c {
|
||||
case ' ', '\n', '\r', '\t':
|
||||
s.cursor++
|
||||
case '{', '[', ']', '}':
|
||||
s.cursor++
|
||||
return json.Delim(c), nil
|
||||
case ',', ':':
|
||||
s.cursor++
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
bytes := floatBytes(s)
|
||||
str := *(*string)(unsafe.Pointer(&bytes))
|
||||
if s.UseNumber {
|
||||
return json.Number(str), nil
|
||||
}
|
||||
f64, err := strconv.ParseFloat(str, 64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return f64, nil
|
||||
case '"':
|
||||
bytes, err := stringBytes(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return string(bytes), nil
|
||||
case 't':
|
||||
if err := trueBytes(s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return true, nil
|
||||
case 'f':
|
||||
if err := falseBytes(s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return false, nil
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return nil, nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
goto END
|
||||
default:
|
||||
return nil, errors.ErrInvalidCharacter(s.char(), "token", s.totalOffset())
|
||||
}
|
||||
}
|
||||
END:
|
||||
return nil, io.EOF
|
||||
}
|
||||
|
||||
func (s *Stream) reset() {
|
||||
s.offset += s.cursor
|
||||
s.buf = s.buf[s.cursor:]
|
||||
s.length -= s.cursor
|
||||
s.cursor = 0
|
||||
}
|
||||
|
||||
func (s *Stream) readBuf() []byte {
|
||||
if s.filledBuffer {
|
||||
s.bufSize *= 2
|
||||
remainBuf := s.buf
|
||||
s.buf = make([]byte, s.bufSize)
|
||||
copy(s.buf, remainBuf)
|
||||
}
|
||||
remainLen := s.length - s.cursor
|
||||
remainNotNulCharNum := int64(0)
|
||||
for i := int64(0); i < remainLen; i++ {
|
||||
if s.buf[s.cursor+i] == nul {
|
||||
break
|
||||
}
|
||||
remainNotNulCharNum++
|
||||
}
|
||||
s.length = s.cursor + remainNotNulCharNum
|
||||
return s.buf[s.cursor+remainNotNulCharNum:]
|
||||
}
|
||||
|
||||
func (s *Stream) read() bool {
|
||||
if s.allRead {
|
||||
return false
|
||||
}
|
||||
buf := s.readBuf()
|
||||
last := len(buf) - 1
|
||||
buf[last] = nul
|
||||
n, err := s.r.Read(buf[:last])
|
||||
s.length += int64(n)
|
||||
if n == last {
|
||||
s.filledBuffer = true
|
||||
} else {
|
||||
s.filledBuffer = false
|
||||
}
|
||||
if err == io.EOF {
|
||||
s.allRead = true
|
||||
} else if err != nil {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (s *Stream) skipWhiteSpace() byte {
|
||||
p := s.bufptr()
|
||||
LOOP:
|
||||
c := char(p, s.cursor)
|
||||
switch c {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
s.cursor++
|
||||
goto LOOP
|
||||
case nul:
|
||||
if s.read() {
|
||||
p = s.bufptr()
|
||||
goto LOOP
|
||||
}
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
func (s *Stream) skipObject(depth int64) error {
|
||||
braceCount := 1
|
||||
_, cursor, p := s.stat()
|
||||
for {
|
||||
switch char(p, cursor) {
|
||||
case '{':
|
||||
braceCount++
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||
}
|
||||
case '}':
|
||||
braceCount--
|
||||
depth--
|
||||
if braceCount == 0 {
|
||||
s.cursor = cursor + 1
|
||||
return nil
|
||||
}
|
||||
case '[':
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||
}
|
||||
case ']':
|
||||
depth--
|
||||
case '"':
|
||||
for {
|
||||
cursor++
|
||||
switch char(p, cursor) {
|
||||
case '\\':
|
||||
cursor++
|
||||
if char(p, cursor) == nul {
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
case '"':
|
||||
goto SWITCH_OUT
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.statForRetry()
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
}
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("object of object", cursor)
|
||||
}
|
||||
SWITCH_OUT:
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Stream) skipArray(depth int64) error {
|
||||
bracketCount := 1
|
||||
_, cursor, p := s.stat()
|
||||
for {
|
||||
switch char(p, cursor) {
|
||||
case '[':
|
||||
bracketCount++
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||
}
|
||||
case ']':
|
||||
bracketCount--
|
||||
depth--
|
||||
if bracketCount == 0 {
|
||||
s.cursor = cursor + 1
|
||||
return nil
|
||||
}
|
||||
case '{':
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||
}
|
||||
case '}':
|
||||
depth--
|
||||
case '"':
|
||||
for {
|
||||
cursor++
|
||||
switch char(p, cursor) {
|
||||
case '\\':
|
||||
cursor++
|
||||
if char(p, cursor) == nul {
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
case '"':
|
||||
goto SWITCH_OUT
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.statForRetry()
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
}
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("array of object", cursor)
|
||||
}
|
||||
SWITCH_OUT:
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Stream) skipValue(depth int64) error {
|
||||
_, cursor, p := s.stat()
|
||||
for {
|
||||
switch char(p, cursor) {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("value of object", s.totalOffset())
|
||||
case '{':
|
||||
s.cursor = cursor + 1
|
||||
return s.skipObject(depth + 1)
|
||||
case '[':
|
||||
s.cursor = cursor + 1
|
||||
return s.skipArray(depth + 1)
|
||||
case '"':
|
||||
for {
|
||||
cursor++
|
||||
switch char(p, cursor) {
|
||||
case '\\':
|
||||
cursor++
|
||||
if char(p, cursor) == nul {
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("value of string", s.totalOffset())
|
||||
}
|
||||
case '"':
|
||||
s.cursor = cursor + 1
|
||||
return nil
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.statForRetry()
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("value of string", s.totalOffset())
|
||||
}
|
||||
}
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
for {
|
||||
cursor++
|
||||
c := char(p, cursor)
|
||||
if floatTable[c] {
|
||||
continue
|
||||
} else if c == nul {
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
}
|
||||
s.cursor = cursor
|
||||
return nil
|
||||
}
|
||||
case 't':
|
||||
s.cursor = cursor
|
||||
if err := trueBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
case 'f':
|
||||
s.cursor = cursor
|
||||
if err := falseBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
case 'n':
|
||||
s.cursor = cursor
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func nullBytes(s *Stream) error {
|
||||
// current cursor's character is 'n'
|
||||
s.cursor++
|
||||
if s.char() != 'u' {
|
||||
if err := retryReadNull(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
if s.char() != 'l' {
|
||||
if err := retryReadNull(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
if s.char() != 'l' {
|
||||
if err := retryReadNull(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
return nil
|
||||
}
|
||||
|
||||
func retryReadNull(s *Stream) error {
|
||||
if s.char() == nul && s.read() {
|
||||
return nil
|
||||
}
|
||||
return errors.ErrInvalidCharacter(s.char(), "null", s.totalOffset())
|
||||
}
|
||||
|
||||
func trueBytes(s *Stream) error {
|
||||
// current cursor's character is 't'
|
||||
s.cursor++
|
||||
if s.char() != 'r' {
|
||||
if err := retryReadTrue(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
if s.char() != 'u' {
|
||||
if err := retryReadTrue(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
if s.char() != 'e' {
|
||||
if err := retryReadTrue(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
return nil
|
||||
}
|
||||
|
||||
func retryReadTrue(s *Stream) error {
|
||||
if s.char() == nul && s.read() {
|
||||
return nil
|
||||
}
|
||||
return errors.ErrInvalidCharacter(s.char(), "bool(true)", s.totalOffset())
|
||||
}
|
||||
|
||||
func falseBytes(s *Stream) error {
|
||||
// current cursor's character is 'f'
|
||||
s.cursor++
|
||||
if s.char() != 'a' {
|
||||
if err := retryReadFalse(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
if s.char() != 'l' {
|
||||
if err := retryReadFalse(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
if s.char() != 's' {
|
||||
if err := retryReadFalse(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
if s.char() != 'e' {
|
||||
if err := retryReadFalse(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
return nil
|
||||
}
|
||||
|
||||
func retryReadFalse(s *Stream) error {
|
||||
if s.char() == nul && s.read() {
|
||||
return nil
|
||||
}
|
||||
return errors.ErrInvalidCharacter(s.char(), "bool(false)", s.totalOffset())
|
||||
}
|
452
internal/decoder/string.go
Normal file
452
internal/decoder/string.go
Normal file
|
@ -0,0 +1,452 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"unicode"
|
||||
"unicode/utf16"
|
||||
"unicode/utf8"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
type stringDecoder struct {
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newStringDecoder(structName, fieldName string) *stringDecoder {
|
||||
return &stringDecoder{
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *stringDecoder) errUnmarshalType(typeName string, offset int64) *errors.UnmarshalTypeError {
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: typeName,
|
||||
Type: reflect.TypeOf(""),
|
||||
Offset: offset,
|
||||
Struct: d.structName,
|
||||
Field: d.fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *stringDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
bytes, err := d.decodeStreamByte(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if bytes == nil {
|
||||
return nil
|
||||
}
|
||||
**(**string)(unsafe.Pointer(&p)) = *(*string)(unsafe.Pointer(&bytes))
|
||||
s.reset()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *stringDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
bytes, c, err := d.decodeByte(ctx.Buf, cursor)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if bytes == nil {
|
||||
return c, nil
|
||||
}
|
||||
cursor = c
|
||||
**(**string)(unsafe.Pointer(&p)) = *(*string)(unsafe.Pointer(&bytes))
|
||||
return cursor, nil
|
||||
}
|
||||
|
||||
func (d *stringDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
bytes, c, err := d.decodeByte(ctx.Buf, cursor)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
if bytes == nil {
|
||||
return [][]byte{nullbytes}, c, nil
|
||||
}
|
||||
return [][]byte{bytes}, c, nil
|
||||
}
|
||||
|
||||
var (
|
||||
hexToInt = [256]int{
|
||||
'0': 0,
|
||||
'1': 1,
|
||||
'2': 2,
|
||||
'3': 3,
|
||||
'4': 4,
|
||||
'5': 5,
|
||||
'6': 6,
|
||||
'7': 7,
|
||||
'8': 8,
|
||||
'9': 9,
|
||||
'A': 10,
|
||||
'B': 11,
|
||||
'C': 12,
|
||||
'D': 13,
|
||||
'E': 14,
|
||||
'F': 15,
|
||||
'a': 10,
|
||||
'b': 11,
|
||||
'c': 12,
|
||||
'd': 13,
|
||||
'e': 14,
|
||||
'f': 15,
|
||||
}
|
||||
)
|
||||
|
||||
func unicodeToRune(code []byte) rune {
|
||||
var r rune
|
||||
for i := 0; i < len(code); i++ {
|
||||
r = r*16 + rune(hexToInt[code[i]])
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func readAtLeast(s *Stream, n int64, p *unsafe.Pointer) bool {
|
||||
for s.cursor+n >= s.length {
|
||||
if !s.read() {
|
||||
return false
|
||||
}
|
||||
*p = s.bufptr()
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func decodeUnicodeRune(s *Stream, p unsafe.Pointer) (rune, int64, unsafe.Pointer, error) {
|
||||
const defaultOffset = 5
|
||||
const surrogateOffset = 11
|
||||
|
||||
if !readAtLeast(s, defaultOffset, &p) {
|
||||
return rune(0), 0, nil, errors.ErrInvalidCharacter(s.char(), "escaped string", s.totalOffset())
|
||||
}
|
||||
|
||||
r := unicodeToRune(s.buf[s.cursor+1 : s.cursor+defaultOffset])
|
||||
if utf16.IsSurrogate(r) {
|
||||
if !readAtLeast(s, surrogateOffset, &p) {
|
||||
return unicode.ReplacementChar, defaultOffset, p, nil
|
||||
}
|
||||
if s.buf[s.cursor+defaultOffset] != '\\' || s.buf[s.cursor+defaultOffset+1] != 'u' {
|
||||
return unicode.ReplacementChar, defaultOffset, p, nil
|
||||
}
|
||||
r2 := unicodeToRune(s.buf[s.cursor+defaultOffset+2 : s.cursor+surrogateOffset])
|
||||
if r := utf16.DecodeRune(r, r2); r != unicode.ReplacementChar {
|
||||
return r, surrogateOffset, p, nil
|
||||
}
|
||||
}
|
||||
return r, defaultOffset, p, nil
|
||||
}
|
||||
|
||||
func decodeUnicode(s *Stream, p unsafe.Pointer) (unsafe.Pointer, error) {
|
||||
const backSlashAndULen = 2 // length of \u
|
||||
|
||||
r, offset, pp, err := decodeUnicodeRune(s, p)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
unicode := []byte(string(r))
|
||||
unicodeLen := int64(len(unicode))
|
||||
s.buf = append(append(s.buf[:s.cursor-1], unicode...), s.buf[s.cursor+offset:]...)
|
||||
unicodeOrgLen := offset - 1
|
||||
s.length = s.length - (backSlashAndULen + (unicodeOrgLen - unicodeLen))
|
||||
s.cursor = s.cursor - backSlashAndULen + unicodeLen
|
||||
return pp, nil
|
||||
}
|
||||
|
||||
func decodeEscapeString(s *Stream, p unsafe.Pointer) (unsafe.Pointer, error) {
|
||||
s.cursor++
|
||||
RETRY:
|
||||
switch s.buf[s.cursor] {
|
||||
case '"':
|
||||
s.buf[s.cursor] = '"'
|
||||
case '\\':
|
||||
s.buf[s.cursor] = '\\'
|
||||
case '/':
|
||||
s.buf[s.cursor] = '/'
|
||||
case 'b':
|
||||
s.buf[s.cursor] = '\b'
|
||||
case 'f':
|
||||
s.buf[s.cursor] = '\f'
|
||||
case 'n':
|
||||
s.buf[s.cursor] = '\n'
|
||||
case 'r':
|
||||
s.buf[s.cursor] = '\r'
|
||||
case 't':
|
||||
s.buf[s.cursor] = '\t'
|
||||
case 'u':
|
||||
return decodeUnicode(s, p)
|
||||
case nul:
|
||||
if !s.read() {
|
||||
return nil, errors.ErrInvalidCharacter(s.char(), "escaped string", s.totalOffset())
|
||||
}
|
||||
p = s.bufptr()
|
||||
goto RETRY
|
||||
default:
|
||||
return nil, errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||
}
|
||||
s.buf = append(s.buf[:s.cursor-1], s.buf[s.cursor:]...)
|
||||
s.length--
|
||||
s.cursor--
|
||||
p = s.bufptr()
|
||||
return p, nil
|
||||
}
|
||||
|
||||
var (
|
||||
runeErrBytes = []byte(string(utf8.RuneError))
|
||||
runeErrBytesLen = int64(len(runeErrBytes))
|
||||
)
|
||||
|
||||
func stringBytes(s *Stream) ([]byte, error) {
|
||||
_, cursor, p := s.stat()
|
||||
cursor++ // skip double quote char
|
||||
start := cursor
|
||||
for {
|
||||
switch char(p, cursor) {
|
||||
case '\\':
|
||||
s.cursor = cursor
|
||||
pp, err := decodeEscapeString(s, p)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
p = pp
|
||||
cursor = s.cursor
|
||||
case '"':
|
||||
literal := s.buf[start:cursor]
|
||||
cursor++
|
||||
s.cursor = cursor
|
||||
return literal, nil
|
||||
case
|
||||
// 0x00 is nul, 0x5c is '\\', 0x22 is '"' .
|
||||
0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, // 0x00-0x0F
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F, // 0x10-0x1F
|
||||
0x20, 0x21 /*0x22,*/, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2A, 0x2B, 0x2C, 0x2D, 0x2E, 0x2F, // 0x20-0x2F
|
||||
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3A, 0x3B, 0x3C, 0x3D, 0x3E, 0x3F, // 0x30-0x3F
|
||||
0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4A, 0x4B, 0x4C, 0x4D, 0x4E, 0x4F, // 0x40-0x4F
|
||||
0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5A, 0x5B /*0x5C,*/, 0x5D, 0x5E, 0x5F, // 0x50-0x5F
|
||||
0x60, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, // 0x60-0x6F
|
||||
0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7A, 0x7B, 0x7C, 0x7D, 0x7E, 0x7F: // 0x70-0x7F
|
||||
// character is ASCII. skip to next char
|
||||
case
|
||||
0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x8B, 0x8C, 0x8D, 0x8E, 0x8F, // 0x80-0x8F
|
||||
0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9A, 0x9B, 0x9C, 0x9D, 0x9E, 0x9F, // 0x90-0x9F
|
||||
0xA0, 0xA1, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7, 0xA8, 0xA9, 0xAA, 0xAB, 0xAC, 0xAD, 0xAE, 0xAF, // 0xA0-0xAF
|
||||
0xB0, 0xB1, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xBA, 0xBB, 0xBC, 0xBD, 0xBE, 0xBF, // 0xB0-0xBF
|
||||
0xC0, 0xC1, // 0xC0-0xC1
|
||||
0xF5, 0xF6, 0xF7, 0xF8, 0xF9, 0xFA, 0xFB, 0xFC, 0xFD, 0xFE, 0xFF: // 0xF5-0xFE
|
||||
// character is invalid
|
||||
s.buf = append(append(append([]byte{}, s.buf[:cursor]...), runeErrBytes...), s.buf[cursor+1:]...)
|
||||
_, _, p = s.stat()
|
||||
cursor += runeErrBytesLen
|
||||
s.length += runeErrBytesLen
|
||||
continue
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
case 0xEF:
|
||||
// RuneError is {0xEF, 0xBF, 0xBD}
|
||||
if s.buf[cursor+1] == 0xBF && s.buf[cursor+2] == 0xBD {
|
||||
// found RuneError: skip
|
||||
cursor += 2
|
||||
break
|
||||
}
|
||||
fallthrough
|
||||
default:
|
||||
// multi bytes character
|
||||
if !utf8.FullRune(s.buf[cursor : len(s.buf)-1]) {
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
}
|
||||
r, size := utf8.DecodeRune(s.buf[cursor:])
|
||||
if r == utf8.RuneError {
|
||||
s.buf = append(append(append([]byte{}, s.buf[:cursor]...), runeErrBytes...), s.buf[cursor+1:]...)
|
||||
cursor += runeErrBytesLen
|
||||
s.length += runeErrBytesLen
|
||||
_, _, p = s.stat()
|
||||
} else {
|
||||
cursor += int64(size)
|
||||
}
|
||||
continue
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
ERROR:
|
||||
return nil, errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *stringDecoder) decodeStreamByte(s *Stream) ([]byte, error) {
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
s.cursor++
|
||||
continue
|
||||
case '[':
|
||||
return nil, d.errUnmarshalType("array", s.totalOffset())
|
||||
case '{':
|
||||
return nil, d.errUnmarshalType("object", s.totalOffset())
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return nil, d.errUnmarshalType("number", s.totalOffset())
|
||||
case '"':
|
||||
return stringBytes(s)
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return nil, nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
return nil, errors.ErrInvalidBeginningOfValue(s.char(), s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *stringDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) {
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
case '[':
|
||||
return nil, 0, d.errUnmarshalType("array", cursor)
|
||||
case '{':
|
||||
return nil, 0, d.errUnmarshalType("object", cursor)
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return nil, 0, d.errUnmarshalType("number", cursor)
|
||||
case '"':
|
||||
cursor++
|
||||
start := cursor
|
||||
b := (*sliceHeader)(unsafe.Pointer(&buf)).data
|
||||
escaped := 0
|
||||
for {
|
||||
switch char(b, cursor) {
|
||||
case '\\':
|
||||
escaped++
|
||||
cursor++
|
||||
switch char(b, cursor) {
|
||||
case '"', '\\', '/', 'b', 'f', 'n', 'r', 't':
|
||||
cursor++
|
||||
case 'u':
|
||||
buflen := int64(len(buf))
|
||||
if cursor+5 >= buflen {
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("escaped string", cursor)
|
||||
}
|
||||
for i := int64(1); i <= 4; i++ {
|
||||
c := char(b, cursor+i)
|
||||
if !(('0' <= c && c <= '9') || ('a' <= c && c <= 'f') || ('A' <= c && c <= 'F')) {
|
||||
return nil, 0, errors.ErrSyntax(fmt.Sprintf("json: invalid character %c in \\u hexadecimal character escape", c), cursor+i)
|
||||
}
|
||||
}
|
||||
cursor += 5
|
||||
default:
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("escaped string", cursor)
|
||||
}
|
||||
continue
|
||||
case '"':
|
||||
literal := buf[start:cursor]
|
||||
if escaped > 0 {
|
||||
literal = literal[:unescapeString(literal)]
|
||||
}
|
||||
cursor++
|
||||
return literal, cursor, nil
|
||||
case nul:
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return nil, cursor, nil
|
||||
default:
|
||||
return nil, 0, errors.ErrInvalidBeginningOfValue(buf[cursor], cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var unescapeMap = [256]byte{
|
||||
'"': '"',
|
||||
'\\': '\\',
|
||||
'/': '/',
|
||||
'b': '\b',
|
||||
'f': '\f',
|
||||
'n': '\n',
|
||||
'r': '\r',
|
||||
't': '\t',
|
||||
}
|
||||
|
||||
func unsafeAdd(ptr unsafe.Pointer, offset int) unsafe.Pointer {
|
||||
return unsafe.Pointer(uintptr(ptr) + uintptr(offset))
|
||||
}
|
||||
|
||||
func unescapeString(buf []byte) int {
|
||||
p := (*sliceHeader)(unsafe.Pointer(&buf)).data
|
||||
end := unsafeAdd(p, len(buf))
|
||||
src := unsafeAdd(p, bytes.IndexByte(buf, '\\'))
|
||||
dst := src
|
||||
for src != end {
|
||||
c := char(src, 0)
|
||||
if c == '\\' {
|
||||
escapeChar := char(src, 1)
|
||||
if escapeChar != 'u' {
|
||||
*(*byte)(dst) = unescapeMap[escapeChar]
|
||||
src = unsafeAdd(src, 2)
|
||||
dst = unsafeAdd(dst, 1)
|
||||
} else {
|
||||
v1 := hexToInt[char(src, 2)]
|
||||
v2 := hexToInt[char(src, 3)]
|
||||
v3 := hexToInt[char(src, 4)]
|
||||
v4 := hexToInt[char(src, 5)]
|
||||
code := rune((v1 << 12) | (v2 << 8) | (v3 << 4) | v4)
|
||||
if code >= 0xd800 && code < 0xdc00 && uintptr(unsafeAdd(src, 11)) < uintptr(end) {
|
||||
if char(src, 6) == '\\' && char(src, 7) == 'u' {
|
||||
v1 := hexToInt[char(src, 8)]
|
||||
v2 := hexToInt[char(src, 9)]
|
||||
v3 := hexToInt[char(src, 10)]
|
||||
v4 := hexToInt[char(src, 11)]
|
||||
lo := rune((v1 << 12) | (v2 << 8) | (v3 << 4) | v4)
|
||||
if lo >= 0xdc00 && lo < 0xe000 {
|
||||
code = (code-0xd800)<<10 | (lo - 0xdc00) + 0x10000
|
||||
src = unsafeAdd(src, 6)
|
||||
}
|
||||
}
|
||||
}
|
||||
var b [utf8.UTFMax]byte
|
||||
n := utf8.EncodeRune(b[:], code)
|
||||
switch n {
|
||||
case 4:
|
||||
*(*byte)(unsafeAdd(dst, 3)) = b[3]
|
||||
fallthrough
|
||||
case 3:
|
||||
*(*byte)(unsafeAdd(dst, 2)) = b[2]
|
||||
fallthrough
|
||||
case 2:
|
||||
*(*byte)(unsafeAdd(dst, 1)) = b[1]
|
||||
fallthrough
|
||||
case 1:
|
||||
*(*byte)(unsafeAdd(dst, 0)) = b[0]
|
||||
}
|
||||
src = unsafeAdd(src, 6)
|
||||
dst = unsafeAdd(dst, n)
|
||||
}
|
||||
} else {
|
||||
*(*byte)(dst) = c
|
||||
src = unsafeAdd(src, 1)
|
||||
dst = unsafeAdd(dst, 1)
|
||||
}
|
||||
}
|
||||
return int(uintptr(dst) - uintptr(p))
|
||||
}
|
845
internal/decoder/struct.go
Normal file
845
internal/decoder/struct.go
Normal file
|
@ -0,0 +1,845 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"math/bits"
|
||||
"sort"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf16"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
type structFieldSet struct {
|
||||
dec Decoder
|
||||
offset uintptr
|
||||
isTaggedKey bool
|
||||
fieldIdx int
|
||||
key string
|
||||
keyLen int64
|
||||
err error
|
||||
}
|
||||
|
||||
type structDecoder struct {
|
||||
fieldMap map[string]*structFieldSet
|
||||
fieldUniqueNameNum int
|
||||
stringDecoder *stringDecoder
|
||||
structName string
|
||||
fieldName string
|
||||
isTriedOptimize bool
|
||||
keyBitmapUint8 [][256]uint8
|
||||
keyBitmapUint16 [][256]uint16
|
||||
sortedFieldSets []*structFieldSet
|
||||
keyDecoder func(*structDecoder, []byte, int64) (int64, *structFieldSet, error)
|
||||
keyStreamDecoder func(*structDecoder, *Stream) (*structFieldSet, string, error)
|
||||
}
|
||||
|
||||
var (
|
||||
largeToSmallTable [256]byte
|
||||
)
|
||||
|
||||
func init() {
|
||||
for i := 0; i < 256; i++ {
|
||||
c := i
|
||||
if 'A' <= c && c <= 'Z' {
|
||||
c += 'a' - 'A'
|
||||
}
|
||||
largeToSmallTable[i] = byte(c)
|
||||
}
|
||||
}
|
||||
|
||||
func toASCIILower(s string) string {
|
||||
b := []byte(s)
|
||||
for i := range b {
|
||||
b[i] = largeToSmallTable[b[i]]
|
||||
}
|
||||
return string(b)
|
||||
}
|
||||
|
||||
func newStructDecoder(structName, fieldName string, fieldMap map[string]*structFieldSet) *structDecoder {
|
||||
return &structDecoder{
|
||||
fieldMap: fieldMap,
|
||||
stringDecoder: newStringDecoder(structName, fieldName),
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
keyDecoder: decodeKey,
|
||||
keyStreamDecoder: decodeKeyStream,
|
||||
}
|
||||
}
|
||||
|
||||
const (
|
||||
allowOptimizeMaxKeyLen = 64
|
||||
allowOptimizeMaxFieldLen = 16
|
||||
)
|
||||
|
||||
func (d *structDecoder) tryOptimize() {
|
||||
fieldUniqueNameMap := map[string]int{}
|
||||
fieldIdx := -1
|
||||
for k, v := range d.fieldMap {
|
||||
lower := strings.ToLower(k)
|
||||
idx, exists := fieldUniqueNameMap[lower]
|
||||
if exists {
|
||||
v.fieldIdx = idx
|
||||
} else {
|
||||
fieldIdx++
|
||||
v.fieldIdx = fieldIdx
|
||||
}
|
||||
fieldUniqueNameMap[lower] = fieldIdx
|
||||
}
|
||||
d.fieldUniqueNameNum = len(fieldUniqueNameMap)
|
||||
|
||||
if d.isTriedOptimize {
|
||||
return
|
||||
}
|
||||
fieldMap := map[string]*structFieldSet{}
|
||||
conflicted := map[string]struct{}{}
|
||||
for k, v := range d.fieldMap {
|
||||
key := strings.ToLower(k)
|
||||
if key != k {
|
||||
if key != toASCIILower(k) {
|
||||
d.isTriedOptimize = true
|
||||
return
|
||||
}
|
||||
// already exists same key (e.g. Hello and HELLO has same lower case key
|
||||
if _, exists := conflicted[key]; exists {
|
||||
d.isTriedOptimize = true
|
||||
return
|
||||
}
|
||||
conflicted[key] = struct{}{}
|
||||
}
|
||||
if field, exists := fieldMap[key]; exists {
|
||||
if field != v {
|
||||
d.isTriedOptimize = true
|
||||
return
|
||||
}
|
||||
}
|
||||
fieldMap[key] = v
|
||||
}
|
||||
|
||||
if len(fieldMap) > allowOptimizeMaxFieldLen {
|
||||
d.isTriedOptimize = true
|
||||
return
|
||||
}
|
||||
|
||||
var maxKeyLen int
|
||||
sortedKeys := []string{}
|
||||
for key := range fieldMap {
|
||||
keyLen := len(key)
|
||||
if keyLen > allowOptimizeMaxKeyLen {
|
||||
d.isTriedOptimize = true
|
||||
return
|
||||
}
|
||||
if maxKeyLen < keyLen {
|
||||
maxKeyLen = keyLen
|
||||
}
|
||||
sortedKeys = append(sortedKeys, key)
|
||||
}
|
||||
sort.Strings(sortedKeys)
|
||||
|
||||
// By allocating one extra capacity than `maxKeyLen`,
|
||||
// it is possible to avoid the process of comparing the index of the key with the length of the bitmap each time.
|
||||
bitmapLen := maxKeyLen + 1
|
||||
if len(sortedKeys) <= 8 {
|
||||
keyBitmap := make([][256]uint8, bitmapLen)
|
||||
for i, key := range sortedKeys {
|
||||
for j := 0; j < len(key); j++ {
|
||||
c := key[j]
|
||||
keyBitmap[j][c] |= (1 << uint(i))
|
||||
}
|
||||
d.sortedFieldSets = append(d.sortedFieldSets, fieldMap[key])
|
||||
}
|
||||
d.keyBitmapUint8 = keyBitmap
|
||||
d.keyDecoder = decodeKeyByBitmapUint8
|
||||
d.keyStreamDecoder = decodeKeyByBitmapUint8Stream
|
||||
} else {
|
||||
keyBitmap := make([][256]uint16, bitmapLen)
|
||||
for i, key := range sortedKeys {
|
||||
for j := 0; j < len(key); j++ {
|
||||
c := key[j]
|
||||
keyBitmap[j][c] |= (1 << uint(i))
|
||||
}
|
||||
d.sortedFieldSets = append(d.sortedFieldSets, fieldMap[key])
|
||||
}
|
||||
d.keyBitmapUint16 = keyBitmap
|
||||
d.keyDecoder = decodeKeyByBitmapUint16
|
||||
d.keyStreamDecoder = decodeKeyByBitmapUint16Stream
|
||||
}
|
||||
}
|
||||
|
||||
// decode from '\uXXXX'
|
||||
func decodeKeyCharByUnicodeRune(buf []byte, cursor int64) ([]byte, int64, error) {
|
||||
const defaultOffset = 4
|
||||
const surrogateOffset = 6
|
||||
|
||||
if cursor+defaultOffset >= int64(len(buf)) {
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("escaped string", cursor)
|
||||
}
|
||||
|
||||
r := unicodeToRune(buf[cursor : cursor+defaultOffset])
|
||||
if utf16.IsSurrogate(r) {
|
||||
cursor += defaultOffset
|
||||
if cursor+surrogateOffset >= int64(len(buf)) || buf[cursor] != '\\' || buf[cursor+1] != 'u' {
|
||||
return []byte(string(unicode.ReplacementChar)), cursor + defaultOffset - 1, nil
|
||||
}
|
||||
cursor += 2
|
||||
r2 := unicodeToRune(buf[cursor : cursor+defaultOffset])
|
||||
if r := utf16.DecodeRune(r, r2); r != unicode.ReplacementChar {
|
||||
return []byte(string(r)), cursor + defaultOffset - 1, nil
|
||||
}
|
||||
}
|
||||
return []byte(string(r)), cursor + defaultOffset - 1, nil
|
||||
}
|
||||
|
||||
func decodeKeyCharByEscapedChar(buf []byte, cursor int64) ([]byte, int64, error) {
|
||||
c := buf[cursor]
|
||||
cursor++
|
||||
switch c {
|
||||
case '"':
|
||||
return []byte{'"'}, cursor, nil
|
||||
case '\\':
|
||||
return []byte{'\\'}, cursor, nil
|
||||
case '/':
|
||||
return []byte{'/'}, cursor, nil
|
||||
case 'b':
|
||||
return []byte{'\b'}, cursor, nil
|
||||
case 'f':
|
||||
return []byte{'\f'}, cursor, nil
|
||||
case 'n':
|
||||
return []byte{'\n'}, cursor, nil
|
||||
case 'r':
|
||||
return []byte{'\r'}, cursor, nil
|
||||
case 't':
|
||||
return []byte{'\t'}, cursor, nil
|
||||
case 'u':
|
||||
return decodeKeyCharByUnicodeRune(buf, cursor)
|
||||
}
|
||||
return nil, cursor, nil
|
||||
}
|
||||
|
||||
func decodeKeyByBitmapUint8(d *structDecoder, buf []byte, cursor int64) (int64, *structFieldSet, error) {
|
||||
var (
|
||||
curBit uint8 = math.MaxUint8
|
||||
)
|
||||
b := (*sliceHeader)(unsafe.Pointer(&buf)).data
|
||||
for {
|
||||
switch char(b, cursor) {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
case '"':
|
||||
cursor++
|
||||
c := char(b, cursor)
|
||||
switch c {
|
||||
case '"':
|
||||
cursor++
|
||||
return cursor, nil, nil
|
||||
case nul:
|
||||
return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||
}
|
||||
keyIdx := 0
|
||||
bitmap := d.keyBitmapUint8
|
||||
start := cursor
|
||||
for {
|
||||
c := char(b, cursor)
|
||||
switch c {
|
||||
case '"':
|
||||
fieldSetIndex := bits.TrailingZeros8(curBit)
|
||||
field := d.sortedFieldSets[fieldSetIndex]
|
||||
keyLen := cursor - start
|
||||
cursor++
|
||||
if keyLen < field.keyLen {
|
||||
// early match
|
||||
return cursor, nil, nil
|
||||
}
|
||||
return cursor, field, nil
|
||||
case nul:
|
||||
return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||
case '\\':
|
||||
cursor++
|
||||
chars, nextCursor, err := decodeKeyCharByEscapedChar(buf, cursor)
|
||||
if err != nil {
|
||||
return 0, nil, err
|
||||
}
|
||||
for _, c := range chars {
|
||||
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||
if curBit == 0 {
|
||||
return decodeKeyNotFound(b, cursor)
|
||||
}
|
||||
keyIdx++
|
||||
}
|
||||
cursor = nextCursor
|
||||
default:
|
||||
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||
if curBit == 0 {
|
||||
return decodeKeyNotFound(b, cursor)
|
||||
}
|
||||
keyIdx++
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
default:
|
||||
return cursor, nil, errors.ErrInvalidBeginningOfValue(char(b, cursor), cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func decodeKeyByBitmapUint16(d *structDecoder, buf []byte, cursor int64) (int64, *structFieldSet, error) {
|
||||
var (
|
||||
curBit uint16 = math.MaxUint16
|
||||
)
|
||||
b := (*sliceHeader)(unsafe.Pointer(&buf)).data
|
||||
for {
|
||||
switch char(b, cursor) {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
case '"':
|
||||
cursor++
|
||||
c := char(b, cursor)
|
||||
switch c {
|
||||
case '"':
|
||||
cursor++
|
||||
return cursor, nil, nil
|
||||
case nul:
|
||||
return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||
}
|
||||
keyIdx := 0
|
||||
bitmap := d.keyBitmapUint16
|
||||
start := cursor
|
||||
for {
|
||||
c := char(b, cursor)
|
||||
switch c {
|
||||
case '"':
|
||||
fieldSetIndex := bits.TrailingZeros16(curBit)
|
||||
field := d.sortedFieldSets[fieldSetIndex]
|
||||
keyLen := cursor - start
|
||||
cursor++
|
||||
if keyLen < field.keyLen {
|
||||
// early match
|
||||
return cursor, nil, nil
|
||||
}
|
||||
return cursor, field, nil
|
||||
case nul:
|
||||
return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||
case '\\':
|
||||
cursor++
|
||||
chars, nextCursor, err := decodeKeyCharByEscapedChar(buf, cursor)
|
||||
if err != nil {
|
||||
return 0, nil, err
|
||||
}
|
||||
for _, c := range chars {
|
||||
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||
if curBit == 0 {
|
||||
return decodeKeyNotFound(b, cursor)
|
||||
}
|
||||
keyIdx++
|
||||
}
|
||||
cursor = nextCursor
|
||||
default:
|
||||
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||
if curBit == 0 {
|
||||
return decodeKeyNotFound(b, cursor)
|
||||
}
|
||||
keyIdx++
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
default:
|
||||
return cursor, nil, errors.ErrInvalidBeginningOfValue(char(b, cursor), cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func decodeKeyNotFound(b unsafe.Pointer, cursor int64) (int64, *structFieldSet, error) {
|
||||
for {
|
||||
cursor++
|
||||
switch char(b, cursor) {
|
||||
case '"':
|
||||
cursor++
|
||||
return cursor, nil, nil
|
||||
case '\\':
|
||||
cursor++
|
||||
if char(b, cursor) == nul {
|
||||
return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||
}
|
||||
case nul:
|
||||
return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func decodeKey(d *structDecoder, buf []byte, cursor int64) (int64, *structFieldSet, error) {
|
||||
key, c, err := d.stringDecoder.decodeByte(buf, cursor)
|
||||
if err != nil {
|
||||
return 0, nil, err
|
||||
}
|
||||
cursor = c
|
||||
k := *(*string)(unsafe.Pointer(&key))
|
||||
field, exists := d.fieldMap[k]
|
||||
if !exists {
|
||||
return cursor, nil, nil
|
||||
}
|
||||
return cursor, field, nil
|
||||
}
|
||||
|
||||
func decodeKeyByBitmapUint8Stream(d *structDecoder, s *Stream) (*structFieldSet, string, error) {
|
||||
var (
|
||||
curBit uint8 = math.MaxUint8
|
||||
)
|
||||
_, cursor, p := s.stat()
|
||||
for {
|
||||
switch char(p, cursor) {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
return nil, "", errors.ErrInvalidBeginningOfValue(char(p, cursor), s.totalOffset())
|
||||
case '"':
|
||||
cursor++
|
||||
FIRST_CHAR:
|
||||
start := cursor
|
||||
switch char(p, cursor) {
|
||||
case '"':
|
||||
cursor++
|
||||
s.cursor = cursor
|
||||
return nil, "", nil
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
goto FIRST_CHAR
|
||||
}
|
||||
return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||
}
|
||||
keyIdx := 0
|
||||
bitmap := d.keyBitmapUint8
|
||||
for {
|
||||
c := char(p, cursor)
|
||||
switch c {
|
||||
case '"':
|
||||
fieldSetIndex := bits.TrailingZeros8(curBit)
|
||||
field := d.sortedFieldSets[fieldSetIndex]
|
||||
keyLen := cursor - start
|
||||
cursor++
|
||||
s.cursor = cursor
|
||||
if keyLen < field.keyLen {
|
||||
// early match
|
||||
return nil, field.key, nil
|
||||
}
|
||||
return field, field.key, nil
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||
case '\\':
|
||||
s.cursor = cursor + 1 // skip '\' char
|
||||
chars, err := decodeKeyCharByEscapeCharStream(s)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
cursor = s.cursor
|
||||
for _, c := range chars {
|
||||
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||
if curBit == 0 {
|
||||
s.cursor = cursor
|
||||
return decodeKeyNotFoundStream(s, start)
|
||||
}
|
||||
keyIdx++
|
||||
}
|
||||
default:
|
||||
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||
if curBit == 0 {
|
||||
s.cursor = cursor
|
||||
return decodeKeyNotFoundStream(s, start)
|
||||
}
|
||||
keyIdx++
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
default:
|
||||
return nil, "", errors.ErrInvalidBeginningOfValue(char(p, cursor), s.totalOffset())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func decodeKeyByBitmapUint16Stream(d *structDecoder, s *Stream) (*structFieldSet, string, error) {
|
||||
var (
|
||||
curBit uint16 = math.MaxUint16
|
||||
)
|
||||
_, cursor, p := s.stat()
|
||||
for {
|
||||
switch char(p, cursor) {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
return nil, "", errors.ErrInvalidBeginningOfValue(char(p, cursor), s.totalOffset())
|
||||
case '"':
|
||||
cursor++
|
||||
FIRST_CHAR:
|
||||
start := cursor
|
||||
switch char(p, cursor) {
|
||||
case '"':
|
||||
cursor++
|
||||
s.cursor = cursor
|
||||
return nil, "", nil
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
goto FIRST_CHAR
|
||||
}
|
||||
return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||
}
|
||||
keyIdx := 0
|
||||
bitmap := d.keyBitmapUint16
|
||||
for {
|
||||
c := char(p, cursor)
|
||||
switch c {
|
||||
case '"':
|
||||
fieldSetIndex := bits.TrailingZeros16(curBit)
|
||||
field := d.sortedFieldSets[fieldSetIndex]
|
||||
keyLen := cursor - start
|
||||
cursor++
|
||||
s.cursor = cursor
|
||||
if keyLen < field.keyLen {
|
||||
// early match
|
||||
return nil, field.key, nil
|
||||
}
|
||||
return field, field.key, nil
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if s.read() {
|
||||
_, cursor, p = s.stat()
|
||||
continue
|
||||
}
|
||||
return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||
case '\\':
|
||||
s.cursor = cursor + 1 // skip '\' char
|
||||
chars, err := decodeKeyCharByEscapeCharStream(s)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
cursor = s.cursor
|
||||
for _, c := range chars {
|
||||
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||
if curBit == 0 {
|
||||
s.cursor = cursor
|
||||
return decodeKeyNotFoundStream(s, start)
|
||||
}
|
||||
keyIdx++
|
||||
}
|
||||
default:
|
||||
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||
if curBit == 0 {
|
||||
s.cursor = cursor
|
||||
return decodeKeyNotFoundStream(s, start)
|
||||
}
|
||||
keyIdx++
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
default:
|
||||
return nil, "", errors.ErrInvalidBeginningOfValue(char(p, cursor), s.totalOffset())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// decode from '\uXXXX'
|
||||
func decodeKeyCharByUnicodeRuneStream(s *Stream) ([]byte, error) {
|
||||
const defaultOffset = 4
|
||||
const surrogateOffset = 6
|
||||
|
||||
if s.cursor+defaultOffset >= s.length {
|
||||
if !s.read() {
|
||||
return nil, errors.ErrInvalidCharacter(s.char(), "escaped unicode char", s.totalOffset())
|
||||
}
|
||||
}
|
||||
|
||||
r := unicodeToRune(s.buf[s.cursor : s.cursor+defaultOffset])
|
||||
if utf16.IsSurrogate(r) {
|
||||
s.cursor += defaultOffset
|
||||
if s.cursor+surrogateOffset >= s.length {
|
||||
s.read()
|
||||
}
|
||||
if s.cursor+surrogateOffset >= s.length || s.buf[s.cursor] != '\\' || s.buf[s.cursor+1] != 'u' {
|
||||
s.cursor += defaultOffset - 1
|
||||
return []byte(string(unicode.ReplacementChar)), nil
|
||||
}
|
||||
r2 := unicodeToRune(s.buf[s.cursor+defaultOffset+2 : s.cursor+surrogateOffset])
|
||||
if r := utf16.DecodeRune(r, r2); r != unicode.ReplacementChar {
|
||||
s.cursor += defaultOffset - 1
|
||||
return []byte(string(r)), nil
|
||||
}
|
||||
}
|
||||
s.cursor += defaultOffset - 1
|
||||
return []byte(string(r)), nil
|
||||
}
|
||||
|
||||
func decodeKeyCharByEscapeCharStream(s *Stream) ([]byte, error) {
|
||||
c := s.buf[s.cursor]
|
||||
s.cursor++
|
||||
RETRY:
|
||||
switch c {
|
||||
case '"':
|
||||
return []byte{'"'}, nil
|
||||
case '\\':
|
||||
return []byte{'\\'}, nil
|
||||
case '/':
|
||||
return []byte{'/'}, nil
|
||||
case 'b':
|
||||
return []byte{'\b'}, nil
|
||||
case 'f':
|
||||
return []byte{'\f'}, nil
|
||||
case 'n':
|
||||
return []byte{'\n'}, nil
|
||||
case 'r':
|
||||
return []byte{'\r'}, nil
|
||||
case 't':
|
||||
return []byte{'\t'}, nil
|
||||
case 'u':
|
||||
return decodeKeyCharByUnicodeRuneStream(s)
|
||||
case nul:
|
||||
if !s.read() {
|
||||
return nil, errors.ErrInvalidCharacter(s.char(), "escaped char", s.totalOffset())
|
||||
}
|
||||
goto RETRY
|
||||
default:
|
||||
return nil, errors.ErrUnexpectedEndOfJSON("struct field", s.totalOffset())
|
||||
}
|
||||
}
|
||||
|
||||
func decodeKeyNotFoundStream(s *Stream, start int64) (*structFieldSet, string, error) {
|
||||
buf, cursor, p := s.stat()
|
||||
for {
|
||||
cursor++
|
||||
switch char(p, cursor) {
|
||||
case '"':
|
||||
b := buf[start:cursor]
|
||||
key := *(*string)(unsafe.Pointer(&b))
|
||||
cursor++
|
||||
s.cursor = cursor
|
||||
return nil, key, nil
|
||||
case '\\':
|
||||
cursor++
|
||||
if char(p, cursor) == nul {
|
||||
s.cursor = cursor
|
||||
if !s.read() {
|
||||
return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||
}
|
||||
buf, cursor, p = s.statForRetry()
|
||||
}
|
||||
case nul:
|
||||
s.cursor = cursor
|
||||
if !s.read() {
|
||||
return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||
}
|
||||
buf, cursor, p = s.statForRetry()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func decodeKeyStream(d *structDecoder, s *Stream) (*structFieldSet, string, error) {
|
||||
key, err := d.stringDecoder.decodeStreamByte(s)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
k := *(*string)(unsafe.Pointer(&key))
|
||||
return d.fieldMap[k], k, nil
|
||||
}
|
||||
|
||||
func (d *structDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||
}
|
||||
|
||||
c := s.skipWhiteSpace()
|
||||
switch c {
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
default:
|
||||
if s.char() != '{' {
|
||||
return errors.ErrInvalidBeginningOfValue(s.char(), s.totalOffset())
|
||||
}
|
||||
}
|
||||
s.cursor++
|
||||
if s.skipWhiteSpace() == '}' {
|
||||
s.cursor++
|
||||
return nil
|
||||
}
|
||||
var (
|
||||
seenFields map[int]struct{}
|
||||
seenFieldNum int
|
||||
)
|
||||
firstWin := (s.Option.Flags & FirstWinOption) != 0
|
||||
if firstWin {
|
||||
seenFields = make(map[int]struct{}, d.fieldUniqueNameNum)
|
||||
}
|
||||
for {
|
||||
s.reset()
|
||||
field, key, err := d.keyStreamDecoder(d, s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if s.skipWhiteSpace() != ':' {
|
||||
return errors.ErrExpected("colon after object key", s.totalOffset())
|
||||
}
|
||||
s.cursor++
|
||||
if field != nil {
|
||||
if field.err != nil {
|
||||
return field.err
|
||||
}
|
||||
if firstWin {
|
||||
if _, exists := seenFields[field.fieldIdx]; exists {
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if err := field.dec.DecodeStream(s, depth, unsafe.Pointer(uintptr(p)+field.offset)); err != nil {
|
||||
return err
|
||||
}
|
||||
seenFieldNum++
|
||||
if d.fieldUniqueNameNum <= seenFieldNum {
|
||||
return s.skipObject(depth)
|
||||
}
|
||||
seenFields[field.fieldIdx] = struct{}{}
|
||||
}
|
||||
} else {
|
||||
if err := field.dec.DecodeStream(s, depth, unsafe.Pointer(uintptr(p)+field.offset)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
} else if s.DisallowUnknownFields {
|
||||
return fmt.Errorf("json: unknown field %q", key)
|
||||
} else {
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
c := s.skipWhiteSpace()
|
||||
if c == '}' {
|
||||
s.cursor++
|
||||
return nil
|
||||
}
|
||||
if c != ',' {
|
||||
return errors.ErrExpected("comma after object element", s.totalOffset())
|
||||
}
|
||||
s.cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func (d *structDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
buflen := int64(len(buf))
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
b := (*sliceHeader)(unsafe.Pointer(&buf)).data
|
||||
switch char(b, cursor) {
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return cursor, nil
|
||||
case '{':
|
||||
default:
|
||||
return 0, errors.ErrInvalidBeginningOfValue(char(b, cursor), cursor)
|
||||
}
|
||||
cursor++
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if buf[cursor] == '}' {
|
||||
cursor++
|
||||
return cursor, nil
|
||||
}
|
||||
var (
|
||||
seenFields map[int]struct{}
|
||||
seenFieldNum int
|
||||
)
|
||||
firstWin := (ctx.Option.Flags & FirstWinOption) != 0
|
||||
if firstWin {
|
||||
seenFields = make(map[int]struct{}, d.fieldUniqueNameNum)
|
||||
}
|
||||
for {
|
||||
c, field, err := d.keyDecoder(d, buf, cursor)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = skipWhiteSpace(buf, c)
|
||||
if char(b, cursor) != ':' {
|
||||
return 0, errors.ErrExpected("colon after object key", cursor)
|
||||
}
|
||||
cursor++
|
||||
if cursor >= buflen {
|
||||
return 0, errors.ErrExpected("object value after colon", cursor)
|
||||
}
|
||||
if field != nil {
|
||||
if field.err != nil {
|
||||
return 0, field.err
|
||||
}
|
||||
if firstWin {
|
||||
if _, exists := seenFields[field.fieldIdx]; exists {
|
||||
c, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = c
|
||||
} else {
|
||||
c, err := field.dec.Decode(ctx, cursor, depth, unsafe.Pointer(uintptr(p)+field.offset))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = c
|
||||
seenFieldNum++
|
||||
if d.fieldUniqueNameNum <= seenFieldNum {
|
||||
return skipObject(buf, cursor, depth)
|
||||
}
|
||||
seenFields[field.fieldIdx] = struct{}{}
|
||||
}
|
||||
} else {
|
||||
c, err := field.dec.Decode(ctx, cursor, depth, unsafe.Pointer(uintptr(p)+field.offset))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = c
|
||||
}
|
||||
} else {
|
||||
c, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = c
|
||||
}
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if char(b, cursor) == '}' {
|
||||
cursor++
|
||||
return cursor, nil
|
||||
}
|
||||
if char(b, cursor) != ',' {
|
||||
return 0, errors.ErrExpected("comma after object element", cursor)
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func (d *structDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
return nil, 0, fmt.Errorf("json: struct decoder does not support decode path")
|
||||
}
|
30
internal/decoder/type.go
Normal file
30
internal/decoder/type.go
Normal file
|
@ -0,0 +1,30 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding"
|
||||
"encoding/json"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type Decoder interface {
|
||||
Decode(*RuntimeContext, int64, int64, unsafe.Pointer) (int64, error)
|
||||
DecodePath(*RuntimeContext, int64, int64) ([][]byte, int64, error)
|
||||
DecodeStream(*Stream, int64, unsafe.Pointer) error
|
||||
}
|
||||
|
||||
const (
|
||||
nul = '\000'
|
||||
maxDecodeNestingDepth = 10000
|
||||
)
|
||||
|
||||
type unmarshalerContext interface {
|
||||
UnmarshalJSON(context.Context, []byte) error
|
||||
}
|
||||
|
||||
var (
|
||||
unmarshalJSONType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem()
|
||||
unmarshalJSONContextType = reflect.TypeOf((*unmarshalerContext)(nil)).Elem()
|
||||
unmarshalTextType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
|
||||
)
|
194
internal/decoder/uint.go
Normal file
194
internal/decoder/uint.go
Normal file
|
@ -0,0 +1,194 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type uintDecoder struct {
|
||||
typ *runtime.Type
|
||||
kind reflect.Kind
|
||||
op func(unsafe.Pointer, uint64)
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newUintDecoder(typ *runtime.Type, structName, fieldName string, op func(unsafe.Pointer, uint64)) *uintDecoder {
|
||||
return &uintDecoder{
|
||||
typ: typ,
|
||||
kind: typ.Kind(),
|
||||
op: op,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *uintDecoder) typeError(buf []byte, offset int64) *errors.UnmarshalTypeError {
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: fmt.Sprintf("number %s", string(buf)),
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: offset,
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
pow10u64 = [...]uint64{
|
||||
1e00, 1e01, 1e02, 1e03, 1e04, 1e05, 1e06, 1e07, 1e08, 1e09,
|
||||
1e10, 1e11, 1e12, 1e13, 1e14, 1e15, 1e16, 1e17, 1e18, 1e19,
|
||||
}
|
||||
pow10u64Len = len(pow10u64)
|
||||
)
|
||||
|
||||
func (d *uintDecoder) parseUint(b []byte) (uint64, error) {
|
||||
maxDigit := len(b)
|
||||
if maxDigit > pow10u64Len {
|
||||
return 0, fmt.Errorf("invalid length of number")
|
||||
}
|
||||
sum := uint64(0)
|
||||
for i := 0; i < maxDigit; i++ {
|
||||
c := uint64(b[i]) - 48
|
||||
digitValue := pow10u64[maxDigit-i-1]
|
||||
sum += c * digitValue
|
||||
}
|
||||
return sum, nil
|
||||
}
|
||||
|
||||
func (d *uintDecoder) decodeStreamByte(s *Stream) ([]byte, error) {
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
s.cursor++
|
||||
continue
|
||||
case '0':
|
||||
s.cursor++
|
||||
return numZeroBuf, nil
|
||||
case '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
start := s.cursor
|
||||
for {
|
||||
s.cursor++
|
||||
if numTable[s.char()] {
|
||||
continue
|
||||
} else if s.char() == nul {
|
||||
if s.read() {
|
||||
s.cursor-- // for retry current character
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
num := s.buf[start:s.cursor]
|
||||
return num, nil
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return nil, nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
default:
|
||||
return nil, d.typeError([]byte{s.char()}, s.totalOffset())
|
||||
}
|
||||
break
|
||||
}
|
||||
return nil, errors.ErrUnexpectedEndOfJSON("number(unsigned integer)", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *uintDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) {
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case '0':
|
||||
cursor++
|
||||
return numZeroBuf, cursor, nil
|
||||
case '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
start := cursor
|
||||
cursor++
|
||||
for numTable[buf[cursor]] {
|
||||
cursor++
|
||||
}
|
||||
num := buf[start:cursor]
|
||||
return num, cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return nil, cursor, nil
|
||||
default:
|
||||
return nil, 0, d.typeError([]byte{buf[cursor]}, cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (d *uintDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
bytes, err := d.decodeStreamByte(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if bytes == nil {
|
||||
return nil
|
||||
}
|
||||
u64, err := d.parseUint(bytes)
|
||||
if err != nil {
|
||||
return d.typeError(bytes, s.totalOffset())
|
||||
}
|
||||
switch d.kind {
|
||||
case reflect.Uint8:
|
||||
if (1 << 8) <= u64 {
|
||||
return d.typeError(bytes, s.totalOffset())
|
||||
}
|
||||
case reflect.Uint16:
|
||||
if (1 << 16) <= u64 {
|
||||
return d.typeError(bytes, s.totalOffset())
|
||||
}
|
||||
case reflect.Uint32:
|
||||
if (1 << 32) <= u64 {
|
||||
return d.typeError(bytes, s.totalOffset())
|
||||
}
|
||||
}
|
||||
d.op(p, u64)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *uintDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
bytes, c, err := d.decodeByte(ctx.Buf, cursor)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if bytes == nil {
|
||||
return c, nil
|
||||
}
|
||||
cursor = c
|
||||
u64, err := d.parseUint(bytes)
|
||||
if err != nil {
|
||||
return 0, d.typeError(bytes, cursor)
|
||||
}
|
||||
switch d.kind {
|
||||
case reflect.Uint8:
|
||||
if (1 << 8) <= u64 {
|
||||
return 0, d.typeError(bytes, cursor)
|
||||
}
|
||||
case reflect.Uint16:
|
||||
if (1 << 16) <= u64 {
|
||||
return 0, d.typeError(bytes, cursor)
|
||||
}
|
||||
case reflect.Uint32:
|
||||
if (1 << 32) <= u64 {
|
||||
return 0, d.typeError(bytes, cursor)
|
||||
}
|
||||
}
|
||||
d.op(p, u64)
|
||||
return cursor, nil
|
||||
}
|
||||
|
||||
func (d *uintDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
return nil, 0, fmt.Errorf("json: uint decoder does not support decode path")
|
||||
}
|
104
internal/decoder/unmarshal_json.go
Normal file
104
internal/decoder/unmarshal_json.go
Normal file
|
@ -0,0 +1,104 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type unmarshalJSONDecoder struct {
|
||||
typ *runtime.Type
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newUnmarshalJSONDecoder(typ *runtime.Type, structName, fieldName string) *unmarshalJSONDecoder {
|
||||
return &unmarshalJSONDecoder{
|
||||
typ: typ,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *unmarshalJSONDecoder) annotateError(cursor int64, err error) {
|
||||
switch e := err.(type) {
|
||||
case *errors.UnmarshalTypeError:
|
||||
e.Struct = d.structName
|
||||
e.Field = d.fieldName
|
||||
case *errors.SyntaxError:
|
||||
e.Offset = cursor
|
||||
}
|
||||
}
|
||||
|
||||
func (d *unmarshalJSONDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
s.skipWhiteSpace()
|
||||
start := s.cursor
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
src := s.buf[start:s.cursor]
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
v := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||
typ: d.typ,
|
||||
ptr: p,
|
||||
}))
|
||||
switch v := v.(type) {
|
||||
case unmarshalerContext:
|
||||
var ctx context.Context
|
||||
if (s.Option.Flags & ContextOption) != 0 {
|
||||
ctx = s.Option.Context
|
||||
} else {
|
||||
ctx = context.Background()
|
||||
}
|
||||
if err := v.UnmarshalJSON(ctx, dst); err != nil {
|
||||
d.annotateError(s.cursor, err)
|
||||
return err
|
||||
}
|
||||
case json.Unmarshaler:
|
||||
if err := v.UnmarshalJSON(dst); err != nil {
|
||||
d.annotateError(s.cursor, err)
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *unmarshalJSONDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
start := cursor
|
||||
end, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
src := buf[start:end]
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
v := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||
typ: d.typ,
|
||||
ptr: p,
|
||||
}))
|
||||
if (ctx.Option.Flags & ContextOption) != 0 {
|
||||
if err := v.(unmarshalerContext).UnmarshalJSON(ctx.Option.Context, dst); err != nil {
|
||||
d.annotateError(cursor, err)
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
if err := v.(json.Unmarshaler).UnmarshalJSON(dst); err != nil {
|
||||
d.annotateError(cursor, err)
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
return end, nil
|
||||
}
|
||||
|
||||
func (d *unmarshalJSONDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
return nil, 0, fmt.Errorf("json: unmarshal json decoder does not support decode path")
|
||||
}
|
285
internal/decoder/unmarshal_text.go
Normal file
285
internal/decoder/unmarshal_text.go
Normal file
|
@ -0,0 +1,285 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding"
|
||||
"fmt"
|
||||
"unicode"
|
||||
"unicode/utf16"
|
||||
"unicode/utf8"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type unmarshalTextDecoder struct {
|
||||
typ *runtime.Type
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newUnmarshalTextDecoder(typ *runtime.Type, structName, fieldName string) *unmarshalTextDecoder {
|
||||
return &unmarshalTextDecoder{
|
||||
typ: typ,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *unmarshalTextDecoder) annotateError(cursor int64, err error) {
|
||||
switch e := err.(type) {
|
||||
case *errors.UnmarshalTypeError:
|
||||
e.Struct = d.structName
|
||||
e.Field = d.fieldName
|
||||
case *errors.SyntaxError:
|
||||
e.Offset = cursor
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
nullbytes = []byte(`null`)
|
||||
)
|
||||
|
||||
func (d *unmarshalTextDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
s.skipWhiteSpace()
|
||||
start := s.cursor
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
src := s.buf[start:s.cursor]
|
||||
if len(src) > 0 {
|
||||
switch src[0] {
|
||||
case '[':
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "array",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
case '{':
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "object",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "number",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
case 'n':
|
||||
if bytes.Equal(src, nullbytes) {
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
if b, ok := unquoteBytes(dst); ok {
|
||||
dst = b
|
||||
}
|
||||
v := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||
typ: d.typ,
|
||||
ptr: p,
|
||||
}))
|
||||
if err := v.(encoding.TextUnmarshaler).UnmarshalText(dst); err != nil {
|
||||
d.annotateError(s.cursor, err)
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *unmarshalTextDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
start := cursor
|
||||
end, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
src := buf[start:end]
|
||||
if len(src) > 0 {
|
||||
switch src[0] {
|
||||
case '[':
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "array",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
case '{':
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "object",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "number",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
case 'n':
|
||||
if bytes.Equal(src, nullbytes) {
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return end, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if s, ok := unquoteBytes(src); ok {
|
||||
src = s
|
||||
}
|
||||
v := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||
typ: d.typ,
|
||||
ptr: *(*unsafe.Pointer)(unsafe.Pointer(&p)),
|
||||
}))
|
||||
if err := v.(encoding.TextUnmarshaler).UnmarshalText(src); err != nil {
|
||||
d.annotateError(cursor, err)
|
||||
return 0, err
|
||||
}
|
||||
return end, nil
|
||||
}
|
||||
|
||||
func (d *unmarshalTextDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
return nil, 0, fmt.Errorf("json: unmarshal text decoder does not support decode path")
|
||||
}
|
||||
|
||||
func unquoteBytes(s []byte) (t []byte, ok bool) { //nolint: nonamedreturns
|
||||
length := len(s)
|
||||
if length < 2 || s[0] != '"' || s[length-1] != '"' {
|
||||
return
|
||||
}
|
||||
s = s[1 : length-1]
|
||||
length -= 2
|
||||
|
||||
// Check for unusual characters. If there are none,
|
||||
// then no unquoting is needed, so return a slice of the
|
||||
// original bytes.
|
||||
r := 0
|
||||
for r < length {
|
||||
c := s[r]
|
||||
if c == '\\' || c == '"' || c < ' ' {
|
||||
break
|
||||
}
|
||||
if c < utf8.RuneSelf {
|
||||
r++
|
||||
continue
|
||||
}
|
||||
rr, size := utf8.DecodeRune(s[r:])
|
||||
if rr == utf8.RuneError && size == 1 {
|
||||
break
|
||||
}
|
||||
r += size
|
||||
}
|
||||
if r == length {
|
||||
return s, true
|
||||
}
|
||||
|
||||
b := make([]byte, length+2*utf8.UTFMax)
|
||||
w := copy(b, s[0:r])
|
||||
for r < length {
|
||||
// Out of room? Can only happen if s is full of
|
||||
// malformed UTF-8 and we're replacing each
|
||||
// byte with RuneError.
|
||||
if w >= len(b)-2*utf8.UTFMax {
|
||||
nb := make([]byte, (len(b)+utf8.UTFMax)*2)
|
||||
copy(nb, b[0:w])
|
||||
b = nb
|
||||
}
|
||||
switch c := s[r]; {
|
||||
case c == '\\':
|
||||
r++
|
||||
if r >= length {
|
||||
return
|
||||
}
|
||||
switch s[r] {
|
||||
default:
|
||||
return
|
||||
case '"', '\\', '/', '\'':
|
||||
b[w] = s[r]
|
||||
r++
|
||||
w++
|
||||
case 'b':
|
||||
b[w] = '\b'
|
||||
r++
|
||||
w++
|
||||
case 'f':
|
||||
b[w] = '\f'
|
||||
r++
|
||||
w++
|
||||
case 'n':
|
||||
b[w] = '\n'
|
||||
r++
|
||||
w++
|
||||
case 'r':
|
||||
b[w] = '\r'
|
||||
r++
|
||||
w++
|
||||
case 't':
|
||||
b[w] = '\t'
|
||||
r++
|
||||
w++
|
||||
case 'u':
|
||||
r--
|
||||
rr := getu4(s[r:])
|
||||
if rr < 0 {
|
||||
return
|
||||
}
|
||||
r += 6
|
||||
if utf16.IsSurrogate(rr) {
|
||||
rr1 := getu4(s[r:])
|
||||
if dec := utf16.DecodeRune(rr, rr1); dec != unicode.ReplacementChar {
|
||||
// A valid pair; consume.
|
||||
r += 6
|
||||
w += utf8.EncodeRune(b[w:], dec)
|
||||
break
|
||||
}
|
||||
// Invalid surrogate; fall back to replacement rune.
|
||||
rr = unicode.ReplacementChar
|
||||
}
|
||||
w += utf8.EncodeRune(b[w:], rr)
|
||||
}
|
||||
|
||||
// Quote, control characters are invalid.
|
||||
case c == '"', c < ' ':
|
||||
return
|
||||
|
||||
// ASCII
|
||||
case c < utf8.RuneSelf:
|
||||
b[w] = c
|
||||
r++
|
||||
w++
|
||||
|
||||
// Coerce to well-formed UTF-8.
|
||||
default:
|
||||
rr, size := utf8.DecodeRune(s[r:])
|
||||
r += size
|
||||
w += utf8.EncodeRune(b[w:], rr)
|
||||
}
|
||||
}
|
||||
return b[0:w], true
|
||||
}
|
||||
|
||||
func getu4(s []byte) rune {
|
||||
if len(s) < 6 || s[0] != '\\' || s[1] != 'u' {
|
||||
return -1
|
||||
}
|
||||
var r rune
|
||||
for _, c := range s[2:6] {
|
||||
switch {
|
||||
case '0' <= c && c <= '9':
|
||||
c = c - '0'
|
||||
case 'a' <= c && c <= 'f':
|
||||
c = c - 'a' + 10
|
||||
case 'A' <= c && c <= 'F':
|
||||
c = c - 'A' + 10
|
||||
default:
|
||||
return -1
|
||||
}
|
||||
r = r*16 + rune(c)
|
||||
}
|
||||
return r
|
||||
}
|
73
internal/decoder/wrapped_string.go
Normal file
73
internal/decoder/wrapped_string.go
Normal file
|
@ -0,0 +1,73 @@
|
|||
package decoder
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type wrappedStringDecoder struct {
|
||||
typ *runtime.Type
|
||||
dec Decoder
|
||||
stringDecoder *stringDecoder
|
||||
structName string
|
||||
fieldName string
|
||||
isPtrType bool
|
||||
}
|
||||
|
||||
func newWrappedStringDecoder(typ *runtime.Type, dec Decoder, structName, fieldName string) *wrappedStringDecoder {
|
||||
return &wrappedStringDecoder{
|
||||
typ: typ,
|
||||
dec: dec,
|
||||
stringDecoder: newStringDecoder(structName, fieldName),
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
isPtrType: typ.Kind() == reflect.Ptr,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *wrappedStringDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
bytes, err := d.stringDecoder.decodeStreamByte(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if bytes == nil {
|
||||
if d.isPtrType {
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
b := make([]byte, len(bytes)+1)
|
||||
copy(b, bytes)
|
||||
if _, err := d.dec.Decode(&RuntimeContext{Buf: b}, 0, depth, p); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *wrappedStringDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
bytes, c, err := d.stringDecoder.decodeByte(ctx.Buf, cursor)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if bytes == nil {
|
||||
if d.isPtrType {
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
}
|
||||
return c, nil
|
||||
}
|
||||
bytes = append(bytes, nul)
|
||||
oldBuf := ctx.Buf
|
||||
ctx.Buf = bytes
|
||||
if _, err := d.dec.Decode(ctx, 0, depth, p); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
ctx.Buf = oldBuf
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func (d *wrappedStringDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
return nil, 0, fmt.Errorf("json: wrapped string decoder does not support decode path")
|
||||
}
|
1023
internal/encoder/code.go
Normal file
1023
internal/encoder/code.go
Normal file
File diff suppressed because it is too large
Load diff
286
internal/encoder/compact.go
Normal file
286
internal/encoder/compact.go
Normal file
|
@ -0,0 +1,286 @@
|
|||
package encoder
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
var (
|
||||
isWhiteSpace = [256]bool{
|
||||
' ': true,
|
||||
'\n': true,
|
||||
'\t': true,
|
||||
'\r': true,
|
||||
}
|
||||
isHTMLEscapeChar = [256]bool{
|
||||
'<': true,
|
||||
'>': true,
|
||||
'&': true,
|
||||
}
|
||||
nul = byte('\000')
|
||||
)
|
||||
|
||||
func Compact(buf *bytes.Buffer, src []byte, escape bool) error {
|
||||
if len(src) == 0 {
|
||||
return errors.ErrUnexpectedEndOfJSON("", 0)
|
||||
}
|
||||
buf.Grow(len(src))
|
||||
dst := buf.Bytes()
|
||||
|
||||
ctx := TakeRuntimeContext()
|
||||
ctxBuf := ctx.Buf[:0]
|
||||
ctxBuf = append(append(ctxBuf, src...), nul)
|
||||
ctx.Buf = ctxBuf
|
||||
|
||||
if err := compactAndWrite(buf, dst, ctxBuf, escape); err != nil {
|
||||
ReleaseRuntimeContext(ctx)
|
||||
return err
|
||||
}
|
||||
ReleaseRuntimeContext(ctx)
|
||||
return nil
|
||||
}
|
||||
|
||||
func compactAndWrite(buf *bytes.Buffer, dst []byte, src []byte, escape bool) error {
|
||||
dst, err := compact(dst, src, escape)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := buf.Write(dst); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func compact(dst, src []byte, escape bool) ([]byte, error) {
|
||||
buf, cursor, err := compactValue(dst, src, 0, escape)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := validateEndBuf(src, cursor); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
func validateEndBuf(src []byte, cursor int64) error {
|
||||
for {
|
||||
switch src[cursor] {
|
||||
case ' ', '\t', '\n', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case nul:
|
||||
return nil
|
||||
}
|
||||
return errors.ErrSyntax(
|
||||
fmt.Sprintf("invalid character '%c' after top-level value", src[cursor]),
|
||||
cursor+1,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func skipWhiteSpace(buf []byte, cursor int64) int64 {
|
||||
LOOP:
|
||||
if isWhiteSpace[buf[cursor]] {
|
||||
cursor++
|
||||
goto LOOP
|
||||
}
|
||||
return cursor
|
||||
}
|
||||
|
||||
func compactValue(dst, src []byte, cursor int64, escape bool) ([]byte, int64, error) {
|
||||
for {
|
||||
switch src[cursor] {
|
||||
case ' ', '\t', '\n', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case '{':
|
||||
return compactObject(dst, src, cursor, escape)
|
||||
case '}':
|
||||
return nil, 0, errors.ErrSyntax("unexpected character '}'", cursor)
|
||||
case '[':
|
||||
return compactArray(dst, src, cursor, escape)
|
||||
case ']':
|
||||
return nil, 0, errors.ErrSyntax("unexpected character ']'", cursor)
|
||||
case '"':
|
||||
return compactString(dst, src, cursor, escape)
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return compactNumber(dst, src, cursor)
|
||||
case 't':
|
||||
return compactTrue(dst, src, cursor)
|
||||
case 'f':
|
||||
return compactFalse(dst, src, cursor)
|
||||
case 'n':
|
||||
return compactNull(dst, src, cursor)
|
||||
default:
|
||||
return nil, 0, errors.ErrSyntax(fmt.Sprintf("unexpected character '%c'", src[cursor]), cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func compactObject(dst, src []byte, cursor int64, escape bool) ([]byte, int64, error) {
|
||||
if src[cursor] == '{' {
|
||||
dst = append(dst, '{')
|
||||
} else {
|
||||
return nil, 0, errors.ErrExpected("expected { character for object value", cursor)
|
||||
}
|
||||
cursor = skipWhiteSpace(src, cursor+1)
|
||||
if src[cursor] == '}' {
|
||||
dst = append(dst, '}')
|
||||
return dst, cursor + 1, nil
|
||||
}
|
||||
var err error
|
||||
for {
|
||||
cursor = skipWhiteSpace(src, cursor)
|
||||
dst, cursor, err = compactString(dst, src, cursor, escape)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor = skipWhiteSpace(src, cursor)
|
||||
if src[cursor] != ':' {
|
||||
return nil, 0, errors.ErrExpected("colon after object key", cursor)
|
||||
}
|
||||
dst = append(dst, ':')
|
||||
dst, cursor, err = compactValue(dst, src, cursor+1, escape)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor = skipWhiteSpace(src, cursor)
|
||||
switch src[cursor] {
|
||||
case '}':
|
||||
dst = append(dst, '}')
|
||||
cursor++
|
||||
return dst, cursor, nil
|
||||
case ',':
|
||||
dst = append(dst, ',')
|
||||
default:
|
||||
return nil, 0, errors.ErrExpected("comma after object value", cursor)
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func compactArray(dst, src []byte, cursor int64, escape bool) ([]byte, int64, error) {
|
||||
if src[cursor] == '[' {
|
||||
dst = append(dst, '[')
|
||||
} else {
|
||||
return nil, 0, errors.ErrExpected("expected [ character for array value", cursor)
|
||||
}
|
||||
cursor = skipWhiteSpace(src, cursor+1)
|
||||
if src[cursor] == ']' {
|
||||
dst = append(dst, ']')
|
||||
return dst, cursor + 1, nil
|
||||
}
|
||||
var err error
|
||||
for {
|
||||
dst, cursor, err = compactValue(dst, src, cursor, escape)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor = skipWhiteSpace(src, cursor)
|
||||
switch src[cursor] {
|
||||
case ']':
|
||||
dst = append(dst, ']')
|
||||
cursor++
|
||||
return dst, cursor, nil
|
||||
case ',':
|
||||
dst = append(dst, ',')
|
||||
default:
|
||||
return nil, 0, errors.ErrExpected("comma after array value", cursor)
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func compactString(dst, src []byte, cursor int64, escape bool) ([]byte, int64, error) {
|
||||
if src[cursor] != '"' {
|
||||
return nil, 0, errors.ErrInvalidCharacter(src[cursor], "string", cursor)
|
||||
}
|
||||
start := cursor
|
||||
for {
|
||||
cursor++
|
||||
c := src[cursor]
|
||||
if escape {
|
||||
if isHTMLEscapeChar[c] {
|
||||
dst = append(dst, src[start:cursor]...)
|
||||
dst = append(dst, `\u00`...)
|
||||
dst = append(dst, hex[c>>4], hex[c&0xF])
|
||||
start = cursor + 1
|
||||
} else if c == 0xE2 && cursor+2 < int64(len(src)) && src[cursor+1] == 0x80 && src[cursor+2]&^1 == 0xA8 {
|
||||
dst = append(dst, src[start:cursor]...)
|
||||
dst = append(dst, `\u202`...)
|
||||
dst = append(dst, hex[src[cursor+2]&0xF])
|
||||
start = cursor + 3
|
||||
cursor += 2
|
||||
}
|
||||
}
|
||||
switch c {
|
||||
case '\\':
|
||||
cursor++
|
||||
if src[cursor] == nul {
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("string", int64(len(src)))
|
||||
}
|
||||
case '"':
|
||||
cursor++
|
||||
return append(dst, src[start:cursor]...), cursor, nil
|
||||
case nul:
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("string", int64(len(src)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func compactNumber(dst, src []byte, cursor int64) ([]byte, int64, error) {
|
||||
start := cursor
|
||||
for {
|
||||
cursor++
|
||||
if floatTable[src[cursor]] {
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
num := src[start:cursor]
|
||||
if _, err := strconv.ParseFloat(*(*string)(unsafe.Pointer(&num)), 64); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
dst = append(dst, num...)
|
||||
return dst, cursor, nil
|
||||
}
|
||||
|
||||
func compactTrue(dst, src []byte, cursor int64) ([]byte, int64, error) {
|
||||
if cursor+3 >= int64(len(src)) {
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("true", cursor)
|
||||
}
|
||||
if !bytes.Equal(src[cursor:cursor+4], []byte(`true`)) {
|
||||
return nil, 0, errors.ErrInvalidCharacter(src[cursor], "true", cursor)
|
||||
}
|
||||
dst = append(dst, "true"...)
|
||||
cursor += 4
|
||||
return dst, cursor, nil
|
||||
}
|
||||
|
||||
func compactFalse(dst, src []byte, cursor int64) ([]byte, int64, error) {
|
||||
if cursor+4 >= int64(len(src)) {
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("false", cursor)
|
||||
}
|
||||
if !bytes.Equal(src[cursor:cursor+5], []byte(`false`)) {
|
||||
return nil, 0, errors.ErrInvalidCharacter(src[cursor], "false", cursor)
|
||||
}
|
||||
dst = append(dst, "false"...)
|
||||
cursor += 5
|
||||
return dst, cursor, nil
|
||||
}
|
||||
|
||||
func compactNull(dst, src []byte, cursor int64) ([]byte, int64, error) {
|
||||
if cursor+3 >= int64(len(src)) {
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("null", cursor)
|
||||
}
|
||||
if !bytes.Equal(src[cursor:cursor+4], []byte(`null`)) {
|
||||
return nil, 0, errors.ErrInvalidCharacter(src[cursor], "null", cursor)
|
||||
}
|
||||
dst = append(dst, "null"...)
|
||||
cursor += 4
|
||||
return dst, cursor, nil
|
||||
}
|
939
internal/encoder/compiler.go
Normal file
939
internal/encoder/compiler.go
Normal file
|
@ -0,0 +1,939 @@
|
|||
package encoder
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding"
|
||||
"encoding/json"
|
||||
"reflect"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type marshalerContext interface {
|
||||
MarshalJSON(context.Context) ([]byte, error)
|
||||
}
|
||||
|
||||
var (
|
||||
marshalJSONType = reflect.TypeOf((*json.Marshaler)(nil)).Elem()
|
||||
marshalJSONContextType = reflect.TypeOf((*marshalerContext)(nil)).Elem()
|
||||
marshalTextType = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem()
|
||||
jsonNumberType = reflect.TypeOf(json.Number(""))
|
||||
cachedOpcodeSets []*OpcodeSet
|
||||
cachedOpcodeMap unsafe.Pointer // map[uintptr]*OpcodeSet
|
||||
typeAddr *runtime.TypeAddr
|
||||
initEncoderOnce sync.Once
|
||||
)
|
||||
|
||||
func initEncoder() {
|
||||
initEncoderOnce.Do(func() {
|
||||
typeAddr = runtime.AnalyzeTypeAddr()
|
||||
if typeAddr == nil {
|
||||
typeAddr = &runtime.TypeAddr{}
|
||||
}
|
||||
cachedOpcodeSets = make([]*OpcodeSet, typeAddr.AddrRange>>typeAddr.AddrShift+1)
|
||||
})
|
||||
}
|
||||
|
||||
func loadOpcodeMap() map[uintptr]*OpcodeSet {
|
||||
p := atomic.LoadPointer(&cachedOpcodeMap)
|
||||
return *(*map[uintptr]*OpcodeSet)(unsafe.Pointer(&p))
|
||||
}
|
||||
|
||||
func storeOpcodeSet(typ uintptr, set *OpcodeSet, m map[uintptr]*OpcodeSet) {
|
||||
newOpcodeMap := make(map[uintptr]*OpcodeSet, len(m)+1)
|
||||
newOpcodeMap[typ] = set
|
||||
|
||||
for k, v := range m {
|
||||
newOpcodeMap[k] = v
|
||||
}
|
||||
|
||||
atomic.StorePointer(&cachedOpcodeMap, *(*unsafe.Pointer)(unsafe.Pointer(&newOpcodeMap)))
|
||||
}
|
||||
|
||||
func compileToGetCodeSetSlowPath(typeptr uintptr) (*OpcodeSet, error) {
|
||||
opcodeMap := loadOpcodeMap()
|
||||
if codeSet, exists := opcodeMap[typeptr]; exists {
|
||||
return codeSet, nil
|
||||
}
|
||||
codeSet, err := newCompiler().compile(typeptr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
storeOpcodeSet(typeptr, codeSet, opcodeMap)
|
||||
return codeSet, nil
|
||||
}
|
||||
|
||||
func getFilteredCodeSetIfNeeded(ctx *RuntimeContext, codeSet *OpcodeSet) (*OpcodeSet, error) {
|
||||
if (ctx.Option.Flag & ContextOption) == 0 {
|
||||
return codeSet, nil
|
||||
}
|
||||
query := FieldQueryFromContext(ctx.Option.Context)
|
||||
if query == nil {
|
||||
return codeSet, nil
|
||||
}
|
||||
ctx.Option.Flag |= FieldQueryOption
|
||||
cacheCodeSet := codeSet.getQueryCache(query.Hash())
|
||||
if cacheCodeSet != nil {
|
||||
return cacheCodeSet, nil
|
||||
}
|
||||
queryCodeSet, err := newCompiler().codeToOpcodeSet(codeSet.Type, codeSet.Code.Filter(query))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
codeSet.setQueryCache(query.Hash(), queryCodeSet)
|
||||
return queryCodeSet, nil
|
||||
}
|
||||
|
||||
type Compiler struct {
|
||||
structTypeToCode map[uintptr]*StructCode
|
||||
}
|
||||
|
||||
func newCompiler() *Compiler {
|
||||
return &Compiler{
|
||||
structTypeToCode: map[uintptr]*StructCode{},
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Compiler) compile(typeptr uintptr) (*OpcodeSet, error) {
|
||||
// noescape trick for header.typ ( reflect.*rtype )
|
||||
typ := *(**runtime.Type)(unsafe.Pointer(&typeptr))
|
||||
code, err := c.typeToCode(typ)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return c.codeToOpcodeSet(typ, code)
|
||||
}
|
||||
|
||||
func (c *Compiler) codeToOpcodeSet(typ *runtime.Type, code Code) (*OpcodeSet, error) {
|
||||
noescapeKeyCode := c.codeToOpcode(&compileContext{
|
||||
structTypeToCodes: map[uintptr]Opcodes{},
|
||||
recursiveCodes: &Opcodes{},
|
||||
}, typ, code)
|
||||
if err := noescapeKeyCode.Validate(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
escapeKeyCode := c.codeToOpcode(&compileContext{
|
||||
structTypeToCodes: map[uintptr]Opcodes{},
|
||||
recursiveCodes: &Opcodes{},
|
||||
escapeKey: true,
|
||||
}, typ, code)
|
||||
noescapeKeyCode = copyOpcode(noescapeKeyCode)
|
||||
escapeKeyCode = copyOpcode(escapeKeyCode)
|
||||
setTotalLengthToInterfaceOp(noescapeKeyCode)
|
||||
setTotalLengthToInterfaceOp(escapeKeyCode)
|
||||
interfaceNoescapeKeyCode := copyToInterfaceOpcode(noescapeKeyCode)
|
||||
interfaceEscapeKeyCode := copyToInterfaceOpcode(escapeKeyCode)
|
||||
codeLength := noescapeKeyCode.TotalLength()
|
||||
return &OpcodeSet{
|
||||
Type: typ,
|
||||
NoescapeKeyCode: noescapeKeyCode,
|
||||
EscapeKeyCode: escapeKeyCode,
|
||||
InterfaceNoescapeKeyCode: interfaceNoescapeKeyCode,
|
||||
InterfaceEscapeKeyCode: interfaceEscapeKeyCode,
|
||||
CodeLength: codeLength,
|
||||
EndCode: ToEndCode(interfaceNoescapeKeyCode),
|
||||
Code: code,
|
||||
QueryCache: map[string]*OpcodeSet{},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (c *Compiler) typeToCode(typ *runtime.Type) (Code, error) {
|
||||
switch {
|
||||
case c.implementsMarshalJSON(typ):
|
||||
return c.marshalJSONCode(typ)
|
||||
case c.implementsMarshalText(typ):
|
||||
return c.marshalTextCode(typ)
|
||||
}
|
||||
|
||||
isPtr := false
|
||||
orgType := typ
|
||||
if typ.Kind() == reflect.Ptr {
|
||||
typ = typ.Elem()
|
||||
isPtr = true
|
||||
}
|
||||
switch {
|
||||
case c.implementsMarshalJSON(typ):
|
||||
return c.marshalJSONCode(orgType)
|
||||
case c.implementsMarshalText(typ):
|
||||
return c.marshalTextCode(orgType)
|
||||
}
|
||||
switch typ.Kind() {
|
||||
case reflect.Slice:
|
||||
elem := typ.Elem()
|
||||
if elem.Kind() == reflect.Uint8 {
|
||||
p := runtime.PtrTo(elem)
|
||||
if !c.implementsMarshalJSONType(p) && !p.Implements(marshalTextType) {
|
||||
return c.bytesCode(typ, isPtr)
|
||||
}
|
||||
}
|
||||
return c.sliceCode(typ)
|
||||
case reflect.Map:
|
||||
if isPtr {
|
||||
return c.ptrCode(runtime.PtrTo(typ))
|
||||
}
|
||||
return c.mapCode(typ)
|
||||
case reflect.Struct:
|
||||
return c.structCode(typ, isPtr)
|
||||
case reflect.Int:
|
||||
return c.intCode(typ, isPtr)
|
||||
case reflect.Int8:
|
||||
return c.int8Code(typ, isPtr)
|
||||
case reflect.Int16:
|
||||
return c.int16Code(typ, isPtr)
|
||||
case reflect.Int32:
|
||||
return c.int32Code(typ, isPtr)
|
||||
case reflect.Int64:
|
||||
return c.int64Code(typ, isPtr)
|
||||
case reflect.Uint, reflect.Uintptr:
|
||||
return c.uintCode(typ, isPtr)
|
||||
case reflect.Uint8:
|
||||
return c.uint8Code(typ, isPtr)
|
||||
case reflect.Uint16:
|
||||
return c.uint16Code(typ, isPtr)
|
||||
case reflect.Uint32:
|
||||
return c.uint32Code(typ, isPtr)
|
||||
case reflect.Uint64:
|
||||
return c.uint64Code(typ, isPtr)
|
||||
case reflect.Float32:
|
||||
return c.float32Code(typ, isPtr)
|
||||
case reflect.Float64:
|
||||
return c.float64Code(typ, isPtr)
|
||||
case reflect.String:
|
||||
return c.stringCode(typ, isPtr)
|
||||
case reflect.Bool:
|
||||
return c.boolCode(typ, isPtr)
|
||||
case reflect.Interface:
|
||||
return c.interfaceCode(typ, isPtr)
|
||||
default:
|
||||
if isPtr && typ.Implements(marshalTextType) {
|
||||
typ = orgType
|
||||
}
|
||||
return c.typeToCodeWithPtr(typ, isPtr)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Compiler) typeToCodeWithPtr(typ *runtime.Type, isPtr bool) (Code, error) {
|
||||
switch {
|
||||
case c.implementsMarshalJSON(typ):
|
||||
return c.marshalJSONCode(typ)
|
||||
case c.implementsMarshalText(typ):
|
||||
return c.marshalTextCode(typ)
|
||||
}
|
||||
switch typ.Kind() {
|
||||
case reflect.Ptr:
|
||||
return c.ptrCode(typ)
|
||||
case reflect.Slice:
|
||||
elem := typ.Elem()
|
||||
if elem.Kind() == reflect.Uint8 {
|
||||
p := runtime.PtrTo(elem)
|
||||
if !c.implementsMarshalJSONType(p) && !p.Implements(marshalTextType) {
|
||||
return c.bytesCode(typ, false)
|
||||
}
|
||||
}
|
||||
return c.sliceCode(typ)
|
||||
case reflect.Array:
|
||||
return c.arrayCode(typ)
|
||||
case reflect.Map:
|
||||
return c.mapCode(typ)
|
||||
case reflect.Struct:
|
||||
return c.structCode(typ, isPtr)
|
||||
case reflect.Interface:
|
||||
return c.interfaceCode(typ, false)
|
||||
case reflect.Int:
|
||||
return c.intCode(typ, false)
|
||||
case reflect.Int8:
|
||||
return c.int8Code(typ, false)
|
||||
case reflect.Int16:
|
||||
return c.int16Code(typ, false)
|
||||
case reflect.Int32:
|
||||
return c.int32Code(typ, false)
|
||||
case reflect.Int64:
|
||||
return c.int64Code(typ, false)
|
||||
case reflect.Uint:
|
||||
return c.uintCode(typ, false)
|
||||
case reflect.Uint8:
|
||||
return c.uint8Code(typ, false)
|
||||
case reflect.Uint16:
|
||||
return c.uint16Code(typ, false)
|
||||
case reflect.Uint32:
|
||||
return c.uint32Code(typ, false)
|
||||
case reflect.Uint64:
|
||||
return c.uint64Code(typ, false)
|
||||
case reflect.Uintptr:
|
||||
return c.uintCode(typ, false)
|
||||
case reflect.Float32:
|
||||
return c.float32Code(typ, false)
|
||||
case reflect.Float64:
|
||||
return c.float64Code(typ, false)
|
||||
case reflect.String:
|
||||
return c.stringCode(typ, false)
|
||||
case reflect.Bool:
|
||||
return c.boolCode(typ, false)
|
||||
}
|
||||
return nil, &errors.UnsupportedTypeError{Type: runtime.RType2Type(typ)}
|
||||
}
|
||||
|
||||
const intSize = 32 << (^uint(0) >> 63)
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) intCode(typ *runtime.Type, isPtr bool) (*IntCode, error) {
|
||||
return &IntCode{typ: typ, bitSize: intSize, isPtr: isPtr}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) int8Code(typ *runtime.Type, isPtr bool) (*IntCode, error) {
|
||||
return &IntCode{typ: typ, bitSize: 8, isPtr: isPtr}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) int16Code(typ *runtime.Type, isPtr bool) (*IntCode, error) {
|
||||
return &IntCode{typ: typ, bitSize: 16, isPtr: isPtr}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) int32Code(typ *runtime.Type, isPtr bool) (*IntCode, error) {
|
||||
return &IntCode{typ: typ, bitSize: 32, isPtr: isPtr}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) int64Code(typ *runtime.Type, isPtr bool) (*IntCode, error) {
|
||||
return &IntCode{typ: typ, bitSize: 64, isPtr: isPtr}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) uintCode(typ *runtime.Type, isPtr bool) (*UintCode, error) {
|
||||
return &UintCode{typ: typ, bitSize: intSize, isPtr: isPtr}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) uint8Code(typ *runtime.Type, isPtr bool) (*UintCode, error) {
|
||||
return &UintCode{typ: typ, bitSize: 8, isPtr: isPtr}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) uint16Code(typ *runtime.Type, isPtr bool) (*UintCode, error) {
|
||||
return &UintCode{typ: typ, bitSize: 16, isPtr: isPtr}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) uint32Code(typ *runtime.Type, isPtr bool) (*UintCode, error) {
|
||||
return &UintCode{typ: typ, bitSize: 32, isPtr: isPtr}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) uint64Code(typ *runtime.Type, isPtr bool) (*UintCode, error) {
|
||||
return &UintCode{typ: typ, bitSize: 64, isPtr: isPtr}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) float32Code(typ *runtime.Type, isPtr bool) (*FloatCode, error) {
|
||||
return &FloatCode{typ: typ, bitSize: 32, isPtr: isPtr}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) float64Code(typ *runtime.Type, isPtr bool) (*FloatCode, error) {
|
||||
return &FloatCode{typ: typ, bitSize: 64, isPtr: isPtr}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) stringCode(typ *runtime.Type, isPtr bool) (*StringCode, error) {
|
||||
return &StringCode{typ: typ, isPtr: isPtr}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) boolCode(typ *runtime.Type, isPtr bool) (*BoolCode, error) {
|
||||
return &BoolCode{typ: typ, isPtr: isPtr}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) intStringCode(typ *runtime.Type) (*IntCode, error) {
|
||||
return &IntCode{typ: typ, bitSize: intSize, isString: true}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) int8StringCode(typ *runtime.Type) (*IntCode, error) {
|
||||
return &IntCode{typ: typ, bitSize: 8, isString: true}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) int16StringCode(typ *runtime.Type) (*IntCode, error) {
|
||||
return &IntCode{typ: typ, bitSize: 16, isString: true}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) int32StringCode(typ *runtime.Type) (*IntCode, error) {
|
||||
return &IntCode{typ: typ, bitSize: 32, isString: true}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) int64StringCode(typ *runtime.Type) (*IntCode, error) {
|
||||
return &IntCode{typ: typ, bitSize: 64, isString: true}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) uintStringCode(typ *runtime.Type) (*UintCode, error) {
|
||||
return &UintCode{typ: typ, bitSize: intSize, isString: true}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) uint8StringCode(typ *runtime.Type) (*UintCode, error) {
|
||||
return &UintCode{typ: typ, bitSize: 8, isString: true}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) uint16StringCode(typ *runtime.Type) (*UintCode, error) {
|
||||
return &UintCode{typ: typ, bitSize: 16, isString: true}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) uint32StringCode(typ *runtime.Type) (*UintCode, error) {
|
||||
return &UintCode{typ: typ, bitSize: 32, isString: true}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) uint64StringCode(typ *runtime.Type) (*UintCode, error) {
|
||||
return &UintCode{typ: typ, bitSize: 64, isString: true}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) bytesCode(typ *runtime.Type, isPtr bool) (*BytesCode, error) {
|
||||
return &BytesCode{typ: typ, isPtr: isPtr}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) interfaceCode(typ *runtime.Type, isPtr bool) (*InterfaceCode, error) {
|
||||
return &InterfaceCode{typ: typ, isPtr: isPtr}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) marshalJSONCode(typ *runtime.Type) (*MarshalJSONCode, error) {
|
||||
return &MarshalJSONCode{
|
||||
typ: typ,
|
||||
isAddrForMarshaler: c.isPtrMarshalJSONType(typ),
|
||||
isNilableType: c.isNilableType(typ),
|
||||
isMarshalerContext: typ.Implements(marshalJSONContextType) || runtime.PtrTo(typ).Implements(marshalJSONContextType),
|
||||
}, nil
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func (c *Compiler) marshalTextCode(typ *runtime.Type) (*MarshalTextCode, error) {
|
||||
return &MarshalTextCode{
|
||||
typ: typ,
|
||||
isAddrForMarshaler: c.isPtrMarshalTextType(typ),
|
||||
isNilableType: c.isNilableType(typ),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (c *Compiler) ptrCode(typ *runtime.Type) (*PtrCode, error) {
|
||||
code, err := c.typeToCodeWithPtr(typ.Elem(), true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ptr, ok := code.(*PtrCode)
|
||||
if ok {
|
||||
return &PtrCode{typ: typ, value: ptr.value, ptrNum: ptr.ptrNum + 1}, nil
|
||||
}
|
||||
return &PtrCode{typ: typ, value: code, ptrNum: 1}, nil
|
||||
}
|
||||
|
||||
func (c *Compiler) sliceCode(typ *runtime.Type) (*SliceCode, error) {
|
||||
elem := typ.Elem()
|
||||
code, err := c.listElemCode(elem)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if code.Kind() == CodeKindStruct {
|
||||
structCode := code.(*StructCode)
|
||||
structCode.enableIndirect()
|
||||
}
|
||||
return &SliceCode{typ: typ, value: code}, nil
|
||||
}
|
||||
|
||||
func (c *Compiler) arrayCode(typ *runtime.Type) (*ArrayCode, error) {
|
||||
elem := typ.Elem()
|
||||
code, err := c.listElemCode(elem)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if code.Kind() == CodeKindStruct {
|
||||
structCode := code.(*StructCode)
|
||||
structCode.enableIndirect()
|
||||
}
|
||||
return &ArrayCode{typ: typ, value: code}, nil
|
||||
}
|
||||
|
||||
func (c *Compiler) mapCode(typ *runtime.Type) (*MapCode, error) {
|
||||
keyCode, err := c.mapKeyCode(typ.Key())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
valueCode, err := c.mapValueCode(typ.Elem())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if valueCode.Kind() == CodeKindStruct {
|
||||
structCode := valueCode.(*StructCode)
|
||||
structCode.enableIndirect()
|
||||
}
|
||||
return &MapCode{typ: typ, key: keyCode, value: valueCode}, nil
|
||||
}
|
||||
|
||||
func (c *Compiler) listElemCode(typ *runtime.Type) (Code, error) {
|
||||
switch {
|
||||
case c.implementsMarshalJSONType(typ) || c.implementsMarshalJSONType(runtime.PtrTo(typ)):
|
||||
return c.marshalJSONCode(typ)
|
||||
case !typ.Implements(marshalTextType) && runtime.PtrTo(typ).Implements(marshalTextType):
|
||||
return c.marshalTextCode(typ)
|
||||
case typ.Kind() == reflect.Map:
|
||||
return c.ptrCode(runtime.PtrTo(typ))
|
||||
default:
|
||||
// isPtr was originally used to indicate whether the type of top level is pointer.
|
||||
// However, since the slice/array element is a specification that can get the pointer address, explicitly set isPtr to true.
|
||||
// See here for related issues: https://github.com/goccy/go-json/issues/370
|
||||
code, err := c.typeToCodeWithPtr(typ, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ptr, ok := code.(*PtrCode)
|
||||
if ok {
|
||||
if ptr.value.Kind() == CodeKindMap {
|
||||
ptr.ptrNum++
|
||||
}
|
||||
}
|
||||
return code, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Compiler) mapKeyCode(typ *runtime.Type) (Code, error) {
|
||||
switch {
|
||||
case c.implementsMarshalText(typ):
|
||||
return c.marshalTextCode(typ)
|
||||
}
|
||||
switch typ.Kind() {
|
||||
case reflect.Ptr:
|
||||
return c.ptrCode(typ)
|
||||
case reflect.String:
|
||||
return c.stringCode(typ, false)
|
||||
case reflect.Int:
|
||||
return c.intStringCode(typ)
|
||||
case reflect.Int8:
|
||||
return c.int8StringCode(typ)
|
||||
case reflect.Int16:
|
||||
return c.int16StringCode(typ)
|
||||
case reflect.Int32:
|
||||
return c.int32StringCode(typ)
|
||||
case reflect.Int64:
|
||||
return c.int64StringCode(typ)
|
||||
case reflect.Uint:
|
||||
return c.uintStringCode(typ)
|
||||
case reflect.Uint8:
|
||||
return c.uint8StringCode(typ)
|
||||
case reflect.Uint16:
|
||||
return c.uint16StringCode(typ)
|
||||
case reflect.Uint32:
|
||||
return c.uint32StringCode(typ)
|
||||
case reflect.Uint64:
|
||||
return c.uint64StringCode(typ)
|
||||
case reflect.Uintptr:
|
||||
return c.uintStringCode(typ)
|
||||
}
|
||||
return nil, &errors.UnsupportedTypeError{Type: runtime.RType2Type(typ)}
|
||||
}
|
||||
|
||||
func (c *Compiler) mapValueCode(typ *runtime.Type) (Code, error) {
|
||||
switch typ.Kind() {
|
||||
case reflect.Map:
|
||||
return c.ptrCode(runtime.PtrTo(typ))
|
||||
default:
|
||||
code, err := c.typeToCodeWithPtr(typ, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ptr, ok := code.(*PtrCode)
|
||||
if ok {
|
||||
if ptr.value.Kind() == CodeKindMap {
|
||||
ptr.ptrNum++
|
||||
}
|
||||
}
|
||||
return code, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Compiler) structCode(typ *runtime.Type, isPtr bool) (*StructCode, error) {
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
if code, exists := c.structTypeToCode[typeptr]; exists {
|
||||
derefCode := *code
|
||||
derefCode.isRecursive = true
|
||||
return &derefCode, nil
|
||||
}
|
||||
indirect := runtime.IfaceIndir(typ)
|
||||
code := &StructCode{typ: typ, isPtr: isPtr, isIndirect: indirect}
|
||||
c.structTypeToCode[typeptr] = code
|
||||
|
||||
fieldNum := typ.NumField()
|
||||
tags := c.typeToStructTags(typ)
|
||||
fields := []*StructFieldCode{}
|
||||
for i, tag := range tags {
|
||||
isOnlyOneFirstField := i == 0 && fieldNum == 1
|
||||
field, err := c.structFieldCode(code, tag, isPtr, isOnlyOneFirstField)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if field.isAnonymous {
|
||||
structCode := field.getAnonymousStruct()
|
||||
if structCode != nil {
|
||||
structCode.removeFieldsByTags(tags)
|
||||
if c.isAssignableIndirect(field, isPtr) {
|
||||
if indirect {
|
||||
structCode.isIndirect = true
|
||||
} else {
|
||||
structCode.isIndirect = false
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
structCode := field.getStruct()
|
||||
if structCode != nil {
|
||||
if indirect {
|
||||
// if parent is indirect type, set child indirect property to true
|
||||
structCode.isIndirect = true
|
||||
} else {
|
||||
// if parent is not indirect type, set child indirect property to false.
|
||||
// but if parent's indirect is false and isPtr is true, then indirect must be true.
|
||||
// Do this only if indirectConversion is enabled at the end of compileStruct.
|
||||
structCode.isIndirect = false
|
||||
}
|
||||
}
|
||||
}
|
||||
fields = append(fields, field)
|
||||
}
|
||||
fieldMap := c.getFieldMap(fields)
|
||||
duplicatedFieldMap := c.getDuplicatedFieldMap(fieldMap)
|
||||
code.fields = c.filteredDuplicatedFields(fields, duplicatedFieldMap)
|
||||
if !code.disableIndirectConversion && !indirect && isPtr {
|
||||
code.enableIndirect()
|
||||
}
|
||||
delete(c.structTypeToCode, typeptr)
|
||||
return code, nil
|
||||
}
|
||||
|
||||
func toElemType(t *runtime.Type) *runtime.Type {
|
||||
for t.Kind() == reflect.Ptr {
|
||||
t = t.Elem()
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
func (c *Compiler) structFieldCode(structCode *StructCode, tag *runtime.StructTag, isPtr, isOnlyOneFirstField bool) (*StructFieldCode, error) {
|
||||
field := tag.Field
|
||||
fieldType := runtime.Type2RType(field.Type)
|
||||
isIndirectSpecialCase := isPtr && isOnlyOneFirstField
|
||||
fieldCode := &StructFieldCode{
|
||||
typ: fieldType,
|
||||
key: tag.Key,
|
||||
tag: tag,
|
||||
offset: field.Offset,
|
||||
isAnonymous: field.Anonymous && !tag.IsTaggedKey && toElemType(fieldType).Kind() == reflect.Struct,
|
||||
isTaggedKey: tag.IsTaggedKey,
|
||||
isNilableType: c.isNilableType(fieldType),
|
||||
isNilCheck: true,
|
||||
}
|
||||
switch {
|
||||
case c.isMovePointerPositionFromHeadToFirstMarshalJSONFieldCase(fieldType, isIndirectSpecialCase):
|
||||
code, err := c.marshalJSONCode(fieldType)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
fieldCode.value = code
|
||||
fieldCode.isAddrForMarshaler = true
|
||||
fieldCode.isNilCheck = false
|
||||
structCode.isIndirect = false
|
||||
structCode.disableIndirectConversion = true
|
||||
case c.isMovePointerPositionFromHeadToFirstMarshalTextFieldCase(fieldType, isIndirectSpecialCase):
|
||||
code, err := c.marshalTextCode(fieldType)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
fieldCode.value = code
|
||||
fieldCode.isAddrForMarshaler = true
|
||||
fieldCode.isNilCheck = false
|
||||
structCode.isIndirect = false
|
||||
structCode.disableIndirectConversion = true
|
||||
case isPtr && c.isPtrMarshalJSONType(fieldType):
|
||||
// *struct{ field T }
|
||||
// func (*T) MarshalJSON() ([]byte, error)
|
||||
code, err := c.marshalJSONCode(fieldType)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
fieldCode.value = code
|
||||
fieldCode.isAddrForMarshaler = true
|
||||
fieldCode.isNilCheck = false
|
||||
case isPtr && c.isPtrMarshalTextType(fieldType):
|
||||
// *struct{ field T }
|
||||
// func (*T) MarshalText() ([]byte, error)
|
||||
code, err := c.marshalTextCode(fieldType)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
fieldCode.value = code
|
||||
fieldCode.isAddrForMarshaler = true
|
||||
fieldCode.isNilCheck = false
|
||||
default:
|
||||
code, err := c.typeToCodeWithPtr(fieldType, isPtr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
switch code.Kind() {
|
||||
case CodeKindPtr, CodeKindInterface:
|
||||
fieldCode.isNextOpPtrType = true
|
||||
}
|
||||
fieldCode.value = code
|
||||
}
|
||||
return fieldCode, nil
|
||||
}
|
||||
|
||||
func (c *Compiler) isAssignableIndirect(fieldCode *StructFieldCode, isPtr bool) bool {
|
||||
if isPtr {
|
||||
return false
|
||||
}
|
||||
codeType := fieldCode.value.Kind()
|
||||
if codeType == CodeKindMarshalJSON {
|
||||
return false
|
||||
}
|
||||
if codeType == CodeKindMarshalText {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (c *Compiler) getFieldMap(fields []*StructFieldCode) map[string][]*StructFieldCode {
|
||||
fieldMap := map[string][]*StructFieldCode{}
|
||||
for _, field := range fields {
|
||||
if field.isAnonymous {
|
||||
for k, v := range c.getAnonymousFieldMap(field) {
|
||||
fieldMap[k] = append(fieldMap[k], v...)
|
||||
}
|
||||
continue
|
||||
}
|
||||
fieldMap[field.key] = append(fieldMap[field.key], field)
|
||||
}
|
||||
return fieldMap
|
||||
}
|
||||
|
||||
func (c *Compiler) getAnonymousFieldMap(field *StructFieldCode) map[string][]*StructFieldCode {
|
||||
fieldMap := map[string][]*StructFieldCode{}
|
||||
structCode := field.getAnonymousStruct()
|
||||
if structCode == nil || structCode.isRecursive {
|
||||
fieldMap[field.key] = append(fieldMap[field.key], field)
|
||||
return fieldMap
|
||||
}
|
||||
for k, v := range c.getFieldMapFromAnonymousParent(structCode.fields) {
|
||||
fieldMap[k] = append(fieldMap[k], v...)
|
||||
}
|
||||
return fieldMap
|
||||
}
|
||||
|
||||
func (c *Compiler) getFieldMapFromAnonymousParent(fields []*StructFieldCode) map[string][]*StructFieldCode {
|
||||
fieldMap := map[string][]*StructFieldCode{}
|
||||
for _, field := range fields {
|
||||
if field.isAnonymous {
|
||||
for k, v := range c.getAnonymousFieldMap(field) {
|
||||
// Do not handle tagged key when embedding more than once
|
||||
for _, vv := range v {
|
||||
vv.isTaggedKey = false
|
||||
}
|
||||
fieldMap[k] = append(fieldMap[k], v...)
|
||||
}
|
||||
continue
|
||||
}
|
||||
fieldMap[field.key] = append(fieldMap[field.key], field)
|
||||
}
|
||||
return fieldMap
|
||||
}
|
||||
|
||||
func (c *Compiler) getDuplicatedFieldMap(fieldMap map[string][]*StructFieldCode) map[*StructFieldCode]struct{} {
|
||||
duplicatedFieldMap := map[*StructFieldCode]struct{}{}
|
||||
for _, fields := range fieldMap {
|
||||
if len(fields) == 1 {
|
||||
continue
|
||||
}
|
||||
if c.isTaggedKeyOnly(fields) {
|
||||
for _, field := range fields {
|
||||
if field.isTaggedKey {
|
||||
continue
|
||||
}
|
||||
duplicatedFieldMap[field] = struct{}{}
|
||||
}
|
||||
} else {
|
||||
for _, field := range fields {
|
||||
duplicatedFieldMap[field] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
return duplicatedFieldMap
|
||||
}
|
||||
|
||||
func (c *Compiler) filteredDuplicatedFields(fields []*StructFieldCode, duplicatedFieldMap map[*StructFieldCode]struct{}) []*StructFieldCode {
|
||||
filteredFields := make([]*StructFieldCode, 0, len(fields))
|
||||
for _, field := range fields {
|
||||
if field.isAnonymous {
|
||||
structCode := field.getAnonymousStruct()
|
||||
if structCode != nil && !structCode.isRecursive {
|
||||
structCode.fields = c.filteredDuplicatedFields(structCode.fields, duplicatedFieldMap)
|
||||
if len(structCode.fields) > 0 {
|
||||
filteredFields = append(filteredFields, field)
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
if _, exists := duplicatedFieldMap[field]; exists {
|
||||
continue
|
||||
}
|
||||
filteredFields = append(filteredFields, field)
|
||||
}
|
||||
return filteredFields
|
||||
}
|
||||
|
||||
func (c *Compiler) isTaggedKeyOnly(fields []*StructFieldCode) bool {
|
||||
var taggedKeyFieldCount int
|
||||
for _, field := range fields {
|
||||
if field.isTaggedKey {
|
||||
taggedKeyFieldCount++
|
||||
}
|
||||
}
|
||||
return taggedKeyFieldCount == 1
|
||||
}
|
||||
|
||||
func (c *Compiler) typeToStructTags(typ *runtime.Type) runtime.StructTags {
|
||||
tags := runtime.StructTags{}
|
||||
fieldNum := typ.NumField()
|
||||
for i := 0; i < fieldNum; i++ {
|
||||
field := typ.Field(i)
|
||||
if runtime.IsIgnoredStructField(field) {
|
||||
continue
|
||||
}
|
||||
tags = append(tags, runtime.StructTagFromField(field))
|
||||
}
|
||||
return tags
|
||||
}
|
||||
|
||||
// *struct{ field T } => struct { field *T }
|
||||
// func (*T) MarshalJSON() ([]byte, error)
|
||||
func (c *Compiler) isMovePointerPositionFromHeadToFirstMarshalJSONFieldCase(typ *runtime.Type, isIndirectSpecialCase bool) bool {
|
||||
return isIndirectSpecialCase && !c.isNilableType(typ) && c.isPtrMarshalJSONType(typ)
|
||||
}
|
||||
|
||||
// *struct{ field T } => struct { field *T }
|
||||
// func (*T) MarshalText() ([]byte, error)
|
||||
func (c *Compiler) isMovePointerPositionFromHeadToFirstMarshalTextFieldCase(typ *runtime.Type, isIndirectSpecialCase bool) bool {
|
||||
return isIndirectSpecialCase && !c.isNilableType(typ) && c.isPtrMarshalTextType(typ)
|
||||
}
|
||||
|
||||
func (c *Compiler) implementsMarshalJSON(typ *runtime.Type) bool {
|
||||
if !c.implementsMarshalJSONType(typ) {
|
||||
return false
|
||||
}
|
||||
if typ.Kind() != reflect.Ptr {
|
||||
return true
|
||||
}
|
||||
// type kind is reflect.Ptr
|
||||
if !c.implementsMarshalJSONType(typ.Elem()) {
|
||||
return true
|
||||
}
|
||||
// needs to dereference
|
||||
return false
|
||||
}
|
||||
|
||||
func (c *Compiler) implementsMarshalText(typ *runtime.Type) bool {
|
||||
if !typ.Implements(marshalTextType) {
|
||||
return false
|
||||
}
|
||||
if typ.Kind() != reflect.Ptr {
|
||||
return true
|
||||
}
|
||||
// type kind is reflect.Ptr
|
||||
if !typ.Elem().Implements(marshalTextType) {
|
||||
return true
|
||||
}
|
||||
// needs to dereference
|
||||
return false
|
||||
}
|
||||
|
||||
func (c *Compiler) isNilableType(typ *runtime.Type) bool {
|
||||
if !runtime.IfaceIndir(typ) {
|
||||
return true
|
||||
}
|
||||
switch typ.Kind() {
|
||||
case reflect.Ptr:
|
||||
return true
|
||||
case reflect.Map:
|
||||
return true
|
||||
case reflect.Func:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Compiler) implementsMarshalJSONType(typ *runtime.Type) bool {
|
||||
return typ.Implements(marshalJSONType) || typ.Implements(marshalJSONContextType)
|
||||
}
|
||||
|
||||
func (c *Compiler) isPtrMarshalJSONType(typ *runtime.Type) bool {
|
||||
return !c.implementsMarshalJSONType(typ) && c.implementsMarshalJSONType(runtime.PtrTo(typ))
|
||||
}
|
||||
|
||||
func (c *Compiler) isPtrMarshalTextType(typ *runtime.Type) bool {
|
||||
return !typ.Implements(marshalTextType) && runtime.PtrTo(typ).Implements(marshalTextType)
|
||||
}
|
||||
|
||||
func (c *Compiler) codeToOpcode(ctx *compileContext, typ *runtime.Type, code Code) *Opcode {
|
||||
codes := code.ToOpcode(ctx)
|
||||
codes.Last().Next = newEndOp(ctx, typ)
|
||||
c.linkRecursiveCode(ctx)
|
||||
return codes.First()
|
||||
}
|
||||
|
||||
func (c *Compiler) linkRecursiveCode(ctx *compileContext) {
|
||||
recursiveCodes := map[uintptr]*CompiledCode{}
|
||||
for _, recursive := range *ctx.recursiveCodes {
|
||||
typeptr := uintptr(unsafe.Pointer(recursive.Type))
|
||||
codes := ctx.structTypeToCodes[typeptr]
|
||||
if recursiveCode, ok := recursiveCodes[typeptr]; ok {
|
||||
*recursive.Jmp = *recursiveCode
|
||||
continue
|
||||
}
|
||||
|
||||
code := copyOpcode(codes.First())
|
||||
code.Op = code.Op.PtrHeadToHead()
|
||||
lastCode := newEndOp(&compileContext{}, recursive.Type)
|
||||
lastCode.Op = OpRecursiveEnd
|
||||
|
||||
// OpRecursiveEnd must set before call TotalLength
|
||||
code.End.Next = lastCode
|
||||
|
||||
totalLength := code.TotalLength()
|
||||
|
||||
// Idx, ElemIdx, Length must set after call TotalLength
|
||||
lastCode.Idx = uint32((totalLength + 1) * uintptrSize)
|
||||
lastCode.ElemIdx = lastCode.Idx + uintptrSize
|
||||
lastCode.Length = lastCode.Idx + 2*uintptrSize
|
||||
|
||||
// extend length to alloc slot for elemIdx + length
|
||||
curTotalLength := uintptr(recursive.TotalLength()) + 3
|
||||
nextTotalLength := uintptr(totalLength) + 3
|
||||
|
||||
compiled := recursive.Jmp
|
||||
compiled.Code = code
|
||||
compiled.CurLen = curTotalLength
|
||||
compiled.NextLen = nextTotalLength
|
||||
compiled.Linked = true
|
||||
|
||||
recursiveCodes[typeptr] = compiled
|
||||
}
|
||||
}
|
33
internal/encoder/compiler_norace.go
Normal file
33
internal/encoder/compiler_norace.go
Normal file
|
@ -0,0 +1,33 @@
|
|||
//go:build !race
|
||||
// +build !race
|
||||
|
||||
package encoder
|
||||
|
||||
func CompileToGetCodeSet(ctx *RuntimeContext, typeptr uintptr) (*OpcodeSet, error) {
|
||||
initEncoder()
|
||||
if typeptr > typeAddr.MaxTypeAddr || typeptr < typeAddr.BaseTypeAddr {
|
||||
codeSet, err := compileToGetCodeSetSlowPath(typeptr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return getFilteredCodeSetIfNeeded(ctx, codeSet)
|
||||
}
|
||||
index := (typeptr - typeAddr.BaseTypeAddr) >> typeAddr.AddrShift
|
||||
if codeSet := cachedOpcodeSets[index]; codeSet != nil {
|
||||
filtered, err := getFilteredCodeSetIfNeeded(ctx, codeSet)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return filtered, nil
|
||||
}
|
||||
codeSet, err := newCompiler().compile(typeptr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
filtered, err := getFilteredCodeSetIfNeeded(ctx, codeSet)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cachedOpcodeSets[index] = codeSet
|
||||
return filtered, nil
|
||||
}
|
46
internal/encoder/compiler_race.go
Normal file
46
internal/encoder/compiler_race.go
Normal file
|
@ -0,0 +1,46 @@
|
|||
//go:build race
|
||||
// +build race
|
||||
|
||||
package encoder
|
||||
|
||||
import (
|
||||
"sync"
|
||||
)
|
||||
|
||||
var setsMu sync.RWMutex
|
||||
|
||||
func CompileToGetCodeSet(ctx *RuntimeContext, typeptr uintptr) (*OpcodeSet, error) {
|
||||
initEncoder()
|
||||
if typeptr > typeAddr.MaxTypeAddr || typeptr < typeAddr.BaseTypeAddr {
|
||||
codeSet, err := compileToGetCodeSetSlowPath(typeptr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return getFilteredCodeSetIfNeeded(ctx, codeSet)
|
||||
}
|
||||
index := (typeptr - typeAddr.BaseTypeAddr) >> typeAddr.AddrShift
|
||||
setsMu.RLock()
|
||||
if codeSet := cachedOpcodeSets[index]; codeSet != nil {
|
||||
filtered, err := getFilteredCodeSetIfNeeded(ctx, codeSet)
|
||||
if err != nil {
|
||||
setsMu.RUnlock()
|
||||
return nil, err
|
||||
}
|
||||
setsMu.RUnlock()
|
||||
return filtered, nil
|
||||
}
|
||||
setsMu.RUnlock()
|
||||
|
||||
codeSet, err := newCompiler().compile(typeptr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
filtered, err := getFilteredCodeSetIfNeeded(ctx, codeSet)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
setsMu.Lock()
|
||||
cachedOpcodeSets[index] = codeSet
|
||||
setsMu.Unlock()
|
||||
return filtered, nil
|
||||
}
|
105
internal/encoder/context.go
Normal file
105
internal/encoder/context.go
Normal file
|
@ -0,0 +1,105 @@
|
|||
package encoder
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type compileContext struct {
|
||||
opcodeIndex uint32
|
||||
ptrIndex int
|
||||
indent uint32
|
||||
escapeKey bool
|
||||
structTypeToCodes map[uintptr]Opcodes
|
||||
recursiveCodes *Opcodes
|
||||
}
|
||||
|
||||
func (c *compileContext) incIndent() {
|
||||
c.indent++
|
||||
}
|
||||
|
||||
func (c *compileContext) decIndent() {
|
||||
c.indent--
|
||||
}
|
||||
|
||||
func (c *compileContext) incIndex() {
|
||||
c.incOpcodeIndex()
|
||||
c.incPtrIndex()
|
||||
}
|
||||
|
||||
func (c *compileContext) decIndex() {
|
||||
c.decOpcodeIndex()
|
||||
c.decPtrIndex()
|
||||
}
|
||||
|
||||
func (c *compileContext) incOpcodeIndex() {
|
||||
c.opcodeIndex++
|
||||
}
|
||||
|
||||
func (c *compileContext) decOpcodeIndex() {
|
||||
c.opcodeIndex--
|
||||
}
|
||||
|
||||
func (c *compileContext) incPtrIndex() {
|
||||
c.ptrIndex++
|
||||
}
|
||||
|
||||
func (c *compileContext) decPtrIndex() {
|
||||
c.ptrIndex--
|
||||
}
|
||||
|
||||
const (
|
||||
bufSize = 1024
|
||||
)
|
||||
|
||||
var (
|
||||
runtimeContextPool = sync.Pool{
|
||||
New: func() interface{} {
|
||||
return &RuntimeContext{
|
||||
Buf: make([]byte, 0, bufSize),
|
||||
Ptrs: make([]uintptr, 128),
|
||||
KeepRefs: make([]unsafe.Pointer, 0, 8),
|
||||
Option: &Option{},
|
||||
}
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
type RuntimeContext struct {
|
||||
Context context.Context
|
||||
Buf []byte
|
||||
MarshalBuf []byte
|
||||
Ptrs []uintptr
|
||||
KeepRefs []unsafe.Pointer
|
||||
SeenPtr []uintptr
|
||||
BaseIndent uint32
|
||||
Prefix []byte
|
||||
IndentStr []byte
|
||||
Option *Option
|
||||
}
|
||||
|
||||
func (c *RuntimeContext) Init(p uintptr, codelen int) {
|
||||
if len(c.Ptrs) < codelen {
|
||||
c.Ptrs = make([]uintptr, codelen)
|
||||
}
|
||||
c.Ptrs[0] = p
|
||||
c.KeepRefs = c.KeepRefs[:0]
|
||||
c.SeenPtr = c.SeenPtr[:0]
|
||||
c.BaseIndent = 0
|
||||
}
|
||||
|
||||
func (c *RuntimeContext) Ptr() uintptr {
|
||||
header := (*runtime.SliceHeader)(unsafe.Pointer(&c.Ptrs))
|
||||
return uintptr(header.Data)
|
||||
}
|
||||
|
||||
func TakeRuntimeContext() *RuntimeContext {
|
||||
return runtimeContextPool.Get().(*RuntimeContext)
|
||||
}
|
||||
|
||||
func ReleaseRuntimeContext(ctx *RuntimeContext) {
|
||||
runtimeContextPool.Put(ctx)
|
||||
}
|
126
internal/encoder/decode_rune.go
Normal file
126
internal/encoder/decode_rune.go
Normal file
|
@ -0,0 +1,126 @@
|
|||
package encoder
|
||||
|
||||
import "unicode/utf8"
|
||||
|
||||
const (
|
||||
// The default lowest and highest continuation byte.
|
||||
locb = 128 //0b10000000
|
||||
hicb = 191 //0b10111111
|
||||
|
||||
// These names of these constants are chosen to give nice alignment in the
|
||||
// table below. The first nibble is an index into acceptRanges or F for
|
||||
// special one-byte cases. The second nibble is the Rune length or the
|
||||
// Status for the special one-byte case.
|
||||
xx = 0xF1 // invalid: size 1
|
||||
as = 0xF0 // ASCII: size 1
|
||||
s1 = 0x02 // accept 0, size 2
|
||||
s2 = 0x13 // accept 1, size 3
|
||||
s3 = 0x03 // accept 0, size 3
|
||||
s4 = 0x23 // accept 2, size 3
|
||||
s5 = 0x34 // accept 3, size 4
|
||||
s6 = 0x04 // accept 0, size 4
|
||||
s7 = 0x44 // accept 4, size 4
|
||||
)
|
||||
|
||||
// first is information about the first byte in a UTF-8 sequence.
|
||||
var first = [256]uint8{
|
||||
// 1 2 3 4 5 6 7 8 9 A B C D E F
|
||||
as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x00-0x0F
|
||||
as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x10-0x1F
|
||||
as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x20-0x2F
|
||||
as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x30-0x3F
|
||||
as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x40-0x4F
|
||||
as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x50-0x5F
|
||||
as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x60-0x6F
|
||||
as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x70-0x7F
|
||||
// 1 2 3 4 5 6 7 8 9 A B C D E F
|
||||
xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0x80-0x8F
|
||||
xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0x90-0x9F
|
||||
xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xA0-0xAF
|
||||
xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xB0-0xBF
|
||||
xx, xx, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, // 0xC0-0xCF
|
||||
s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, // 0xD0-0xDF
|
||||
s2, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s4, s3, s3, // 0xE0-0xEF
|
||||
s5, s6, s6, s6, s7, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xF0-0xFF
|
||||
}
|
||||
|
||||
const (
|
||||
lineSep = byte(168) //'\u2028'
|
||||
paragraphSep = byte(169) //'\u2029'
|
||||
)
|
||||
|
||||
type decodeRuneState int
|
||||
|
||||
const (
|
||||
validUTF8State decodeRuneState = iota
|
||||
runeErrorState
|
||||
lineSepState
|
||||
paragraphSepState
|
||||
)
|
||||
|
||||
func decodeRuneInString(s string) (decodeRuneState, int) {
|
||||
n := len(s)
|
||||
s0 := s[0]
|
||||
x := first[s0]
|
||||
if x >= as {
|
||||
// The following code simulates an additional check for x == xx and
|
||||
// handling the ASCII and invalid cases accordingly. This mask-and-or
|
||||
// approach prevents an additional branch.
|
||||
mask := rune(x) << 31 >> 31 // Create 0x0000 or 0xFFFF.
|
||||
if rune(s[0])&^mask|utf8.RuneError&mask == utf8.RuneError {
|
||||
return runeErrorState, 1
|
||||
}
|
||||
return validUTF8State, 1
|
||||
}
|
||||
sz := int(x & 7)
|
||||
if n < sz {
|
||||
return runeErrorState, 1
|
||||
}
|
||||
s1 := s[1]
|
||||
switch x >> 4 {
|
||||
case 0:
|
||||
if s1 < locb || hicb < s1 {
|
||||
return runeErrorState, 1
|
||||
}
|
||||
case 1:
|
||||
if s1 < 0xA0 || hicb < s1 {
|
||||
return runeErrorState, 1
|
||||
}
|
||||
case 2:
|
||||
if s1 < locb || 0x9F < s1 {
|
||||
return runeErrorState, 1
|
||||
}
|
||||
case 3:
|
||||
if s1 < 0x90 || hicb < s1 {
|
||||
return runeErrorState, 1
|
||||
}
|
||||
case 4:
|
||||
if s1 < locb || 0x8F < s1 {
|
||||
return runeErrorState, 1
|
||||
}
|
||||
}
|
||||
if sz <= 2 {
|
||||
return validUTF8State, 2
|
||||
}
|
||||
s2 := s[2]
|
||||
if s2 < locb || hicb < s2 {
|
||||
return runeErrorState, 1
|
||||
}
|
||||
if sz <= 3 {
|
||||
// separator character prefixes: [2]byte{226, 128}
|
||||
if s0 == 226 && s1 == 128 {
|
||||
switch s2 {
|
||||
case lineSep:
|
||||
return lineSepState, 3
|
||||
case paragraphSep:
|
||||
return paragraphSepState, 3
|
||||
}
|
||||
}
|
||||
return validUTF8State, 3
|
||||
}
|
||||
s3 := s[3]
|
||||
if s3 < locb || hicb < s3 {
|
||||
return runeErrorState, 1
|
||||
}
|
||||
return validUTF8State, 4
|
||||
}
|
20
internal/encoder/encode_opcode_test.go
Normal file
20
internal/encoder/encode_opcode_test.go
Normal file
|
@ -0,0 +1,20 @@
|
|||
package encoder
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
func TestDumpOpcode(t *testing.T) {
|
||||
ctx := TakeRuntimeContext()
|
||||
defer ReleaseRuntimeContext(ctx)
|
||||
var v interface{} = 1
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
typ := header.typ
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
codeSet, err := CompileToGetCodeSet(ctx, typeptr)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
codeSet.EscapeKeyCode.Dump()
|
||||
}
|
601
internal/encoder/encoder.go
Normal file
601
internal/encoder/encoder.go
Normal file
|
@ -0,0 +1,601 @@
|
|||
package encoder
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
func (t OpType) IsMultipleOpHead() bool {
|
||||
switch t {
|
||||
case OpStructHead:
|
||||
return true
|
||||
case OpStructHeadSlice:
|
||||
return true
|
||||
case OpStructHeadArray:
|
||||
return true
|
||||
case OpStructHeadMap:
|
||||
return true
|
||||
case OpStructHeadStruct:
|
||||
return true
|
||||
case OpStructHeadOmitEmpty:
|
||||
return true
|
||||
case OpStructHeadOmitEmptySlice:
|
||||
return true
|
||||
case OpStructHeadOmitEmptyArray:
|
||||
return true
|
||||
case OpStructHeadOmitEmptyMap:
|
||||
return true
|
||||
case OpStructHeadOmitEmptyStruct:
|
||||
return true
|
||||
case OpStructHeadSlicePtr:
|
||||
return true
|
||||
case OpStructHeadOmitEmptySlicePtr:
|
||||
return true
|
||||
case OpStructHeadArrayPtr:
|
||||
return true
|
||||
case OpStructHeadOmitEmptyArrayPtr:
|
||||
return true
|
||||
case OpStructHeadMapPtr:
|
||||
return true
|
||||
case OpStructHeadOmitEmptyMapPtr:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (t OpType) IsMultipleOpField() bool {
|
||||
switch t {
|
||||
case OpStructField:
|
||||
return true
|
||||
case OpStructFieldSlice:
|
||||
return true
|
||||
case OpStructFieldArray:
|
||||
return true
|
||||
case OpStructFieldMap:
|
||||
return true
|
||||
case OpStructFieldStruct:
|
||||
return true
|
||||
case OpStructFieldOmitEmpty:
|
||||
return true
|
||||
case OpStructFieldOmitEmptySlice:
|
||||
return true
|
||||
case OpStructFieldOmitEmptyArray:
|
||||
return true
|
||||
case OpStructFieldOmitEmptyMap:
|
||||
return true
|
||||
case OpStructFieldOmitEmptyStruct:
|
||||
return true
|
||||
case OpStructFieldSlicePtr:
|
||||
return true
|
||||
case OpStructFieldOmitEmptySlicePtr:
|
||||
return true
|
||||
case OpStructFieldArrayPtr:
|
||||
return true
|
||||
case OpStructFieldOmitEmptyArrayPtr:
|
||||
return true
|
||||
case OpStructFieldMapPtr:
|
||||
return true
|
||||
case OpStructFieldOmitEmptyMapPtr:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type OpcodeSet struct {
|
||||
Type *runtime.Type
|
||||
NoescapeKeyCode *Opcode
|
||||
EscapeKeyCode *Opcode
|
||||
InterfaceNoescapeKeyCode *Opcode
|
||||
InterfaceEscapeKeyCode *Opcode
|
||||
CodeLength int
|
||||
EndCode *Opcode
|
||||
Code Code
|
||||
QueryCache map[string]*OpcodeSet
|
||||
cacheMu sync.RWMutex
|
||||
}
|
||||
|
||||
func (s *OpcodeSet) getQueryCache(hash string) *OpcodeSet {
|
||||
s.cacheMu.RLock()
|
||||
codeSet := s.QueryCache[hash]
|
||||
s.cacheMu.RUnlock()
|
||||
return codeSet
|
||||
}
|
||||
|
||||
func (s *OpcodeSet) setQueryCache(hash string, codeSet *OpcodeSet) {
|
||||
s.cacheMu.Lock()
|
||||
s.QueryCache[hash] = codeSet
|
||||
s.cacheMu.Unlock()
|
||||
}
|
||||
|
||||
type CompiledCode struct {
|
||||
Code *Opcode
|
||||
Linked bool // whether recursive code already have linked
|
||||
CurLen uintptr
|
||||
NextLen uintptr
|
||||
}
|
||||
|
||||
const StartDetectingCyclesAfter = 1000
|
||||
|
||||
func Load(base uintptr, idx uintptr) uintptr {
|
||||
addr := base + idx
|
||||
return **(**uintptr)(unsafe.Pointer(&addr))
|
||||
}
|
||||
|
||||
func Store(base uintptr, idx uintptr, p uintptr) {
|
||||
addr := base + idx
|
||||
**(**uintptr)(unsafe.Pointer(&addr)) = p
|
||||
}
|
||||
|
||||
func LoadNPtr(base uintptr, idx uintptr, ptrNum int) uintptr {
|
||||
addr := base + idx
|
||||
p := **(**uintptr)(unsafe.Pointer(&addr))
|
||||
if p == 0 {
|
||||
return 0
|
||||
}
|
||||
return PtrToPtr(p)
|
||||
/*
|
||||
for i := 0; i < ptrNum; i++ {
|
||||
if p == 0 {
|
||||
return p
|
||||
}
|
||||
p = PtrToPtr(p)
|
||||
}
|
||||
return p
|
||||
*/
|
||||
}
|
||||
|
||||
func PtrToUint64(p uintptr) uint64 { return **(**uint64)(unsafe.Pointer(&p)) }
|
||||
func PtrToFloat32(p uintptr) float32 { return **(**float32)(unsafe.Pointer(&p)) }
|
||||
func PtrToFloat64(p uintptr) float64 { return **(**float64)(unsafe.Pointer(&p)) }
|
||||
func PtrToBool(p uintptr) bool { return **(**bool)(unsafe.Pointer(&p)) }
|
||||
func PtrToBytes(p uintptr) []byte { return **(**[]byte)(unsafe.Pointer(&p)) }
|
||||
func PtrToNumber(p uintptr) json.Number { return **(**json.Number)(unsafe.Pointer(&p)) }
|
||||
func PtrToString(p uintptr) string { return **(**string)(unsafe.Pointer(&p)) }
|
||||
func PtrToSlice(p uintptr) *runtime.SliceHeader { return *(**runtime.SliceHeader)(unsafe.Pointer(&p)) }
|
||||
func PtrToPtr(p uintptr) uintptr {
|
||||
return uintptr(**(**unsafe.Pointer)(unsafe.Pointer(&p)))
|
||||
}
|
||||
func PtrToNPtr(p uintptr, ptrNum int) uintptr {
|
||||
for i := 0; i < ptrNum; i++ {
|
||||
if p == 0 {
|
||||
return 0
|
||||
}
|
||||
p = PtrToPtr(p)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
func PtrToUnsafePtr(p uintptr) unsafe.Pointer {
|
||||
return *(*unsafe.Pointer)(unsafe.Pointer(&p))
|
||||
}
|
||||
func PtrToInterface(code *Opcode, p uintptr) interface{} {
|
||||
return *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||
typ: code.Type,
|
||||
ptr: *(*unsafe.Pointer)(unsafe.Pointer(&p)),
|
||||
}))
|
||||
}
|
||||
|
||||
func ErrUnsupportedValue(code *Opcode, ptr uintptr) *errors.UnsupportedValueError {
|
||||
v := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||
typ: code.Type,
|
||||
ptr: *(*unsafe.Pointer)(unsafe.Pointer(&ptr)),
|
||||
}))
|
||||
return &errors.UnsupportedValueError{
|
||||
Value: reflect.ValueOf(v),
|
||||
Str: fmt.Sprintf("encountered a cycle via %s", code.Type),
|
||||
}
|
||||
}
|
||||
|
||||
func ErrUnsupportedFloat(v float64) *errors.UnsupportedValueError {
|
||||
return &errors.UnsupportedValueError{
|
||||
Value: reflect.ValueOf(v),
|
||||
Str: strconv.FormatFloat(v, 'g', -1, 64),
|
||||
}
|
||||
}
|
||||
|
||||
func ErrMarshalerWithCode(code *Opcode, err error) *errors.MarshalerError {
|
||||
return &errors.MarshalerError{
|
||||
Type: runtime.RType2Type(code.Type),
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
|
||||
type emptyInterface struct {
|
||||
typ *runtime.Type
|
||||
ptr unsafe.Pointer
|
||||
}
|
||||
|
||||
type MapItem struct {
|
||||
Key []byte
|
||||
Value []byte
|
||||
}
|
||||
|
||||
type Mapslice struct {
|
||||
Items []MapItem
|
||||
}
|
||||
|
||||
func (m *Mapslice) Len() int {
|
||||
return len(m.Items)
|
||||
}
|
||||
|
||||
func (m *Mapslice) Less(i, j int) bool {
|
||||
return bytes.Compare(m.Items[i].Key, m.Items[j].Key) < 0
|
||||
}
|
||||
|
||||
func (m *Mapslice) Swap(i, j int) {
|
||||
m.Items[i], m.Items[j] = m.Items[j], m.Items[i]
|
||||
}
|
||||
|
||||
//nolint:structcheck,unused
|
||||
type mapIter struct {
|
||||
key unsafe.Pointer
|
||||
elem unsafe.Pointer
|
||||
t unsafe.Pointer
|
||||
h unsafe.Pointer
|
||||
buckets unsafe.Pointer
|
||||
bptr unsafe.Pointer
|
||||
overflow unsafe.Pointer
|
||||
oldoverflow unsafe.Pointer
|
||||
startBucket uintptr
|
||||
offset uint8
|
||||
wrapped bool
|
||||
B uint8
|
||||
i uint8
|
||||
bucket uintptr
|
||||
checkBucket uintptr
|
||||
}
|
||||
|
||||
type MapContext struct {
|
||||
Start int
|
||||
First int
|
||||
Idx int
|
||||
Slice *Mapslice
|
||||
Buf []byte
|
||||
Len int
|
||||
Iter mapIter
|
||||
}
|
||||
|
||||
var mapContextPool = sync.Pool{
|
||||
New: func() interface{} {
|
||||
return &MapContext{
|
||||
Slice: &Mapslice{},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
func NewMapContext(mapLen int, unorderedMap bool) *MapContext {
|
||||
ctx := mapContextPool.Get().(*MapContext)
|
||||
if !unorderedMap {
|
||||
if len(ctx.Slice.Items) < mapLen {
|
||||
ctx.Slice.Items = make([]MapItem, mapLen)
|
||||
} else {
|
||||
ctx.Slice.Items = ctx.Slice.Items[:mapLen]
|
||||
}
|
||||
}
|
||||
ctx.Buf = ctx.Buf[:0]
|
||||
ctx.Iter = mapIter{}
|
||||
ctx.Idx = 0
|
||||
ctx.Len = mapLen
|
||||
return ctx
|
||||
}
|
||||
|
||||
func ReleaseMapContext(c *MapContext) {
|
||||
mapContextPool.Put(c)
|
||||
}
|
||||
|
||||
//go:linkname MapIterInit runtime.mapiterinit
|
||||
//go:noescape
|
||||
func MapIterInit(mapType *runtime.Type, m unsafe.Pointer, it *mapIter)
|
||||
|
||||
//go:linkname MapIterKey reflect.mapiterkey
|
||||
//go:noescape
|
||||
func MapIterKey(it *mapIter) unsafe.Pointer
|
||||
|
||||
//go:linkname MapIterNext reflect.mapiternext
|
||||
//go:noescape
|
||||
func MapIterNext(it *mapIter)
|
||||
|
||||
//go:linkname MapLen reflect.maplen
|
||||
//go:noescape
|
||||
func MapLen(m unsafe.Pointer) int
|
||||
|
||||
func AppendByteSlice(_ *RuntimeContext, b []byte, src []byte) []byte {
|
||||
if src == nil {
|
||||
return append(b, `null`...)
|
||||
}
|
||||
encodedLen := base64.StdEncoding.EncodedLen(len(src))
|
||||
b = append(b, '"')
|
||||
pos := len(b)
|
||||
remainLen := cap(b[pos:])
|
||||
var buf []byte
|
||||
if remainLen > encodedLen {
|
||||
buf = b[pos : pos+encodedLen]
|
||||
} else {
|
||||
buf = make([]byte, encodedLen)
|
||||
}
|
||||
base64.StdEncoding.Encode(buf, src)
|
||||
return append(append(b, buf...), '"')
|
||||
}
|
||||
|
||||
func AppendFloat32(_ *RuntimeContext, b []byte, v float32) []byte {
|
||||
f64 := float64(v)
|
||||
abs := math.Abs(f64)
|
||||
fmt := byte('f')
|
||||
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
|
||||
if abs != 0 {
|
||||
f32 := float32(abs)
|
||||
if f32 < 1e-6 || f32 >= 1e21 {
|
||||
fmt = 'e'
|
||||
}
|
||||
}
|
||||
return strconv.AppendFloat(b, f64, fmt, -1, 32)
|
||||
}
|
||||
|
||||
func AppendFloat64(_ *RuntimeContext, b []byte, v float64) []byte {
|
||||
abs := math.Abs(v)
|
||||
fmt := byte('f')
|
||||
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
|
||||
if abs != 0 {
|
||||
if abs < 1e-6 || abs >= 1e21 {
|
||||
fmt = 'e'
|
||||
}
|
||||
}
|
||||
return strconv.AppendFloat(b, v, fmt, -1, 64)
|
||||
}
|
||||
|
||||
func AppendBool(_ *RuntimeContext, b []byte, v bool) []byte {
|
||||
if v {
|
||||
return append(b, "true"...)
|
||||
}
|
||||
return append(b, "false"...)
|
||||
}
|
||||
|
||||
var (
|
||||
floatTable = [256]bool{
|
||||
'0': true,
|
||||
'1': true,
|
||||
'2': true,
|
||||
'3': true,
|
||||
'4': true,
|
||||
'5': true,
|
||||
'6': true,
|
||||
'7': true,
|
||||
'8': true,
|
||||
'9': true,
|
||||
'.': true,
|
||||
'e': true,
|
||||
'E': true,
|
||||
'+': true,
|
||||
'-': true,
|
||||
}
|
||||
)
|
||||
|
||||
func AppendNumber(_ *RuntimeContext, b []byte, n json.Number) ([]byte, error) {
|
||||
if len(n) == 0 {
|
||||
return append(b, '0'), nil
|
||||
}
|
||||
for i := 0; i < len(n); i++ {
|
||||
if !floatTable[n[i]] {
|
||||
return nil, fmt.Errorf("json: invalid number literal %q", n)
|
||||
}
|
||||
}
|
||||
b = append(b, n...)
|
||||
return b, nil
|
||||
}
|
||||
|
||||
func AppendMarshalJSON(ctx *RuntimeContext, code *Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||
rv := reflect.ValueOf(v) // convert by dynamic interface type
|
||||
if (code.Flags & AddrForMarshalerFlags) != 0 {
|
||||
if rv.CanAddr() {
|
||||
rv = rv.Addr()
|
||||
} else {
|
||||
newV := reflect.New(rv.Type())
|
||||
newV.Elem().Set(rv)
|
||||
rv = newV
|
||||
}
|
||||
}
|
||||
|
||||
if rv.Kind() == reflect.Ptr && rv.IsNil() {
|
||||
return AppendNull(ctx, b), nil
|
||||
}
|
||||
|
||||
v = rv.Interface()
|
||||
var bb []byte
|
||||
if (code.Flags & MarshalerContextFlags) != 0 {
|
||||
marshaler, ok := v.(marshalerContext)
|
||||
if !ok {
|
||||
return AppendNull(ctx, b), nil
|
||||
}
|
||||
stdctx := ctx.Option.Context
|
||||
if ctx.Option.Flag&FieldQueryOption != 0 {
|
||||
stdctx = SetFieldQueryToContext(stdctx, code.FieldQuery)
|
||||
}
|
||||
b, err := marshaler.MarshalJSON(stdctx)
|
||||
if err != nil {
|
||||
return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err}
|
||||
}
|
||||
bb = b
|
||||
} else {
|
||||
marshaler, ok := v.(json.Marshaler)
|
||||
if !ok {
|
||||
return AppendNull(ctx, b), nil
|
||||
}
|
||||
b, err := marshaler.MarshalJSON()
|
||||
if err != nil {
|
||||
return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err}
|
||||
}
|
||||
bb = b
|
||||
}
|
||||
marshalBuf := ctx.MarshalBuf[:0]
|
||||
marshalBuf = append(append(marshalBuf, bb...), nul)
|
||||
compactedBuf, err := compact(b, marshalBuf, (ctx.Option.Flag&HTMLEscapeOption) != 0)
|
||||
if err != nil {
|
||||
return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err}
|
||||
}
|
||||
ctx.MarshalBuf = marshalBuf
|
||||
return compactedBuf, nil
|
||||
}
|
||||
|
||||
func AppendMarshalJSONIndent(ctx *RuntimeContext, code *Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||
rv := reflect.ValueOf(v) // convert by dynamic interface type
|
||||
if (code.Flags & AddrForMarshalerFlags) != 0 {
|
||||
if rv.CanAddr() {
|
||||
rv = rv.Addr()
|
||||
} else {
|
||||
newV := reflect.New(rv.Type())
|
||||
newV.Elem().Set(rv)
|
||||
rv = newV
|
||||
}
|
||||
}
|
||||
v = rv.Interface()
|
||||
var bb []byte
|
||||
if (code.Flags & MarshalerContextFlags) != 0 {
|
||||
marshaler, ok := v.(marshalerContext)
|
||||
if !ok {
|
||||
return AppendNull(ctx, b), nil
|
||||
}
|
||||
b, err := marshaler.MarshalJSON(ctx.Option.Context)
|
||||
if err != nil {
|
||||
return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err}
|
||||
}
|
||||
bb = b
|
||||
} else {
|
||||
marshaler, ok := v.(json.Marshaler)
|
||||
if !ok {
|
||||
return AppendNull(ctx, b), nil
|
||||
}
|
||||
b, err := marshaler.MarshalJSON()
|
||||
if err != nil {
|
||||
return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err}
|
||||
}
|
||||
bb = b
|
||||
}
|
||||
marshalBuf := ctx.MarshalBuf[:0]
|
||||
marshalBuf = append(append(marshalBuf, bb...), nul)
|
||||
indentedBuf, err := doIndent(
|
||||
b,
|
||||
marshalBuf,
|
||||
string(ctx.Prefix)+strings.Repeat(string(ctx.IndentStr), int(ctx.BaseIndent+code.Indent)),
|
||||
string(ctx.IndentStr),
|
||||
(ctx.Option.Flag&HTMLEscapeOption) != 0,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err}
|
||||
}
|
||||
ctx.MarshalBuf = marshalBuf
|
||||
return indentedBuf, nil
|
||||
}
|
||||
|
||||
func AppendMarshalText(ctx *RuntimeContext, code *Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||
rv := reflect.ValueOf(v) // convert by dynamic interface type
|
||||
if (code.Flags & AddrForMarshalerFlags) != 0 {
|
||||
if rv.CanAddr() {
|
||||
rv = rv.Addr()
|
||||
} else {
|
||||
newV := reflect.New(rv.Type())
|
||||
newV.Elem().Set(rv)
|
||||
rv = newV
|
||||
}
|
||||
}
|
||||
v = rv.Interface()
|
||||
marshaler, ok := v.(encoding.TextMarshaler)
|
||||
if !ok {
|
||||
return AppendNull(ctx, b), nil
|
||||
}
|
||||
bytes, err := marshaler.MarshalText()
|
||||
if err != nil {
|
||||
return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err}
|
||||
}
|
||||
return AppendString(ctx, b, *(*string)(unsafe.Pointer(&bytes))), nil
|
||||
}
|
||||
|
||||
func AppendMarshalTextIndent(ctx *RuntimeContext, code *Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||
rv := reflect.ValueOf(v) // convert by dynamic interface type
|
||||
if (code.Flags & AddrForMarshalerFlags) != 0 {
|
||||
if rv.CanAddr() {
|
||||
rv = rv.Addr()
|
||||
} else {
|
||||
newV := reflect.New(rv.Type())
|
||||
newV.Elem().Set(rv)
|
||||
rv = newV
|
||||
}
|
||||
}
|
||||
v = rv.Interface()
|
||||
marshaler, ok := v.(encoding.TextMarshaler)
|
||||
if !ok {
|
||||
return AppendNull(ctx, b), nil
|
||||
}
|
||||
bytes, err := marshaler.MarshalText()
|
||||
if err != nil {
|
||||
return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err}
|
||||
}
|
||||
return AppendString(ctx, b, *(*string)(unsafe.Pointer(&bytes))), nil
|
||||
}
|
||||
|
||||
func AppendNull(_ *RuntimeContext, b []byte) []byte {
|
||||
return append(b, "null"...)
|
||||
}
|
||||
|
||||
func AppendComma(_ *RuntimeContext, b []byte) []byte {
|
||||
return append(b, ',')
|
||||
}
|
||||
|
||||
func AppendCommaIndent(_ *RuntimeContext, b []byte) []byte {
|
||||
return append(b, ',', '\n')
|
||||
}
|
||||
|
||||
func AppendStructEnd(_ *RuntimeContext, b []byte) []byte {
|
||||
return append(b, '}', ',')
|
||||
}
|
||||
|
||||
func AppendStructEndIndent(ctx *RuntimeContext, code *Opcode, b []byte) []byte {
|
||||
b = append(b, '\n')
|
||||
b = append(b, ctx.Prefix...)
|
||||
indentNum := ctx.BaseIndent + code.Indent - 1
|
||||
for i := uint32(0); i < indentNum; i++ {
|
||||
b = append(b, ctx.IndentStr...)
|
||||
}
|
||||
return append(b, '}', ',', '\n')
|
||||
}
|
||||
|
||||
func AppendIndent(ctx *RuntimeContext, b []byte, indent uint32) []byte {
|
||||
b = append(b, ctx.Prefix...)
|
||||
indentNum := ctx.BaseIndent + indent
|
||||
for i := uint32(0); i < indentNum; i++ {
|
||||
b = append(b, ctx.IndentStr...)
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
func IsNilForMarshaler(v interface{}) bool {
|
||||
rv := reflect.ValueOf(v)
|
||||
switch rv.Kind() {
|
||||
case reflect.Bool:
|
||||
return !rv.Bool()
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return rv.Int() == 0
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
return rv.Uint() == 0
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return math.Float64bits(rv.Float()) == 0
|
||||
case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Func:
|
||||
return rv.IsNil()
|
||||
case reflect.Slice:
|
||||
return rv.IsNil() || rv.Len() == 0
|
||||
case reflect.String:
|
||||
return rv.Len() == 0
|
||||
}
|
||||
return false
|
||||
}
|
211
internal/encoder/indent.go
Normal file
211
internal/encoder/indent.go
Normal file
|
@ -0,0 +1,211 @@
|
|||
package encoder
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
func takeIndentSrcRuntimeContext(src []byte) (*RuntimeContext, []byte) {
|
||||
ctx := TakeRuntimeContext()
|
||||
buf := ctx.Buf[:0]
|
||||
buf = append(append(buf, src...), nul)
|
||||
ctx.Buf = buf
|
||||
return ctx, buf
|
||||
}
|
||||
|
||||
func Indent(buf *bytes.Buffer, src []byte, prefix, indentStr string) error {
|
||||
if len(src) == 0 {
|
||||
return errors.ErrUnexpectedEndOfJSON("", 0)
|
||||
}
|
||||
|
||||
srcCtx, srcBuf := takeIndentSrcRuntimeContext(src)
|
||||
dstCtx := TakeRuntimeContext()
|
||||
dst := dstCtx.Buf[:0]
|
||||
|
||||
dst, err := indentAndWrite(buf, dst, srcBuf, prefix, indentStr)
|
||||
if err != nil {
|
||||
ReleaseRuntimeContext(srcCtx)
|
||||
ReleaseRuntimeContext(dstCtx)
|
||||
return err
|
||||
}
|
||||
dstCtx.Buf = dst
|
||||
ReleaseRuntimeContext(srcCtx)
|
||||
ReleaseRuntimeContext(dstCtx)
|
||||
return nil
|
||||
}
|
||||
|
||||
func indentAndWrite(buf *bytes.Buffer, dst []byte, src []byte, prefix, indentStr string) ([]byte, error) {
|
||||
dst, err := doIndent(dst, src, prefix, indentStr, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if _, err := buf.Write(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return dst, nil
|
||||
}
|
||||
|
||||
func doIndent(dst, src []byte, prefix, indentStr string, escape bool) ([]byte, error) {
|
||||
buf, cursor, err := indentValue(dst, src, 0, 0, []byte(prefix), []byte(indentStr), escape)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := validateEndBuf(src, cursor); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
func indentValue(
|
||||
dst []byte,
|
||||
src []byte,
|
||||
indentNum int,
|
||||
cursor int64,
|
||||
prefix []byte,
|
||||
indentBytes []byte,
|
||||
escape bool) ([]byte, int64, error) {
|
||||
for {
|
||||
switch src[cursor] {
|
||||
case ' ', '\t', '\n', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case '{':
|
||||
return indentObject(dst, src, indentNum, cursor, prefix, indentBytes, escape)
|
||||
case '}':
|
||||
return nil, 0, errors.ErrSyntax("unexpected character '}'", cursor)
|
||||
case '[':
|
||||
return indentArray(dst, src, indentNum, cursor, prefix, indentBytes, escape)
|
||||
case ']':
|
||||
return nil, 0, errors.ErrSyntax("unexpected character ']'", cursor)
|
||||
case '"':
|
||||
return compactString(dst, src, cursor, escape)
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return compactNumber(dst, src, cursor)
|
||||
case 't':
|
||||
return compactTrue(dst, src, cursor)
|
||||
case 'f':
|
||||
return compactFalse(dst, src, cursor)
|
||||
case 'n':
|
||||
return compactNull(dst, src, cursor)
|
||||
default:
|
||||
return nil, 0, errors.ErrSyntax(fmt.Sprintf("unexpected character '%c'", src[cursor]), cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func indentObject(
|
||||
dst []byte,
|
||||
src []byte,
|
||||
indentNum int,
|
||||
cursor int64,
|
||||
prefix []byte,
|
||||
indentBytes []byte,
|
||||
escape bool) ([]byte, int64, error) {
|
||||
if src[cursor] == '{' {
|
||||
dst = append(dst, '{')
|
||||
} else {
|
||||
return nil, 0, errors.ErrExpected("expected { character for object value", cursor)
|
||||
}
|
||||
cursor = skipWhiteSpace(src, cursor+1)
|
||||
if src[cursor] == '}' {
|
||||
dst = append(dst, '}')
|
||||
return dst, cursor + 1, nil
|
||||
}
|
||||
indentNum++
|
||||
var err error
|
||||
for {
|
||||
dst = append(append(dst, '\n'), prefix...)
|
||||
for i := 0; i < indentNum; i++ {
|
||||
dst = append(dst, indentBytes...)
|
||||
}
|
||||
cursor = skipWhiteSpace(src, cursor)
|
||||
dst, cursor, err = compactString(dst, src, cursor, escape)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor = skipWhiteSpace(src, cursor)
|
||||
if src[cursor] != ':' {
|
||||
return nil, 0, errors.ErrSyntax(
|
||||
fmt.Sprintf("invalid character '%c' after object key", src[cursor]),
|
||||
cursor+1,
|
||||
)
|
||||
}
|
||||
dst = append(dst, ':', ' ')
|
||||
dst, cursor, err = indentValue(dst, src, indentNum, cursor+1, prefix, indentBytes, escape)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor = skipWhiteSpace(src, cursor)
|
||||
switch src[cursor] {
|
||||
case '}':
|
||||
dst = append(append(dst, '\n'), prefix...)
|
||||
for i := 0; i < indentNum-1; i++ {
|
||||
dst = append(dst, indentBytes...)
|
||||
}
|
||||
dst = append(dst, '}')
|
||||
cursor++
|
||||
return dst, cursor, nil
|
||||
case ',':
|
||||
dst = append(dst, ',')
|
||||
default:
|
||||
return nil, 0, errors.ErrSyntax(
|
||||
fmt.Sprintf("invalid character '%c' after object key:value pair", src[cursor]),
|
||||
cursor+1,
|
||||
)
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func indentArray(
|
||||
dst []byte,
|
||||
src []byte,
|
||||
indentNum int,
|
||||
cursor int64,
|
||||
prefix []byte,
|
||||
indentBytes []byte,
|
||||
escape bool) ([]byte, int64, error) {
|
||||
if src[cursor] == '[' {
|
||||
dst = append(dst, '[')
|
||||
} else {
|
||||
return nil, 0, errors.ErrExpected("expected [ character for array value", cursor)
|
||||
}
|
||||
cursor = skipWhiteSpace(src, cursor+1)
|
||||
if src[cursor] == ']' {
|
||||
dst = append(dst, ']')
|
||||
return dst, cursor + 1, nil
|
||||
}
|
||||
indentNum++
|
||||
var err error
|
||||
for {
|
||||
dst = append(append(dst, '\n'), prefix...)
|
||||
for i := 0; i < indentNum; i++ {
|
||||
dst = append(dst, indentBytes...)
|
||||
}
|
||||
dst, cursor, err = indentValue(dst, src, indentNum, cursor, prefix, indentBytes, escape)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor = skipWhiteSpace(src, cursor)
|
||||
switch src[cursor] {
|
||||
case ']':
|
||||
dst = append(append(dst, '\n'), prefix...)
|
||||
for i := 0; i < indentNum-1; i++ {
|
||||
dst = append(dst, indentBytes...)
|
||||
}
|
||||
dst = append(dst, ']')
|
||||
cursor++
|
||||
return dst, cursor, nil
|
||||
case ',':
|
||||
dst = append(dst, ',')
|
||||
default:
|
||||
return nil, 0, errors.ErrSyntax(
|
||||
fmt.Sprintf("invalid character '%c' after array value", src[cursor]),
|
||||
cursor+1,
|
||||
)
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
}
|
176
internal/encoder/int.go
Normal file
176
internal/encoder/int.go
Normal file
|
@ -0,0 +1,176 @@
|
|||
// This files's processing codes are inspired by https://github.com/segmentio/encoding.
|
||||
// The license notation is as follows.
|
||||
//
|
||||
// # MIT License
|
||||
//
|
||||
// Copyright (c) 2019 Segment.io, Inc.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
package encoder
|
||||
|
||||
import (
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
var endianness int
|
||||
|
||||
func init() {
|
||||
var b [2]byte
|
||||
*(*uint16)(unsafe.Pointer(&b)) = uint16(0xABCD)
|
||||
|
||||
switch b[0] {
|
||||
case 0xCD:
|
||||
endianness = 0 // LE
|
||||
case 0xAB:
|
||||
endianness = 1 // BE
|
||||
default:
|
||||
panic("could not determine endianness")
|
||||
}
|
||||
}
|
||||
|
||||
// "00010203...96979899" cast to []uint16
|
||||
var intLELookup = [100]uint16{
|
||||
0x3030, 0x3130, 0x3230, 0x3330, 0x3430, 0x3530, 0x3630, 0x3730, 0x3830, 0x3930,
|
||||
0x3031, 0x3131, 0x3231, 0x3331, 0x3431, 0x3531, 0x3631, 0x3731, 0x3831, 0x3931,
|
||||
0x3032, 0x3132, 0x3232, 0x3332, 0x3432, 0x3532, 0x3632, 0x3732, 0x3832, 0x3932,
|
||||
0x3033, 0x3133, 0x3233, 0x3333, 0x3433, 0x3533, 0x3633, 0x3733, 0x3833, 0x3933,
|
||||
0x3034, 0x3134, 0x3234, 0x3334, 0x3434, 0x3534, 0x3634, 0x3734, 0x3834, 0x3934,
|
||||
0x3035, 0x3135, 0x3235, 0x3335, 0x3435, 0x3535, 0x3635, 0x3735, 0x3835, 0x3935,
|
||||
0x3036, 0x3136, 0x3236, 0x3336, 0x3436, 0x3536, 0x3636, 0x3736, 0x3836, 0x3936,
|
||||
0x3037, 0x3137, 0x3237, 0x3337, 0x3437, 0x3537, 0x3637, 0x3737, 0x3837, 0x3937,
|
||||
0x3038, 0x3138, 0x3238, 0x3338, 0x3438, 0x3538, 0x3638, 0x3738, 0x3838, 0x3938,
|
||||
0x3039, 0x3139, 0x3239, 0x3339, 0x3439, 0x3539, 0x3639, 0x3739, 0x3839, 0x3939,
|
||||
}
|
||||
|
||||
var intBELookup = [100]uint16{
|
||||
0x3030, 0x3031, 0x3032, 0x3033, 0x3034, 0x3035, 0x3036, 0x3037, 0x3038, 0x3039,
|
||||
0x3130, 0x3131, 0x3132, 0x3133, 0x3134, 0x3135, 0x3136, 0x3137, 0x3138, 0x3139,
|
||||
0x3230, 0x3231, 0x3232, 0x3233, 0x3234, 0x3235, 0x3236, 0x3237, 0x3238, 0x3239,
|
||||
0x3330, 0x3331, 0x3332, 0x3333, 0x3334, 0x3335, 0x3336, 0x3337, 0x3338, 0x3339,
|
||||
0x3430, 0x3431, 0x3432, 0x3433, 0x3434, 0x3435, 0x3436, 0x3437, 0x3438, 0x3439,
|
||||
0x3530, 0x3531, 0x3532, 0x3533, 0x3534, 0x3535, 0x3536, 0x3537, 0x3538, 0x3539,
|
||||
0x3630, 0x3631, 0x3632, 0x3633, 0x3634, 0x3635, 0x3636, 0x3637, 0x3638, 0x3639,
|
||||
0x3730, 0x3731, 0x3732, 0x3733, 0x3734, 0x3735, 0x3736, 0x3737, 0x3738, 0x3739,
|
||||
0x3830, 0x3831, 0x3832, 0x3833, 0x3834, 0x3835, 0x3836, 0x3837, 0x3838, 0x3839,
|
||||
0x3930, 0x3931, 0x3932, 0x3933, 0x3934, 0x3935, 0x3936, 0x3937, 0x3938, 0x3939,
|
||||
}
|
||||
|
||||
var intLookup = [2]*[100]uint16{&intLELookup, &intBELookup}
|
||||
|
||||
func numMask(numBitSize uint8) uint64 {
|
||||
return 1<<numBitSize - 1
|
||||
}
|
||||
|
||||
func AppendInt(_ *RuntimeContext, out []byte, p uintptr, code *Opcode) []byte {
|
||||
var u64 uint64
|
||||
switch code.NumBitSize {
|
||||
case 8:
|
||||
u64 = (uint64)(**(**uint8)(unsafe.Pointer(&p)))
|
||||
case 16:
|
||||
u64 = (uint64)(**(**uint16)(unsafe.Pointer(&p)))
|
||||
case 32:
|
||||
u64 = (uint64)(**(**uint32)(unsafe.Pointer(&p)))
|
||||
case 64:
|
||||
u64 = **(**uint64)(unsafe.Pointer(&p))
|
||||
}
|
||||
mask := numMask(code.NumBitSize)
|
||||
n := u64 & mask
|
||||
negative := (u64>>(code.NumBitSize-1))&1 == 1
|
||||
if !negative {
|
||||
if n < 10 {
|
||||
return append(out, byte(n+'0'))
|
||||
} else if n < 100 {
|
||||
u := intLELookup[n]
|
||||
return append(out, byte(u), byte(u>>8))
|
||||
}
|
||||
} else {
|
||||
n = -n & mask
|
||||
}
|
||||
|
||||
lookup := intLookup[endianness]
|
||||
|
||||
var b [22]byte
|
||||
u := (*[11]uint16)(unsafe.Pointer(&b))
|
||||
i := 11
|
||||
|
||||
for n >= 100 {
|
||||
j := n % 100
|
||||
n /= 100
|
||||
i--
|
||||
u[i] = lookup[j]
|
||||
}
|
||||
|
||||
i--
|
||||
u[i] = lookup[n]
|
||||
|
||||
i *= 2 // convert to byte index
|
||||
if n < 10 {
|
||||
i++ // remove leading zero
|
||||
}
|
||||
if negative {
|
||||
i--
|
||||
b[i] = '-'
|
||||
}
|
||||
|
||||
return append(out, b[i:]...)
|
||||
}
|
||||
|
||||
func AppendUint(_ *RuntimeContext, out []byte, p uintptr, code *Opcode) []byte {
|
||||
var u64 uint64
|
||||
switch code.NumBitSize {
|
||||
case 8:
|
||||
u64 = (uint64)(**(**uint8)(unsafe.Pointer(&p)))
|
||||
case 16:
|
||||
u64 = (uint64)(**(**uint16)(unsafe.Pointer(&p)))
|
||||
case 32:
|
||||
u64 = (uint64)(**(**uint32)(unsafe.Pointer(&p)))
|
||||
case 64:
|
||||
u64 = **(**uint64)(unsafe.Pointer(&p))
|
||||
}
|
||||
mask := numMask(code.NumBitSize)
|
||||
n := u64 & mask
|
||||
if n < 10 {
|
||||
return append(out, byte(n+'0'))
|
||||
} else if n < 100 {
|
||||
u := intLELookup[n]
|
||||
return append(out, byte(u), byte(u>>8))
|
||||
}
|
||||
|
||||
lookup := intLookup[endianness]
|
||||
|
||||
var b [22]byte
|
||||
u := (*[11]uint16)(unsafe.Pointer(&b))
|
||||
i := 11
|
||||
|
||||
for n >= 100 {
|
||||
j := n % 100
|
||||
n /= 100
|
||||
i--
|
||||
u[i] = lookup[j]
|
||||
}
|
||||
|
||||
i--
|
||||
u[i] = lookup[n]
|
||||
|
||||
i *= 2 // convert to byte index
|
||||
if n < 10 {
|
||||
i++ // remove leading zero
|
||||
}
|
||||
return append(out, b[i:]...)
|
||||
}
|
9
internal/encoder/map112.go
Normal file
9
internal/encoder/map112.go
Normal file
|
@ -0,0 +1,9 @@
|
|||
//go:build !go1.13
|
||||
// +build !go1.13
|
||||
|
||||
package encoder
|
||||
|
||||
import "unsafe"
|
||||
|
||||
//go:linkname MapIterValue reflect.mapitervalue
|
||||
func MapIterValue(it *mapIter) unsafe.Pointer
|
9
internal/encoder/map113.go
Normal file
9
internal/encoder/map113.go
Normal file
|
@ -0,0 +1,9 @@
|
|||
//go:build go1.13
|
||||
// +build go1.13
|
||||
|
||||
package encoder
|
||||
|
||||
import "unsafe"
|
||||
|
||||
//go:linkname MapIterValue reflect.mapiterelem
|
||||
func MapIterValue(it *mapIter) unsafe.Pointer
|
752
internal/encoder/opcode.go
Normal file
752
internal/encoder/opcode.go
Normal file
|
@ -0,0 +1,752 @@
|
|||
package encoder
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
const uintptrSize = 4 << (^uintptr(0) >> 63)
|
||||
|
||||
type OpFlags uint16
|
||||
|
||||
const (
|
||||
AnonymousHeadFlags OpFlags = 1 << 0
|
||||
AnonymousKeyFlags OpFlags = 1 << 1
|
||||
IndirectFlags OpFlags = 1 << 2
|
||||
IsTaggedKeyFlags OpFlags = 1 << 3
|
||||
NilCheckFlags OpFlags = 1 << 4
|
||||
AddrForMarshalerFlags OpFlags = 1 << 5
|
||||
IsNextOpPtrTypeFlags OpFlags = 1 << 6
|
||||
IsNilableTypeFlags OpFlags = 1 << 7
|
||||
MarshalerContextFlags OpFlags = 1 << 8
|
||||
NonEmptyInterfaceFlags OpFlags = 1 << 9
|
||||
)
|
||||
|
||||
type Opcode struct {
|
||||
Op OpType // operation type
|
||||
Idx uint32 // offset to access ptr
|
||||
Next *Opcode // next opcode
|
||||
End *Opcode // array/slice/struct/map end
|
||||
NextField *Opcode // next struct field
|
||||
Key string // struct field key
|
||||
Offset uint32 // offset size from struct header
|
||||
PtrNum uint8 // pointer number: e.g. double pointer is 2.
|
||||
NumBitSize uint8
|
||||
Flags OpFlags
|
||||
|
||||
Type *runtime.Type // go type
|
||||
Jmp *CompiledCode // for recursive call
|
||||
FieldQuery *FieldQuery // field query for Interface / MarshalJSON / MarshalText
|
||||
ElemIdx uint32 // offset to access array/slice elem
|
||||
Length uint32 // offset to access slice length or array length
|
||||
Indent uint32 // indent number
|
||||
Size uint32 // array/slice elem size
|
||||
DisplayIdx uint32 // opcode index
|
||||
DisplayKey string // key text to display
|
||||
}
|
||||
|
||||
func (c *Opcode) Validate() error {
|
||||
var prevIdx uint32
|
||||
for code := c; !code.IsEnd(); {
|
||||
if prevIdx != 0 {
|
||||
if code.DisplayIdx != prevIdx+1 {
|
||||
return fmt.Errorf(
|
||||
"invalid index. previous display index is %d but next is %d. dump = %s",
|
||||
prevIdx, code.DisplayIdx, c.Dump(),
|
||||
)
|
||||
}
|
||||
}
|
||||
prevIdx = code.DisplayIdx
|
||||
code = code.IterNext()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Opcode) IterNext() *Opcode {
|
||||
if c == nil {
|
||||
return nil
|
||||
}
|
||||
switch c.Op.CodeType() {
|
||||
case CodeArrayElem, CodeSliceElem, CodeMapKey:
|
||||
return c.End
|
||||
default:
|
||||
return c.Next
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Opcode) IsEnd() bool {
|
||||
if c == nil {
|
||||
return true
|
||||
}
|
||||
return c.Op == OpEnd || c.Op == OpInterfaceEnd || c.Op == OpRecursiveEnd
|
||||
}
|
||||
|
||||
func (c *Opcode) MaxIdx() uint32 {
|
||||
max := uint32(0)
|
||||
for _, value := range []uint32{
|
||||
c.Idx,
|
||||
c.ElemIdx,
|
||||
c.Length,
|
||||
c.Size,
|
||||
} {
|
||||
if max < value {
|
||||
max = value
|
||||
}
|
||||
}
|
||||
return max
|
||||
}
|
||||
|
||||
func (c *Opcode) ToHeaderType(isString bool) OpType {
|
||||
switch c.Op {
|
||||
case OpInt:
|
||||
if isString {
|
||||
return OpStructHeadIntString
|
||||
}
|
||||
return OpStructHeadInt
|
||||
case OpIntPtr:
|
||||
if isString {
|
||||
return OpStructHeadIntPtrString
|
||||
}
|
||||
return OpStructHeadIntPtr
|
||||
case OpUint:
|
||||
if isString {
|
||||
return OpStructHeadUintString
|
||||
}
|
||||
return OpStructHeadUint
|
||||
case OpUintPtr:
|
||||
if isString {
|
||||
return OpStructHeadUintPtrString
|
||||
}
|
||||
return OpStructHeadUintPtr
|
||||
case OpFloat32:
|
||||
if isString {
|
||||
return OpStructHeadFloat32String
|
||||
}
|
||||
return OpStructHeadFloat32
|
||||
case OpFloat32Ptr:
|
||||
if isString {
|
||||
return OpStructHeadFloat32PtrString
|
||||
}
|
||||
return OpStructHeadFloat32Ptr
|
||||
case OpFloat64:
|
||||
if isString {
|
||||
return OpStructHeadFloat64String
|
||||
}
|
||||
return OpStructHeadFloat64
|
||||
case OpFloat64Ptr:
|
||||
if isString {
|
||||
return OpStructHeadFloat64PtrString
|
||||
}
|
||||
return OpStructHeadFloat64Ptr
|
||||
case OpString:
|
||||
if isString {
|
||||
return OpStructHeadStringString
|
||||
}
|
||||
return OpStructHeadString
|
||||
case OpStringPtr:
|
||||
if isString {
|
||||
return OpStructHeadStringPtrString
|
||||
}
|
||||
return OpStructHeadStringPtr
|
||||
case OpNumber:
|
||||
if isString {
|
||||
return OpStructHeadNumberString
|
||||
}
|
||||
return OpStructHeadNumber
|
||||
case OpNumberPtr:
|
||||
if isString {
|
||||
return OpStructHeadNumberPtrString
|
||||
}
|
||||
return OpStructHeadNumberPtr
|
||||
case OpBool:
|
||||
if isString {
|
||||
return OpStructHeadBoolString
|
||||
}
|
||||
return OpStructHeadBool
|
||||
case OpBoolPtr:
|
||||
if isString {
|
||||
return OpStructHeadBoolPtrString
|
||||
}
|
||||
return OpStructHeadBoolPtr
|
||||
case OpBytes:
|
||||
return OpStructHeadBytes
|
||||
case OpBytesPtr:
|
||||
return OpStructHeadBytesPtr
|
||||
case OpMap:
|
||||
return OpStructHeadMap
|
||||
case OpMapPtr:
|
||||
c.Op = OpMap
|
||||
return OpStructHeadMapPtr
|
||||
case OpArray:
|
||||
return OpStructHeadArray
|
||||
case OpArrayPtr:
|
||||
c.Op = OpArray
|
||||
return OpStructHeadArrayPtr
|
||||
case OpSlice:
|
||||
return OpStructHeadSlice
|
||||
case OpSlicePtr:
|
||||
c.Op = OpSlice
|
||||
return OpStructHeadSlicePtr
|
||||
case OpMarshalJSON:
|
||||
return OpStructHeadMarshalJSON
|
||||
case OpMarshalJSONPtr:
|
||||
return OpStructHeadMarshalJSONPtr
|
||||
case OpMarshalText:
|
||||
return OpStructHeadMarshalText
|
||||
case OpMarshalTextPtr:
|
||||
return OpStructHeadMarshalTextPtr
|
||||
}
|
||||
return OpStructHead
|
||||
}
|
||||
|
||||
func (c *Opcode) ToFieldType(isString bool) OpType {
|
||||
switch c.Op {
|
||||
case OpInt:
|
||||
if isString {
|
||||
return OpStructFieldIntString
|
||||
}
|
||||
return OpStructFieldInt
|
||||
case OpIntPtr:
|
||||
if isString {
|
||||
return OpStructFieldIntPtrString
|
||||
}
|
||||
return OpStructFieldIntPtr
|
||||
case OpUint:
|
||||
if isString {
|
||||
return OpStructFieldUintString
|
||||
}
|
||||
return OpStructFieldUint
|
||||
case OpUintPtr:
|
||||
if isString {
|
||||
return OpStructFieldUintPtrString
|
||||
}
|
||||
return OpStructFieldUintPtr
|
||||
case OpFloat32:
|
||||
if isString {
|
||||
return OpStructFieldFloat32String
|
||||
}
|
||||
return OpStructFieldFloat32
|
||||
case OpFloat32Ptr:
|
||||
if isString {
|
||||
return OpStructFieldFloat32PtrString
|
||||
}
|
||||
return OpStructFieldFloat32Ptr
|
||||
case OpFloat64:
|
||||
if isString {
|
||||
return OpStructFieldFloat64String
|
||||
}
|
||||
return OpStructFieldFloat64
|
||||
case OpFloat64Ptr:
|
||||
if isString {
|
||||
return OpStructFieldFloat64PtrString
|
||||
}
|
||||
return OpStructFieldFloat64Ptr
|
||||
case OpString:
|
||||
if isString {
|
||||
return OpStructFieldStringString
|
||||
}
|
||||
return OpStructFieldString
|
||||
case OpStringPtr:
|
||||
if isString {
|
||||
return OpStructFieldStringPtrString
|
||||
}
|
||||
return OpStructFieldStringPtr
|
||||
case OpNumber:
|
||||
if isString {
|
||||
return OpStructFieldNumberString
|
||||
}
|
||||
return OpStructFieldNumber
|
||||
case OpNumberPtr:
|
||||
if isString {
|
||||
return OpStructFieldNumberPtrString
|
||||
}
|
||||
return OpStructFieldNumberPtr
|
||||
case OpBool:
|
||||
if isString {
|
||||
return OpStructFieldBoolString
|
||||
}
|
||||
return OpStructFieldBool
|
||||
case OpBoolPtr:
|
||||
if isString {
|
||||
return OpStructFieldBoolPtrString
|
||||
}
|
||||
return OpStructFieldBoolPtr
|
||||
case OpBytes:
|
||||
return OpStructFieldBytes
|
||||
case OpBytesPtr:
|
||||
return OpStructFieldBytesPtr
|
||||
case OpMap:
|
||||
return OpStructFieldMap
|
||||
case OpMapPtr:
|
||||
c.Op = OpMap
|
||||
return OpStructFieldMapPtr
|
||||
case OpArray:
|
||||
return OpStructFieldArray
|
||||
case OpArrayPtr:
|
||||
c.Op = OpArray
|
||||
return OpStructFieldArrayPtr
|
||||
case OpSlice:
|
||||
return OpStructFieldSlice
|
||||
case OpSlicePtr:
|
||||
c.Op = OpSlice
|
||||
return OpStructFieldSlicePtr
|
||||
case OpMarshalJSON:
|
||||
return OpStructFieldMarshalJSON
|
||||
case OpMarshalJSONPtr:
|
||||
return OpStructFieldMarshalJSONPtr
|
||||
case OpMarshalText:
|
||||
return OpStructFieldMarshalText
|
||||
case OpMarshalTextPtr:
|
||||
return OpStructFieldMarshalTextPtr
|
||||
}
|
||||
return OpStructField
|
||||
}
|
||||
|
||||
func newOpCode(ctx *compileContext, typ *runtime.Type, op OpType) *Opcode {
|
||||
return newOpCodeWithNext(ctx, typ, op, newEndOp(ctx, typ))
|
||||
}
|
||||
|
||||
func opcodeOffset(idx int) uint32 {
|
||||
return uint32(idx) * uintptrSize
|
||||
}
|
||||
|
||||
func getCodeAddrByIdx(head *Opcode, idx uint32) *Opcode {
|
||||
addr := uintptr(unsafe.Pointer(head)) + uintptr(idx)*unsafe.Sizeof(Opcode{})
|
||||
return *(**Opcode)(unsafe.Pointer(&addr))
|
||||
}
|
||||
|
||||
func copyOpcode(code *Opcode) *Opcode {
|
||||
codeNum := ToEndCode(code).DisplayIdx + 1
|
||||
codeSlice := make([]Opcode, codeNum)
|
||||
head := (*Opcode)((*runtime.SliceHeader)(unsafe.Pointer(&codeSlice)).Data)
|
||||
ptr := head
|
||||
c := code
|
||||
for {
|
||||
*ptr = Opcode{
|
||||
Op: c.Op,
|
||||
Key: c.Key,
|
||||
PtrNum: c.PtrNum,
|
||||
NumBitSize: c.NumBitSize,
|
||||
Flags: c.Flags,
|
||||
Idx: c.Idx,
|
||||
Offset: c.Offset,
|
||||
Type: c.Type,
|
||||
FieldQuery: c.FieldQuery,
|
||||
DisplayIdx: c.DisplayIdx,
|
||||
DisplayKey: c.DisplayKey,
|
||||
ElemIdx: c.ElemIdx,
|
||||
Length: c.Length,
|
||||
Size: c.Size,
|
||||
Indent: c.Indent,
|
||||
Jmp: c.Jmp,
|
||||
}
|
||||
if c.End != nil {
|
||||
ptr.End = getCodeAddrByIdx(head, c.End.DisplayIdx)
|
||||
}
|
||||
if c.NextField != nil {
|
||||
ptr.NextField = getCodeAddrByIdx(head, c.NextField.DisplayIdx)
|
||||
}
|
||||
if c.Next != nil {
|
||||
ptr.Next = getCodeAddrByIdx(head, c.Next.DisplayIdx)
|
||||
}
|
||||
if c.IsEnd() {
|
||||
break
|
||||
}
|
||||
ptr = getCodeAddrByIdx(head, c.DisplayIdx+1)
|
||||
c = c.IterNext()
|
||||
}
|
||||
return head
|
||||
}
|
||||
|
||||
func setTotalLengthToInterfaceOp(code *Opcode) {
|
||||
for c := code; !c.IsEnd(); {
|
||||
if c.Op == OpInterface || c.Op == OpInterfacePtr {
|
||||
c.Length = uint32(code.TotalLength())
|
||||
}
|
||||
c = c.IterNext()
|
||||
}
|
||||
}
|
||||
|
||||
func ToEndCode(code *Opcode) *Opcode {
|
||||
c := code
|
||||
for !c.IsEnd() {
|
||||
c = c.IterNext()
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
func copyToInterfaceOpcode(code *Opcode) *Opcode {
|
||||
copied := copyOpcode(code)
|
||||
c := copied
|
||||
c = ToEndCode(c)
|
||||
c.Idx += uintptrSize
|
||||
c.ElemIdx = c.Idx + uintptrSize
|
||||
c.Length = c.Idx + 2*uintptrSize
|
||||
c.Op = OpInterfaceEnd
|
||||
return copied
|
||||
}
|
||||
|
||||
func newOpCodeWithNext(ctx *compileContext, typ *runtime.Type, op OpType, next *Opcode) *Opcode {
|
||||
return &Opcode{
|
||||
Op: op,
|
||||
Idx: opcodeOffset(ctx.ptrIndex),
|
||||
Next: next,
|
||||
Type: typ,
|
||||
DisplayIdx: ctx.opcodeIndex,
|
||||
Indent: ctx.indent,
|
||||
}
|
||||
}
|
||||
|
||||
func newEndOp(ctx *compileContext, typ *runtime.Type) *Opcode {
|
||||
return newOpCodeWithNext(ctx, typ, OpEnd, nil)
|
||||
}
|
||||
|
||||
func (c *Opcode) TotalLength() int {
|
||||
var idx int
|
||||
code := c
|
||||
for !code.IsEnd() {
|
||||
maxIdx := int(code.MaxIdx() / uintptrSize)
|
||||
if idx < maxIdx {
|
||||
idx = maxIdx
|
||||
}
|
||||
if code.Op == OpRecursiveEnd {
|
||||
break
|
||||
}
|
||||
code = code.IterNext()
|
||||
}
|
||||
maxIdx := int(code.MaxIdx() / uintptrSize)
|
||||
if idx < maxIdx {
|
||||
idx = maxIdx
|
||||
}
|
||||
return idx + 1
|
||||
}
|
||||
|
||||
func (c *Opcode) dumpHead(code *Opcode) string {
|
||||
var length uint32
|
||||
if code.Op.CodeType() == CodeArrayHead {
|
||||
length = code.Length
|
||||
} else {
|
||||
length = code.Length / uintptrSize
|
||||
}
|
||||
return fmt.Sprintf(
|
||||
`[%03d]%s%s ([idx:%d][elemIdx:%d][length:%d])`,
|
||||
code.DisplayIdx,
|
||||
strings.Repeat("-", int(code.Indent)),
|
||||
code.Op,
|
||||
code.Idx/uintptrSize,
|
||||
code.ElemIdx/uintptrSize,
|
||||
length,
|
||||
)
|
||||
}
|
||||
|
||||
func (c *Opcode) dumpMapHead(code *Opcode) string {
|
||||
return fmt.Sprintf(
|
||||
`[%03d]%s%s ([idx:%d])`,
|
||||
code.DisplayIdx,
|
||||
strings.Repeat("-", int(code.Indent)),
|
||||
code.Op,
|
||||
code.Idx/uintptrSize,
|
||||
)
|
||||
}
|
||||
|
||||
func (c *Opcode) dumpMapEnd(code *Opcode) string {
|
||||
return fmt.Sprintf(
|
||||
`[%03d]%s%s ([idx:%d])`,
|
||||
code.DisplayIdx,
|
||||
strings.Repeat("-", int(code.Indent)),
|
||||
code.Op,
|
||||
code.Idx/uintptrSize,
|
||||
)
|
||||
}
|
||||
|
||||
func (c *Opcode) dumpElem(code *Opcode) string {
|
||||
var length uint32
|
||||
if code.Op.CodeType() == CodeArrayElem {
|
||||
length = code.Length
|
||||
} else {
|
||||
length = code.Length / uintptrSize
|
||||
}
|
||||
return fmt.Sprintf(
|
||||
`[%03d]%s%s ([idx:%d][elemIdx:%d][length:%d][size:%d])`,
|
||||
code.DisplayIdx,
|
||||
strings.Repeat("-", int(code.Indent)),
|
||||
code.Op,
|
||||
code.Idx/uintptrSize,
|
||||
code.ElemIdx/uintptrSize,
|
||||
length,
|
||||
code.Size,
|
||||
)
|
||||
}
|
||||
|
||||
func (c *Opcode) dumpField(code *Opcode) string {
|
||||
return fmt.Sprintf(
|
||||
`[%03d]%s%s ([idx:%d][key:%s][offset:%d])`,
|
||||
code.DisplayIdx,
|
||||
strings.Repeat("-", int(code.Indent)),
|
||||
code.Op,
|
||||
code.Idx/uintptrSize,
|
||||
code.DisplayKey,
|
||||
code.Offset,
|
||||
)
|
||||
}
|
||||
|
||||
func (c *Opcode) dumpKey(code *Opcode) string {
|
||||
return fmt.Sprintf(
|
||||
`[%03d]%s%s ([idx:%d])`,
|
||||
code.DisplayIdx,
|
||||
strings.Repeat("-", int(code.Indent)),
|
||||
code.Op,
|
||||
code.Idx/uintptrSize,
|
||||
)
|
||||
}
|
||||
|
||||
func (c *Opcode) dumpValue(code *Opcode) string {
|
||||
return fmt.Sprintf(
|
||||
`[%03d]%s%s ([idx:%d])`,
|
||||
code.DisplayIdx,
|
||||
strings.Repeat("-", int(code.Indent)),
|
||||
code.Op,
|
||||
code.Idx/uintptrSize,
|
||||
)
|
||||
}
|
||||
|
||||
func (c *Opcode) Dump() string {
|
||||
codes := []string{}
|
||||
for code := c; !code.IsEnd(); {
|
||||
switch code.Op.CodeType() {
|
||||
case CodeSliceHead:
|
||||
codes = append(codes, c.dumpHead(code))
|
||||
code = code.Next
|
||||
case CodeMapHead:
|
||||
codes = append(codes, c.dumpMapHead(code))
|
||||
code = code.Next
|
||||
case CodeArrayElem, CodeSliceElem:
|
||||
codes = append(codes, c.dumpElem(code))
|
||||
code = code.End
|
||||
case CodeMapKey:
|
||||
codes = append(codes, c.dumpKey(code))
|
||||
code = code.End
|
||||
case CodeMapValue:
|
||||
codes = append(codes, c.dumpValue(code))
|
||||
code = code.Next
|
||||
case CodeMapEnd:
|
||||
codes = append(codes, c.dumpMapEnd(code))
|
||||
code = code.Next
|
||||
case CodeStructField:
|
||||
codes = append(codes, c.dumpField(code))
|
||||
code = code.Next
|
||||
case CodeStructEnd:
|
||||
codes = append(codes, c.dumpField(code))
|
||||
code = code.Next
|
||||
default:
|
||||
codes = append(codes, fmt.Sprintf(
|
||||
"[%03d]%s%s ([idx:%d])",
|
||||
code.DisplayIdx,
|
||||
strings.Repeat("-", int(code.Indent)),
|
||||
code.Op,
|
||||
code.Idx/uintptrSize,
|
||||
))
|
||||
code = code.Next
|
||||
}
|
||||
}
|
||||
return strings.Join(codes, "\n")
|
||||
}
|
||||
|
||||
func (c *Opcode) DumpDOT() string {
|
||||
type edge struct {
|
||||
from, to *Opcode
|
||||
label string
|
||||
weight int
|
||||
}
|
||||
var edges []edge
|
||||
|
||||
b := &bytes.Buffer{}
|
||||
fmt.Fprintf(b, "digraph \"%p\" {\n", c.Type)
|
||||
fmt.Fprintln(b, "mclimit=1.5;\nrankdir=TD;\nordering=out;\nnode[shape=box];")
|
||||
for code := c; !code.IsEnd(); {
|
||||
label := code.Op.String()
|
||||
fmt.Fprintf(b, "\"%p\" [label=%q];\n", code, label)
|
||||
if p := code.Next; p != nil {
|
||||
edges = append(edges, edge{
|
||||
from: code,
|
||||
to: p,
|
||||
label: "Next",
|
||||
weight: 10,
|
||||
})
|
||||
}
|
||||
if p := code.NextField; p != nil {
|
||||
edges = append(edges, edge{
|
||||
from: code,
|
||||
to: p,
|
||||
label: "NextField",
|
||||
weight: 2,
|
||||
})
|
||||
}
|
||||
if p := code.End; p != nil {
|
||||
edges = append(edges, edge{
|
||||
from: code,
|
||||
to: p,
|
||||
label: "End",
|
||||
weight: 1,
|
||||
})
|
||||
}
|
||||
if p := code.Jmp; p != nil {
|
||||
edges = append(edges, edge{
|
||||
from: code,
|
||||
to: p.Code,
|
||||
label: "Jmp",
|
||||
weight: 1,
|
||||
})
|
||||
}
|
||||
|
||||
switch code.Op.CodeType() {
|
||||
case CodeSliceHead:
|
||||
code = code.Next
|
||||
case CodeMapHead:
|
||||
code = code.Next
|
||||
case CodeArrayElem, CodeSliceElem:
|
||||
code = code.End
|
||||
case CodeMapKey:
|
||||
code = code.End
|
||||
case CodeMapValue:
|
||||
code = code.Next
|
||||
case CodeMapEnd:
|
||||
code = code.Next
|
||||
case CodeStructField:
|
||||
code = code.Next
|
||||
case CodeStructEnd:
|
||||
code = code.Next
|
||||
default:
|
||||
code = code.Next
|
||||
}
|
||||
if code.IsEnd() {
|
||||
fmt.Fprintf(b, "\"%p\" [label=%q];\n", code, code.Op.String())
|
||||
}
|
||||
}
|
||||
sort.Slice(edges, func(i, j int) bool {
|
||||
return edges[i].to.DisplayIdx < edges[j].to.DisplayIdx
|
||||
})
|
||||
for _, e := range edges {
|
||||
fmt.Fprintf(b, "\"%p\" -> \"%p\" [label=%q][weight=%d];\n", e.from, e.to, e.label, e.weight)
|
||||
}
|
||||
fmt.Fprint(b, "}")
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func newSliceHeaderCode(ctx *compileContext, typ *runtime.Type) *Opcode {
|
||||
idx := opcodeOffset(ctx.ptrIndex)
|
||||
ctx.incPtrIndex()
|
||||
elemIdx := opcodeOffset(ctx.ptrIndex)
|
||||
ctx.incPtrIndex()
|
||||
length := opcodeOffset(ctx.ptrIndex)
|
||||
return &Opcode{
|
||||
Op: OpSlice,
|
||||
Type: typ,
|
||||
Idx: idx,
|
||||
DisplayIdx: ctx.opcodeIndex,
|
||||
ElemIdx: elemIdx,
|
||||
Length: length,
|
||||
Indent: ctx.indent,
|
||||
}
|
||||
}
|
||||
|
||||
func newSliceElemCode(ctx *compileContext, typ *runtime.Type, head *Opcode, size uintptr) *Opcode {
|
||||
return &Opcode{
|
||||
Op: OpSliceElem,
|
||||
Type: typ,
|
||||
Idx: head.Idx,
|
||||
DisplayIdx: ctx.opcodeIndex,
|
||||
ElemIdx: head.ElemIdx,
|
||||
Length: head.Length,
|
||||
Indent: ctx.indent,
|
||||
Size: uint32(size),
|
||||
}
|
||||
}
|
||||
|
||||
func newArrayHeaderCode(ctx *compileContext, typ *runtime.Type, alen int) *Opcode {
|
||||
idx := opcodeOffset(ctx.ptrIndex)
|
||||
ctx.incPtrIndex()
|
||||
elemIdx := opcodeOffset(ctx.ptrIndex)
|
||||
return &Opcode{
|
||||
Op: OpArray,
|
||||
Type: typ,
|
||||
Idx: idx,
|
||||
DisplayIdx: ctx.opcodeIndex,
|
||||
ElemIdx: elemIdx,
|
||||
Indent: ctx.indent,
|
||||
Length: uint32(alen),
|
||||
}
|
||||
}
|
||||
|
||||
func newArrayElemCode(ctx *compileContext, typ *runtime.Type, head *Opcode, length int, size uintptr) *Opcode {
|
||||
return &Opcode{
|
||||
Op: OpArrayElem,
|
||||
Type: typ,
|
||||
Idx: head.Idx,
|
||||
DisplayIdx: ctx.opcodeIndex,
|
||||
ElemIdx: head.ElemIdx,
|
||||
Length: uint32(length),
|
||||
Indent: ctx.indent,
|
||||
Size: uint32(size),
|
||||
}
|
||||
}
|
||||
|
||||
func newMapHeaderCode(ctx *compileContext, typ *runtime.Type) *Opcode {
|
||||
idx := opcodeOffset(ctx.ptrIndex)
|
||||
ctx.incPtrIndex()
|
||||
return &Opcode{
|
||||
Op: OpMap,
|
||||
Type: typ,
|
||||
Idx: idx,
|
||||
DisplayIdx: ctx.opcodeIndex,
|
||||
Indent: ctx.indent,
|
||||
}
|
||||
}
|
||||
|
||||
func newMapKeyCode(ctx *compileContext, typ *runtime.Type, head *Opcode) *Opcode {
|
||||
return &Opcode{
|
||||
Op: OpMapKey,
|
||||
Type: typ,
|
||||
Idx: head.Idx,
|
||||
DisplayIdx: ctx.opcodeIndex,
|
||||
Indent: ctx.indent,
|
||||
}
|
||||
}
|
||||
|
||||
func newMapValueCode(ctx *compileContext, typ *runtime.Type, head *Opcode) *Opcode {
|
||||
return &Opcode{
|
||||
Op: OpMapValue,
|
||||
Type: typ,
|
||||
Idx: head.Idx,
|
||||
DisplayIdx: ctx.opcodeIndex,
|
||||
Indent: ctx.indent,
|
||||
}
|
||||
}
|
||||
|
||||
func newMapEndCode(ctx *compileContext, typ *runtime.Type, head *Opcode) *Opcode {
|
||||
return &Opcode{
|
||||
Op: OpMapEnd,
|
||||
Type: typ,
|
||||
Idx: head.Idx,
|
||||
DisplayIdx: ctx.opcodeIndex,
|
||||
Indent: ctx.indent,
|
||||
Next: newEndOp(ctx, typ),
|
||||
}
|
||||
}
|
||||
|
||||
func newRecursiveCode(ctx *compileContext, typ *runtime.Type, jmp *CompiledCode) *Opcode {
|
||||
return &Opcode{
|
||||
Op: OpRecursive,
|
||||
Type: typ,
|
||||
Idx: opcodeOffset(ctx.ptrIndex),
|
||||
Next: newEndOp(ctx, typ),
|
||||
DisplayIdx: ctx.opcodeIndex,
|
||||
Indent: ctx.indent,
|
||||
Jmp: jmp,
|
||||
}
|
||||
}
|
48
internal/encoder/option.go
Normal file
48
internal/encoder/option.go
Normal file
|
@ -0,0 +1,48 @@
|
|||
package encoder
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
)
|
||||
|
||||
type OptionFlag uint8
|
||||
|
||||
const (
|
||||
HTMLEscapeOption OptionFlag = 1 << iota
|
||||
IndentOption
|
||||
UnorderedMapOption
|
||||
DebugOption
|
||||
ColorizeOption
|
||||
ContextOption
|
||||
NormalizeUTF8Option
|
||||
FieldQueryOption
|
||||
)
|
||||
|
||||
type Option struct {
|
||||
Flag OptionFlag
|
||||
ColorScheme *ColorScheme
|
||||
Context context.Context
|
||||
DebugOut io.Writer
|
||||
DebugDOTOut io.WriteCloser
|
||||
}
|
||||
|
||||
type EncodeFormat struct {
|
||||
Header string
|
||||
Footer string
|
||||
}
|
||||
|
||||
type EncodeFormatScheme struct {
|
||||
Int EncodeFormat
|
||||
Uint EncodeFormat
|
||||
Float EncodeFormat
|
||||
Bool EncodeFormat
|
||||
String EncodeFormat
|
||||
Binary EncodeFormat
|
||||
ObjectKey EncodeFormat
|
||||
Null EncodeFormat
|
||||
}
|
||||
|
||||
type (
|
||||
ColorScheme = EncodeFormatScheme
|
||||
ColorFormat = EncodeFormat
|
||||
)
|
932
internal/encoder/optype.go
Normal file
932
internal/encoder/optype.go
Normal file
|
@ -0,0 +1,932 @@
|
|||
// Code generated by internal/cmd/generator. DO NOT EDIT!
|
||||
package encoder
|
||||
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
type CodeType int
|
||||
|
||||
const (
|
||||
CodeOp CodeType = 0
|
||||
CodeArrayHead CodeType = 1
|
||||
CodeArrayElem CodeType = 2
|
||||
CodeSliceHead CodeType = 3
|
||||
CodeSliceElem CodeType = 4
|
||||
CodeMapHead CodeType = 5
|
||||
CodeMapKey CodeType = 6
|
||||
CodeMapValue CodeType = 7
|
||||
CodeMapEnd CodeType = 8
|
||||
CodeRecursive CodeType = 9
|
||||
CodeStructField CodeType = 10
|
||||
CodeStructEnd CodeType = 11
|
||||
)
|
||||
|
||||
var opTypeStrings = [400]string{
|
||||
"End",
|
||||
"Interface",
|
||||
"Ptr",
|
||||
"SliceElem",
|
||||
"SliceEnd",
|
||||
"ArrayElem",
|
||||
"ArrayEnd",
|
||||
"MapKey",
|
||||
"MapValue",
|
||||
"MapEnd",
|
||||
"Recursive",
|
||||
"RecursivePtr",
|
||||
"RecursiveEnd",
|
||||
"InterfaceEnd",
|
||||
"Int",
|
||||
"Uint",
|
||||
"Float32",
|
||||
"Float64",
|
||||
"Bool",
|
||||
"String",
|
||||
"Bytes",
|
||||
"Number",
|
||||
"Array",
|
||||
"Map",
|
||||
"Slice",
|
||||
"Struct",
|
||||
"MarshalJSON",
|
||||
"MarshalText",
|
||||
"IntString",
|
||||
"UintString",
|
||||
"Float32String",
|
||||
"Float64String",
|
||||
"BoolString",
|
||||
"StringString",
|
||||
"NumberString",
|
||||
"IntPtr",
|
||||
"UintPtr",
|
||||
"Float32Ptr",
|
||||
"Float64Ptr",
|
||||
"BoolPtr",
|
||||
"StringPtr",
|
||||
"BytesPtr",
|
||||
"NumberPtr",
|
||||
"ArrayPtr",
|
||||
"MapPtr",
|
||||
"SlicePtr",
|
||||
"MarshalJSONPtr",
|
||||
"MarshalTextPtr",
|
||||
"InterfacePtr",
|
||||
"IntPtrString",
|
||||
"UintPtrString",
|
||||
"Float32PtrString",
|
||||
"Float64PtrString",
|
||||
"BoolPtrString",
|
||||
"StringPtrString",
|
||||
"NumberPtrString",
|
||||
"StructHeadInt",
|
||||
"StructHeadOmitEmptyInt",
|
||||
"StructPtrHeadInt",
|
||||
"StructPtrHeadOmitEmptyInt",
|
||||
"StructHeadUint",
|
||||
"StructHeadOmitEmptyUint",
|
||||
"StructPtrHeadUint",
|
||||
"StructPtrHeadOmitEmptyUint",
|
||||
"StructHeadFloat32",
|
||||
"StructHeadOmitEmptyFloat32",
|
||||
"StructPtrHeadFloat32",
|
||||
"StructPtrHeadOmitEmptyFloat32",
|
||||
"StructHeadFloat64",
|
||||
"StructHeadOmitEmptyFloat64",
|
||||
"StructPtrHeadFloat64",
|
||||
"StructPtrHeadOmitEmptyFloat64",
|
||||
"StructHeadBool",
|
||||
"StructHeadOmitEmptyBool",
|
||||
"StructPtrHeadBool",
|
||||
"StructPtrHeadOmitEmptyBool",
|
||||
"StructHeadString",
|
||||
"StructHeadOmitEmptyString",
|
||||
"StructPtrHeadString",
|
||||
"StructPtrHeadOmitEmptyString",
|
||||
"StructHeadBytes",
|
||||
"StructHeadOmitEmptyBytes",
|
||||
"StructPtrHeadBytes",
|
||||
"StructPtrHeadOmitEmptyBytes",
|
||||
"StructHeadNumber",
|
||||
"StructHeadOmitEmptyNumber",
|
||||
"StructPtrHeadNumber",
|
||||
"StructPtrHeadOmitEmptyNumber",
|
||||
"StructHeadArray",
|
||||
"StructHeadOmitEmptyArray",
|
||||
"StructPtrHeadArray",
|
||||
"StructPtrHeadOmitEmptyArray",
|
||||
"StructHeadMap",
|
||||
"StructHeadOmitEmptyMap",
|
||||
"StructPtrHeadMap",
|
||||
"StructPtrHeadOmitEmptyMap",
|
||||
"StructHeadSlice",
|
||||
"StructHeadOmitEmptySlice",
|
||||
"StructPtrHeadSlice",
|
||||
"StructPtrHeadOmitEmptySlice",
|
||||
"StructHeadStruct",
|
||||
"StructHeadOmitEmptyStruct",
|
||||
"StructPtrHeadStruct",
|
||||
"StructPtrHeadOmitEmptyStruct",
|
||||
"StructHeadMarshalJSON",
|
||||
"StructHeadOmitEmptyMarshalJSON",
|
||||
"StructPtrHeadMarshalJSON",
|
||||
"StructPtrHeadOmitEmptyMarshalJSON",
|
||||
"StructHeadMarshalText",
|
||||
"StructHeadOmitEmptyMarshalText",
|
||||
"StructPtrHeadMarshalText",
|
||||
"StructPtrHeadOmitEmptyMarshalText",
|
||||
"StructHeadIntString",
|
||||
"StructHeadOmitEmptyIntString",
|
||||
"StructPtrHeadIntString",
|
||||
"StructPtrHeadOmitEmptyIntString",
|
||||
"StructHeadUintString",
|
||||
"StructHeadOmitEmptyUintString",
|
||||
"StructPtrHeadUintString",
|
||||
"StructPtrHeadOmitEmptyUintString",
|
||||
"StructHeadFloat32String",
|
||||
"StructHeadOmitEmptyFloat32String",
|
||||
"StructPtrHeadFloat32String",
|
||||
"StructPtrHeadOmitEmptyFloat32String",
|
||||
"StructHeadFloat64String",
|
||||
"StructHeadOmitEmptyFloat64String",
|
||||
"StructPtrHeadFloat64String",
|
||||
"StructPtrHeadOmitEmptyFloat64String",
|
||||
"StructHeadBoolString",
|
||||
"StructHeadOmitEmptyBoolString",
|
||||
"StructPtrHeadBoolString",
|
||||
"StructPtrHeadOmitEmptyBoolString",
|
||||
"StructHeadStringString",
|
||||
"StructHeadOmitEmptyStringString",
|
||||
"StructPtrHeadStringString",
|
||||
"StructPtrHeadOmitEmptyStringString",
|
||||
"StructHeadNumberString",
|
||||
"StructHeadOmitEmptyNumberString",
|
||||
"StructPtrHeadNumberString",
|
||||
"StructPtrHeadOmitEmptyNumberString",
|
||||
"StructHeadIntPtr",
|
||||
"StructHeadOmitEmptyIntPtr",
|
||||
"StructPtrHeadIntPtr",
|
||||
"StructPtrHeadOmitEmptyIntPtr",
|
||||
"StructHeadUintPtr",
|
||||
"StructHeadOmitEmptyUintPtr",
|
||||
"StructPtrHeadUintPtr",
|
||||
"StructPtrHeadOmitEmptyUintPtr",
|
||||
"StructHeadFloat32Ptr",
|
||||
"StructHeadOmitEmptyFloat32Ptr",
|
||||
"StructPtrHeadFloat32Ptr",
|
||||
"StructPtrHeadOmitEmptyFloat32Ptr",
|
||||
"StructHeadFloat64Ptr",
|
||||
"StructHeadOmitEmptyFloat64Ptr",
|
||||
"StructPtrHeadFloat64Ptr",
|
||||
"StructPtrHeadOmitEmptyFloat64Ptr",
|
||||
"StructHeadBoolPtr",
|
||||
"StructHeadOmitEmptyBoolPtr",
|
||||
"StructPtrHeadBoolPtr",
|
||||
"StructPtrHeadOmitEmptyBoolPtr",
|
||||
"StructHeadStringPtr",
|
||||
"StructHeadOmitEmptyStringPtr",
|
||||
"StructPtrHeadStringPtr",
|
||||
"StructPtrHeadOmitEmptyStringPtr",
|
||||
"StructHeadBytesPtr",
|
||||
"StructHeadOmitEmptyBytesPtr",
|
||||
"StructPtrHeadBytesPtr",
|
||||
"StructPtrHeadOmitEmptyBytesPtr",
|
||||
"StructHeadNumberPtr",
|
||||
"StructHeadOmitEmptyNumberPtr",
|
||||
"StructPtrHeadNumberPtr",
|
||||
"StructPtrHeadOmitEmptyNumberPtr",
|
||||
"StructHeadArrayPtr",
|
||||
"StructHeadOmitEmptyArrayPtr",
|
||||
"StructPtrHeadArrayPtr",
|
||||
"StructPtrHeadOmitEmptyArrayPtr",
|
||||
"StructHeadMapPtr",
|
||||
"StructHeadOmitEmptyMapPtr",
|
||||
"StructPtrHeadMapPtr",
|
||||
"StructPtrHeadOmitEmptyMapPtr",
|
||||
"StructHeadSlicePtr",
|
||||
"StructHeadOmitEmptySlicePtr",
|
||||
"StructPtrHeadSlicePtr",
|
||||
"StructPtrHeadOmitEmptySlicePtr",
|
||||
"StructHeadMarshalJSONPtr",
|
||||
"StructHeadOmitEmptyMarshalJSONPtr",
|
||||
"StructPtrHeadMarshalJSONPtr",
|
||||
"StructPtrHeadOmitEmptyMarshalJSONPtr",
|
||||
"StructHeadMarshalTextPtr",
|
||||
"StructHeadOmitEmptyMarshalTextPtr",
|
||||
"StructPtrHeadMarshalTextPtr",
|
||||
"StructPtrHeadOmitEmptyMarshalTextPtr",
|
||||
"StructHeadInterfacePtr",
|
||||
"StructHeadOmitEmptyInterfacePtr",
|
||||
"StructPtrHeadInterfacePtr",
|
||||
"StructPtrHeadOmitEmptyInterfacePtr",
|
||||
"StructHeadIntPtrString",
|
||||
"StructHeadOmitEmptyIntPtrString",
|
||||
"StructPtrHeadIntPtrString",
|
||||
"StructPtrHeadOmitEmptyIntPtrString",
|
||||
"StructHeadUintPtrString",
|
||||
"StructHeadOmitEmptyUintPtrString",
|
||||
"StructPtrHeadUintPtrString",
|
||||
"StructPtrHeadOmitEmptyUintPtrString",
|
||||
"StructHeadFloat32PtrString",
|
||||
"StructHeadOmitEmptyFloat32PtrString",
|
||||
"StructPtrHeadFloat32PtrString",
|
||||
"StructPtrHeadOmitEmptyFloat32PtrString",
|
||||
"StructHeadFloat64PtrString",
|
||||
"StructHeadOmitEmptyFloat64PtrString",
|
||||
"StructPtrHeadFloat64PtrString",
|
||||
"StructPtrHeadOmitEmptyFloat64PtrString",
|
||||
"StructHeadBoolPtrString",
|
||||
"StructHeadOmitEmptyBoolPtrString",
|
||||
"StructPtrHeadBoolPtrString",
|
||||
"StructPtrHeadOmitEmptyBoolPtrString",
|
||||
"StructHeadStringPtrString",
|
||||
"StructHeadOmitEmptyStringPtrString",
|
||||
"StructPtrHeadStringPtrString",
|
||||
"StructPtrHeadOmitEmptyStringPtrString",
|
||||
"StructHeadNumberPtrString",
|
||||
"StructHeadOmitEmptyNumberPtrString",
|
||||
"StructPtrHeadNumberPtrString",
|
||||
"StructPtrHeadOmitEmptyNumberPtrString",
|
||||
"StructHead",
|
||||
"StructHeadOmitEmpty",
|
||||
"StructPtrHead",
|
||||
"StructPtrHeadOmitEmpty",
|
||||
"StructFieldInt",
|
||||
"StructFieldOmitEmptyInt",
|
||||
"StructEndInt",
|
||||
"StructEndOmitEmptyInt",
|
||||
"StructFieldUint",
|
||||
"StructFieldOmitEmptyUint",
|
||||
"StructEndUint",
|
||||
"StructEndOmitEmptyUint",
|
||||
"StructFieldFloat32",
|
||||
"StructFieldOmitEmptyFloat32",
|
||||
"StructEndFloat32",
|
||||
"StructEndOmitEmptyFloat32",
|
||||
"StructFieldFloat64",
|
||||
"StructFieldOmitEmptyFloat64",
|
||||
"StructEndFloat64",
|
||||
"StructEndOmitEmptyFloat64",
|
||||
"StructFieldBool",
|
||||
"StructFieldOmitEmptyBool",
|
||||
"StructEndBool",
|
||||
"StructEndOmitEmptyBool",
|
||||
"StructFieldString",
|
||||
"StructFieldOmitEmptyString",
|
||||
"StructEndString",
|
||||
"StructEndOmitEmptyString",
|
||||
"StructFieldBytes",
|
||||
"StructFieldOmitEmptyBytes",
|
||||
"StructEndBytes",
|
||||
"StructEndOmitEmptyBytes",
|
||||
"StructFieldNumber",
|
||||
"StructFieldOmitEmptyNumber",
|
||||
"StructEndNumber",
|
||||
"StructEndOmitEmptyNumber",
|
||||
"StructFieldArray",
|
||||
"StructFieldOmitEmptyArray",
|
||||
"StructEndArray",
|
||||
"StructEndOmitEmptyArray",
|
||||
"StructFieldMap",
|
||||
"StructFieldOmitEmptyMap",
|
||||
"StructEndMap",
|
||||
"StructEndOmitEmptyMap",
|
||||
"StructFieldSlice",
|
||||
"StructFieldOmitEmptySlice",
|
||||
"StructEndSlice",
|
||||
"StructEndOmitEmptySlice",
|
||||
"StructFieldStruct",
|
||||
"StructFieldOmitEmptyStruct",
|
||||
"StructEndStruct",
|
||||
"StructEndOmitEmptyStruct",
|
||||
"StructFieldMarshalJSON",
|
||||
"StructFieldOmitEmptyMarshalJSON",
|
||||
"StructEndMarshalJSON",
|
||||
"StructEndOmitEmptyMarshalJSON",
|
||||
"StructFieldMarshalText",
|
||||
"StructFieldOmitEmptyMarshalText",
|
||||
"StructEndMarshalText",
|
||||
"StructEndOmitEmptyMarshalText",
|
||||
"StructFieldIntString",
|
||||
"StructFieldOmitEmptyIntString",
|
||||
"StructEndIntString",
|
||||
"StructEndOmitEmptyIntString",
|
||||
"StructFieldUintString",
|
||||
"StructFieldOmitEmptyUintString",
|
||||
"StructEndUintString",
|
||||
"StructEndOmitEmptyUintString",
|
||||
"StructFieldFloat32String",
|
||||
"StructFieldOmitEmptyFloat32String",
|
||||
"StructEndFloat32String",
|
||||
"StructEndOmitEmptyFloat32String",
|
||||
"StructFieldFloat64String",
|
||||
"StructFieldOmitEmptyFloat64String",
|
||||
"StructEndFloat64String",
|
||||
"StructEndOmitEmptyFloat64String",
|
||||
"StructFieldBoolString",
|
||||
"StructFieldOmitEmptyBoolString",
|
||||
"StructEndBoolString",
|
||||
"StructEndOmitEmptyBoolString",
|
||||
"StructFieldStringString",
|
||||
"StructFieldOmitEmptyStringString",
|
||||
"StructEndStringString",
|
||||
"StructEndOmitEmptyStringString",
|
||||
"StructFieldNumberString",
|
||||
"StructFieldOmitEmptyNumberString",
|
||||
"StructEndNumberString",
|
||||
"StructEndOmitEmptyNumberString",
|
||||
"StructFieldIntPtr",
|
||||
"StructFieldOmitEmptyIntPtr",
|
||||
"StructEndIntPtr",
|
||||
"StructEndOmitEmptyIntPtr",
|
||||
"StructFieldUintPtr",
|
||||
"StructFieldOmitEmptyUintPtr",
|
||||
"StructEndUintPtr",
|
||||
"StructEndOmitEmptyUintPtr",
|
||||
"StructFieldFloat32Ptr",
|
||||
"StructFieldOmitEmptyFloat32Ptr",
|
||||
"StructEndFloat32Ptr",
|
||||
"StructEndOmitEmptyFloat32Ptr",
|
||||
"StructFieldFloat64Ptr",
|
||||
"StructFieldOmitEmptyFloat64Ptr",
|
||||
"StructEndFloat64Ptr",
|
||||
"StructEndOmitEmptyFloat64Ptr",
|
||||
"StructFieldBoolPtr",
|
||||
"StructFieldOmitEmptyBoolPtr",
|
||||
"StructEndBoolPtr",
|
||||
"StructEndOmitEmptyBoolPtr",
|
||||
"StructFieldStringPtr",
|
||||
"StructFieldOmitEmptyStringPtr",
|
||||
"StructEndStringPtr",
|
||||
"StructEndOmitEmptyStringPtr",
|
||||
"StructFieldBytesPtr",
|
||||
"StructFieldOmitEmptyBytesPtr",
|
||||
"StructEndBytesPtr",
|
||||
"StructEndOmitEmptyBytesPtr",
|
||||
"StructFieldNumberPtr",
|
||||
"StructFieldOmitEmptyNumberPtr",
|
||||
"StructEndNumberPtr",
|
||||
"StructEndOmitEmptyNumberPtr",
|
||||
"StructFieldArrayPtr",
|
||||
"StructFieldOmitEmptyArrayPtr",
|
||||
"StructEndArrayPtr",
|
||||
"StructEndOmitEmptyArrayPtr",
|
||||
"StructFieldMapPtr",
|
||||
"StructFieldOmitEmptyMapPtr",
|
||||
"StructEndMapPtr",
|
||||
"StructEndOmitEmptyMapPtr",
|
||||
"StructFieldSlicePtr",
|
||||
"StructFieldOmitEmptySlicePtr",
|
||||
"StructEndSlicePtr",
|
||||
"StructEndOmitEmptySlicePtr",
|
||||
"StructFieldMarshalJSONPtr",
|
||||
"StructFieldOmitEmptyMarshalJSONPtr",
|
||||
"StructEndMarshalJSONPtr",
|
||||
"StructEndOmitEmptyMarshalJSONPtr",
|
||||
"StructFieldMarshalTextPtr",
|
||||
"StructFieldOmitEmptyMarshalTextPtr",
|
||||
"StructEndMarshalTextPtr",
|
||||
"StructEndOmitEmptyMarshalTextPtr",
|
||||
"StructFieldInterfacePtr",
|
||||
"StructFieldOmitEmptyInterfacePtr",
|
||||
"StructEndInterfacePtr",
|
||||
"StructEndOmitEmptyInterfacePtr",
|
||||
"StructFieldIntPtrString",
|
||||
"StructFieldOmitEmptyIntPtrString",
|
||||
"StructEndIntPtrString",
|
||||
"StructEndOmitEmptyIntPtrString",
|
||||
"StructFieldUintPtrString",
|
||||
"StructFieldOmitEmptyUintPtrString",
|
||||
"StructEndUintPtrString",
|
||||
"StructEndOmitEmptyUintPtrString",
|
||||
"StructFieldFloat32PtrString",
|
||||
"StructFieldOmitEmptyFloat32PtrString",
|
||||
"StructEndFloat32PtrString",
|
||||
"StructEndOmitEmptyFloat32PtrString",
|
||||
"StructFieldFloat64PtrString",
|
||||
"StructFieldOmitEmptyFloat64PtrString",
|
||||
"StructEndFloat64PtrString",
|
||||
"StructEndOmitEmptyFloat64PtrString",
|
||||
"StructFieldBoolPtrString",
|
||||
"StructFieldOmitEmptyBoolPtrString",
|
||||
"StructEndBoolPtrString",
|
||||
"StructEndOmitEmptyBoolPtrString",
|
||||
"StructFieldStringPtrString",
|
||||
"StructFieldOmitEmptyStringPtrString",
|
||||
"StructEndStringPtrString",
|
||||
"StructEndOmitEmptyStringPtrString",
|
||||
"StructFieldNumberPtrString",
|
||||
"StructFieldOmitEmptyNumberPtrString",
|
||||
"StructEndNumberPtrString",
|
||||
"StructEndOmitEmptyNumberPtrString",
|
||||
"StructField",
|
||||
"StructFieldOmitEmpty",
|
||||
"StructEnd",
|
||||
"StructEndOmitEmpty",
|
||||
}
|
||||
|
||||
type OpType uint16
|
||||
|
||||
const (
|
||||
OpEnd OpType = 0
|
||||
OpInterface OpType = 1
|
||||
OpPtr OpType = 2
|
||||
OpSliceElem OpType = 3
|
||||
OpSliceEnd OpType = 4
|
||||
OpArrayElem OpType = 5
|
||||
OpArrayEnd OpType = 6
|
||||
OpMapKey OpType = 7
|
||||
OpMapValue OpType = 8
|
||||
OpMapEnd OpType = 9
|
||||
OpRecursive OpType = 10
|
||||
OpRecursivePtr OpType = 11
|
||||
OpRecursiveEnd OpType = 12
|
||||
OpInterfaceEnd OpType = 13
|
||||
OpInt OpType = 14
|
||||
OpUint OpType = 15
|
||||
OpFloat32 OpType = 16
|
||||
OpFloat64 OpType = 17
|
||||
OpBool OpType = 18
|
||||
OpString OpType = 19
|
||||
OpBytes OpType = 20
|
||||
OpNumber OpType = 21
|
||||
OpArray OpType = 22
|
||||
OpMap OpType = 23
|
||||
OpSlice OpType = 24
|
||||
OpStruct OpType = 25
|
||||
OpMarshalJSON OpType = 26
|
||||
OpMarshalText OpType = 27
|
||||
OpIntString OpType = 28
|
||||
OpUintString OpType = 29
|
||||
OpFloat32String OpType = 30
|
||||
OpFloat64String OpType = 31
|
||||
OpBoolString OpType = 32
|
||||
OpStringString OpType = 33
|
||||
OpNumberString OpType = 34
|
||||
OpIntPtr OpType = 35
|
||||
OpUintPtr OpType = 36
|
||||
OpFloat32Ptr OpType = 37
|
||||
OpFloat64Ptr OpType = 38
|
||||
OpBoolPtr OpType = 39
|
||||
OpStringPtr OpType = 40
|
||||
OpBytesPtr OpType = 41
|
||||
OpNumberPtr OpType = 42
|
||||
OpArrayPtr OpType = 43
|
||||
OpMapPtr OpType = 44
|
||||
OpSlicePtr OpType = 45
|
||||
OpMarshalJSONPtr OpType = 46
|
||||
OpMarshalTextPtr OpType = 47
|
||||
OpInterfacePtr OpType = 48
|
||||
OpIntPtrString OpType = 49
|
||||
OpUintPtrString OpType = 50
|
||||
OpFloat32PtrString OpType = 51
|
||||
OpFloat64PtrString OpType = 52
|
||||
OpBoolPtrString OpType = 53
|
||||
OpStringPtrString OpType = 54
|
||||
OpNumberPtrString OpType = 55
|
||||
OpStructHeadInt OpType = 56
|
||||
OpStructHeadOmitEmptyInt OpType = 57
|
||||
OpStructPtrHeadInt OpType = 58
|
||||
OpStructPtrHeadOmitEmptyInt OpType = 59
|
||||
OpStructHeadUint OpType = 60
|
||||
OpStructHeadOmitEmptyUint OpType = 61
|
||||
OpStructPtrHeadUint OpType = 62
|
||||
OpStructPtrHeadOmitEmptyUint OpType = 63
|
||||
OpStructHeadFloat32 OpType = 64
|
||||
OpStructHeadOmitEmptyFloat32 OpType = 65
|
||||
OpStructPtrHeadFloat32 OpType = 66
|
||||
OpStructPtrHeadOmitEmptyFloat32 OpType = 67
|
||||
OpStructHeadFloat64 OpType = 68
|
||||
OpStructHeadOmitEmptyFloat64 OpType = 69
|
||||
OpStructPtrHeadFloat64 OpType = 70
|
||||
OpStructPtrHeadOmitEmptyFloat64 OpType = 71
|
||||
OpStructHeadBool OpType = 72
|
||||
OpStructHeadOmitEmptyBool OpType = 73
|
||||
OpStructPtrHeadBool OpType = 74
|
||||
OpStructPtrHeadOmitEmptyBool OpType = 75
|
||||
OpStructHeadString OpType = 76
|
||||
OpStructHeadOmitEmptyString OpType = 77
|
||||
OpStructPtrHeadString OpType = 78
|
||||
OpStructPtrHeadOmitEmptyString OpType = 79
|
||||
OpStructHeadBytes OpType = 80
|
||||
OpStructHeadOmitEmptyBytes OpType = 81
|
||||
OpStructPtrHeadBytes OpType = 82
|
||||
OpStructPtrHeadOmitEmptyBytes OpType = 83
|
||||
OpStructHeadNumber OpType = 84
|
||||
OpStructHeadOmitEmptyNumber OpType = 85
|
||||
OpStructPtrHeadNumber OpType = 86
|
||||
OpStructPtrHeadOmitEmptyNumber OpType = 87
|
||||
OpStructHeadArray OpType = 88
|
||||
OpStructHeadOmitEmptyArray OpType = 89
|
||||
OpStructPtrHeadArray OpType = 90
|
||||
OpStructPtrHeadOmitEmptyArray OpType = 91
|
||||
OpStructHeadMap OpType = 92
|
||||
OpStructHeadOmitEmptyMap OpType = 93
|
||||
OpStructPtrHeadMap OpType = 94
|
||||
OpStructPtrHeadOmitEmptyMap OpType = 95
|
||||
OpStructHeadSlice OpType = 96
|
||||
OpStructHeadOmitEmptySlice OpType = 97
|
||||
OpStructPtrHeadSlice OpType = 98
|
||||
OpStructPtrHeadOmitEmptySlice OpType = 99
|
||||
OpStructHeadStruct OpType = 100
|
||||
OpStructHeadOmitEmptyStruct OpType = 101
|
||||
OpStructPtrHeadStruct OpType = 102
|
||||
OpStructPtrHeadOmitEmptyStruct OpType = 103
|
||||
OpStructHeadMarshalJSON OpType = 104
|
||||
OpStructHeadOmitEmptyMarshalJSON OpType = 105
|
||||
OpStructPtrHeadMarshalJSON OpType = 106
|
||||
OpStructPtrHeadOmitEmptyMarshalJSON OpType = 107
|
||||
OpStructHeadMarshalText OpType = 108
|
||||
OpStructHeadOmitEmptyMarshalText OpType = 109
|
||||
OpStructPtrHeadMarshalText OpType = 110
|
||||
OpStructPtrHeadOmitEmptyMarshalText OpType = 111
|
||||
OpStructHeadIntString OpType = 112
|
||||
OpStructHeadOmitEmptyIntString OpType = 113
|
||||
OpStructPtrHeadIntString OpType = 114
|
||||
OpStructPtrHeadOmitEmptyIntString OpType = 115
|
||||
OpStructHeadUintString OpType = 116
|
||||
OpStructHeadOmitEmptyUintString OpType = 117
|
||||
OpStructPtrHeadUintString OpType = 118
|
||||
OpStructPtrHeadOmitEmptyUintString OpType = 119
|
||||
OpStructHeadFloat32String OpType = 120
|
||||
OpStructHeadOmitEmptyFloat32String OpType = 121
|
||||
OpStructPtrHeadFloat32String OpType = 122
|
||||
OpStructPtrHeadOmitEmptyFloat32String OpType = 123
|
||||
OpStructHeadFloat64String OpType = 124
|
||||
OpStructHeadOmitEmptyFloat64String OpType = 125
|
||||
OpStructPtrHeadFloat64String OpType = 126
|
||||
OpStructPtrHeadOmitEmptyFloat64String OpType = 127
|
||||
OpStructHeadBoolString OpType = 128
|
||||
OpStructHeadOmitEmptyBoolString OpType = 129
|
||||
OpStructPtrHeadBoolString OpType = 130
|
||||
OpStructPtrHeadOmitEmptyBoolString OpType = 131
|
||||
OpStructHeadStringString OpType = 132
|
||||
OpStructHeadOmitEmptyStringString OpType = 133
|
||||
OpStructPtrHeadStringString OpType = 134
|
||||
OpStructPtrHeadOmitEmptyStringString OpType = 135
|
||||
OpStructHeadNumberString OpType = 136
|
||||
OpStructHeadOmitEmptyNumberString OpType = 137
|
||||
OpStructPtrHeadNumberString OpType = 138
|
||||
OpStructPtrHeadOmitEmptyNumberString OpType = 139
|
||||
OpStructHeadIntPtr OpType = 140
|
||||
OpStructHeadOmitEmptyIntPtr OpType = 141
|
||||
OpStructPtrHeadIntPtr OpType = 142
|
||||
OpStructPtrHeadOmitEmptyIntPtr OpType = 143
|
||||
OpStructHeadUintPtr OpType = 144
|
||||
OpStructHeadOmitEmptyUintPtr OpType = 145
|
||||
OpStructPtrHeadUintPtr OpType = 146
|
||||
OpStructPtrHeadOmitEmptyUintPtr OpType = 147
|
||||
OpStructHeadFloat32Ptr OpType = 148
|
||||
OpStructHeadOmitEmptyFloat32Ptr OpType = 149
|
||||
OpStructPtrHeadFloat32Ptr OpType = 150
|
||||
OpStructPtrHeadOmitEmptyFloat32Ptr OpType = 151
|
||||
OpStructHeadFloat64Ptr OpType = 152
|
||||
OpStructHeadOmitEmptyFloat64Ptr OpType = 153
|
||||
OpStructPtrHeadFloat64Ptr OpType = 154
|
||||
OpStructPtrHeadOmitEmptyFloat64Ptr OpType = 155
|
||||
OpStructHeadBoolPtr OpType = 156
|
||||
OpStructHeadOmitEmptyBoolPtr OpType = 157
|
||||
OpStructPtrHeadBoolPtr OpType = 158
|
||||
OpStructPtrHeadOmitEmptyBoolPtr OpType = 159
|
||||
OpStructHeadStringPtr OpType = 160
|
||||
OpStructHeadOmitEmptyStringPtr OpType = 161
|
||||
OpStructPtrHeadStringPtr OpType = 162
|
||||
OpStructPtrHeadOmitEmptyStringPtr OpType = 163
|
||||
OpStructHeadBytesPtr OpType = 164
|
||||
OpStructHeadOmitEmptyBytesPtr OpType = 165
|
||||
OpStructPtrHeadBytesPtr OpType = 166
|
||||
OpStructPtrHeadOmitEmptyBytesPtr OpType = 167
|
||||
OpStructHeadNumberPtr OpType = 168
|
||||
OpStructHeadOmitEmptyNumberPtr OpType = 169
|
||||
OpStructPtrHeadNumberPtr OpType = 170
|
||||
OpStructPtrHeadOmitEmptyNumberPtr OpType = 171
|
||||
OpStructHeadArrayPtr OpType = 172
|
||||
OpStructHeadOmitEmptyArrayPtr OpType = 173
|
||||
OpStructPtrHeadArrayPtr OpType = 174
|
||||
OpStructPtrHeadOmitEmptyArrayPtr OpType = 175
|
||||
OpStructHeadMapPtr OpType = 176
|
||||
OpStructHeadOmitEmptyMapPtr OpType = 177
|
||||
OpStructPtrHeadMapPtr OpType = 178
|
||||
OpStructPtrHeadOmitEmptyMapPtr OpType = 179
|
||||
OpStructHeadSlicePtr OpType = 180
|
||||
OpStructHeadOmitEmptySlicePtr OpType = 181
|
||||
OpStructPtrHeadSlicePtr OpType = 182
|
||||
OpStructPtrHeadOmitEmptySlicePtr OpType = 183
|
||||
OpStructHeadMarshalJSONPtr OpType = 184
|
||||
OpStructHeadOmitEmptyMarshalJSONPtr OpType = 185
|
||||
OpStructPtrHeadMarshalJSONPtr OpType = 186
|
||||
OpStructPtrHeadOmitEmptyMarshalJSONPtr OpType = 187
|
||||
OpStructHeadMarshalTextPtr OpType = 188
|
||||
OpStructHeadOmitEmptyMarshalTextPtr OpType = 189
|
||||
OpStructPtrHeadMarshalTextPtr OpType = 190
|
||||
OpStructPtrHeadOmitEmptyMarshalTextPtr OpType = 191
|
||||
OpStructHeadInterfacePtr OpType = 192
|
||||
OpStructHeadOmitEmptyInterfacePtr OpType = 193
|
||||
OpStructPtrHeadInterfacePtr OpType = 194
|
||||
OpStructPtrHeadOmitEmptyInterfacePtr OpType = 195
|
||||
OpStructHeadIntPtrString OpType = 196
|
||||
OpStructHeadOmitEmptyIntPtrString OpType = 197
|
||||
OpStructPtrHeadIntPtrString OpType = 198
|
||||
OpStructPtrHeadOmitEmptyIntPtrString OpType = 199
|
||||
OpStructHeadUintPtrString OpType = 200
|
||||
OpStructHeadOmitEmptyUintPtrString OpType = 201
|
||||
OpStructPtrHeadUintPtrString OpType = 202
|
||||
OpStructPtrHeadOmitEmptyUintPtrString OpType = 203
|
||||
OpStructHeadFloat32PtrString OpType = 204
|
||||
OpStructHeadOmitEmptyFloat32PtrString OpType = 205
|
||||
OpStructPtrHeadFloat32PtrString OpType = 206
|
||||
OpStructPtrHeadOmitEmptyFloat32PtrString OpType = 207
|
||||
OpStructHeadFloat64PtrString OpType = 208
|
||||
OpStructHeadOmitEmptyFloat64PtrString OpType = 209
|
||||
OpStructPtrHeadFloat64PtrString OpType = 210
|
||||
OpStructPtrHeadOmitEmptyFloat64PtrString OpType = 211
|
||||
OpStructHeadBoolPtrString OpType = 212
|
||||
OpStructHeadOmitEmptyBoolPtrString OpType = 213
|
||||
OpStructPtrHeadBoolPtrString OpType = 214
|
||||
OpStructPtrHeadOmitEmptyBoolPtrString OpType = 215
|
||||
OpStructHeadStringPtrString OpType = 216
|
||||
OpStructHeadOmitEmptyStringPtrString OpType = 217
|
||||
OpStructPtrHeadStringPtrString OpType = 218
|
||||
OpStructPtrHeadOmitEmptyStringPtrString OpType = 219
|
||||
OpStructHeadNumberPtrString OpType = 220
|
||||
OpStructHeadOmitEmptyNumberPtrString OpType = 221
|
||||
OpStructPtrHeadNumberPtrString OpType = 222
|
||||
OpStructPtrHeadOmitEmptyNumberPtrString OpType = 223
|
||||
OpStructHead OpType = 224
|
||||
OpStructHeadOmitEmpty OpType = 225
|
||||
OpStructPtrHead OpType = 226
|
||||
OpStructPtrHeadOmitEmpty OpType = 227
|
||||
OpStructFieldInt OpType = 228
|
||||
OpStructFieldOmitEmptyInt OpType = 229
|
||||
OpStructEndInt OpType = 230
|
||||
OpStructEndOmitEmptyInt OpType = 231
|
||||
OpStructFieldUint OpType = 232
|
||||
OpStructFieldOmitEmptyUint OpType = 233
|
||||
OpStructEndUint OpType = 234
|
||||
OpStructEndOmitEmptyUint OpType = 235
|
||||
OpStructFieldFloat32 OpType = 236
|
||||
OpStructFieldOmitEmptyFloat32 OpType = 237
|
||||
OpStructEndFloat32 OpType = 238
|
||||
OpStructEndOmitEmptyFloat32 OpType = 239
|
||||
OpStructFieldFloat64 OpType = 240
|
||||
OpStructFieldOmitEmptyFloat64 OpType = 241
|
||||
OpStructEndFloat64 OpType = 242
|
||||
OpStructEndOmitEmptyFloat64 OpType = 243
|
||||
OpStructFieldBool OpType = 244
|
||||
OpStructFieldOmitEmptyBool OpType = 245
|
||||
OpStructEndBool OpType = 246
|
||||
OpStructEndOmitEmptyBool OpType = 247
|
||||
OpStructFieldString OpType = 248
|
||||
OpStructFieldOmitEmptyString OpType = 249
|
||||
OpStructEndString OpType = 250
|
||||
OpStructEndOmitEmptyString OpType = 251
|
||||
OpStructFieldBytes OpType = 252
|
||||
OpStructFieldOmitEmptyBytes OpType = 253
|
||||
OpStructEndBytes OpType = 254
|
||||
OpStructEndOmitEmptyBytes OpType = 255
|
||||
OpStructFieldNumber OpType = 256
|
||||
OpStructFieldOmitEmptyNumber OpType = 257
|
||||
OpStructEndNumber OpType = 258
|
||||
OpStructEndOmitEmptyNumber OpType = 259
|
||||
OpStructFieldArray OpType = 260
|
||||
OpStructFieldOmitEmptyArray OpType = 261
|
||||
OpStructEndArray OpType = 262
|
||||
OpStructEndOmitEmptyArray OpType = 263
|
||||
OpStructFieldMap OpType = 264
|
||||
OpStructFieldOmitEmptyMap OpType = 265
|
||||
OpStructEndMap OpType = 266
|
||||
OpStructEndOmitEmptyMap OpType = 267
|
||||
OpStructFieldSlice OpType = 268
|
||||
OpStructFieldOmitEmptySlice OpType = 269
|
||||
OpStructEndSlice OpType = 270
|
||||
OpStructEndOmitEmptySlice OpType = 271
|
||||
OpStructFieldStruct OpType = 272
|
||||
OpStructFieldOmitEmptyStruct OpType = 273
|
||||
OpStructEndStruct OpType = 274
|
||||
OpStructEndOmitEmptyStruct OpType = 275
|
||||
OpStructFieldMarshalJSON OpType = 276
|
||||
OpStructFieldOmitEmptyMarshalJSON OpType = 277
|
||||
OpStructEndMarshalJSON OpType = 278
|
||||
OpStructEndOmitEmptyMarshalJSON OpType = 279
|
||||
OpStructFieldMarshalText OpType = 280
|
||||
OpStructFieldOmitEmptyMarshalText OpType = 281
|
||||
OpStructEndMarshalText OpType = 282
|
||||
OpStructEndOmitEmptyMarshalText OpType = 283
|
||||
OpStructFieldIntString OpType = 284
|
||||
OpStructFieldOmitEmptyIntString OpType = 285
|
||||
OpStructEndIntString OpType = 286
|
||||
OpStructEndOmitEmptyIntString OpType = 287
|
||||
OpStructFieldUintString OpType = 288
|
||||
OpStructFieldOmitEmptyUintString OpType = 289
|
||||
OpStructEndUintString OpType = 290
|
||||
OpStructEndOmitEmptyUintString OpType = 291
|
||||
OpStructFieldFloat32String OpType = 292
|
||||
OpStructFieldOmitEmptyFloat32String OpType = 293
|
||||
OpStructEndFloat32String OpType = 294
|
||||
OpStructEndOmitEmptyFloat32String OpType = 295
|
||||
OpStructFieldFloat64String OpType = 296
|
||||
OpStructFieldOmitEmptyFloat64String OpType = 297
|
||||
OpStructEndFloat64String OpType = 298
|
||||
OpStructEndOmitEmptyFloat64String OpType = 299
|
||||
OpStructFieldBoolString OpType = 300
|
||||
OpStructFieldOmitEmptyBoolString OpType = 301
|
||||
OpStructEndBoolString OpType = 302
|
||||
OpStructEndOmitEmptyBoolString OpType = 303
|
||||
OpStructFieldStringString OpType = 304
|
||||
OpStructFieldOmitEmptyStringString OpType = 305
|
||||
OpStructEndStringString OpType = 306
|
||||
OpStructEndOmitEmptyStringString OpType = 307
|
||||
OpStructFieldNumberString OpType = 308
|
||||
OpStructFieldOmitEmptyNumberString OpType = 309
|
||||
OpStructEndNumberString OpType = 310
|
||||
OpStructEndOmitEmptyNumberString OpType = 311
|
||||
OpStructFieldIntPtr OpType = 312
|
||||
OpStructFieldOmitEmptyIntPtr OpType = 313
|
||||
OpStructEndIntPtr OpType = 314
|
||||
OpStructEndOmitEmptyIntPtr OpType = 315
|
||||
OpStructFieldUintPtr OpType = 316
|
||||
OpStructFieldOmitEmptyUintPtr OpType = 317
|
||||
OpStructEndUintPtr OpType = 318
|
||||
OpStructEndOmitEmptyUintPtr OpType = 319
|
||||
OpStructFieldFloat32Ptr OpType = 320
|
||||
OpStructFieldOmitEmptyFloat32Ptr OpType = 321
|
||||
OpStructEndFloat32Ptr OpType = 322
|
||||
OpStructEndOmitEmptyFloat32Ptr OpType = 323
|
||||
OpStructFieldFloat64Ptr OpType = 324
|
||||
OpStructFieldOmitEmptyFloat64Ptr OpType = 325
|
||||
OpStructEndFloat64Ptr OpType = 326
|
||||
OpStructEndOmitEmptyFloat64Ptr OpType = 327
|
||||
OpStructFieldBoolPtr OpType = 328
|
||||
OpStructFieldOmitEmptyBoolPtr OpType = 329
|
||||
OpStructEndBoolPtr OpType = 330
|
||||
OpStructEndOmitEmptyBoolPtr OpType = 331
|
||||
OpStructFieldStringPtr OpType = 332
|
||||
OpStructFieldOmitEmptyStringPtr OpType = 333
|
||||
OpStructEndStringPtr OpType = 334
|
||||
OpStructEndOmitEmptyStringPtr OpType = 335
|
||||
OpStructFieldBytesPtr OpType = 336
|
||||
OpStructFieldOmitEmptyBytesPtr OpType = 337
|
||||
OpStructEndBytesPtr OpType = 338
|
||||
OpStructEndOmitEmptyBytesPtr OpType = 339
|
||||
OpStructFieldNumberPtr OpType = 340
|
||||
OpStructFieldOmitEmptyNumberPtr OpType = 341
|
||||
OpStructEndNumberPtr OpType = 342
|
||||
OpStructEndOmitEmptyNumberPtr OpType = 343
|
||||
OpStructFieldArrayPtr OpType = 344
|
||||
OpStructFieldOmitEmptyArrayPtr OpType = 345
|
||||
OpStructEndArrayPtr OpType = 346
|
||||
OpStructEndOmitEmptyArrayPtr OpType = 347
|
||||
OpStructFieldMapPtr OpType = 348
|
||||
OpStructFieldOmitEmptyMapPtr OpType = 349
|
||||
OpStructEndMapPtr OpType = 350
|
||||
OpStructEndOmitEmptyMapPtr OpType = 351
|
||||
OpStructFieldSlicePtr OpType = 352
|
||||
OpStructFieldOmitEmptySlicePtr OpType = 353
|
||||
OpStructEndSlicePtr OpType = 354
|
||||
OpStructEndOmitEmptySlicePtr OpType = 355
|
||||
OpStructFieldMarshalJSONPtr OpType = 356
|
||||
OpStructFieldOmitEmptyMarshalJSONPtr OpType = 357
|
||||
OpStructEndMarshalJSONPtr OpType = 358
|
||||
OpStructEndOmitEmptyMarshalJSONPtr OpType = 359
|
||||
OpStructFieldMarshalTextPtr OpType = 360
|
||||
OpStructFieldOmitEmptyMarshalTextPtr OpType = 361
|
||||
OpStructEndMarshalTextPtr OpType = 362
|
||||
OpStructEndOmitEmptyMarshalTextPtr OpType = 363
|
||||
OpStructFieldInterfacePtr OpType = 364
|
||||
OpStructFieldOmitEmptyInterfacePtr OpType = 365
|
||||
OpStructEndInterfacePtr OpType = 366
|
||||
OpStructEndOmitEmptyInterfacePtr OpType = 367
|
||||
OpStructFieldIntPtrString OpType = 368
|
||||
OpStructFieldOmitEmptyIntPtrString OpType = 369
|
||||
OpStructEndIntPtrString OpType = 370
|
||||
OpStructEndOmitEmptyIntPtrString OpType = 371
|
||||
OpStructFieldUintPtrString OpType = 372
|
||||
OpStructFieldOmitEmptyUintPtrString OpType = 373
|
||||
OpStructEndUintPtrString OpType = 374
|
||||
OpStructEndOmitEmptyUintPtrString OpType = 375
|
||||
OpStructFieldFloat32PtrString OpType = 376
|
||||
OpStructFieldOmitEmptyFloat32PtrString OpType = 377
|
||||
OpStructEndFloat32PtrString OpType = 378
|
||||
OpStructEndOmitEmptyFloat32PtrString OpType = 379
|
||||
OpStructFieldFloat64PtrString OpType = 380
|
||||
OpStructFieldOmitEmptyFloat64PtrString OpType = 381
|
||||
OpStructEndFloat64PtrString OpType = 382
|
||||
OpStructEndOmitEmptyFloat64PtrString OpType = 383
|
||||
OpStructFieldBoolPtrString OpType = 384
|
||||
OpStructFieldOmitEmptyBoolPtrString OpType = 385
|
||||
OpStructEndBoolPtrString OpType = 386
|
||||
OpStructEndOmitEmptyBoolPtrString OpType = 387
|
||||
OpStructFieldStringPtrString OpType = 388
|
||||
OpStructFieldOmitEmptyStringPtrString OpType = 389
|
||||
OpStructEndStringPtrString OpType = 390
|
||||
OpStructEndOmitEmptyStringPtrString OpType = 391
|
||||
OpStructFieldNumberPtrString OpType = 392
|
||||
OpStructFieldOmitEmptyNumberPtrString OpType = 393
|
||||
OpStructEndNumberPtrString OpType = 394
|
||||
OpStructEndOmitEmptyNumberPtrString OpType = 395
|
||||
OpStructField OpType = 396
|
||||
OpStructFieldOmitEmpty OpType = 397
|
||||
OpStructEnd OpType = 398
|
||||
OpStructEndOmitEmpty OpType = 399
|
||||
)
|
||||
|
||||
func (t OpType) String() string {
|
||||
if int(t) >= 400 {
|
||||
return ""
|
||||
}
|
||||
return opTypeStrings[int(t)]
|
||||
}
|
||||
|
||||
func (t OpType) CodeType() CodeType {
|
||||
if strings.Contains(t.String(), "Struct") {
|
||||
if strings.Contains(t.String(), "End") {
|
||||
return CodeStructEnd
|
||||
}
|
||||
return CodeStructField
|
||||
}
|
||||
switch t {
|
||||
case OpArray, OpArrayPtr:
|
||||
return CodeArrayHead
|
||||
case OpArrayElem:
|
||||
return CodeArrayElem
|
||||
case OpSlice, OpSlicePtr:
|
||||
return CodeSliceHead
|
||||
case OpSliceElem:
|
||||
return CodeSliceElem
|
||||
case OpMap, OpMapPtr:
|
||||
return CodeMapHead
|
||||
case OpMapKey:
|
||||
return CodeMapKey
|
||||
case OpMapValue:
|
||||
return CodeMapValue
|
||||
case OpMapEnd:
|
||||
return CodeMapEnd
|
||||
}
|
||||
|
||||
return CodeOp
|
||||
}
|
||||
|
||||
func (t OpType) HeadToPtrHead() OpType {
|
||||
if strings.Index(t.String(), "PtrHead") > 0 {
|
||||
return t
|
||||
}
|
||||
|
||||
idx := strings.Index(t.String(), "Head")
|
||||
if idx == -1 {
|
||||
return t
|
||||
}
|
||||
suffix := "PtrHead" + t.String()[idx+len("Head"):]
|
||||
|
||||
const toPtrOffset = 2
|
||||
if strings.Contains(OpType(int(t)+toPtrOffset).String(), suffix) {
|
||||
return OpType(int(t) + toPtrOffset)
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
func (t OpType) HeadToOmitEmptyHead() OpType {
|
||||
const toOmitEmptyOffset = 1
|
||||
if strings.Contains(OpType(int(t)+toOmitEmptyOffset).String(), "OmitEmpty") {
|
||||
return OpType(int(t) + toOmitEmptyOffset)
|
||||
}
|
||||
|
||||
return t
|
||||
}
|
||||
|
||||
func (t OpType) PtrHeadToHead() OpType {
|
||||
idx := strings.Index(t.String(), "PtrHead")
|
||||
if idx == -1 {
|
||||
return t
|
||||
}
|
||||
suffix := t.String()[idx+len("Ptr"):]
|
||||
|
||||
const toPtrOffset = 2
|
||||
if strings.Contains(OpType(int(t)-toPtrOffset).String(), suffix) {
|
||||
return OpType(int(t) - toPtrOffset)
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
func (t OpType) FieldToEnd() OpType {
|
||||
idx := strings.Index(t.String(), "Field")
|
||||
if idx == -1 {
|
||||
return t
|
||||
}
|
||||
suffix := t.String()[idx+len("Field"):]
|
||||
if suffix == "" || suffix == "OmitEmpty" {
|
||||
return t
|
||||
}
|
||||
const toEndOffset = 2
|
||||
if strings.Contains(OpType(int(t)+toEndOffset).String(), "End"+suffix) {
|
||||
return OpType(int(t) + toEndOffset)
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
func (t OpType) FieldToOmitEmptyField() OpType {
|
||||
const toOmitEmptyOffset = 1
|
||||
if strings.Contains(OpType(int(t)+toOmitEmptyOffset).String(), "OmitEmpty") {
|
||||
return OpType(int(t) + toOmitEmptyOffset)
|
||||
}
|
||||
return t
|
||||
}
|
135
internal/encoder/query.go
Normal file
135
internal/encoder/query.go
Normal file
|
@ -0,0 +1,135 @@
|
|||
package encoder
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
var (
|
||||
Marshal func(interface{}) ([]byte, error)
|
||||
Unmarshal func([]byte, interface{}) error
|
||||
)
|
||||
|
||||
type FieldQuery struct {
|
||||
Name string
|
||||
Fields []*FieldQuery
|
||||
hash string
|
||||
}
|
||||
|
||||
func (q *FieldQuery) Hash() string {
|
||||
if q.hash != "" {
|
||||
return q.hash
|
||||
}
|
||||
b, _ := Marshal(q)
|
||||
q.hash = string(b)
|
||||
return q.hash
|
||||
}
|
||||
|
||||
func (q *FieldQuery) MarshalJSON() ([]byte, error) {
|
||||
if q.Name != "" {
|
||||
if len(q.Fields) > 0 {
|
||||
return Marshal(map[string][]*FieldQuery{q.Name: q.Fields})
|
||||
}
|
||||
return Marshal(q.Name)
|
||||
}
|
||||
return Marshal(q.Fields)
|
||||
}
|
||||
|
||||
func (q *FieldQuery) QueryString() (FieldQueryString, error) {
|
||||
b, err := Marshal(q)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return FieldQueryString(b), nil
|
||||
}
|
||||
|
||||
type FieldQueryString string
|
||||
|
||||
func (s FieldQueryString) Build() (*FieldQuery, error) {
|
||||
var query interface{}
|
||||
if err := Unmarshal([]byte(s), &query); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return s.build(reflect.ValueOf(query))
|
||||
}
|
||||
|
||||
func (s FieldQueryString) build(v reflect.Value) (*FieldQuery, error) {
|
||||
switch v.Type().Kind() {
|
||||
case reflect.String:
|
||||
return s.buildString(v)
|
||||
case reflect.Map:
|
||||
return s.buildMap(v)
|
||||
case reflect.Slice:
|
||||
return s.buildSlice(v)
|
||||
case reflect.Interface:
|
||||
return s.build(reflect.ValueOf(v.Interface()))
|
||||
}
|
||||
return nil, fmt.Errorf("failed to build field query")
|
||||
}
|
||||
|
||||
func (s FieldQueryString) buildString(v reflect.Value) (*FieldQuery, error) {
|
||||
b := []byte(v.String())
|
||||
switch b[0] {
|
||||
case '[', '{':
|
||||
var query interface{}
|
||||
if err := Unmarshal(b, &query); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if str, ok := query.(string); ok {
|
||||
return &FieldQuery{Name: str}, nil
|
||||
}
|
||||
return s.build(reflect.ValueOf(query))
|
||||
}
|
||||
return &FieldQuery{Name: string(b)}, nil
|
||||
}
|
||||
|
||||
func (s FieldQueryString) buildSlice(v reflect.Value) (*FieldQuery, error) {
|
||||
fields := make([]*FieldQuery, 0, v.Len())
|
||||
for i := 0; i < v.Len(); i++ {
|
||||
def, err := s.build(v.Index(i))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
fields = append(fields, def)
|
||||
}
|
||||
return &FieldQuery{Fields: fields}, nil
|
||||
}
|
||||
|
||||
func (s FieldQueryString) buildMap(v reflect.Value) (*FieldQuery, error) {
|
||||
keys := v.MapKeys()
|
||||
if len(keys) != 1 {
|
||||
return nil, fmt.Errorf("failed to build field query object")
|
||||
}
|
||||
key := keys[0]
|
||||
if key.Type().Kind() != reflect.String {
|
||||
return nil, fmt.Errorf("failed to build field query. invalid object key type")
|
||||
}
|
||||
name := key.String()
|
||||
def, err := s.build(v.MapIndex(key))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &FieldQuery{
|
||||
Name: name,
|
||||
Fields: def.Fields,
|
||||
}, nil
|
||||
}
|
||||
|
||||
type queryKey struct{}
|
||||
|
||||
func FieldQueryFromContext(ctx context.Context) *FieldQuery {
|
||||
query := ctx.Value(queryKey{})
|
||||
if query == nil {
|
||||
return nil
|
||||
}
|
||||
q, ok := query.(*FieldQuery)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return q
|
||||
}
|
||||
|
||||
func SetFieldQueryToContext(ctx context.Context, query *FieldQuery) context.Context {
|
||||
return context.WithValue(ctx, queryKey{}, query)
|
||||
}
|
483
internal/encoder/string.go
Normal file
483
internal/encoder/string.go
Normal file
|
@ -0,0 +1,483 @@
|
|||
// This files's string processing codes are inspired by https://github.com/segmentio/encoding.
|
||||
// The license notation is as follows.
|
||||
//
|
||||
// # MIT License
|
||||
//
|
||||
// Copyright (c) 2019 Segment.io, Inc.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
package encoder
|
||||
|
||||
import (
|
||||
"math/bits"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
const (
|
||||
lsb = 0x0101010101010101
|
||||
msb = 0x8080808080808080
|
||||
)
|
||||
|
||||
var hex = "0123456789abcdef"
|
||||
|
||||
//nolint:govet
|
||||
func stringToUint64Slice(s string) []uint64 {
|
||||
return *(*[]uint64)(unsafe.Pointer(&reflect.SliceHeader{
|
||||
Data: ((*reflect.StringHeader)(unsafe.Pointer(&s))).Data,
|
||||
Len: len(s) / 8,
|
||||
Cap: len(s) / 8,
|
||||
}))
|
||||
}
|
||||
|
||||
func AppendString(ctx *RuntimeContext, buf []byte, s string) []byte {
|
||||
if ctx.Option.Flag&HTMLEscapeOption != 0 {
|
||||
if ctx.Option.Flag&NormalizeUTF8Option != 0 {
|
||||
return appendNormalizedHTMLString(buf, s)
|
||||
}
|
||||
return appendHTMLString(buf, s)
|
||||
}
|
||||
if ctx.Option.Flag&NormalizeUTF8Option != 0 {
|
||||
return appendNormalizedString(buf, s)
|
||||
}
|
||||
return appendString(buf, s)
|
||||
}
|
||||
|
||||
func appendNormalizedHTMLString(buf []byte, s string) []byte {
|
||||
valLen := len(s)
|
||||
if valLen == 0 {
|
||||
return append(buf, `""`...)
|
||||
}
|
||||
buf = append(buf, '"')
|
||||
var (
|
||||
i, j int
|
||||
)
|
||||
if valLen >= 8 {
|
||||
chunks := stringToUint64Slice(s)
|
||||
for _, n := range chunks {
|
||||
// combine masks before checking for the MSB of each byte. We include
|
||||
// `n` in the mask to check whether any of the *input* byte MSBs were
|
||||
// set (i.e. the byte was outside the ASCII range).
|
||||
mask := n | (n - (lsb * 0x20)) |
|
||||
((n ^ (lsb * '"')) - lsb) |
|
||||
((n ^ (lsb * '\\')) - lsb) |
|
||||
((n ^ (lsb * '<')) - lsb) |
|
||||
((n ^ (lsb * '>')) - lsb) |
|
||||
((n ^ (lsb * '&')) - lsb)
|
||||
if (mask & msb) != 0 {
|
||||
j = bits.TrailingZeros64(mask&msb) / 8
|
||||
goto ESCAPE_END
|
||||
}
|
||||
}
|
||||
for i := len(chunks) * 8; i < valLen; i++ {
|
||||
if needEscapeHTMLNormalizeUTF8[s[i]] {
|
||||
j = i
|
||||
goto ESCAPE_END
|
||||
}
|
||||
}
|
||||
// no found any escape characters.
|
||||
return append(append(buf, s...), '"')
|
||||
}
|
||||
ESCAPE_END:
|
||||
for j < valLen {
|
||||
c := s[j]
|
||||
|
||||
if !needEscapeHTMLNormalizeUTF8[c] {
|
||||
// fast path: most of the time, printable ascii characters are used
|
||||
j++
|
||||
continue
|
||||
}
|
||||
|
||||
switch c {
|
||||
case '\\', '"':
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, '\\', c)
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
|
||||
case '\n':
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, '\\', 'n')
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
|
||||
case '\r':
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, '\\', 'r')
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
|
||||
case '\t':
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, '\\', 't')
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
|
||||
case '<', '>', '&':
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, `\u00`...)
|
||||
buf = append(buf, hex[c>>4], hex[c&0xF])
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
|
||||
case 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x0B, 0x0C, 0x0E, 0x0F, // 0x00-0x0F
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F: // 0x10-0x1F
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, `\u00`...)
|
||||
buf = append(buf, hex[c>>4], hex[c&0xF])
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
}
|
||||
state, size := decodeRuneInString(s[j:])
|
||||
switch state {
|
||||
case runeErrorState:
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, `\ufffd`...)
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
// U+2028 is LINE SEPARATOR.
|
||||
// U+2029 is PARAGRAPH SEPARATOR.
|
||||
// They are both technically valid characters in JSON strings,
|
||||
// but don't work in JSONP, which has to be evaluated as JavaScript,
|
||||
// and can lead to security holes there. It is valid JSON to
|
||||
// escape them, so we do so unconditionally.
|
||||
// See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion.
|
||||
case lineSepState:
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, `\u2028`...)
|
||||
i = j + 3
|
||||
j = j + 3
|
||||
continue
|
||||
case paragraphSepState:
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, `\u2029`...)
|
||||
i = j + 3
|
||||
j = j + 3
|
||||
continue
|
||||
}
|
||||
j += size
|
||||
}
|
||||
|
||||
return append(append(buf, s[i:]...), '"')
|
||||
}
|
||||
|
||||
func appendHTMLString(buf []byte, s string) []byte {
|
||||
valLen := len(s)
|
||||
if valLen == 0 {
|
||||
return append(buf, `""`...)
|
||||
}
|
||||
buf = append(buf, '"')
|
||||
var (
|
||||
i, j int
|
||||
)
|
||||
if valLen >= 8 {
|
||||
chunks := stringToUint64Slice(s)
|
||||
for _, n := range chunks {
|
||||
// combine masks before checking for the MSB of each byte. We include
|
||||
// `n` in the mask to check whether any of the *input* byte MSBs were
|
||||
// set (i.e. the byte was outside the ASCII range).
|
||||
mask := n | (n - (lsb * 0x20)) |
|
||||
((n ^ (lsb * '"')) - lsb) |
|
||||
((n ^ (lsb * '\\')) - lsb) |
|
||||
((n ^ (lsb * '<')) - lsb) |
|
||||
((n ^ (lsb * '>')) - lsb) |
|
||||
((n ^ (lsb * '&')) - lsb)
|
||||
if (mask & msb) != 0 {
|
||||
j = bits.TrailingZeros64(mask&msb) / 8
|
||||
goto ESCAPE_END
|
||||
}
|
||||
}
|
||||
for i := len(chunks) * 8; i < valLen; i++ {
|
||||
if needEscapeHTML[s[i]] {
|
||||
j = i
|
||||
goto ESCAPE_END
|
||||
}
|
||||
}
|
||||
// no found any escape characters.
|
||||
return append(append(buf, s...), '"')
|
||||
}
|
||||
ESCAPE_END:
|
||||
for j < valLen {
|
||||
c := s[j]
|
||||
|
||||
if !needEscapeHTML[c] {
|
||||
// fast path: most of the time, printable ascii characters are used
|
||||
j++
|
||||
continue
|
||||
}
|
||||
|
||||
switch c {
|
||||
case '\\', '"':
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, '\\', c)
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
|
||||
case '\n':
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, '\\', 'n')
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
|
||||
case '\r':
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, '\\', 'r')
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
|
||||
case '\t':
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, '\\', 't')
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
|
||||
case '<', '>', '&':
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, `\u00`...)
|
||||
buf = append(buf, hex[c>>4], hex[c&0xF])
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
|
||||
case 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x0B, 0x0C, 0x0E, 0x0F, // 0x00-0x0F
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F: // 0x10-0x1F
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, `\u00`...)
|
||||
buf = append(buf, hex[c>>4], hex[c&0xF])
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
}
|
||||
j++
|
||||
}
|
||||
|
||||
return append(append(buf, s[i:]...), '"')
|
||||
}
|
||||
|
||||
func appendNormalizedString(buf []byte, s string) []byte {
|
||||
valLen := len(s)
|
||||
if valLen == 0 {
|
||||
return append(buf, `""`...)
|
||||
}
|
||||
buf = append(buf, '"')
|
||||
var (
|
||||
i, j int
|
||||
)
|
||||
if valLen >= 8 {
|
||||
chunks := stringToUint64Slice(s)
|
||||
for _, n := range chunks {
|
||||
// combine masks before checking for the MSB of each byte. We include
|
||||
// `n` in the mask to check whether any of the *input* byte MSBs were
|
||||
// set (i.e. the byte was outside the ASCII range).
|
||||
mask := n | (n - (lsb * 0x20)) |
|
||||
((n ^ (lsb * '"')) - lsb) |
|
||||
((n ^ (lsb * '\\')) - lsb)
|
||||
if (mask & msb) != 0 {
|
||||
j = bits.TrailingZeros64(mask&msb) / 8
|
||||
goto ESCAPE_END
|
||||
}
|
||||
}
|
||||
valLen := len(s)
|
||||
for i := len(chunks) * 8; i < valLen; i++ {
|
||||
if needEscapeNormalizeUTF8[s[i]] {
|
||||
j = i
|
||||
goto ESCAPE_END
|
||||
}
|
||||
}
|
||||
return append(append(buf, s...), '"')
|
||||
}
|
||||
ESCAPE_END:
|
||||
for j < valLen {
|
||||
c := s[j]
|
||||
|
||||
if !needEscapeNormalizeUTF8[c] {
|
||||
// fast path: most of the time, printable ascii characters are used
|
||||
j++
|
||||
continue
|
||||
}
|
||||
|
||||
switch c {
|
||||
case '\\', '"':
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, '\\', c)
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
|
||||
case '\n':
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, '\\', 'n')
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
|
||||
case '\r':
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, '\\', 'r')
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
|
||||
case '\t':
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, '\\', 't')
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
|
||||
case 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x0B, 0x0C, 0x0E, 0x0F, // 0x00-0x0F
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F: // 0x10-0x1F
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, `\u00`...)
|
||||
buf = append(buf, hex[c>>4], hex[c&0xF])
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
}
|
||||
|
||||
state, size := decodeRuneInString(s[j:])
|
||||
switch state {
|
||||
case runeErrorState:
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, `\ufffd`...)
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
// U+2028 is LINE SEPARATOR.
|
||||
// U+2029 is PARAGRAPH SEPARATOR.
|
||||
// They are both technically valid characters in JSON strings,
|
||||
// but don't work in JSONP, which has to be evaluated as JavaScript,
|
||||
// and can lead to security holes there. It is valid JSON to
|
||||
// escape them, so we do so unconditionally.
|
||||
// See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion.
|
||||
case lineSepState:
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, `\u2028`...)
|
||||
i = j + 3
|
||||
j = j + 3
|
||||
continue
|
||||
case paragraphSepState:
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, `\u2029`...)
|
||||
i = j + 3
|
||||
j = j + 3
|
||||
continue
|
||||
}
|
||||
j += size
|
||||
}
|
||||
|
||||
return append(append(buf, s[i:]...), '"')
|
||||
}
|
||||
|
||||
func appendString(buf []byte, s string) []byte {
|
||||
valLen := len(s)
|
||||
if valLen == 0 {
|
||||
return append(buf, `""`...)
|
||||
}
|
||||
buf = append(buf, '"')
|
||||
var (
|
||||
i, j int
|
||||
)
|
||||
if valLen >= 8 {
|
||||
chunks := stringToUint64Slice(s)
|
||||
for _, n := range chunks {
|
||||
// combine masks before checking for the MSB of each byte. We include
|
||||
// `n` in the mask to check whether any of the *input* byte MSBs were
|
||||
// set (i.e. the byte was outside the ASCII range).
|
||||
mask := n | (n - (lsb * 0x20)) |
|
||||
((n ^ (lsb * '"')) - lsb) |
|
||||
((n ^ (lsb * '\\')) - lsb)
|
||||
if (mask & msb) != 0 {
|
||||
j = bits.TrailingZeros64(mask&msb) / 8
|
||||
goto ESCAPE_END
|
||||
}
|
||||
}
|
||||
valLen := len(s)
|
||||
for i := len(chunks) * 8; i < valLen; i++ {
|
||||
if needEscape[s[i]] {
|
||||
j = i
|
||||
goto ESCAPE_END
|
||||
}
|
||||
}
|
||||
return append(append(buf, s...), '"')
|
||||
}
|
||||
ESCAPE_END:
|
||||
for j < valLen {
|
||||
c := s[j]
|
||||
|
||||
if !needEscape[c] {
|
||||
// fast path: most of the time, printable ascii characters are used
|
||||
j++
|
||||
continue
|
||||
}
|
||||
|
||||
switch c {
|
||||
case '\\', '"':
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, '\\', c)
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
|
||||
case '\n':
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, '\\', 'n')
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
|
||||
case '\r':
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, '\\', 'r')
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
|
||||
case '\t':
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, '\\', 't')
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
|
||||
case 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x0B, 0x0C, 0x0E, 0x0F, // 0x00-0x0F
|
||||
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F: // 0x10-0x1F
|
||||
buf = append(buf, s[i:j]...)
|
||||
buf = append(buf, `\u00`...)
|
||||
buf = append(buf, hex[c>>4], hex[c&0xF])
|
||||
i = j + 1
|
||||
j = j + 1
|
||||
continue
|
||||
}
|
||||
j++
|
||||
}
|
||||
|
||||
return append(append(buf, s[i:]...), '"')
|
||||
}
|
415
internal/encoder/string_table.go
Normal file
415
internal/encoder/string_table.go
Normal file
|
@ -0,0 +1,415 @@
|
|||
package encoder
|
||||
|
||||
var needEscapeHTMLNormalizeUTF8 = [256]bool{
|
||||
'"': true,
|
||||
'&': true,
|
||||
'<': true,
|
||||
'>': true,
|
||||
'\\': true,
|
||||
0x00: true,
|
||||
0x01: true,
|
||||
0x02: true,
|
||||
0x03: true,
|
||||
0x04: true,
|
||||
0x05: true,
|
||||
0x06: true,
|
||||
0x07: true,
|
||||
0x08: true,
|
||||
0x09: true,
|
||||
0x0a: true,
|
||||
0x0b: true,
|
||||
0x0c: true,
|
||||
0x0d: true,
|
||||
0x0e: true,
|
||||
0x0f: true,
|
||||
0x10: true,
|
||||
0x11: true,
|
||||
0x12: true,
|
||||
0x13: true,
|
||||
0x14: true,
|
||||
0x15: true,
|
||||
0x16: true,
|
||||
0x17: true,
|
||||
0x18: true,
|
||||
0x19: true,
|
||||
0x1a: true,
|
||||
0x1b: true,
|
||||
0x1c: true,
|
||||
0x1d: true,
|
||||
0x1e: true,
|
||||
0x1f: true,
|
||||
/* 0x20 - 0x7f */
|
||||
0x80: true,
|
||||
0x81: true,
|
||||
0x82: true,
|
||||
0x83: true,
|
||||
0x84: true,
|
||||
0x85: true,
|
||||
0x86: true,
|
||||
0x87: true,
|
||||
0x88: true,
|
||||
0x89: true,
|
||||
0x8a: true,
|
||||
0x8b: true,
|
||||
0x8c: true,
|
||||
0x8d: true,
|
||||
0x8e: true,
|
||||
0x8f: true,
|
||||
0x90: true,
|
||||
0x91: true,
|
||||
0x92: true,
|
||||
0x93: true,
|
||||
0x94: true,
|
||||
0x95: true,
|
||||
0x96: true,
|
||||
0x97: true,
|
||||
0x98: true,
|
||||
0x99: true,
|
||||
0x9a: true,
|
||||
0x9b: true,
|
||||
0x9c: true,
|
||||
0x9d: true,
|
||||
0x9e: true,
|
||||
0x9f: true,
|
||||
0xa0: true,
|
||||
0xa1: true,
|
||||
0xa2: true,
|
||||
0xa3: true,
|
||||
0xa4: true,
|
||||
0xa5: true,
|
||||
0xa6: true,
|
||||
0xa7: true,
|
||||
0xa8: true,
|
||||
0xa9: true,
|
||||
0xaa: true,
|
||||
0xab: true,
|
||||
0xac: true,
|
||||
0xad: true,
|
||||
0xae: true,
|
||||
0xaf: true,
|
||||
0xb0: true,
|
||||
0xb1: true,
|
||||
0xb2: true,
|
||||
0xb3: true,
|
||||
0xb4: true,
|
||||
0xb5: true,
|
||||
0xb6: true,
|
||||
0xb7: true,
|
||||
0xb8: true,
|
||||
0xb9: true,
|
||||
0xba: true,
|
||||
0xbb: true,
|
||||
0xbc: true,
|
||||
0xbd: true,
|
||||
0xbe: true,
|
||||
0xbf: true,
|
||||
0xc0: true,
|
||||
0xc1: true,
|
||||
0xc2: true,
|
||||
0xc3: true,
|
||||
0xc4: true,
|
||||
0xc5: true,
|
||||
0xc6: true,
|
||||
0xc7: true,
|
||||
0xc8: true,
|
||||
0xc9: true,
|
||||
0xca: true,
|
||||
0xcb: true,
|
||||
0xcc: true,
|
||||
0xcd: true,
|
||||
0xce: true,
|
||||
0xcf: true,
|
||||
0xd0: true,
|
||||
0xd1: true,
|
||||
0xd2: true,
|
||||
0xd3: true,
|
||||
0xd4: true,
|
||||
0xd5: true,
|
||||
0xd6: true,
|
||||
0xd7: true,
|
||||
0xd8: true,
|
||||
0xd9: true,
|
||||
0xda: true,
|
||||
0xdb: true,
|
||||
0xdc: true,
|
||||
0xdd: true,
|
||||
0xde: true,
|
||||
0xdf: true,
|
||||
0xe0: true,
|
||||
0xe1: true,
|
||||
0xe2: true,
|
||||
0xe3: true,
|
||||
0xe4: true,
|
||||
0xe5: true,
|
||||
0xe6: true,
|
||||
0xe7: true,
|
||||
0xe8: true,
|
||||
0xe9: true,
|
||||
0xea: true,
|
||||
0xeb: true,
|
||||
0xec: true,
|
||||
0xed: true,
|
||||
0xee: true,
|
||||
0xef: true,
|
||||
0xf0: true,
|
||||
0xf1: true,
|
||||
0xf2: true,
|
||||
0xf3: true,
|
||||
0xf4: true,
|
||||
0xf5: true,
|
||||
0xf6: true,
|
||||
0xf7: true,
|
||||
0xf8: true,
|
||||
0xf9: true,
|
||||
0xfa: true,
|
||||
0xfb: true,
|
||||
0xfc: true,
|
||||
0xfd: true,
|
||||
0xfe: true,
|
||||
0xff: true,
|
||||
}
|
||||
|
||||
var needEscapeNormalizeUTF8 = [256]bool{
|
||||
'"': true,
|
||||
'\\': true,
|
||||
0x00: true,
|
||||
0x01: true,
|
||||
0x02: true,
|
||||
0x03: true,
|
||||
0x04: true,
|
||||
0x05: true,
|
||||
0x06: true,
|
||||
0x07: true,
|
||||
0x08: true,
|
||||
0x09: true,
|
||||
0x0a: true,
|
||||
0x0b: true,
|
||||
0x0c: true,
|
||||
0x0d: true,
|
||||
0x0e: true,
|
||||
0x0f: true,
|
||||
0x10: true,
|
||||
0x11: true,
|
||||
0x12: true,
|
||||
0x13: true,
|
||||
0x14: true,
|
||||
0x15: true,
|
||||
0x16: true,
|
||||
0x17: true,
|
||||
0x18: true,
|
||||
0x19: true,
|
||||
0x1a: true,
|
||||
0x1b: true,
|
||||
0x1c: true,
|
||||
0x1d: true,
|
||||
0x1e: true,
|
||||
0x1f: true,
|
||||
/* 0x20 - 0x7f */
|
||||
0x80: true,
|
||||
0x81: true,
|
||||
0x82: true,
|
||||
0x83: true,
|
||||
0x84: true,
|
||||
0x85: true,
|
||||
0x86: true,
|
||||
0x87: true,
|
||||
0x88: true,
|
||||
0x89: true,
|
||||
0x8a: true,
|
||||
0x8b: true,
|
||||
0x8c: true,
|
||||
0x8d: true,
|
||||
0x8e: true,
|
||||
0x8f: true,
|
||||
0x90: true,
|
||||
0x91: true,
|
||||
0x92: true,
|
||||
0x93: true,
|
||||
0x94: true,
|
||||
0x95: true,
|
||||
0x96: true,
|
||||
0x97: true,
|
||||
0x98: true,
|
||||
0x99: true,
|
||||
0x9a: true,
|
||||
0x9b: true,
|
||||
0x9c: true,
|
||||
0x9d: true,
|
||||
0x9e: true,
|
||||
0x9f: true,
|
||||
0xa0: true,
|
||||
0xa1: true,
|
||||
0xa2: true,
|
||||
0xa3: true,
|
||||
0xa4: true,
|
||||
0xa5: true,
|
||||
0xa6: true,
|
||||
0xa7: true,
|
||||
0xa8: true,
|
||||
0xa9: true,
|
||||
0xaa: true,
|
||||
0xab: true,
|
||||
0xac: true,
|
||||
0xad: true,
|
||||
0xae: true,
|
||||
0xaf: true,
|
||||
0xb0: true,
|
||||
0xb1: true,
|
||||
0xb2: true,
|
||||
0xb3: true,
|
||||
0xb4: true,
|
||||
0xb5: true,
|
||||
0xb6: true,
|
||||
0xb7: true,
|
||||
0xb8: true,
|
||||
0xb9: true,
|
||||
0xba: true,
|
||||
0xbb: true,
|
||||
0xbc: true,
|
||||
0xbd: true,
|
||||
0xbe: true,
|
||||
0xbf: true,
|
||||
0xc0: true,
|
||||
0xc1: true,
|
||||
0xc2: true,
|
||||
0xc3: true,
|
||||
0xc4: true,
|
||||
0xc5: true,
|
||||
0xc6: true,
|
||||
0xc7: true,
|
||||
0xc8: true,
|
||||
0xc9: true,
|
||||
0xca: true,
|
||||
0xcb: true,
|
||||
0xcc: true,
|
||||
0xcd: true,
|
||||
0xce: true,
|
||||
0xcf: true,
|
||||
0xd0: true,
|
||||
0xd1: true,
|
||||
0xd2: true,
|
||||
0xd3: true,
|
||||
0xd4: true,
|
||||
0xd5: true,
|
||||
0xd6: true,
|
||||
0xd7: true,
|
||||
0xd8: true,
|
||||
0xd9: true,
|
||||
0xda: true,
|
||||
0xdb: true,
|
||||
0xdc: true,
|
||||
0xdd: true,
|
||||
0xde: true,
|
||||
0xdf: true,
|
||||
0xe0: true,
|
||||
0xe1: true,
|
||||
0xe2: true,
|
||||
0xe3: true,
|
||||
0xe4: true,
|
||||
0xe5: true,
|
||||
0xe6: true,
|
||||
0xe7: true,
|
||||
0xe8: true,
|
||||
0xe9: true,
|
||||
0xea: true,
|
||||
0xeb: true,
|
||||
0xec: true,
|
||||
0xed: true,
|
||||
0xee: true,
|
||||
0xef: true,
|
||||
0xf0: true,
|
||||
0xf1: true,
|
||||
0xf2: true,
|
||||
0xf3: true,
|
||||
0xf4: true,
|
||||
0xf5: true,
|
||||
0xf6: true,
|
||||
0xf7: true,
|
||||
0xf8: true,
|
||||
0xf9: true,
|
||||
0xfa: true,
|
||||
0xfb: true,
|
||||
0xfc: true,
|
||||
0xfd: true,
|
||||
0xfe: true,
|
||||
0xff: true,
|
||||
}
|
||||
|
||||
var needEscapeHTML = [256]bool{
|
||||
'"': true,
|
||||
'&': true,
|
||||
'<': true,
|
||||
'>': true,
|
||||
'\\': true,
|
||||
0x00: true,
|
||||
0x01: true,
|
||||
0x02: true,
|
||||
0x03: true,
|
||||
0x04: true,
|
||||
0x05: true,
|
||||
0x06: true,
|
||||
0x07: true,
|
||||
0x08: true,
|
||||
0x09: true,
|
||||
0x0a: true,
|
||||
0x0b: true,
|
||||
0x0c: true,
|
||||
0x0d: true,
|
||||
0x0e: true,
|
||||
0x0f: true,
|
||||
0x10: true,
|
||||
0x11: true,
|
||||
0x12: true,
|
||||
0x13: true,
|
||||
0x14: true,
|
||||
0x15: true,
|
||||
0x16: true,
|
||||
0x17: true,
|
||||
0x18: true,
|
||||
0x19: true,
|
||||
0x1a: true,
|
||||
0x1b: true,
|
||||
0x1c: true,
|
||||
0x1d: true,
|
||||
0x1e: true,
|
||||
0x1f: true,
|
||||
/* 0x20 - 0xff */
|
||||
}
|
||||
|
||||
var needEscape = [256]bool{
|
||||
'"': true,
|
||||
'\\': true,
|
||||
0x00: true,
|
||||
0x01: true,
|
||||
0x02: true,
|
||||
0x03: true,
|
||||
0x04: true,
|
||||
0x05: true,
|
||||
0x06: true,
|
||||
0x07: true,
|
||||
0x08: true,
|
||||
0x09: true,
|
||||
0x0a: true,
|
||||
0x0b: true,
|
||||
0x0c: true,
|
||||
0x0d: true,
|
||||
0x0e: true,
|
||||
0x0f: true,
|
||||
0x10: true,
|
||||
0x11: true,
|
||||
0x12: true,
|
||||
0x13: true,
|
||||
0x14: true,
|
||||
0x15: true,
|
||||
0x16: true,
|
||||
0x17: true,
|
||||
0x18: true,
|
||||
0x19: true,
|
||||
0x1a: true,
|
||||
0x1b: true,
|
||||
0x1c: true,
|
||||
0x1d: true,
|
||||
0x1e: true,
|
||||
0x1f: true,
|
||||
/* 0x20 - 0xff */
|
||||
}
|
41
internal/encoder/vm/debug_vm.go
Normal file
41
internal/encoder/vm/debug_vm.go
Normal file
|
@ -0,0 +1,41 @@
|
|||
package vm
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/goccy/go-json/internal/encoder"
|
||||
)
|
||||
|
||||
func DebugRun(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) {
|
||||
defer func() {
|
||||
var code *encoder.Opcode
|
||||
if (ctx.Option.Flag & encoder.HTMLEscapeOption) != 0 {
|
||||
code = codeSet.EscapeKeyCode
|
||||
} else {
|
||||
code = codeSet.NoescapeKeyCode
|
||||
}
|
||||
if wc := ctx.Option.DebugDOTOut; wc != nil {
|
||||
_, _ = io.WriteString(wc, code.DumpDOT())
|
||||
wc.Close()
|
||||
ctx.Option.DebugDOTOut = nil
|
||||
}
|
||||
|
||||
if err := recover(); err != nil {
|
||||
w := ctx.Option.DebugOut
|
||||
fmt.Fprintln(w, "=============[DEBUG]===============")
|
||||
fmt.Fprintln(w, "* [TYPE]")
|
||||
fmt.Fprintln(w, codeSet.Type)
|
||||
fmt.Fprintf(w, "\n")
|
||||
fmt.Fprintln(w, "* [ALL OPCODE]")
|
||||
fmt.Fprintln(w, code.Dump())
|
||||
fmt.Fprintf(w, "\n")
|
||||
fmt.Fprintln(w, "* [CONTEXT]")
|
||||
fmt.Fprintf(w, "%+v\n", ctx)
|
||||
fmt.Fprintln(w, "===================================")
|
||||
panic(err)
|
||||
}
|
||||
}()
|
||||
|
||||
return Run(ctx, b, codeSet)
|
||||
}
|
9
internal/encoder/vm/hack.go
Normal file
9
internal/encoder/vm/hack.go
Normal file
|
@ -0,0 +1,9 @@
|
|||
package vm
|
||||
|
||||
import (
|
||||
// HACK: compile order
|
||||
// `vm`, `vm_indent`, `vm_color`, `vm_color_indent` packages uses a lot of memory to compile,
|
||||
// so forcibly make dependencies and avoid compiling in concurrent.
|
||||
// dependency order: vm => vm_indent => vm_color => vm_color_indent
|
||||
_ "github.com/goccy/go-json/internal/encoder/vm_indent"
|
||||
)
|
207
internal/encoder/vm/util.go
Normal file
207
internal/encoder/vm/util.go
Normal file
|
@ -0,0 +1,207 @@
|
|||
package vm
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/encoder"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
const uintptrSize = 4 << (^uintptr(0) >> 63)
|
||||
|
||||
var (
|
||||
appendInt = encoder.AppendInt
|
||||
appendUint = encoder.AppendUint
|
||||
appendFloat32 = encoder.AppendFloat32
|
||||
appendFloat64 = encoder.AppendFloat64
|
||||
appendString = encoder.AppendString
|
||||
appendByteSlice = encoder.AppendByteSlice
|
||||
appendNumber = encoder.AppendNumber
|
||||
errUnsupportedValue = encoder.ErrUnsupportedValue
|
||||
errUnsupportedFloat = encoder.ErrUnsupportedFloat
|
||||
mapiterinit = encoder.MapIterInit
|
||||
mapiterkey = encoder.MapIterKey
|
||||
mapitervalue = encoder.MapIterValue
|
||||
mapiternext = encoder.MapIterNext
|
||||
maplen = encoder.MapLen
|
||||
)
|
||||
|
||||
type emptyInterface struct {
|
||||
typ *runtime.Type
|
||||
ptr unsafe.Pointer
|
||||
}
|
||||
|
||||
type nonEmptyInterface struct {
|
||||
itab *struct {
|
||||
ityp *runtime.Type // static interface type
|
||||
typ *runtime.Type // dynamic concrete type
|
||||
// unused fields...
|
||||
}
|
||||
ptr unsafe.Pointer
|
||||
}
|
||||
|
||||
func errUnimplementedOp(op encoder.OpType) error {
|
||||
return fmt.Errorf("encoder: opcode %s has not been implemented", op)
|
||||
}
|
||||
|
||||
func load(base uintptr, idx uint32) uintptr {
|
||||
addr := base + uintptr(idx)
|
||||
return **(**uintptr)(unsafe.Pointer(&addr))
|
||||
}
|
||||
|
||||
func store(base uintptr, idx uint32, p uintptr) {
|
||||
addr := base + uintptr(idx)
|
||||
**(**uintptr)(unsafe.Pointer(&addr)) = p
|
||||
}
|
||||
|
||||
func loadNPtr(base uintptr, idx uint32, ptrNum uint8) uintptr {
|
||||
addr := base + uintptr(idx)
|
||||
p := **(**uintptr)(unsafe.Pointer(&addr))
|
||||
for i := uint8(0); i < ptrNum; i++ {
|
||||
if p == 0 {
|
||||
return 0
|
||||
}
|
||||
p = ptrToPtr(p)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
func ptrToUint64(p uintptr, bitSize uint8) uint64 {
|
||||
switch bitSize {
|
||||
case 8:
|
||||
return (uint64)(**(**uint8)(unsafe.Pointer(&p)))
|
||||
case 16:
|
||||
return (uint64)(**(**uint16)(unsafe.Pointer(&p)))
|
||||
case 32:
|
||||
return (uint64)(**(**uint32)(unsafe.Pointer(&p)))
|
||||
case 64:
|
||||
return **(**uint64)(unsafe.Pointer(&p))
|
||||
}
|
||||
return 0
|
||||
}
|
||||
func ptrToFloat32(p uintptr) float32 { return **(**float32)(unsafe.Pointer(&p)) }
|
||||
func ptrToFloat64(p uintptr) float64 { return **(**float64)(unsafe.Pointer(&p)) }
|
||||
func ptrToBool(p uintptr) bool { return **(**bool)(unsafe.Pointer(&p)) }
|
||||
func ptrToBytes(p uintptr) []byte { return **(**[]byte)(unsafe.Pointer(&p)) }
|
||||
func ptrToNumber(p uintptr) json.Number { return **(**json.Number)(unsafe.Pointer(&p)) }
|
||||
func ptrToString(p uintptr) string { return **(**string)(unsafe.Pointer(&p)) }
|
||||
func ptrToSlice(p uintptr) *runtime.SliceHeader { return *(**runtime.SliceHeader)(unsafe.Pointer(&p)) }
|
||||
func ptrToPtr(p uintptr) uintptr {
|
||||
return uintptr(**(**unsafe.Pointer)(unsafe.Pointer(&p)))
|
||||
}
|
||||
func ptrToNPtr(p uintptr, ptrNum uint8) uintptr {
|
||||
for i := uint8(0); i < ptrNum; i++ {
|
||||
if p == 0 {
|
||||
return 0
|
||||
}
|
||||
p = ptrToPtr(p)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
func ptrToUnsafePtr(p uintptr) unsafe.Pointer {
|
||||
return *(*unsafe.Pointer)(unsafe.Pointer(&p))
|
||||
}
|
||||
func ptrToInterface(code *encoder.Opcode, p uintptr) interface{} {
|
||||
return *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||
typ: code.Type,
|
||||
ptr: *(*unsafe.Pointer)(unsafe.Pointer(&p)),
|
||||
}))
|
||||
}
|
||||
|
||||
func appendBool(_ *encoder.RuntimeContext, b []byte, v bool) []byte {
|
||||
if v {
|
||||
return append(b, "true"...)
|
||||
}
|
||||
return append(b, "false"...)
|
||||
}
|
||||
|
||||
func appendNull(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b, "null"...)
|
||||
}
|
||||
|
||||
func appendComma(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b, ',')
|
||||
}
|
||||
|
||||
func appendNullComma(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b, "null,"...)
|
||||
}
|
||||
|
||||
func appendColon(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
last := len(b) - 1
|
||||
b[last] = ':'
|
||||
return b
|
||||
}
|
||||
|
||||
func appendMapKeyValue(_ *encoder.RuntimeContext, _ *encoder.Opcode, b, key, value []byte) []byte {
|
||||
b = append(b, key...)
|
||||
b[len(b)-1] = ':'
|
||||
return append(b, value...)
|
||||
}
|
||||
|
||||
func appendMapEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte {
|
||||
b[len(b)-1] = '}'
|
||||
b = append(b, ',')
|
||||
return b
|
||||
}
|
||||
|
||||
func appendMarshalJSON(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||
return encoder.AppendMarshalJSON(ctx, code, b, v)
|
||||
}
|
||||
|
||||
func appendMarshalText(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||
return encoder.AppendMarshalText(ctx, code, b, v)
|
||||
}
|
||||
|
||||
func appendArrayHead(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte {
|
||||
return append(b, '[')
|
||||
}
|
||||
|
||||
func appendArrayEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte {
|
||||
last := len(b) - 1
|
||||
b[last] = ']'
|
||||
return append(b, ',')
|
||||
}
|
||||
|
||||
func appendEmptyArray(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b, '[', ']', ',')
|
||||
}
|
||||
|
||||
func appendEmptyObject(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b, '{', '}', ',')
|
||||
}
|
||||
|
||||
func appendObjectEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte {
|
||||
last := len(b) - 1
|
||||
b[last] = '}'
|
||||
return append(b, ',')
|
||||
}
|
||||
|
||||
func appendStructHead(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b, '{')
|
||||
}
|
||||
|
||||
func appendStructKey(_ *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||
return append(b, code.Key...)
|
||||
}
|
||||
|
||||
func appendStructEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte {
|
||||
return append(b, '}', ',')
|
||||
}
|
||||
|
||||
func appendStructEndSkipLast(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||
last := len(b) - 1
|
||||
if b[last] == ',' {
|
||||
b[last] = '}'
|
||||
return appendComma(ctx, b)
|
||||
}
|
||||
return appendStructEnd(ctx, code, b)
|
||||
}
|
||||
|
||||
func restoreIndent(_ *encoder.RuntimeContext, _ *encoder.Opcode, _ uintptr) {}
|
||||
func storeIndent(_ uintptr, _ *encoder.Opcode, _ uintptr) {}
|
||||
func appendMapKeyIndent(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { return b }
|
||||
func appendArrayElemIndent(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { return b }
|
4859
internal/encoder/vm/vm.go
Normal file
4859
internal/encoder/vm/vm.go
Normal file
File diff suppressed because it is too large
Load diff
35
internal/encoder/vm_color/debug_vm.go
Normal file
35
internal/encoder/vm_color/debug_vm.go
Normal file
|
@ -0,0 +1,35 @@
|
|||
package vm_color
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/goccy/go-json/internal/encoder"
|
||||
)
|
||||
|
||||
func DebugRun(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) {
|
||||
var code *encoder.Opcode
|
||||
if (ctx.Option.Flag & encoder.HTMLEscapeOption) != 0 {
|
||||
code = codeSet.EscapeKeyCode
|
||||
} else {
|
||||
code = codeSet.NoescapeKeyCode
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err := recover(); err != nil {
|
||||
w := ctx.Option.DebugOut
|
||||
fmt.Fprintln(w, "=============[DEBUG]===============")
|
||||
fmt.Fprintln(w, "* [TYPE]")
|
||||
fmt.Fprintln(w, codeSet.Type)
|
||||
fmt.Fprintf(w, "\n")
|
||||
fmt.Fprintln(w, "* [ALL OPCODE]")
|
||||
fmt.Fprintln(w, code.Dump())
|
||||
fmt.Fprintf(w, "\n")
|
||||
fmt.Fprintln(w, "* [CONTEXT]")
|
||||
fmt.Fprintf(w, "%+v\n", ctx)
|
||||
fmt.Fprintln(w, "===================================")
|
||||
panic(err)
|
||||
}
|
||||
}()
|
||||
|
||||
return Run(ctx, b, codeSet)
|
||||
}
|
9
internal/encoder/vm_color/hack.go
Normal file
9
internal/encoder/vm_color/hack.go
Normal file
|
@ -0,0 +1,9 @@
|
|||
package vm_color
|
||||
|
||||
import (
|
||||
// HACK: compile order
|
||||
// `vm`, `vm_indent`, `vm_color`, `vm_color_indent` packages uses a lot of memory to compile,
|
||||
// so forcibly make dependencies and avoid compiling in concurrent.
|
||||
// dependency order: vm => vm_indent => vm_color => vm_color_indent
|
||||
_ "github.com/goccy/go-json/internal/encoder/vm_color_indent"
|
||||
)
|
274
internal/encoder/vm_color/util.go
Normal file
274
internal/encoder/vm_color/util.go
Normal file
|
@ -0,0 +1,274 @@
|
|||
package vm_color
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/encoder"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
const uintptrSize = 4 << (^uintptr(0) >> 63)
|
||||
|
||||
var (
|
||||
errUnsupportedValue = encoder.ErrUnsupportedValue
|
||||
errUnsupportedFloat = encoder.ErrUnsupportedFloat
|
||||
mapiterinit = encoder.MapIterInit
|
||||
mapiterkey = encoder.MapIterKey
|
||||
mapitervalue = encoder.MapIterValue
|
||||
mapiternext = encoder.MapIterNext
|
||||
maplen = encoder.MapLen
|
||||
)
|
||||
|
||||
type emptyInterface struct {
|
||||
typ *runtime.Type
|
||||
ptr unsafe.Pointer
|
||||
}
|
||||
|
||||
type nonEmptyInterface struct {
|
||||
itab *struct {
|
||||
ityp *runtime.Type // static interface type
|
||||
typ *runtime.Type // dynamic concrete type
|
||||
// unused fields...
|
||||
}
|
||||
ptr unsafe.Pointer
|
||||
}
|
||||
|
||||
func errUnimplementedOp(op encoder.OpType) error {
|
||||
return fmt.Errorf("encoder: opcode %s has not been implemented", op)
|
||||
}
|
||||
|
||||
func load(base uintptr, idx uint32) uintptr {
|
||||
addr := base + uintptr(idx)
|
||||
return **(**uintptr)(unsafe.Pointer(&addr))
|
||||
}
|
||||
|
||||
func store(base uintptr, idx uint32, p uintptr) {
|
||||
addr := base + uintptr(idx)
|
||||
**(**uintptr)(unsafe.Pointer(&addr)) = p
|
||||
}
|
||||
|
||||
func loadNPtr(base uintptr, idx uint32, ptrNum uint8) uintptr {
|
||||
addr := base + uintptr(idx)
|
||||
p := **(**uintptr)(unsafe.Pointer(&addr))
|
||||
for i := uint8(0); i < ptrNum; i++ {
|
||||
if p == 0 {
|
||||
return 0
|
||||
}
|
||||
p = ptrToPtr(p)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
func ptrToUint64(p uintptr, bitSize uint8) uint64 {
|
||||
switch bitSize {
|
||||
case 8:
|
||||
return (uint64)(**(**uint8)(unsafe.Pointer(&p)))
|
||||
case 16:
|
||||
return (uint64)(**(**uint16)(unsafe.Pointer(&p)))
|
||||
case 32:
|
||||
return (uint64)(**(**uint32)(unsafe.Pointer(&p)))
|
||||
case 64:
|
||||
return **(**uint64)(unsafe.Pointer(&p))
|
||||
}
|
||||
return 0
|
||||
}
|
||||
func ptrToFloat32(p uintptr) float32 { return **(**float32)(unsafe.Pointer(&p)) }
|
||||
func ptrToFloat64(p uintptr) float64 { return **(**float64)(unsafe.Pointer(&p)) }
|
||||
func ptrToBool(p uintptr) bool { return **(**bool)(unsafe.Pointer(&p)) }
|
||||
func ptrToBytes(p uintptr) []byte { return **(**[]byte)(unsafe.Pointer(&p)) }
|
||||
func ptrToNumber(p uintptr) json.Number { return **(**json.Number)(unsafe.Pointer(&p)) }
|
||||
func ptrToString(p uintptr) string { return **(**string)(unsafe.Pointer(&p)) }
|
||||
func ptrToSlice(p uintptr) *runtime.SliceHeader { return *(**runtime.SliceHeader)(unsafe.Pointer(&p)) }
|
||||
func ptrToPtr(p uintptr) uintptr {
|
||||
return uintptr(**(**unsafe.Pointer)(unsafe.Pointer(&p)))
|
||||
}
|
||||
func ptrToNPtr(p uintptr, ptrNum uint8) uintptr {
|
||||
for i := uint8(0); i < ptrNum; i++ {
|
||||
if p == 0 {
|
||||
return 0
|
||||
}
|
||||
p = ptrToPtr(p)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
func ptrToUnsafePtr(p uintptr) unsafe.Pointer {
|
||||
return *(*unsafe.Pointer)(unsafe.Pointer(&p))
|
||||
}
|
||||
func ptrToInterface(code *encoder.Opcode, p uintptr) interface{} {
|
||||
return *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||
typ: code.Type,
|
||||
ptr: *(*unsafe.Pointer)(unsafe.Pointer(&p)),
|
||||
}))
|
||||
}
|
||||
|
||||
func appendInt(ctx *encoder.RuntimeContext, b []byte, p uintptr, code *encoder.Opcode) []byte {
|
||||
format := ctx.Option.ColorScheme.Int
|
||||
b = append(b, format.Header...)
|
||||
b = encoder.AppendInt(ctx, b, p, code)
|
||||
return append(b, format.Footer...)
|
||||
}
|
||||
|
||||
func appendUint(ctx *encoder.RuntimeContext, b []byte, p uintptr, code *encoder.Opcode) []byte {
|
||||
format := ctx.Option.ColorScheme.Uint
|
||||
b = append(b, format.Header...)
|
||||
b = encoder.AppendUint(ctx, b, p, code)
|
||||
return append(b, format.Footer...)
|
||||
}
|
||||
|
||||
func appendFloat32(ctx *encoder.RuntimeContext, b []byte, v float32) []byte {
|
||||
format := ctx.Option.ColorScheme.Float
|
||||
b = append(b, format.Header...)
|
||||
b = encoder.AppendFloat32(ctx, b, v)
|
||||
return append(b, format.Footer...)
|
||||
}
|
||||
|
||||
func appendFloat64(ctx *encoder.RuntimeContext, b []byte, v float64) []byte {
|
||||
format := ctx.Option.ColorScheme.Float
|
||||
b = append(b, format.Header...)
|
||||
b = encoder.AppendFloat64(ctx, b, v)
|
||||
return append(b, format.Footer...)
|
||||
}
|
||||
|
||||
func appendString(ctx *encoder.RuntimeContext, b []byte, v string) []byte {
|
||||
format := ctx.Option.ColorScheme.String
|
||||
b = append(b, format.Header...)
|
||||
b = encoder.AppendString(ctx, b, v)
|
||||
return append(b, format.Footer...)
|
||||
}
|
||||
|
||||
func appendByteSlice(ctx *encoder.RuntimeContext, b []byte, src []byte) []byte {
|
||||
format := ctx.Option.ColorScheme.Binary
|
||||
b = append(b, format.Header...)
|
||||
b = encoder.AppendByteSlice(ctx, b, src)
|
||||
return append(b, format.Footer...)
|
||||
}
|
||||
|
||||
func appendNumber(ctx *encoder.RuntimeContext, b []byte, n json.Number) ([]byte, error) {
|
||||
format := ctx.Option.ColorScheme.Int
|
||||
b = append(b, format.Header...)
|
||||
bb, err := encoder.AppendNumber(ctx, b, n)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return append(bb, format.Footer...), nil
|
||||
}
|
||||
|
||||
func appendBool(ctx *encoder.RuntimeContext, b []byte, v bool) []byte {
|
||||
format := ctx.Option.ColorScheme.Bool
|
||||
b = append(b, format.Header...)
|
||||
if v {
|
||||
b = append(b, "true"...)
|
||||
} else {
|
||||
b = append(b, "false"...)
|
||||
}
|
||||
return append(b, format.Footer...)
|
||||
}
|
||||
|
||||
func appendNull(ctx *encoder.RuntimeContext, b []byte) []byte {
|
||||
format := ctx.Option.ColorScheme.Null
|
||||
b = append(b, format.Header...)
|
||||
b = append(b, "null"...)
|
||||
return append(b, format.Footer...)
|
||||
}
|
||||
|
||||
func appendComma(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b, ',')
|
||||
}
|
||||
|
||||
func appendNullComma(ctx *encoder.RuntimeContext, b []byte) []byte {
|
||||
format := ctx.Option.ColorScheme.Null
|
||||
b = append(b, format.Header...)
|
||||
b = append(b, "null"...)
|
||||
return append(append(b, format.Footer...), ',')
|
||||
}
|
||||
|
||||
func appendColon(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
last := len(b) - 1
|
||||
b[last] = ':'
|
||||
return b
|
||||
}
|
||||
|
||||
func appendMapKeyValue(_ *encoder.RuntimeContext, _ *encoder.Opcode, b, key, value []byte) []byte {
|
||||
b = append(b, key[:len(key)-1]...)
|
||||
b = append(b, ':')
|
||||
return append(b, value...)
|
||||
}
|
||||
|
||||
func appendMapEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte {
|
||||
last := len(b) - 1
|
||||
b[last] = '}'
|
||||
b = append(b, ',')
|
||||
return b
|
||||
}
|
||||
|
||||
func appendMarshalJSON(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||
return encoder.AppendMarshalJSON(ctx, code, b, v)
|
||||
}
|
||||
|
||||
func appendMarshalText(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||
format := ctx.Option.ColorScheme.String
|
||||
b = append(b, format.Header...)
|
||||
bb, err := encoder.AppendMarshalText(ctx, code, b, v)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return append(bb, format.Footer...), nil
|
||||
}
|
||||
|
||||
func appendArrayHead(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte {
|
||||
return append(b, '[')
|
||||
}
|
||||
|
||||
func appendArrayEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte {
|
||||
last := len(b) - 1
|
||||
b[last] = ']'
|
||||
return append(b, ',')
|
||||
}
|
||||
|
||||
func appendEmptyArray(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b, '[', ']', ',')
|
||||
}
|
||||
|
||||
func appendEmptyObject(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b, '{', '}', ',')
|
||||
}
|
||||
|
||||
func appendObjectEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte {
|
||||
last := len(b) - 1
|
||||
b[last] = '}'
|
||||
return append(b, ',')
|
||||
}
|
||||
|
||||
func appendStructHead(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b, '{')
|
||||
}
|
||||
|
||||
func appendStructKey(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||
format := ctx.Option.ColorScheme.ObjectKey
|
||||
b = append(b, format.Header...)
|
||||
b = append(b, code.Key[:len(code.Key)-1]...)
|
||||
b = append(b, format.Footer...)
|
||||
|
||||
return append(b, ':')
|
||||
}
|
||||
|
||||
func appendStructEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte {
|
||||
return append(b, '}', ',')
|
||||
}
|
||||
|
||||
func appendStructEndSkipLast(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||
last := len(b) - 1
|
||||
if b[last] == ',' {
|
||||
b[last] = '}'
|
||||
return appendComma(ctx, b)
|
||||
}
|
||||
return appendStructEnd(ctx, code, b)
|
||||
}
|
||||
|
||||
func restoreIndent(_ *encoder.RuntimeContext, _ *encoder.Opcode, _ uintptr) {}
|
||||
func storeIndent(_ uintptr, _ *encoder.Opcode, _ uintptr) {}
|
||||
func appendMapKeyIndent(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { return b }
|
||||
func appendArrayElemIndent(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { return b }
|
4859
internal/encoder/vm_color/vm.go
Normal file
4859
internal/encoder/vm_color/vm.go
Normal file
File diff suppressed because it is too large
Load diff
35
internal/encoder/vm_color_indent/debug_vm.go
Normal file
35
internal/encoder/vm_color_indent/debug_vm.go
Normal file
|
@ -0,0 +1,35 @@
|
|||
package vm_color_indent
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/goccy/go-json/internal/encoder"
|
||||
)
|
||||
|
||||
func DebugRun(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) {
|
||||
var code *encoder.Opcode
|
||||
if (ctx.Option.Flag & encoder.HTMLEscapeOption) != 0 {
|
||||
code = codeSet.EscapeKeyCode
|
||||
} else {
|
||||
code = codeSet.NoescapeKeyCode
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err := recover(); err != nil {
|
||||
w := ctx.Option.DebugOut
|
||||
fmt.Fprintln(w, "=============[DEBUG]===============")
|
||||
fmt.Fprintln(w, "* [TYPE]")
|
||||
fmt.Fprintln(w, codeSet.Type)
|
||||
fmt.Fprintf(w, "\n")
|
||||
fmt.Fprintln(w, "* [ALL OPCODE]")
|
||||
fmt.Fprintln(w, code.Dump())
|
||||
fmt.Fprintf(w, "\n")
|
||||
fmt.Fprintln(w, "* [CONTEXT]")
|
||||
fmt.Fprintf(w, "%+v\n", ctx)
|
||||
fmt.Fprintln(w, "===================================")
|
||||
panic(err)
|
||||
}
|
||||
}()
|
||||
|
||||
return Run(ctx, b, codeSet)
|
||||
}
|
297
internal/encoder/vm_color_indent/util.go
Normal file
297
internal/encoder/vm_color_indent/util.go
Normal file
|
@ -0,0 +1,297 @@
|
|||
package vm_color_indent
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/encoder"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
const uintptrSize = 4 << (^uintptr(0) >> 63)
|
||||
|
||||
var (
|
||||
appendIndent = encoder.AppendIndent
|
||||
appendStructEnd = encoder.AppendStructEndIndent
|
||||
errUnsupportedValue = encoder.ErrUnsupportedValue
|
||||
errUnsupportedFloat = encoder.ErrUnsupportedFloat
|
||||
mapiterinit = encoder.MapIterInit
|
||||
mapiterkey = encoder.MapIterKey
|
||||
mapitervalue = encoder.MapIterValue
|
||||
mapiternext = encoder.MapIterNext
|
||||
maplen = encoder.MapLen
|
||||
)
|
||||
|
||||
type emptyInterface struct {
|
||||
typ *runtime.Type
|
||||
ptr unsafe.Pointer
|
||||
}
|
||||
|
||||
type nonEmptyInterface struct {
|
||||
itab *struct {
|
||||
ityp *runtime.Type // static interface type
|
||||
typ *runtime.Type // dynamic concrete type
|
||||
// unused fields...
|
||||
}
|
||||
ptr unsafe.Pointer
|
||||
}
|
||||
|
||||
func errUnimplementedOp(op encoder.OpType) error {
|
||||
return fmt.Errorf("encoder (indent): opcode %s has not been implemented", op)
|
||||
}
|
||||
|
||||
func load(base uintptr, idx uint32) uintptr {
|
||||
addr := base + uintptr(idx)
|
||||
return **(**uintptr)(unsafe.Pointer(&addr))
|
||||
}
|
||||
|
||||
func store(base uintptr, idx uint32, p uintptr) {
|
||||
addr := base + uintptr(idx)
|
||||
**(**uintptr)(unsafe.Pointer(&addr)) = p
|
||||
}
|
||||
|
||||
func loadNPtr(base uintptr, idx uint32, ptrNum uint8) uintptr {
|
||||
addr := base + uintptr(idx)
|
||||
p := **(**uintptr)(unsafe.Pointer(&addr))
|
||||
for i := uint8(0); i < ptrNum; i++ {
|
||||
if p == 0 {
|
||||
return 0
|
||||
}
|
||||
p = ptrToPtr(p)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
func ptrToUint64(p uintptr, bitSize uint8) uint64 {
|
||||
switch bitSize {
|
||||
case 8:
|
||||
return (uint64)(**(**uint8)(unsafe.Pointer(&p)))
|
||||
case 16:
|
||||
return (uint64)(**(**uint16)(unsafe.Pointer(&p)))
|
||||
case 32:
|
||||
return (uint64)(**(**uint32)(unsafe.Pointer(&p)))
|
||||
case 64:
|
||||
return **(**uint64)(unsafe.Pointer(&p))
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func ptrToFloat32(p uintptr) float32 { return **(**float32)(unsafe.Pointer(&p)) }
|
||||
func ptrToFloat64(p uintptr) float64 { return **(**float64)(unsafe.Pointer(&p)) }
|
||||
func ptrToBool(p uintptr) bool { return **(**bool)(unsafe.Pointer(&p)) }
|
||||
func ptrToBytes(p uintptr) []byte { return **(**[]byte)(unsafe.Pointer(&p)) }
|
||||
func ptrToNumber(p uintptr) json.Number { return **(**json.Number)(unsafe.Pointer(&p)) }
|
||||
func ptrToString(p uintptr) string { return **(**string)(unsafe.Pointer(&p)) }
|
||||
func ptrToSlice(p uintptr) *runtime.SliceHeader { return *(**runtime.SliceHeader)(unsafe.Pointer(&p)) }
|
||||
func ptrToPtr(p uintptr) uintptr {
|
||||
return uintptr(**(**unsafe.Pointer)(unsafe.Pointer(&p)))
|
||||
}
|
||||
func ptrToNPtr(p uintptr, ptrNum uint8) uintptr {
|
||||
for i := uint8(0); i < ptrNum; i++ {
|
||||
if p == 0 {
|
||||
return 0
|
||||
}
|
||||
p = ptrToPtr(p)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
func ptrToUnsafePtr(p uintptr) unsafe.Pointer {
|
||||
return *(*unsafe.Pointer)(unsafe.Pointer(&p))
|
||||
}
|
||||
func ptrToInterface(code *encoder.Opcode, p uintptr) interface{} {
|
||||
return *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||
typ: code.Type,
|
||||
ptr: *(*unsafe.Pointer)(unsafe.Pointer(&p)),
|
||||
}))
|
||||
}
|
||||
|
||||
func appendInt(ctx *encoder.RuntimeContext, b []byte, p uintptr, code *encoder.Opcode) []byte {
|
||||
format := ctx.Option.ColorScheme.Int
|
||||
b = append(b, format.Header...)
|
||||
b = encoder.AppendInt(ctx, b, p, code)
|
||||
return append(b, format.Footer...)
|
||||
}
|
||||
|
||||
func appendUint(ctx *encoder.RuntimeContext, b []byte, p uintptr, code *encoder.Opcode) []byte {
|
||||
format := ctx.Option.ColorScheme.Uint
|
||||
b = append(b, format.Header...)
|
||||
b = encoder.AppendUint(ctx, b, p, code)
|
||||
return append(b, format.Footer...)
|
||||
}
|
||||
|
||||
func appendFloat32(ctx *encoder.RuntimeContext, b []byte, v float32) []byte {
|
||||
format := ctx.Option.ColorScheme.Float
|
||||
b = append(b, format.Header...)
|
||||
b = encoder.AppendFloat32(ctx, b, v)
|
||||
return append(b, format.Footer...)
|
||||
}
|
||||
|
||||
func appendFloat64(ctx *encoder.RuntimeContext, b []byte, v float64) []byte {
|
||||
format := ctx.Option.ColorScheme.Float
|
||||
b = append(b, format.Header...)
|
||||
b = encoder.AppendFloat64(ctx, b, v)
|
||||
return append(b, format.Footer...)
|
||||
}
|
||||
|
||||
func appendString(ctx *encoder.RuntimeContext, b []byte, v string) []byte {
|
||||
format := ctx.Option.ColorScheme.String
|
||||
b = append(b, format.Header...)
|
||||
b = encoder.AppendString(ctx, b, v)
|
||||
return append(b, format.Footer...)
|
||||
}
|
||||
|
||||
func appendByteSlice(ctx *encoder.RuntimeContext, b []byte, src []byte) []byte {
|
||||
format := ctx.Option.ColorScheme.Binary
|
||||
b = append(b, format.Header...)
|
||||
b = encoder.AppendByteSlice(ctx, b, src)
|
||||
return append(b, format.Footer...)
|
||||
}
|
||||
|
||||
func appendNumber(ctx *encoder.RuntimeContext, b []byte, n json.Number) ([]byte, error) {
|
||||
format := ctx.Option.ColorScheme.Int
|
||||
b = append(b, format.Header...)
|
||||
bb, err := encoder.AppendNumber(ctx, b, n)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return append(bb, format.Footer...), nil
|
||||
}
|
||||
|
||||
func appendBool(ctx *encoder.RuntimeContext, b []byte, v bool) []byte {
|
||||
format := ctx.Option.ColorScheme.Bool
|
||||
b = append(b, format.Header...)
|
||||
if v {
|
||||
b = append(b, "true"...)
|
||||
} else {
|
||||
b = append(b, "false"...)
|
||||
}
|
||||
return append(b, format.Footer...)
|
||||
}
|
||||
|
||||
func appendNull(ctx *encoder.RuntimeContext, b []byte) []byte {
|
||||
format := ctx.Option.ColorScheme.Null
|
||||
b = append(b, format.Header...)
|
||||
b = append(b, "null"...)
|
||||
return append(b, format.Footer...)
|
||||
}
|
||||
|
||||
func appendComma(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b, ',', '\n')
|
||||
}
|
||||
|
||||
func appendNullComma(ctx *encoder.RuntimeContext, b []byte) []byte {
|
||||
format := ctx.Option.ColorScheme.Null
|
||||
b = append(b, format.Header...)
|
||||
b = append(b, "null"...)
|
||||
return append(append(b, format.Footer...), ',', '\n')
|
||||
}
|
||||
|
||||
func appendColon(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b[:len(b)-2], ':', ' ')
|
||||
}
|
||||
|
||||
func appendMapKeyValue(ctx *encoder.RuntimeContext, code *encoder.Opcode, b, key, value []byte) []byte {
|
||||
b = appendIndent(ctx, b, code.Indent+1)
|
||||
b = append(b, key...)
|
||||
b[len(b)-2] = ':'
|
||||
b[len(b)-1] = ' '
|
||||
return append(b, value...)
|
||||
}
|
||||
|
||||
func appendMapEnd(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||
b = b[:len(b)-2]
|
||||
b = append(b, '\n')
|
||||
b = appendIndent(ctx, b, code.Indent)
|
||||
return append(b, '}', ',', '\n')
|
||||
}
|
||||
|
||||
func appendArrayHead(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||
b = append(b, '[', '\n')
|
||||
return appendIndent(ctx, b, code.Indent+1)
|
||||
}
|
||||
|
||||
func appendArrayEnd(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||
b = b[:len(b)-2]
|
||||
b = append(b, '\n')
|
||||
b = appendIndent(ctx, b, code.Indent)
|
||||
return append(b, ']', ',', '\n')
|
||||
}
|
||||
|
||||
func appendEmptyArray(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b, '[', ']', ',', '\n')
|
||||
}
|
||||
|
||||
func appendEmptyObject(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b, '{', '}', ',', '\n')
|
||||
}
|
||||
|
||||
func appendObjectEnd(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||
last := len(b) - 1
|
||||
// replace comma to newline
|
||||
b[last-1] = '\n'
|
||||
b = appendIndent(ctx, b[:last], code.Indent)
|
||||
return append(b, '}', ',', '\n')
|
||||
}
|
||||
|
||||
func appendMarshalJSON(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||
return encoder.AppendMarshalJSONIndent(ctx, code, b, v)
|
||||
}
|
||||
|
||||
func appendMarshalText(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||
format := ctx.Option.ColorScheme.String
|
||||
b = append(b, format.Header...)
|
||||
bb, err := encoder.AppendMarshalTextIndent(ctx, code, b, v)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return append(bb, format.Footer...), nil
|
||||
}
|
||||
|
||||
func appendStructHead(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b, '{', '\n')
|
||||
}
|
||||
|
||||
func appendStructKey(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||
b = appendIndent(ctx, b, code.Indent)
|
||||
|
||||
format := ctx.Option.ColorScheme.ObjectKey
|
||||
b = append(b, format.Header...)
|
||||
b = append(b, code.Key[:len(code.Key)-1]...)
|
||||
b = append(b, format.Footer...)
|
||||
|
||||
return append(b, ':', ' ')
|
||||
}
|
||||
|
||||
func appendStructEndSkipLast(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||
last := len(b) - 1
|
||||
if b[last-1] == '{' {
|
||||
b[last] = '}'
|
||||
} else {
|
||||
if b[last] == '\n' {
|
||||
// to remove ',' and '\n' characters
|
||||
b = b[:len(b)-2]
|
||||
}
|
||||
b = append(b, '\n')
|
||||
b = appendIndent(ctx, b, code.Indent-1)
|
||||
b = append(b, '}')
|
||||
}
|
||||
return appendComma(ctx, b)
|
||||
}
|
||||
|
||||
func restoreIndent(ctx *encoder.RuntimeContext, code *encoder.Opcode, ctxptr uintptr) {
|
||||
ctx.BaseIndent = uint32(load(ctxptr, code.Length))
|
||||
}
|
||||
|
||||
func storeIndent(ctxptr uintptr, code *encoder.Opcode, indent uintptr) {
|
||||
store(ctxptr, code.Length, indent)
|
||||
}
|
||||
|
||||
func appendArrayElemIndent(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||
return appendIndent(ctx, b, code.Indent+1)
|
||||
}
|
||||
|
||||
func appendMapKeyIndent(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||
return appendIndent(ctx, b, code.Indent)
|
||||
}
|
4859
internal/encoder/vm_color_indent/vm.go
Normal file
4859
internal/encoder/vm_color_indent/vm.go
Normal file
File diff suppressed because it is too large
Load diff
35
internal/encoder/vm_indent/debug_vm.go
Normal file
35
internal/encoder/vm_indent/debug_vm.go
Normal file
|
@ -0,0 +1,35 @@
|
|||
package vm_indent
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/goccy/go-json/internal/encoder"
|
||||
)
|
||||
|
||||
func DebugRun(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) {
|
||||
var code *encoder.Opcode
|
||||
if (ctx.Option.Flag & encoder.HTMLEscapeOption) != 0 {
|
||||
code = codeSet.EscapeKeyCode
|
||||
} else {
|
||||
code = codeSet.NoescapeKeyCode
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err := recover(); err != nil {
|
||||
w := ctx.Option.DebugOut
|
||||
fmt.Fprintln(w, "=============[DEBUG]===============")
|
||||
fmt.Fprintln(w, "* [TYPE]")
|
||||
fmt.Fprintln(w, codeSet.Type)
|
||||
fmt.Fprintf(w, "\n")
|
||||
fmt.Fprintln(w, "* [ALL OPCODE]")
|
||||
fmt.Fprintln(w, code.Dump())
|
||||
fmt.Fprintf(w, "\n")
|
||||
fmt.Fprintln(w, "* [CONTEXT]")
|
||||
fmt.Fprintf(w, "%+v\n", ctx)
|
||||
fmt.Fprintln(w, "===================================")
|
||||
panic(err)
|
||||
}
|
||||
}()
|
||||
|
||||
return Run(ctx, b, codeSet)
|
||||
}
|
9
internal/encoder/vm_indent/hack.go
Normal file
9
internal/encoder/vm_indent/hack.go
Normal file
|
@ -0,0 +1,9 @@
|
|||
package vm_indent
|
||||
|
||||
import (
|
||||
// HACK: compile order
|
||||
// `vm`, `vm_indent`, `vm_color`, `vm_color_indent` packages uses a lot of memory to compile,
|
||||
// so forcibly make dependencies and avoid compiling in concurrent.
|
||||
// dependency order: vm => vm_indent => vm_color => vm_color_indent
|
||||
_ "github.com/goccy/go-json/internal/encoder/vm_color"
|
||||
)
|
230
internal/encoder/vm_indent/util.go
Normal file
230
internal/encoder/vm_indent/util.go
Normal file
|
@ -0,0 +1,230 @@
|
|||
package vm_indent
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/encoder"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
const uintptrSize = 4 << (^uintptr(0) >> 63)
|
||||
|
||||
var (
|
||||
appendInt = encoder.AppendInt
|
||||
appendUint = encoder.AppendUint
|
||||
appendFloat32 = encoder.AppendFloat32
|
||||
appendFloat64 = encoder.AppendFloat64
|
||||
appendString = encoder.AppendString
|
||||
appendByteSlice = encoder.AppendByteSlice
|
||||
appendNumber = encoder.AppendNumber
|
||||
appendStructEnd = encoder.AppendStructEndIndent
|
||||
appendIndent = encoder.AppendIndent
|
||||
errUnsupportedValue = encoder.ErrUnsupportedValue
|
||||
errUnsupportedFloat = encoder.ErrUnsupportedFloat
|
||||
mapiterinit = encoder.MapIterInit
|
||||
mapiterkey = encoder.MapIterKey
|
||||
mapitervalue = encoder.MapIterValue
|
||||
mapiternext = encoder.MapIterNext
|
||||
maplen = encoder.MapLen
|
||||
)
|
||||
|
||||
type emptyInterface struct {
|
||||
typ *runtime.Type
|
||||
ptr unsafe.Pointer
|
||||
}
|
||||
|
||||
type nonEmptyInterface struct {
|
||||
itab *struct {
|
||||
ityp *runtime.Type // static interface type
|
||||
typ *runtime.Type // dynamic concrete type
|
||||
// unused fields...
|
||||
}
|
||||
ptr unsafe.Pointer
|
||||
}
|
||||
|
||||
func errUnimplementedOp(op encoder.OpType) error {
|
||||
return fmt.Errorf("encoder (indent): opcode %s has not been implemented", op)
|
||||
}
|
||||
|
||||
func load(base uintptr, idx uint32) uintptr {
|
||||
addr := base + uintptr(idx)
|
||||
return **(**uintptr)(unsafe.Pointer(&addr))
|
||||
}
|
||||
|
||||
func store(base uintptr, idx uint32, p uintptr) {
|
||||
addr := base + uintptr(idx)
|
||||
**(**uintptr)(unsafe.Pointer(&addr)) = p
|
||||
}
|
||||
|
||||
func loadNPtr(base uintptr, idx uint32, ptrNum uint8) uintptr {
|
||||
addr := base + uintptr(idx)
|
||||
p := **(**uintptr)(unsafe.Pointer(&addr))
|
||||
for i := uint8(0); i < ptrNum; i++ {
|
||||
if p == 0 {
|
||||
return 0
|
||||
}
|
||||
p = ptrToPtr(p)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
func ptrToUint64(p uintptr, bitSize uint8) uint64 {
|
||||
switch bitSize {
|
||||
case 8:
|
||||
return (uint64)(**(**uint8)(unsafe.Pointer(&p)))
|
||||
case 16:
|
||||
return (uint64)(**(**uint16)(unsafe.Pointer(&p)))
|
||||
case 32:
|
||||
return (uint64)(**(**uint32)(unsafe.Pointer(&p)))
|
||||
case 64:
|
||||
return **(**uint64)(unsafe.Pointer(&p))
|
||||
}
|
||||
return 0
|
||||
}
|
||||
func ptrToFloat32(p uintptr) float32 { return **(**float32)(unsafe.Pointer(&p)) }
|
||||
func ptrToFloat64(p uintptr) float64 { return **(**float64)(unsafe.Pointer(&p)) }
|
||||
func ptrToBool(p uintptr) bool { return **(**bool)(unsafe.Pointer(&p)) }
|
||||
func ptrToBytes(p uintptr) []byte { return **(**[]byte)(unsafe.Pointer(&p)) }
|
||||
func ptrToNumber(p uintptr) json.Number { return **(**json.Number)(unsafe.Pointer(&p)) }
|
||||
func ptrToString(p uintptr) string { return **(**string)(unsafe.Pointer(&p)) }
|
||||
func ptrToSlice(p uintptr) *runtime.SliceHeader { return *(**runtime.SliceHeader)(unsafe.Pointer(&p)) }
|
||||
func ptrToPtr(p uintptr) uintptr {
|
||||
return uintptr(**(**unsafe.Pointer)(unsafe.Pointer(&p)))
|
||||
}
|
||||
func ptrToNPtr(p uintptr, ptrNum uint8) uintptr {
|
||||
for i := uint8(0); i < ptrNum; i++ {
|
||||
if p == 0 {
|
||||
return 0
|
||||
}
|
||||
p = ptrToPtr(p)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
func ptrToUnsafePtr(p uintptr) unsafe.Pointer {
|
||||
return *(*unsafe.Pointer)(unsafe.Pointer(&p))
|
||||
}
|
||||
func ptrToInterface(code *encoder.Opcode, p uintptr) interface{} {
|
||||
return *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||
typ: code.Type,
|
||||
ptr: *(*unsafe.Pointer)(unsafe.Pointer(&p)),
|
||||
}))
|
||||
}
|
||||
|
||||
func appendBool(_ *encoder.RuntimeContext, b []byte, v bool) []byte {
|
||||
if v {
|
||||
return append(b, "true"...)
|
||||
}
|
||||
return append(b, "false"...)
|
||||
}
|
||||
|
||||
func appendNull(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b, "null"...)
|
||||
}
|
||||
|
||||
func appendComma(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b, ',', '\n')
|
||||
}
|
||||
|
||||
func appendNullComma(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b, "null,\n"...)
|
||||
}
|
||||
|
||||
func appendColon(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b[:len(b)-2], ':', ' ')
|
||||
}
|
||||
|
||||
func appendMapKeyValue(ctx *encoder.RuntimeContext, code *encoder.Opcode, b, key, value []byte) []byte {
|
||||
b = appendIndent(ctx, b, code.Indent+1)
|
||||
b = append(b, key...)
|
||||
b[len(b)-2] = ':'
|
||||
b[len(b)-1] = ' '
|
||||
return append(b, value...)
|
||||
}
|
||||
|
||||
func appendMapEnd(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||
b = b[:len(b)-2]
|
||||
b = append(b, '\n')
|
||||
b = appendIndent(ctx, b, code.Indent)
|
||||
return append(b, '}', ',', '\n')
|
||||
}
|
||||
|
||||
func appendArrayHead(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||
b = append(b, '[', '\n')
|
||||
return appendIndent(ctx, b, code.Indent+1)
|
||||
}
|
||||
|
||||
func appendArrayEnd(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||
b = b[:len(b)-2]
|
||||
b = append(b, '\n')
|
||||
b = appendIndent(ctx, b, code.Indent)
|
||||
return append(b, ']', ',', '\n')
|
||||
}
|
||||
|
||||
func appendEmptyArray(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b, '[', ']', ',', '\n')
|
||||
}
|
||||
|
||||
func appendEmptyObject(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b, '{', '}', ',', '\n')
|
||||
}
|
||||
|
||||
func appendObjectEnd(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||
last := len(b) - 1
|
||||
// replace comma to newline
|
||||
b[last-1] = '\n'
|
||||
b = appendIndent(ctx, b[:last], code.Indent)
|
||||
return append(b, '}', ',', '\n')
|
||||
}
|
||||
|
||||
func appendMarshalJSON(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||
return encoder.AppendMarshalJSONIndent(ctx, code, b, v)
|
||||
}
|
||||
|
||||
func appendMarshalText(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||
return encoder.AppendMarshalTextIndent(ctx, code, b, v)
|
||||
}
|
||||
|
||||
func appendStructHead(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||
return append(b, '{', '\n')
|
||||
}
|
||||
|
||||
func appendStructKey(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||
b = appendIndent(ctx, b, code.Indent)
|
||||
b = append(b, code.Key...)
|
||||
return append(b, ' ')
|
||||
}
|
||||
|
||||
func appendStructEndSkipLast(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||
last := len(b) - 1
|
||||
if b[last-1] == '{' {
|
||||
b[last] = '}'
|
||||
} else {
|
||||
if b[last] == '\n' {
|
||||
// to remove ',' and '\n' characters
|
||||
b = b[:len(b)-2]
|
||||
}
|
||||
b = append(b, '\n')
|
||||
b = appendIndent(ctx, b, code.Indent-1)
|
||||
b = append(b, '}')
|
||||
}
|
||||
return appendComma(ctx, b)
|
||||
}
|
||||
|
||||
func restoreIndent(ctx *encoder.RuntimeContext, code *encoder.Opcode, ctxptr uintptr) {
|
||||
ctx.BaseIndent = uint32(load(ctxptr, code.Length))
|
||||
}
|
||||
|
||||
func storeIndent(ctxptr uintptr, code *encoder.Opcode, indent uintptr) {
|
||||
store(ctxptr, code.Length, indent)
|
||||
}
|
||||
|
||||
func appendArrayElemIndent(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||
return appendIndent(ctx, b, code.Indent+1)
|
||||
}
|
||||
|
||||
func appendMapKeyIndent(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||
return appendIndent(ctx, b, code.Indent)
|
||||
}
|
4859
internal/encoder/vm_indent/vm.go
Normal file
4859
internal/encoder/vm_indent/vm.go
Normal file
File diff suppressed because it is too large
Load diff
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue