summaryrefslogtreecommitdiff
path: root/vendor/github.com/bytedance/sonic/internal/decoder
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/bytedance/sonic/internal/decoder')
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/api/decoder.go (renamed from vendor/github.com/bytedance/sonic/internal/decoder/decoder.go)136
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_amd64.go38
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_arm64.go38
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/api/stream.go (renamed from vendor/github.com/bytedance/sonic/internal/decoder/stream.go)27
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go116.go130
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/assembler_stkabi_amd64.go1950
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/consts/option.go36
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/errors/errors.go (renamed from vendor/github.com/bytedance/sonic/internal/decoder/errors.go)16
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/generic_stkabi_amd64.go733
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/generic_stkabi_amd64_test.s37
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm.s (renamed from vendor/github.com/bytedance/sonic/internal/decoder/asm.s)0
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm_stubs_amd64_go117.go (renamed from vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go117.go)2
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm_stubs_amd64_go121.go (renamed from vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go121.go)4
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/jitdec/assembler_regabi_amd64.go (renamed from vendor/github.com/bytedance/sonic/internal/decoder/assembler_regabi_amd64.go)103
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/jitdec/compiler.go (renamed from vendor/github.com/bytedance/sonic/internal/decoder/compiler.go)107
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/jitdec/debug.go (renamed from vendor/github.com/bytedance/sonic/internal/decoder/debug.go)2
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/jitdec/decoder.go140
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/jitdec/generic_regabi_amd64.go (renamed from vendor/github.com/bytedance/sonic/internal/decoder/generic_regabi_amd64.go)4
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/jitdec/generic_regabi_amd64_test.s (renamed from vendor/github.com/bytedance/sonic/internal/decoder/generic_regabi_amd64_test.s)2
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/jitdec/pools.go (renamed from vendor/github.com/bytedance/sonic/internal/decoder/pools.go)4
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/jitdec/primitives.go (renamed from vendor/github.com/bytedance/sonic/internal/decoder/primitives.go)16
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/jitdec/stubs_go116.go (renamed from vendor/github.com/bytedance/sonic/internal/decoder/stubs_go116.go)9
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/jitdec/stubs_go120.go (renamed from vendor/github.com/bytedance/sonic/internal/decoder/stubs_go120.go)7
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/jitdec/types.go (renamed from vendor/github.com/bytedance/sonic/internal/decoder/types.go)2
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/jitdec/utils.go (renamed from vendor/github.com/bytedance/sonic/internal/decoder/utils.go)2
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/optdec/compile_struct.go174
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/optdec/compiler.go449
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/optdec/const.go60
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/optdec/context.go3
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/optdec/decoder.go160
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/optdec/errors.go73
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/optdec/functor.go281
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/optdec/helper.go110
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/optdec/interface.go169
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/optdec/map.go430
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/optdec/native.go269
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/optdec/node.go1278
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/optdec/slice.go224
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/optdec/stringopts.go360
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/optdec/structs.go61
-rw-r--r--vendor/github.com/bytedance/sonic/internal/decoder/optdec/types.go60
41 files changed, 4640 insertions, 3066 deletions
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/decoder.go b/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder.go
index 8453db861..0dc01998f 100644
--- a/vendor/github.com/bytedance/sonic/internal/decoder/decoder.go
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder.go
@@ -14,51 +14,52 @@
* limitations under the License.
*/
-package decoder
+package api
import (
- `unsafe`
- `encoding/json`
`reflect`
- `runtime`
`github.com/bytedance/sonic/internal/native`
`github.com/bytedance/sonic/internal/native/types`
+ `github.com/bytedance/sonic/internal/decoder/consts`
+ `github.com/bytedance/sonic/internal/decoder/errors`
`github.com/bytedance/sonic/internal/rt`
`github.com/bytedance/sonic/option`
- `github.com/bytedance/sonic/utf8`
)
const (
- _F_use_int64 = 0
- _F_disable_urc = 2
- _F_disable_unknown = 3
- _F_copy_string = 4
-
- _F_use_number = types.B_USE_NUMBER
- _F_validate_string = types.B_VALIDATE_STRING
- _F_allow_control = types.B_ALLOW_CONTROL
+ _F_allow_control = consts.F_allow_control
+ _F_copy_string = consts.F_copy_string
+ _F_disable_unknown = consts.F_disable_unknown
+ _F_disable_urc = consts.F_disable_urc
+ _F_use_int64 = consts.F_use_int64
+ _F_use_number = consts.F_use_number
+ _F_validate_string = consts.F_validate_string
+
+ _MaxStack = consts.MaxStack
+
+ OptionUseInt64 = consts.OptionUseInt64
+ OptionUseNumber = consts.OptionUseNumber
+ OptionUseUnicodeErrors = consts.OptionUseUnicodeErrors
+ OptionDisableUnknown = consts.OptionDisableUnknown
+ OptionCopyString = consts.OptionCopyString
+ OptionValidateString = consts.OptionValidateString
+ OptionNoValidateJSON = consts.OptionNoValidateJSON
)
-type Options uint64
-
-const (
- OptionUseInt64 Options = 1 << _F_use_int64
- OptionUseNumber Options = 1 << _F_use_number
- OptionUseUnicodeErrors Options = 1 << _F_disable_urc
- OptionDisableUnknown Options = 1 << _F_disable_unknown
- OptionCopyString Options = 1 << _F_copy_string
- OptionValidateString Options = 1 << _F_validate_string
+type (
+ Options = consts.Options
+ MismatchTypeError = errors.MismatchTypeError
+ SyntaxError = errors.SyntaxError
)
func (self *Decoder) SetOptions(opts Options) {
- if (opts & OptionUseNumber != 0) && (opts & OptionUseInt64 != 0) {
+ if (opts & consts.OptionUseNumber != 0) && (opts & consts.OptionUseInt64 != 0) {
panic("can't set OptionUseInt64 and OptionUseNumber both!")
}
self.f = uint64(opts)
}
-
// Decoder is the decoder context object
type Decoder struct {
i int
@@ -109,44 +110,7 @@ func (self *Decoder) CheckTrailings() error {
// Decode parses the JSON-encoded data from current position and stores the result
// in the value pointed to by val.
func (self *Decoder) Decode(val interface{}) error {
- /* validate json if needed */
- if (self.f & (1 << _F_validate_string)) != 0 && !utf8.ValidateString(self.s){
- dbuf := utf8.CorrectWith(nil, rt.Str2Mem(self.s), "\ufffd")
- self.s = rt.Mem2Str(dbuf)
- }
-
- vv := rt.UnpackEface(val)
- vp := vv.Value
-
- /* check for nil type */
- if vv.Type == nil {
- return &json.InvalidUnmarshalError{}
- }
-
- /* must be a non-nil pointer */
- if vp == nil || vv.Type.Kind() != reflect.Ptr {
- return &json.InvalidUnmarshalError{Type: vv.Type.Pack()}
- }
-
- etp := rt.PtrElem(vv.Type)
-
- /* check the defined pointer type for issue 379 */
- if vv.Type.IsNamed() {
- newp := vp
- etp = vv.Type
- vp = unsafe.Pointer(&newp)
- }
-
- /* create a new stack, and call the decoder */
- sb := newStack()
- nb, err := decodeTypedPointer(self.s, self.i, etp, vp, sb, self.f)
- /* return the stack back */
- self.i = nb
- freeStack(sb)
-
- /* avoid GC ahead */
- runtime.KeepAlive(vv)
- return err
+ return decodeImpl(&self.s, &self.i, self.f, val)
}
// UseInt64 indicates the Decoder to unmarshal an integer into an interface{} as an
@@ -194,53 +158,7 @@ func (self *Decoder) ValidateString() {
// Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is
// a compile option to set the depth of recursive compile for the nested struct type.
func Pretouch(vt reflect.Type, opts ...option.CompileOption) error {
- cfg := option.DefaultCompileOptions()
- for _, opt := range opts {
- opt(&cfg)
- }
- return pretouchRec(map[reflect.Type]bool{vt:true}, cfg)
-}
-
-func pretouchType(_vt reflect.Type, opts option.CompileOptions) (map[reflect.Type]bool, error) {
- /* compile function */
- compiler := newCompiler().apply(opts)
- decoder := func(vt *rt.GoType, _ ...interface{}) (interface{}, error) {
- if pp, err := compiler.compile(_vt); err != nil {
- return nil, err
- } else {
- as := newAssembler(pp)
- as.name = _vt.String()
- return as.Load(), nil
- }
- }
-
- /* find or compile */
- vt := rt.UnpackType(_vt)
- if val := programCache.Get(vt); val != nil {
- return nil, nil
- } else if _, err := programCache.Compute(vt, decoder); err == nil {
- return compiler.rec, nil
- } else {
- return nil, err
- }
-}
-
-func pretouchRec(vtm map[reflect.Type]bool, opts option.CompileOptions) error {
- if opts.RecursiveDepth < 0 || len(vtm) == 0 {
- return nil
- }
- next := make(map[reflect.Type]bool)
- for vt := range(vtm) {
- sub, err := pretouchType(vt, opts)
- if err != nil {
- return err
- }
- for svt := range(sub) {
- next[svt] = true
- }
- }
- opts.RecursiveDepth -= 1
- return pretouchRec(next, opts)
+ return pretouchImpl(vt, opts...)
}
// Skip skips only one json value, and returns first non-blank character position and its ending position if it is valid.
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_amd64.go b/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_amd64.go
new file mode 100644
index 000000000..4e1c3f42c
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_amd64.go
@@ -0,0 +1,38 @@
+//go:build go1.17 && !go1.24
+// +build go1.17,!go1.24
+
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package api
+
+import (
+ "github.com/bytedance/sonic/internal/envs"
+ "github.com/bytedance/sonic/internal/decoder/jitdec"
+ "github.com/bytedance/sonic/internal/decoder/optdec"
+)
+
+var (
+ pretouchImpl = jitdec.Pretouch
+ decodeImpl = jitdec.Decode
+)
+
+ func init() {
+ if envs.UseOptDec {
+ pretouchImpl = optdec.Pretouch
+ decodeImpl = optdec.Decode
+ }
+ }
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_arm64.go b/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_arm64.go
new file mode 100644
index 000000000..65a9478b4
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_arm64.go
@@ -0,0 +1,38 @@
+// +build go1.17,!go1.24
+
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package api
+
+import (
+ `github.com/bytedance/sonic/internal/decoder/optdec`
+ `github.com/bytedance/sonic/internal/envs`
+)
+
+var (
+ pretouchImpl = optdec.Pretouch
+ decodeImpl = optdec.Decode
+)
+
+
+func init() {
+ // whe in aarch64. we enable all optimize
+ envs.EnableOptDec()
+ envs.EnableFastMap()
+}
+
+
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/stream.go b/vendor/github.com/bytedance/sonic/internal/decoder/api/stream.go
index 7eb8a6951..8a8102dd5 100644
--- a/vendor/github.com/bytedance/sonic/internal/decoder/stream.go
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/api/stream.go
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package decoder
+package api
import (
`bytes`
@@ -47,6 +47,12 @@ var bufPool = sync.Pool{
},
}
+func freeBytes(buf []byte) {
+ if rt.CanSizeResue(cap(buf)) {
+ bufPool.Put(buf[:0])
+ }
+}
+
// NewStreamDecoder adapts to encoding/json.NewDecoder API.
//
// NewStreamDecoder returns a new decoder that reads from r.
@@ -61,25 +67,16 @@ func NewStreamDecoder(r io.Reader) *StreamDecoder {
func (self *StreamDecoder) Decode(val interface{}) (err error) {
// read more data into buf
if self.More() {
- // println(string(self.buf))
var s = self.scanp
try_skip:
var e = len(self.buf)
- // println("s:", s, "e:", e, "scanned:",self.scanned, "scanp:",self.scanp, self.buf)
var src = rt.Mem2Str(self.buf[s:e])
- // if len(src) > 5 {
- // println(src[:5], src[len(src)-5:])
- // } else {
- // println(src)
- // }
// try skip
var x = 0;
if y := native.SkipOneFast(&src, &x); y < 0 {
if self.readMore() {
- // println("more")
goto try_skip
} else {
- // println("no more")
err = SyntaxError{e, self.s, types.ParsingError(-s), ""}
self.setErr(err)
return
@@ -89,7 +86,6 @@ func (self *StreamDecoder) Decode(val interface{}) (err error) {
e = x + s
}
- // println("decode: ", s, e)
// must copy string here for safety
self.Decoder.Reset(string(self.buf[s:e]))
err = self.Decoder.Decode(val)
@@ -101,13 +97,11 @@ func (self *StreamDecoder) Decode(val interface{}) (err error) {
self.scanp = e
_, empty := self.scan()
if empty {
- // println("recycle")
// no remain valid bytes, thus we just recycle buffer
mem := self.buf
self.buf = nil
- bufPool.Put(mem[:0])
+ freeBytes(mem)
} else {
- // println("keep")
// remain undecoded bytes, move them onto head
n := copy(self.buf, self.buf[self.scanp:])
self.buf = self.buf[:n]
@@ -123,7 +117,6 @@ func (self *StreamDecoder) Decode(val interface{}) (err error) {
// InputOffset returns the input stream byte offset of the current decoder position.
// The offset gives the location of the end of the most recently returned token and the beginning of the next token.
func (self *StreamDecoder) InputOffset() int64 {
- // println("input offset",self.scanned, self.scanp)
return self.scanned + int64(self.scanp)
}
@@ -178,7 +171,7 @@ func (self *StreamDecoder) setErr(err error) {
self.err = err
mem := self.buf[:0]
self.buf = nil
- bufPool.Put(mem)
+ freeBytes(mem)
}
func (self *StreamDecoder) peek() (byte, error) {
@@ -237,12 +230,10 @@ func realloc(buf *[]byte) bool {
l := uint(len(*buf))
c := uint(cap(*buf))
if c == 0 {
- // println("use pool!")
*buf = bufPool.Get().([]byte)
return true
}
if c - l <= c >> minLeftBufferShift {
- // println("realloc!")
e := l+(l>>minLeftBufferShift)
if e <= c {
e = c*2
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go116.go b/vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go116.go
deleted file mode 100644
index 4c4c850ac..000000000
--- a/vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go116.go
+++ /dev/null
@@ -1,130 +0,0 @@
-// +build go1.16,!go1.17
-
-// Copyright 2023 CloudWeGo Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package decoder
-
-import (
- `strconv`
- _ `unsafe`
-
- `github.com/bytedance/sonic/internal/jit`
- `github.com/bytedance/sonic/internal/rt`
- `github.com/twitchyliquid64/golang-asm/obj`
- `github.com/twitchyliquid64/golang-asm/obj/x86`
-)
-
-var _runtime_writeBarrier uintptr = rt.GcwbAddr()
-
-//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
-func gcWriteBarrierAX()
-
-var (
- _V_writeBarrier = jit.Imm(int64(_runtime_writeBarrier))
-
- _F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX)
-)
-
-func (self *_Assembler) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
- self.Emit("MOVQ", _V_writeBarrier, _R10)
- self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
- self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
- if saveDI {
- self.save(_DI)
- }
- self.Emit("LEAQ", rec, _DI)
- self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
- self.Rjmp("CALL", _R10)
- if saveDI {
- self.load(_DI)
- }
- self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
- self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
- self.Emit("MOVQ", _AX, rec)
- self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
-}
-
-func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool, saveAX bool) {
- if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
- panic("rec contains AX!")
- }
- self.Emit("MOVQ", _V_writeBarrier, _R10)
- self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
- self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
- if saveAX {
- self.Emit("XCHGQ", ptr, _AX)
- } else {
- self.Emit("MOVQ", ptr, _AX)
- }
- if saveDI {
- self.save(_DI)
- }
- self.Emit("LEAQ", rec, _DI)
- self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
- self.Rjmp("CALL", _R10)
- if saveDI {
- self.load(_DI)
- }
- if saveAX {
- self.Emit("XCHGQ", ptr, _AX)
- }
- self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
- self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
- self.Emit("MOVQ", ptr, rec)
- self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
-}
-
-
-func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
- self.Emit("MOVQ", _V_writeBarrier, _R10)
- self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
- self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
- if saveDI {
- self.save(_DI)
- }
- self.Emit("LEAQ", rec, _DI)
- self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
- self.Rjmp("CALL", _R10)
- if saveDI {
- self.load(_DI)
- }
- self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
- self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
- self.Emit("MOVQ", _AX, rec)
- self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
-}
-
-func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) {
- if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
- panic("rec contains AX!")
- }
- self.Emit("MOVQ", _V_writeBarrier, _R10)
- self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
- self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
- self.Emit("MOVQ", ptr, _AX)
- if saveDI {
- self.save(_DI)
- }
- self.Emit("LEAQ", rec, _DI)
- self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
- self.Rjmp("CALL", _R10)
- if saveDI {
- self.load(_DI)
- }
- self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
- self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
- self.Emit("MOVQ", ptr, rec)
- self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
-}
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/assembler_stkabi_amd64.go b/vendor/github.com/bytedance/sonic/internal/decoder/assembler_stkabi_amd64.go
deleted file mode 100644
index 9e2acc23f..000000000
--- a/vendor/github.com/bytedance/sonic/internal/decoder/assembler_stkabi_amd64.go
+++ /dev/null
@@ -1,1950 +0,0 @@
-// +build go1.16,!go1.17
-
-/*
- * Copyright 2021 ByteDance Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package decoder
-
-import (
- `encoding/json`
- `fmt`
- `math`
- `reflect`
- `unsafe`
-
- `github.com/bytedance/sonic/internal/caching`
- `github.com/bytedance/sonic/internal/jit`
- `github.com/bytedance/sonic/internal/native`
- `github.com/bytedance/sonic/internal/native/types`
- `github.com/bytedance/sonic/internal/rt`
- `github.com/twitchyliquid64/golang-asm/obj`
-)
-
-/** Register Allocations
- *
- * State Registers:
- *
- * %rbx : stack base
- * %r12 : input pointer
- * %r13 : input length
- * %r14 : input cursor
- * %r15 : value pointer
- *
- * Error Registers:
- *
- * %r10 : error type register
- * %r11 : error pointer register
- */
-
-/** Function Prototype & Stack Map
- *
- * func (s string, ic int, vp unsafe.Pointer, sb *_Stack, fv uint64, sv string) (rc int, err error)
- *
- * s.buf : (FP)
- * s.len : 8(FP)
- * ic : 16(FP)
- * vp : 24(FP)
- * sb : 32(FP)
- * fv : 40(FP)
- * sv : 56(FP)
- * err.vt : 72(FP)
- * err.vp : 80(FP)
- */
-
-const (
- _FP_args = 96 // 96 bytes to pass arguments and return values for this function
- _FP_fargs = 80 // 80 bytes for passing arguments to other Go functions
- _FP_saves = 40 // 40 bytes for saving the registers before CALL instructions
- _FP_locals = 144 // 144 bytes for local variables
-)
-
-const (
- _FP_offs = _FP_fargs + _FP_saves + _FP_locals
- _FP_size = _FP_offs + 8 // 8 bytes for the parent frame pointer
- _FP_base = _FP_size + 8 // 8 bytes for the return address
-)
-
-const (
- _IM_null = 0x6c6c756e // 'null'
- _IM_true = 0x65757274 // 'true'
- _IM_alse = 0x65736c61 // 'alse' ('false' without the 'f')
-)
-
-const (
- _BM_space = (1 << ' ') | (1 << '\t') | (1 << '\r') | (1 << '\n')
-)
-
-const (
- _MODE_JSON = 1 << 3 // base64 mode
-)
-
-const (
- _LB_error = "_error"
- _LB_im_error = "_im_error"
- _LB_eof_error = "_eof_error"
- _LB_type_error = "_type_error"
- _LB_field_error = "_field_error"
- _LB_range_error = "_range_error"
- _LB_stack_error = "_stack_error"
- _LB_base64_error = "_base64_error"
- _LB_unquote_error = "_unquote_error"
- _LB_parsing_error = "_parsing_error"
- _LB_parsing_error_v = "_parsing_error_v"
- _LB_mismatch_error = "_mismatch_error"
-)
-
-const (
- _LB_char_0_error = "_char_0_error"
- _LB_char_1_error = "_char_1_error"
- _LB_char_2_error = "_char_2_error"
- _LB_char_3_error = "_char_3_error"
- _LB_char_4_error = "_char_4_error"
- _LB_char_m2_error = "_char_m2_error"
- _LB_char_m3_error = "_char_m3_error"
-)
-
-const (
- _LB_skip_one = "_skip_one"
- _LB_skip_key_value = "_skip_key_value"
-)
-
-var (
- _AX = jit.Reg("AX")
- _CX = jit.Reg("CX")
- _DX = jit.Reg("DX")
- _DI = jit.Reg("DI")
- _SI = jit.Reg("SI")
- _BP = jit.Reg("BP")
- _SP = jit.Reg("SP")
- _R8 = jit.Reg("R8")
- _R9 = jit.Reg("R9")
- _X0 = jit.Reg("X0")
- _X1 = jit.Reg("X1")
-)
-
-var (
- _ST = jit.Reg("BX")
- _IP = jit.Reg("R12")
- _IL = jit.Reg("R13")
- _IC = jit.Reg("R14")
- _VP = jit.Reg("R15")
-)
-
-var (
- _R10 = jit.Reg("R10") // used for gcWriteBarrier
- _DF = jit.Reg("R10") // reuse R10 in generic decoder for flags
- _ET = jit.Reg("R10")
- _EP = jit.Reg("R11")
-)
-
-var (
- _ARG_s = _ARG_sp
- _ARG_sp = jit.Ptr(_SP, _FP_base)
- _ARG_sl = jit.Ptr(_SP, _FP_base + 8)
- _ARG_ic = jit.Ptr(_SP, _FP_base + 16)
- _ARG_vp = jit.Ptr(_SP, _FP_base + 24)
- _ARG_sb = jit.Ptr(_SP, _FP_base + 32)
- _ARG_fv = jit.Ptr(_SP, _FP_base + 40)
-)
-
-var (
- _VAR_sv = _VAR_sv_p
- _VAR_sv_p = jit.Ptr(_SP, _FP_base + 48)
- _VAR_sv_n = jit.Ptr(_SP, _FP_base + 56)
- _VAR_vk = jit.Ptr(_SP, _FP_base + 64)
-)
-
-var (
- _RET_rc = jit.Ptr(_SP, _FP_base + 72)
- _RET_et = jit.Ptr(_SP, _FP_base + 80)
- _RET_ep = jit.Ptr(_SP, _FP_base + 88)
-)
-
-var (
- _VAR_st = _VAR_st_Vt
- _VAR_sr = jit.Ptr(_SP, _FP_fargs + _FP_saves)
-)
-
-
-var (
- _VAR_st_Vt = jit.Ptr(_SP, _FP_fargs + _FP_saves + 0)
- _VAR_st_Dv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8)
- _VAR_st_Iv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16)
- _VAR_st_Ep = jit.Ptr(_SP, _FP_fargs + _FP_saves + 24)
- _VAR_st_Db = jit.Ptr(_SP, _FP_fargs + _FP_saves + 32)
- _VAR_st_Dc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 40)
-)
-
-var (
- _VAR_ss_AX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 48)
- _VAR_ss_CX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 56)
- _VAR_ss_SI = jit.Ptr(_SP, _FP_fargs + _FP_saves + 64)
- _VAR_ss_R8 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 72)
- _VAR_ss_R9 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 80)
-)
-
-var (
- _VAR_bs_p = jit.Ptr(_SP, _FP_fargs + _FP_saves + 88)
- _VAR_bs_n = jit.Ptr(_SP, _FP_fargs + _FP_saves + 96)
- _VAR_bs_LR = jit.Ptr(_SP, _FP_fargs + _FP_saves + 104)
-)
-
-var _VAR_fl = jit.Ptr(_SP, _FP_fargs + _FP_saves + 112)
-
-var (
- _VAR_et = jit.Ptr(_SP, _FP_fargs + _FP_saves + 120) // save dismatched type
- _VAR_ic = jit.Ptr(_SP, _FP_fargs + _FP_saves + 128) // save dismatched position
- _VAR_pc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 136) // save skip return pc
-)
-
-type _Assembler struct {
- jit.BaseAssembler
- p _Program
- name string
-}
-
-func newAssembler(p _Program) *_Assembler {
- return new(_Assembler).Init(p)
-}
-
-/** Assembler Interface **/
-
-func (self *_Assembler) Load() _Decoder {
- return ptodec(self.BaseAssembler.Load("decode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
-}
-
-func (self *_Assembler) Init(p _Program) *_Assembler {
- self.p = p
- self.BaseAssembler.Init(self.compile)
- return self
-}
-
-func (self *_Assembler) compile() {
- self.prologue()
- self.instrs()
- self.epilogue()
- self.copy_string()
- self.escape_string()
- self.escape_string_twice()
- self.skip_one()
- self.skip_key_value()
- self.mismatch_error()
- self.type_error()
- self.field_error()
- self.range_error()
- self.stack_error()
- self.base64_error()
- self.parsing_error()
-}
-
-/** Assembler Stages **/
-
-var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
- _OP_any : (*_Assembler)._asm_OP_any,
- _OP_dyn : (*_Assembler)._asm_OP_dyn,
- _OP_str : (*_Assembler)._asm_OP_str,
- _OP_bin : (*_Assembler)._asm_OP_bin,
- _OP_bool : (*_Assembler)._asm_OP_bool,
- _OP_num : (*_Assembler)._asm_OP_num,
- _OP_i8 : (*_Assembler)._asm_OP_i8,
- _OP_i16 : (*_Assembler)._asm_OP_i16,
- _OP_i32 : (*_Assembler)._asm_OP_i32,
- _OP_i64 : (*_Assembler)._asm_OP_i64,
- _OP_u8 : (*_Assembler)._asm_OP_u8,
- _OP_u16 : (*_Assembler)._asm_OP_u16,
- _OP_u32 : (*_Assembler)._asm_OP_u32,
- _OP_u64 : (*_Assembler)._asm_OP_u64,
- _OP_f32 : (*_Assembler)._asm_OP_f32,
- _OP_f64 : (*_Assembler)._asm_OP_f64,
- _OP_unquote : (*_Assembler)._asm_OP_unquote,
- _OP_nil_1 : (*_Assembler)._asm_OP_nil_1,
- _OP_nil_2 : (*_Assembler)._asm_OP_nil_2,
- _OP_nil_3 : (*_Assembler)._asm_OP_nil_3,
- _OP_deref : (*_Assembler)._asm_OP_deref,
- _OP_index : (*_Assembler)._asm_OP_index,
- _OP_is_null : (*_Assembler)._asm_OP_is_null,
- _OP_is_null_quote : (*_Assembler)._asm_OP_is_null_quote,
- _OP_map_init : (*_Assembler)._asm_OP_map_init,
- _OP_map_key_i8 : (*_Assembler)._asm_OP_map_key_i8,
- _OP_map_key_i16 : (*_Assembler)._asm_OP_map_key_i16,
- _OP_map_key_i32 : (*_Assembler)._asm_OP_map_key_i32,
- _OP_map_key_i64 : (*_Assembler)._asm_OP_map_key_i64,
- _OP_map_key_u8 : (*_Assembler)._asm_OP_map_key_u8,
- _OP_map_key_u16 : (*_Assembler)._asm_OP_map_key_u16,
- _OP_map_key_u32 : (*_Assembler)._asm_OP_map_key_u32,
- _OP_map_key_u64 : (*_Assembler)._asm_OP_map_key_u64,
- _OP_map_key_f32 : (*_Assembler)._asm_OP_map_key_f32,
- _OP_map_key_f64 : (*_Assembler)._asm_OP_map_key_f64,
- _OP_map_key_str : (*_Assembler)._asm_OP_map_key_str,
- _OP_map_key_utext : (*_Assembler)._asm_OP_map_key_utext,
- _OP_map_key_utext_p : (*_Assembler)._asm_OP_map_key_utext_p,
- _OP_array_skip : (*_Assembler)._asm_OP_array_skip,
- _OP_array_clear : (*_Assembler)._asm_OP_array_clear,
- _OP_array_clear_p : (*_Assembler)._asm_OP_array_clear_p,
- _OP_slice_init : (*_Assembler)._asm_OP_slice_init,
- _OP_slice_append : (*_Assembler)._asm_OP_slice_append,
- _OP_object_skip : (*_Assembler)._asm_OP_object_skip,
- _OP_object_next : (*_Assembler)._asm_OP_object_next,
- _OP_struct_field : (*_Assembler)._asm_OP_struct_field,
- _OP_unmarshal : (*_Assembler)._asm_OP_unmarshal,
- _OP_unmarshal_p : (*_Assembler)._asm_OP_unmarshal_p,
- _OP_unmarshal_text : (*_Assembler)._asm_OP_unmarshal_text,
- _OP_unmarshal_text_p : (*_Assembler)._asm_OP_unmarshal_text_p,
- _OP_lspace : (*_Assembler)._asm_OP_lspace,
- _OP_match_char : (*_Assembler)._asm_OP_match_char,
- _OP_check_char : (*_Assembler)._asm_OP_check_char,
- _OP_load : (*_Assembler)._asm_OP_load,
- _OP_save : (*_Assembler)._asm_OP_save,
- _OP_drop : (*_Assembler)._asm_OP_drop,
- _OP_drop_2 : (*_Assembler)._asm_OP_drop_2,
- _OP_recurse : (*_Assembler)._asm_OP_recurse,
- _OP_goto : (*_Assembler)._asm_OP_goto,
- _OP_switch : (*_Assembler)._asm_OP_switch,
- _OP_check_char_0 : (*_Assembler)._asm_OP_check_char_0,
- _OP_dismatch_err : (*_Assembler)._asm_OP_dismatch_err,
- _OP_go_skip : (*_Assembler)._asm_OP_go_skip,
- _OP_add : (*_Assembler)._asm_OP_add,
- _OP_check_empty : (*_Assembler)._asm_OP_check_empty,
-}
-
-func (self *_Assembler) instr(v *_Instr) {
- if fn := _OpFuncTab[v.op()]; fn != nil {
- fn(self, v)
- } else {
- panic(fmt.Sprintf("invalid opcode: %d", v.op()))
- }
-}
-
-func (self *_Assembler) instrs() {
- for i, v := range self.p {
- self.Mark(i)
- self.instr(&v)
- self.debug_instr(i, &v)
- }
-}
-
-func (self *_Assembler) epilogue() {
- self.Mark(len(self.p))
- self.Emit("XORL", _EP, _EP) // XORL EP, EP
- self.Emit("MOVQ", _VAR_et, _ET) // MOVQ VAR_et, ET
- self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
- self.Sjmp("JNZ", _LB_mismatch_error) // JNZ _LB_mismatch_error
- self.Link(_LB_error) // _error:
- self.Emit("MOVQ", _IC, _RET_rc) // MOVQ IC, rc<>+40(FP)
- self.Emit("MOVQ", _ET, _RET_et) // MOVQ ET, et<>+48(FP)
- self.Emit("MOVQ", _EP, _RET_ep) // MOVQ EP, ep<>+56(FP)
- self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP) // MOVQ _FP_offs(SP), BP
- self.Emit("ADDQ", jit.Imm(_FP_size), _SP) // ADDQ $_FP_size, SP
- self.Emit("RET") // RET
-}
-
-func (self *_Assembler) prologue() {
- self.Emit("SUBQ", jit.Imm(_FP_size), _SP) // SUBQ $_FP_size, SP
- self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs)) // MOVQ BP, _FP_offs(SP)
- self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP) // LEAQ _FP_offs(SP), BP
- self.Emit("MOVQ", _ARG_sp, _IP) // MOVQ s.p<>+0(FP), IP
- self.Emit("MOVQ", _ARG_sl, _IL) // MOVQ s.l<>+8(FP), IL
- self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC
- self.Emit("MOVQ", _ARG_vp, _VP) // MOVQ vp<>+24(FP), VP
- self.Emit("MOVQ", _ARG_sb, _ST) // MOVQ vp<>+32(FP), ST
- // initialize digital buffer first
- self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_st_Dc) // MOVQ $_MaxDigitNums, ss.Dcap
- self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX) // LEAQ _DbufOffset(ST), AX
- self.Emit("MOVQ", _AX, _VAR_st_Db) // MOVQ AX, ss.Dbuf
- self.Emit("XORL", _AX, _AX) // XORL AX, AX
- self.Emit("MOVQ", _AX, _VAR_et) // MOVQ AX, ss.Dp
-}
-
-/** Function Calling Helpers **/
-
-var _REG_go = []obj.Addr {
- _ST,
- _VP,
- _IP,
- _IL,
- _IC,
-}
-
-func (self *_Assembler) save(r ...obj.Addr) {
- for i, v := range r {
- if i > _FP_saves / 8 - 1 {
- panic("too many registers to save")
- } else {
- self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8))
- }
- }
-}
-
-func (self *_Assembler) load(r ...obj.Addr) {
- for i, v := range r {
- if i > _FP_saves / 8 - 1 {
- panic("too many registers to load")
- } else {
- self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v)
- }
- }
-}
-
-func (self *_Assembler) call(fn obj.Addr) {
- self.Emit("MOVQ", fn, _AX) // MOVQ ${fn}, AX
- self.Rjmp("CALL", _AX) // CALL AX
-}
-
-func (self *_Assembler) call_go(fn obj.Addr) {
- self.save(_REG_go...) // SAVE $REG_go
- self.call(fn) // CALL ${fn}
- self.load(_REG_go...) // LOAD $REG_go
-}
-
-func (self *_Assembler) call_sf(fn obj.Addr) {
- self.Emit("LEAQ", _ARG_s, _DI) // LEAQ s<>+0(FP), DI
- self.Emit("MOVQ", _IC, _ARG_ic) // MOVQ IC, ic<>+16(FP)
- self.Emit("LEAQ", _ARG_ic, _SI) // LEAQ ic<>+16(FP), SI
- self.Emit("LEAQ", jit.Ptr(_ST, _FsmOffset), _DX) // LEAQ _FsmOffset(ST), DX
- self.Emit("MOVQ", _ARG_fv, _CX)
- self.call(fn) // CALL ${fn}
- self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC
-}
-
-func (self *_Assembler) call_vf(fn obj.Addr) {
- self.Emit("LEAQ", _ARG_s, _DI) // LEAQ s<>+0(FP), DI
- self.Emit("MOVQ", _IC, _ARG_ic) // MOVQ IC, ic<>+16(FP)
- self.Emit("LEAQ", _ARG_ic, _SI) // LEAQ ic<>+16(FP), SI
- self.Emit("LEAQ", _VAR_st, _DX) // LEAQ st, DX
- self.call(fn) // CALL ${fn}
- self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC
-}
-
-/** Assembler Error Handlers **/
-
-var (
- _F_convT64 = jit.Func(convT64)
- _F_error_wrap = jit.Func(error_wrap)
- _F_error_type = jit.Func(error_type)
- _F_error_field = jit.Func(error_field)
- _F_error_value = jit.Func(error_value)
- _F_error_mismatch = jit.Func(error_mismatch)
-)
-
-var (
- _I_int8 , _T_int8 = rtype(reflect.TypeOf(int8(0)))
- _I_int16 , _T_int16 = rtype(reflect.TypeOf(int16(0)))
- _I_int32 , _T_int32 = rtype(reflect.TypeOf(int32(0)))
- _I_uint8 , _T_uint8 = rtype(reflect.TypeOf(uint8(0)))
- _I_uint16 , _T_uint16 = rtype(reflect.TypeOf(uint16(0)))
- _I_uint32 , _T_uint32 = rtype(reflect.TypeOf(uint32(0)))
- _I_float32 , _T_float32 = rtype(reflect.TypeOf(float32(0)))
-)
-
-var (
- _T_error = rt.UnpackType(errorType)
- _I_base64_CorruptInputError = jit.Itab(_T_error, base64CorruptInputError)
-)
-
-var (
- _V_stackOverflow = jit.Imm(int64(uintptr(unsafe.Pointer(&stackOverflow))))
- _I_json_UnsupportedValueError = jit.Itab(_T_error, reflect.TypeOf(new(json.UnsupportedValueError)))
- _I_json_MismatchTypeError = jit.Itab(_T_error, reflect.TypeOf(new(MismatchTypeError)))
-)
-
-func (self *_Assembler) type_error() {
- self.Link(_LB_type_error) // _type_error:
- self.Emit("MOVQ", _ET, jit.Ptr(_SP, 0)) // MOVQ ET, (SP)
- self.call_go(_F_error_type) // CALL_GO error_type
- self.Emit("MOVQ", jit.Ptr(_SP, 8), _ET) // MOVQ 8(SP), ET
- self.Emit("MOVQ", jit.Ptr(_SP, 16), _EP) // MOVQ 16(SP), EP
- self.Sjmp("JMP" , _LB_error) // JMP _error
-}
-
-
-func (self *_Assembler) mismatch_error() {
- self.Link(_LB_mismatch_error) // _type_error:
- self.Emit("MOVQ", _VAR_et, _ET) // MOVQ _VAR_et, ET
- self.Emit("MOVQ", _VAR_ic, _EP) // MOVQ _VAR_ic, EP
- self.Emit("MOVQ", _I_json_MismatchTypeError, _AX) // MOVQ _I_json_MismatchTypeError, AX
- self.Emit("CMPQ", _ET, _AX) // CMPQ ET, AX
- self.Sjmp("JE" , _LB_error) // JE _LB_error
- self.Emit("MOVQ", _ARG_sp, _AX)
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
- self.Emit("MOVQ", _ARG_sl, _CX)
- self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
- self.Emit("MOVQ", _VAR_ic, _AX)
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
- self.Emit("MOVQ", _VAR_et, _CX)
- self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24)) // MOVQ CX, 24(SP)
- self.call_go(_F_error_mismatch) // CALL_GO error_type
- self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
- self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
- self.Sjmp("JMP" , _LB_error) // JMP _error
-}
-
-func (self *_Assembler) _asm_OP_dismatch_err(p *_Instr) {
- self.Emit("MOVQ", _IC, _VAR_ic)
- self.Emit("MOVQ", jit.Type(p.vt()), _ET)
- self.Emit("MOVQ", _ET, _VAR_et)
-}
-
-func (self *_Assembler) _asm_OP_go_skip(p *_Instr) {
- self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
- self.Xref(p.vi(), 4)
- self.Emit("MOVQ", _R9, _VAR_pc)
- self.Sjmp("JMP" , _LB_skip_one) // JMP _skip_one
-}
-
-func (self *_Assembler) skip_one() {
- self.Link(_LB_skip_one) // _skip:
- self.Emit("MOVQ", _VAR_ic, _IC) // MOVQ _VAR_ic, IC
- self.call_sf(_F_skip_one) // CALL_SF skip_one
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
- self.Emit("MOVQ" , _VAR_pc, _R9) // MOVQ pc, R9
- self.Rjmp("JMP" , _R9) // JMP (R9)
-}
-
-
-func (self *_Assembler) skip_key_value() {
- self.Link(_LB_skip_key_value) // _skip:
- // skip the key
- self.Emit("MOVQ", _VAR_ic, _IC) // MOVQ _VAR_ic, IC
- self.call_sf(_F_skip_one) // CALL_SF skip_one
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
- // match char ':'
- self.lspace("_global_1")
- self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(':'))
- self.Sjmp("JNE" , _LB_parsing_error_v) // JNE _parse_error_v
- self.Emit("ADDQ", jit.Imm(1), _IC) // ADDQ $1, IC
- self.lspace("_global_2")
- // skip the value
- self.call_sf(_F_skip_one) // CALL_SF skip_one
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
- // jump back to specified address
- self.Emit("MOVQ" , _VAR_pc, _R9) // MOVQ pc, R9
- self.Rjmp("JMP" , _R9) // JMP (R9)
-}
-
-func (self *_Assembler) field_error() {
- self.Link(_LB_field_error) // _field_error:
- self.Emit("MOVOU", _VAR_sv, _X0) // MOVOU sv, X0
- self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0)) // MOVOU X0, (SP)
- self.call_go(_F_error_field) // CALL_GO error_field
- self.Emit("MOVQ" , jit.Ptr(_SP, 16), _ET) // MOVQ 16(SP), ET
- self.Emit("MOVQ" , jit.Ptr(_SP, 24), _EP) // MOVQ 24(SP), EP
- self.Sjmp("JMP" , _LB_error) // JMP _error
-}
-
-func (self *_Assembler) range_error() {
- self.Link(_LB_range_error) // _range_error:
- self.slice_from(_VAR_st_Ep, 0) // SLICE st.Ep, $0
- self.Emit("MOVQ", _DI, jit.Ptr(_SP, 0)) // MOVQ DI, (SP)
- self.Emit("MOVQ", _SI, jit.Ptr(_SP, 8)) // MOVQ SI, 8(SP)
- self.Emit("MOVQ", _ET, jit.Ptr(_SP, 16)) // MOVQ ET, 16(SP)
- self.Emit("MOVQ", _EP, jit.Ptr(_SP, 24)) // MOVQ EP, 24(SP)
- self.call_go(_F_error_value) // CALL_GO error_value
- self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
- self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
- self.Sjmp("JMP" , _LB_error) // JMP _error
-}
-
-func (self *_Assembler) stack_error() {
- self.Link(_LB_stack_error) // _stack_error:
- self.Emit("MOVQ", _V_stackOverflow, _EP) // MOVQ ${_V_stackOverflow}, EP
- self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ ${_I_json_UnsupportedValueError}, ET
- self.Sjmp("JMP" , _LB_error) // JMP _error
-}
-
-func (self *_Assembler) base64_error() {
- self.Link(_LB_base64_error)
- self.Emit("NEGQ", _AX) // NEGQ AX
- self.Emit("SUBQ", jit.Imm(1), _AX) // SUBQ $1, AX
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
- self.call_go(_F_convT64) // CALL_GO convT64
- self.Emit("MOVQ", jit.Ptr(_SP, 8), _EP) // MOVQ 8(SP), EP
- self.Emit("MOVQ", _I_base64_CorruptInputError, _ET) // MOVQ ${itab(base64.CorruptInputError)}, ET
- self.Sjmp("JMP" , _LB_error) // JMP _error
-}
-
-func (self *_Assembler) parsing_error() {
- self.Link(_LB_eof_error) // _eof_error:
- self.Emit("MOVQ" , _IL, _IC) // MOVQ IL, IC
- self.Emit("MOVL" , jit.Imm(int64(types.ERR_EOF)), _EP) // MOVL ${types.ERR_EOF}, EP
- self.Sjmp("JMP" , _LB_parsing_error) // JMP _parsing_error
- self.Link(_LB_unquote_error) // _unquote_error:
- self.Emit("SUBQ" , _VAR_sr, _SI) // SUBQ sr, SI
- self.Emit("SUBQ" , _SI, _IC) // SUBQ IL, IC
- self.Link(_LB_parsing_error_v) // _parsing_error_v:
- self.Emit("MOVQ" , _AX, _EP) // MOVQ AX, EP
- self.Emit("NEGQ" , _EP) // NEGQ EP
- self.Sjmp("JMP" , _LB_parsing_error) // JMP _parsing_error
- self.Link(_LB_char_m3_error) // _char_m3_error:
- self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC
- self.Link(_LB_char_m2_error) // _char_m2_error:
- self.Emit("SUBQ" , jit.Imm(2), _IC) // SUBQ $2, IC
- self.Sjmp("JMP" , _LB_char_0_error) // JMP _char_0_error
- self.Link(_LB_im_error) // _im_error:
- self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 0)) // CMPB CX, (IP)(IC)
- self.Sjmp("JNE" , _LB_char_0_error) // JNE _char_0_error
- self.Emit("SHRL" , jit.Imm(8), _CX) // SHRL $8, CX
- self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 1)) // CMPB CX, 1(IP)(IC)
- self.Sjmp("JNE" , _LB_char_1_error) // JNE _char_1_error
- self.Emit("SHRL" , jit.Imm(8), _CX) // SHRL $8, CX
- self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 2)) // CMPB CX, 2(IP)(IC)
- self.Sjmp("JNE" , _LB_char_2_error) // JNE _char_2_error
- self.Sjmp("JMP" , _LB_char_3_error) // JNE _char_3_error
- self.Link(_LB_char_4_error) // _char_4_error:
- self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
- self.Link(_LB_char_3_error) // _char_3_error:
- self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
- self.Link(_LB_char_2_error) // _char_2_error:
- self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
- self.Link(_LB_char_1_error) // _char_1_error:
- self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
- self.Link(_LB_char_0_error) // _char_0_error:
- self.Emit("MOVL" , jit.Imm(int64(types.ERR_INVALID_CHAR)), _EP) // MOVL ${types.ERR_INVALID_CHAR}, EP
- self.Link(_LB_parsing_error) // _parsing_error:
- self.Emit("MOVOU", _ARG_s, _X0) // MOVOU s, X0
- self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0)) // MOVOU X0, (SP)
- self.Emit("MOVQ" , _IC, jit.Ptr(_SP, 16)) // MOVQ IC, 16(SP)
- self.Emit("MOVQ" , _EP, jit.Ptr(_SP, 24)) // MOVQ EP, 24(SP)
- self.call_go(_F_error_wrap) // CALL_GO error_wrap
- self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
- self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
- self.Sjmp("JMP" , _LB_error) // JMP _error
-}
-
-/** Memory Management Routines **/
-
-var (
- _T_byte = jit.Type(byteType)
- _F_mallocgc = jit.Func(mallocgc)
-)
-
-func (self *_Assembler) malloc(nb obj.Addr, ret obj.Addr) {
- self.Emit("XORL", _AX, _AX) // XORL AX, AX
- self.Emit("MOVQ", _T_byte, _CX) // MOVQ ${type(byte)}, CX
- self.Emit("MOVQ", nb, jit.Ptr(_SP, 0)) // MOVQ ${nb}, (SP)
- self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
- self.call_go(_F_mallocgc) // CALL_GO mallocgc
- self.Emit("MOVQ", jit.Ptr(_SP, 24), ret) // MOVQ 24(SP), ${ret}
-}
-
-func (self *_Assembler) valloc(vt reflect.Type, ret obj.Addr) {
- self.Emit("MOVQ", jit.Imm(int64(vt.Size())), _AX) // MOVQ ${vt.Size()}, AX
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
- self.Emit("MOVQ", jit.Type(vt), _AX) // MOVQ ${vt}, AX
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
- self.Emit("MOVB", jit.Imm(1), jit.Ptr(_SP, 16)) // MOVB $1, 16(SP)
- self.call_go(_F_mallocgc) // CALL_GO mallocgc
- self.Emit("MOVQ", jit.Ptr(_SP, 24), ret) // MOVQ 24(SP), ${ret}
-}
-
-func (self *_Assembler) vfollow(vt reflect.Type) {
- self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JNZ" , "_end_{n}") // JNZ _end_{n}
- self.valloc(vt, _AX) // VALLOC ${vt}, AX
- self.WritePtrAX(1, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP)
- self.Link("_end_{n}") // _end_{n}:
- self.Emit("MOVQ" , _AX, _VP) // MOVQ AX, VP
-}
-
-/** Value Parsing Routines **/
-
-var (
- _F_vstring = jit.Imm(int64(native.S_vstring))
- _F_vnumber = jit.Imm(int64(native.S_vnumber))
- _F_vsigned = jit.Imm(int64(native.S_vsigned))
- _F_vunsigned = jit.Imm(int64(native.S_vunsigned))
-)
-
-func (self *_Assembler) check_err(vt reflect.Type, pin string, pin2 int) {
- self.Emit("MOVQ" , _VAR_st_Vt, _AX) // MOVQ st.Vt, AX
- self.Emit("TESTQ", _AX, _AX) // CMPQ AX, ${native.V_STRING}
- // try to skip the value
- if vt != nil {
- self.Sjmp("JNS" , "_check_err_{n}") // JNE _parsing_error_v
- self.Emit("MOVQ", jit.Type(vt), _ET)
- self.Emit("MOVQ", _ET, _VAR_et)
- if pin2 != -1 {
- self.Emit("SUBQ", jit.Imm(1), _BP)
- self.Emit("MOVQ", _BP, _VAR_ic)
- self.Byte(0x4c , 0x8d, 0x0d) // LEAQ (PC), R9
- self.Xref(pin2, 4)
- self.Emit("MOVQ", _R9, _VAR_pc)
- self.Sjmp("JMP" , _LB_skip_key_value)
- } else {
- self.Emit("MOVQ", _BP, _VAR_ic)
- self.Byte(0x4c , 0x8d, 0x0d) // LEAQ (PC), R9
- self.Sref(pin, 4)
- self.Emit("MOVQ", _R9, _VAR_pc)
- self.Sjmp("JMP" , _LB_skip_one)
- }
- self.Link("_check_err_{n}")
- } else {
- self.Sjmp("JS" , _LB_parsing_error_v) // JNE _parsing_error_v
- }
-}
-
-func (self *_Assembler) check_eof(d int64) {
- if d == 1 {
- self.Emit("CMPQ", _IC, _IL) // CMPQ IC, IL
- self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error
- } else {
- self.Emit("LEAQ", jit.Ptr(_IC, d), _AX) // LEAQ ${d}(IC), AX
- self.Emit("CMPQ", _AX, _IL) // CMPQ AX, IL
- self.Sjmp("JA" , _LB_eof_error) // JA _eof_error
- }
-}
-
-func (self *_Assembler) parse_string() { // parse_string has a validate flag params in the last
- self.Emit("MOVQ", _ARG_fv, _CX)
- self.call_vf(_F_vstring)
- self.check_err(nil, "", -1)
-}
-
-func (self *_Assembler) parse_number(vt reflect.Type, pin string, pin2 int) {
- self.Emit("MOVQ", _IC, _BP)
- self.call_vf(_F_vnumber) // call vnumber
- self.check_err(vt, pin, pin2)
-}
-
-func (self *_Assembler) parse_signed(vt reflect.Type, pin string, pin2 int) {
- self.Emit("MOVQ", _IC, _BP)
- self.call_vf(_F_vsigned)
- self.check_err(vt, pin, pin2)
-}
-
-func (self *_Assembler) parse_unsigned(vt reflect.Type, pin string, pin2 int) {
- self.Emit("MOVQ", _IC, _BP)
- self.call_vf(_F_vunsigned)
- self.check_err(vt, pin, pin2)
-}
-
-// Pointer: DI, Size: SI, Return: R9
-func (self *_Assembler) copy_string() {
- self.Link("_copy_string")
- self.Emit("MOVQ", _DI, _VAR_bs_p)
- self.Emit("MOVQ", _SI, _VAR_bs_n)
- self.Emit("MOVQ", _R9, _VAR_bs_LR)
- self.malloc(_SI, _AX)
- self.Emit("MOVQ", _AX, _VAR_sv_p)
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))
- self.Emit("MOVQ", _VAR_bs_p, _DI)
- self.Emit("MOVQ", _DI, jit.Ptr(_SP, 8))
- self.Emit("MOVQ", _VAR_bs_n, _SI)
- self.Emit("MOVQ", _SI, jit.Ptr(_SP, 16))
- self.call_go(_F_memmove)
- self.Emit("MOVQ", _VAR_sv_p, _DI)
- self.Emit("MOVQ", _VAR_bs_n, _SI)
- self.Emit("MOVQ", _VAR_bs_LR, _R9)
- self.Rjmp("JMP", _R9)
-}
-
-// Pointer: DI, Size: SI, Return: R9
-func (self *_Assembler) escape_string() {
- self.Link("_escape_string")
- self.Emit("MOVQ" , _DI, _VAR_bs_p)
- self.Emit("MOVQ" , _SI, _VAR_bs_n)
- self.Emit("MOVQ" , _R9, _VAR_bs_LR)
- self.malloc(_SI, _DX) // MALLOC SI, DX
- self.Emit("MOVQ" , _DX, _VAR_sv_p)
- self.Emit("MOVQ" , _VAR_bs_p, _DI)
- self.Emit("MOVQ" , _VAR_bs_n, _SI)
- self.Emit("LEAQ" , _VAR_sr, _CX) // LEAQ sr, CX
- self.Emit("XORL" , _R8, _R8) // XORL R8, R8
- self.Emit("BTQ" , jit.Imm(_F_disable_urc), _ARG_fv) // BTQ ${_F_disable_urc}, fv
- self.Emit("SETCC", _R8) // SETCC R8
- self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8) // SHLQ ${types.B_UNICODE_REPLACE}, R8
- self.call(_F_unquote) // CALL unquote
- self.Emit("MOVQ" , _VAR_bs_n, _SI) // MOVQ ${n}, SI
- self.Emit("ADDQ" , jit.Imm(1), _SI) // ADDQ $1, SI
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JS" , _LB_unquote_error) // JS _unquote_error
- self.Emit("MOVQ" , _AX, _SI)
- self.Emit("MOVQ" , _VAR_sv_p, _DI)
- self.Emit("MOVQ" , _VAR_bs_LR, _R9)
- self.Rjmp("JMP", _R9)
-}
-
-func (self *_Assembler) escape_string_twice() {
- self.Link("_escape_string_twice")
- self.Emit("MOVQ" , _DI, _VAR_bs_p)
- self.Emit("MOVQ" , _SI, _VAR_bs_n)
- self.Emit("MOVQ" , _R9, _VAR_bs_LR)
- self.malloc(_SI, _DX) // MALLOC SI, DX
- self.Emit("MOVQ" , _DX, _VAR_sv_p)
- self.Emit("MOVQ" , _VAR_bs_p, _DI)
- self.Emit("MOVQ" , _VAR_bs_n, _SI)
- self.Emit("LEAQ" , _VAR_sr, _CX) // LEAQ sr, CX
- self.Emit("MOVL" , jit.Imm(types.F_DOUBLE_UNQUOTE), _R8) // MOVL ${types.F_DOUBLE_UNQUOTE}, R8
- self.Emit("BTQ" , jit.Imm(_F_disable_urc), _ARG_fv) // BTQ ${_F_disable_urc}, AX
- self.Emit("XORL" , _AX, _AX) // XORL AX, AX
- self.Emit("SETCC", _AX) // SETCC AX
- self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _AX) // SHLQ ${types.B_UNICODE_REPLACE}, AX
- self.Emit("ORQ" , _AX, _R8) // ORQ AX, R8
- self.call(_F_unquote) // CALL unquote
- self.Emit("MOVQ" , _VAR_bs_n, _SI) // MOVQ ${n}, SI
- self.Emit("ADDQ" , jit.Imm(3), _SI) // ADDQ $3, SI
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JS" , _LB_unquote_error) // JS _unquote_error
- self.Emit("MOVQ" , _AX, _SI)
- self.Emit("MOVQ" , _VAR_sv_p, _DI)
- self.Emit("MOVQ" , _VAR_bs_LR, _R9)
- self.Rjmp("JMP", _R9)
-}
-
-/** Range Checking Routines **/
-
-var (
- _V_max_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_max_f32))))
- _V_min_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_min_f32))))
-)
-
-var (
- _Vp_max_f32 = new(float32)
- _Vp_min_f32 = new(float32)
-)
-
-func init() {
- *_Vp_max_f32 = math.MaxFloat32
- *_Vp_min_f32 = -math.MaxFloat32
-}
-
-func (self *_Assembler) range_single() {
- self.Emit("CVTSD2SS", _VAR_st_Dv, _X0) // CVTSD2SS st.Dv, X0
- self.Emit("MOVQ" , _V_max_f32, _AX) // MOVQ _max_f32, AX
- self.Emit("MOVQ" , jit.Gitab(_I_float32), _ET) // MOVQ ${itab(float32)}, ET
- self.Emit("MOVQ" , jit.Gtype(_T_float32), _EP) // MOVQ ${type(float32)}, EP
- self.Emit("UCOMISS" , jit.Ptr(_AX, 0), _X0) // UCOMISS (AX), X0
- self.Sjmp("JA" , _LB_range_error) // JA _range_error
- self.Emit("MOVQ" , _V_min_f32, _AX) // MOVQ _min_f32, AX
- self.Emit("UCOMISS" , jit.Ptr(_AX, 0), _X0) // UCOMISS (AX), X0
- self.Sjmp("JB" , _LB_range_error) // JB _range_error
-}
-
-func (self *_Assembler) range_signed(i *rt.GoItab, t *rt.GoType, a int64, b int64) {
- self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX
- self.Emit("MOVQ", jit.Gitab(i), _ET) // MOVQ ${i}, ET
- self.Emit("MOVQ", jit.Gtype(t), _EP) // MOVQ ${t}, EP
- self.Emit("CMPQ", _AX, jit.Imm(a)) // CMPQ AX, ${a}
- self.Sjmp("JL" , _LB_range_error) // JL _range_error
- self.Emit("CMPQ", _AX, jit.Imm(b)) // CMPQ AX, ${B}
- self.Sjmp("JG" , _LB_range_error) // JG _range_error
-}
-
-func (self *_Assembler) range_unsigned(i *rt.GoItab, t *rt.GoType, v uint64) {
- self.Emit("MOVQ" , _VAR_st_Iv, _AX) // MOVQ st.Iv, AX
- self.Emit("MOVQ" , jit.Gitab(i), _ET) // MOVQ ${i}, ET
- self.Emit("MOVQ" , jit.Gtype(t), _EP) // MOVQ ${t}, EP
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JS" , _LB_range_error) // JS _range_error
- self.Emit("CMPQ" , _AX, jit.Imm(int64(v))) // CMPQ AX, ${a}
- self.Sjmp("JA" , _LB_range_error) // JA _range_error
-}
-
-/** String Manipulating Routines **/
-
-var (
- _F_unquote = jit.Imm(int64(native.S_unquote))
-)
-
-func (self *_Assembler) slice_from(p obj.Addr, d int64) {
- self.Emit("MOVQ", p, _SI) // MOVQ ${p}, SI
- self.slice_from_r(_SI, d) // SLICE_R SI, ${d}
-}
-
-func (self *_Assembler) slice_from_r(p obj.Addr, d int64) {
- self.Emit("LEAQ", jit.Sib(_IP, p, 1, 0), _DI) // LEAQ (IP)(${p}), DI
- self.Emit("NEGQ", p) // NEGQ ${p}
- self.Emit("LEAQ", jit.Sib(_IC, p, 1, d), _SI) // LEAQ d(IC)(${p}), SI
-}
-
-func (self *_Assembler) unquote_once(p obj.Addr, n obj.Addr, stack bool, copy bool) {
- self.slice_from(_VAR_st_Iv, -1) // SLICE st.Iv, $-1
- self.Emit("CMPQ" , _VAR_st_Ep, jit.Imm(-1)) // CMPQ st.Ep, $-1
- self.Sjmp("JE" , "_noescape_{n}") // JE _noescape_{n}
- self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
- self.Sref("_unquote_once_write_{n}", 4)
- self.Sjmp("JMP" , "_escape_string")
- self.Link("_noescape_{n}") // _noescape_{n}:
- if copy {
- self.Emit("BTQ" , jit.Imm(_F_copy_string), _ARG_fv)
- self.Sjmp("JNC", "_unquote_once_write_{n}")
- self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
- self.Sref("_unquote_once_write_{n}", 4)
- self.Sjmp("JMP", "_copy_string")
- }
- self.Link("_unquote_once_write_{n}")
- self.Emit("MOVQ" , _SI, n) // MOVQ SI, ${n}
- if stack {
- self.Emit("MOVQ", _DI, p)
- } else {
- self.WriteRecNotAX(10, _DI, p, false, false)
- }
-}
-
-func (self *_Assembler) unquote_twice(p obj.Addr, n obj.Addr, stack bool) {
- self.Emit("CMPQ" , _VAR_st_Ep, jit.Imm(-1)) // CMPQ st.Ep, $-1
- self.Sjmp("JE" , _LB_eof_error) // JE _eof_error
- self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -3), jit.Imm('\\')) // CMPB -3(IP)(IC), $'\\'
- self.Sjmp("JNE" , _LB_char_m3_error) // JNE _char_m3_error
- self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -2), jit.Imm('"')) // CMPB -2(IP)(IC), $'"'
- self.Sjmp("JNE" , _LB_char_m2_error) // JNE _char_m2_error
- self.slice_from(_VAR_st_Iv, -3) // SLICE st.Iv, $-3
- self.Emit("MOVQ" , _SI, _AX) // MOVQ SI, AX
- self.Emit("ADDQ" , _VAR_st_Iv, _AX) // ADDQ st.Iv, AX
- self.Emit("CMPQ" , _VAR_st_Ep, _AX) // CMPQ st.Ep, AX
- self.Sjmp("JE" , "_noescape_{n}") // JE _noescape_{n}
- self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
- self.Sref("_unquote_twice_write_{n}", 4)
- self.Sjmp("JMP" , "_escape_string_twice")
- self.Link("_noescape_{n}") // _noescape_{n}:
- self.Emit("BTQ" , jit.Imm(_F_copy_string), _ARG_fv)
- self.Sjmp("JNC", "_unquote_twice_write_{n}")
- self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
- self.Sref("_unquote_twice_write_{n}", 4)
- self.Sjmp("JMP", "_copy_string")
- self.Link("_unquote_twice_write_{n}")
- self.Emit("MOVQ" , _SI, n) // MOVQ SI, ${n}
- if stack {
- self.Emit("MOVQ", _DI, p)
- } else {
- self.WriteRecNotAX(12, _DI, p, false, false)
- }
-}
-
-/** Memory Clearing Routines **/
-
-var (
- _F_memclrHasPointers = jit.Func(memclrHasPointers)
- _F_memclrNoHeapPointers = jit.Func(memclrNoHeapPointers)
-)
-
-func (self *_Assembler) mem_clear_fn(ptrfree bool) {
- if !ptrfree {
- self.call_go(_F_memclrHasPointers)
- } else {
- self.call_go(_F_memclrNoHeapPointers)
- }
-}
-
-func (self *_Assembler) mem_clear_rem(size int64, ptrfree bool) {
- self.Emit("MOVQ", jit.Imm(size), _CX) // MOVQ ${size}, CX
- self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
- self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _AX) // MOVQ (ST)(AX), AX
- self.Emit("SUBQ", _VP, _AX) // SUBQ VP, AX
- self.Emit("ADDQ", _AX, _CX) // ADDQ AX, CX
- self.Emit("MOVQ", _VP, jit.Ptr(_SP, 0)) // MOVQ VP, (SP)
- self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
- self.mem_clear_fn(ptrfree) // CALL_GO memclr{Has,NoHeap}Pointers
-}
-
-/** Map Assigning Routines **/
-
-var (
- _F_mapassign = jit.Func(mapassign)
- _F_mapassign_fast32 = jit.Func(mapassign_fast32)
- _F_mapassign_faststr = jit.Func(mapassign_faststr)
- _F_mapassign_fast64ptr = jit.Func(mapassign_fast64ptr)
-)
-
-var (
- _F_decodeJsonUnmarshaler obj.Addr
- _F_decodeTextUnmarshaler obj.Addr
-)
-
-func init() {
- _F_decodeJsonUnmarshaler = jit.Func(decodeJsonUnmarshaler)
- _F_decodeTextUnmarshaler = jit.Func(decodeTextUnmarshaler)
-}
-
-func (self *_Assembler) mapaccess_ptr(t reflect.Type) {
- if rt.MapType(rt.UnpackType(t)).IndirectElem() {
- self.vfollow(t.Elem())
- }
-}
-
-func (self *_Assembler) mapassign_std(t reflect.Type, v obj.Addr) {
- self.Emit("LEAQ", v, _AX) // LEAQ ${v}, AX
- self.mapassign_call(t, _F_mapassign) // MAPASSIGN ${t}, mapassign
-}
-
-func (self *_Assembler) mapassign_str_fast(t reflect.Type, p obj.Addr, n obj.Addr) {
- self.Emit("MOVQ", jit.Type(t), _AX) // MOVQ ${t}, AX
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
- self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8)) // MOVQ VP, 8(SP)
- self.Emit("MOVQ", p, jit.Ptr(_SP, 16)) // MOVQ ${p}, 16(SP)
- self.Emit("MOVQ", n, jit.Ptr(_SP, 24)) // MOVQ ${n}, 24(SP)
- self.call_go(_F_mapassign_faststr) // CALL_GO ${fn}
- self.Emit("MOVQ", jit.Ptr(_SP, 32), _VP) // MOVQ 32(SP), VP
- self.mapaccess_ptr(t)
-}
-
-func (self *_Assembler) mapassign_call(t reflect.Type, fn obj.Addr) {
- self.Emit("MOVQ", jit.Type(t), _SI) // MOVQ ${t}, SI
- self.Emit("MOVQ", _SI, jit.Ptr(_SP, 0)) // MOVQ SI, (SP)
- self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8)) // MOVQ VP, 8(SP)
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
- self.call_go(fn) // CALL_GO ${fn}
- self.Emit("MOVQ", jit.Ptr(_SP, 24), _VP) // MOVQ 24(SP), VP
-}
-
-func (self *_Assembler) mapassign_fastx(t reflect.Type, fn obj.Addr) {
- self.mapassign_call(t, fn)
- self.mapaccess_ptr(t)
-}
-
-func (self *_Assembler) mapassign_utext(t reflect.Type, addressable bool) {
- pv := false
- vk := t.Key()
- tk := t.Key()
-
- /* deref pointer if needed */
- if vk.Kind() == reflect.Ptr {
- pv = true
- vk = vk.Elem()
- }
-
- /* addressable value with pointer receiver */
- if addressable {
- pv = false
- tk = reflect.PtrTo(tk)
- }
-
- /* allocate the key, and call the unmarshaler */
- self.valloc(vk, _DI) // VALLOC ${vk}, DI
- // must spill vk pointer since next call_go may invoke GC
- self.Emit("MOVQ" , _DI, _VAR_vk)
- self.Emit("MOVQ" , jit.Type(tk), _AX) // MOVQ ${tk}, AX
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
- self.Emit("MOVQ" , _DI, jit.Ptr(_SP, 8)) // MOVQ DI, 8(SP)
- self.Emit("MOVOU", _VAR_sv, _X0) // MOVOU sv, X0
- self.Emit("MOVOU", _X0, jit.Ptr(_SP, 16)) // MOVOU X0, 16(SP)
- self.call_go(_F_decodeTextUnmarshaler) // CALL_GO decodeTextUnmarshaler
- self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
- self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
- self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
- self.Sjmp("JNZ" , _LB_error) // JNZ _error
- self.Emit("MOVQ" , _VAR_vk, _AX)
-
- /* select the correct assignment function */
- if !pv {
- self.mapassign_call(t, _F_mapassign)
- } else {
- self.mapassign_fastx(t, _F_mapassign_fast64ptr)
- }
-}
-
-/** External Unmarshaler Routines **/
-
-var (
- _F_skip_one = jit.Imm(int64(native.S_skip_one))
- _F_skip_number = jit.Imm(int64(native.S_skip_number))
-)
-
-func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool) {
- self.call_sf(_F_skip_one) // CALL_SF skip_one
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
- self.slice_from_r(_AX, 0) // SLICE_R AX, $0
- self.Emit("MOVQ" , _DI, _VAR_sv_p) // MOVQ DI, sv.p
- self.Emit("MOVQ" , _SI, _VAR_sv_n) // MOVQ SI, sv.n
- self.unmarshal_func(t, _F_decodeJsonUnmarshaler, deref) // UNMARSHAL json, ${t}, ${deref}
-}
-
-func (self *_Assembler) unmarshal_text(t reflect.Type, deref bool) {
- self.parse_string() // PARSE STRING
- self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true) // UNQUOTE once, sv.p, sv.n
- self.unmarshal_func(t, _F_decodeTextUnmarshaler, deref) // UNMARSHAL text, ${t}, ${deref}
-}
-
-func (self *_Assembler) unmarshal_func(t reflect.Type, fn obj.Addr, deref bool) {
- pt := t
- vk := t.Kind()
-
- /* allocate the field if needed */
- if deref && vk == reflect.Ptr {
- self.Emit("MOVQ" , _VP, _AX) // MOVQ VP, AX
- self.Emit("MOVQ" , jit.Ptr(_AX, 0), _AX) // MOVQ (AX), AX
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JNZ" , "_deref_{n}") // JNZ _deref_{n}
- self.valloc(t.Elem(), _AX) // VALLOC ${t.Elem()}, AX
- self.WritePtrAX(3, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP)
- self.Link("_deref_{n}") // _deref_{n}:
- }
-
- /* set value type */
- self.Emit("MOVQ", jit.Type(pt), _CX) // MOVQ ${pt}, CX
- self.Emit("MOVQ", _CX, jit.Ptr(_SP, 0)) // MOVQ CX, (SP)
-
- /* set value pointer */
- if deref && vk == reflect.Ptr {
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
- } else {
- self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8)) // MOVQ VP, 8(SP)
- }
-
- /* set the source string and call the unmarshaler */
- self.Emit("MOVOU", _VAR_sv, _X0) // MOVOU sv, X0
- self.Emit("MOVOU", _X0, jit.Ptr(_SP, 16)) // MOVOU X0, 16(SP)
- self.call_go(fn) // CALL_GO ${fn}
- self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
- self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
- self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
- self.Sjmp("JNZ" , _LB_error) // JNZ _error
-}
-
-/** Dynamic Decoding Routine **/
-
-var (
- _F_decodeTypedPointer obj.Addr
-)
-
-func init() {
- _F_decodeTypedPointer = jit.Func(decodeTypedPointer)
-}
-
-func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) {
- self.Emit("MOVQ" , _ARG_fv, _CX) // MOVQ fv, CX
- self.Emit("MOVOU", _ARG_sp, _X0) // MOVOU sp, X0
- self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0)) // MOVOU X0, (SP)
- self.Emit("MOVQ" , _IC, jit.Ptr(_SP, 16)) // MOVQ IC, 16(SP)
- self.Emit("MOVQ" , vt, jit.Ptr(_SP, 24)) // MOVQ ${vt}, 24(SP)
- self.Emit("MOVQ" , vp, jit.Ptr(_SP, 32)) // MOVQ ${vp}, 32(SP)
- self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 40)) // MOVQ ST, 40(SP)
- self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 48)) // MOVQ CX, 48(SP)
- self.call_go(_F_decodeTypedPointer) // CALL_GO decodeTypedPointer
- self.Emit("MOVQ" , jit.Ptr(_SP, 64), _ET) // MOVQ 64(SP), ET
- self.Emit("MOVQ" , jit.Ptr(_SP, 72), _EP) // MOVQ 72(SP), EP
- self.Emit("MOVQ" , jit.Ptr(_SP, 56), _IC) // MOVQ 56(SP), IC
- self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
- self.Sjmp("JE", "_decode_dynamic_end_{n}") // JE, _decode_dynamic_end_{n}
- self.Emit("MOVQ", _I_json_MismatchTypeError, _AX) // MOVQ _I_json_MismatchTypeError, AX
- self.Emit("CMPQ", _ET, _AX) // CMPQ ET, AX
- self.Sjmp("JNE" , _LB_error) // JNE LB_error
- self.Emit("MOVQ", _EP, _VAR_ic) // MOVQ EP, VAR_ic
- self.Emit("MOVQ", _ET, _VAR_et) // MOVQ ET, VAR_et
- self.Link("_decode_dynamic_end_{n}")
-
-}
-
-/** OpCode Assembler Functions **/
-
-var (
- _F_memequal = jit.Func(memequal)
- _F_memmove = jit.Func(memmove)
- _F_growslice = jit.Func(growslice)
- _F_makeslice = jit.Func(makeslice)
- _F_makemap_small = jit.Func(makemap_small)
- _F_mapassign_fast64 = jit.Func(mapassign_fast64)
-)
-
-var (
- _F_lspace = jit.Imm(int64(native.S_lspace))
- _F_strhash = jit.Imm(int64(caching.S_strhash))
-)
-
-var (
- _F_b64decode = jit.Imm(int64(_subr__b64decode))
- _F_decodeValue = jit.Imm(int64(_subr_decode_value))
-)
-
-var (
- _F_skip_array = jit.Imm(int64(native.S_skip_array))
- _F_skip_object = jit.Imm(int64(native.S_skip_object))
-)
-
-var (
- _F_FieldMap_GetCaseInsensitive obj.Addr
- _Empty_Slice = make([]byte, 0)
- _Zero_Base = int64(uintptr(((*rt.GoSlice)(unsafe.Pointer(&_Empty_Slice))).Ptr))
-)
-
-const (
- _MODE_AVX2 = 1 << 2
-)
-
-const (
- _Fe_ID = int64(unsafe.Offsetof(caching.FieldEntry{}.ID))
- _Fe_Name = int64(unsafe.Offsetof(caching.FieldEntry{}.Name))
- _Fe_Hash = int64(unsafe.Offsetof(caching.FieldEntry{}.Hash))
-)
-
-const (
- _Vk_Ptr = int64(reflect.Ptr)
- _Gt_KindFlags = int64(unsafe.Offsetof(rt.GoType{}.KindFlags))
-)
-
-func init() {
- _F_FieldMap_GetCaseInsensitive = jit.Func((*caching.FieldMap).GetCaseInsensitive)
-}
-
-func (self *_Assembler) _asm_OP_any(_ *_Instr) {
- self.Emit("MOVQ" , jit.Ptr(_VP, 8), _CX) // MOVQ 8(VP), CX
- self.Emit("TESTQ" , _CX, _CX) // TESTQ CX, CX
- self.Sjmp("JZ" , "_decode_{n}") // JZ _decode_{n}
- self.Emit("CMPQ" , _CX, _VP) // CMPQ CX, VP
- self.Sjmp("JE" , "_decode_{n}") // JE _decode_{n}
- self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX
- self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX) // MOVBLZX _Gt_KindFlags(AX), DX
- self.Emit("ANDL" , jit.Imm(rt.F_kind_mask), _DX) // ANDL ${F_kind_mask}, DX
- self.Emit("CMPL" , _DX, jit.Imm(_Vk_Ptr)) // CMPL DX, ${reflect.Ptr}
- self.Sjmp("JNE" , "_decode_{n}") // JNE _decode_{n}
- self.Emit("LEAQ" , jit.Ptr(_VP, 8), _DI) // LEAQ 8(VP), DI
- self.decode_dynamic(_AX, _DI) // DECODE AX, DI
- self.Sjmp("JMP" , "_decode_end_{n}") // JMP _decode_end_{n}
- self.Link("_decode_{n}") // _decode_{n}:
- self.Emit("MOVQ" , _ARG_fv, _DF) // MOVQ fv, DF
- self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 0)) // MOVQ _ST, (SP)
- self.call(_F_decodeValue) // CALL decodeValue
- self.Emit("TESTQ" , _EP, _EP) // TESTQ EP, EP
- self.Sjmp("JNZ" , _LB_parsing_error) // JNZ _parsing_error
- self.Link("_decode_end_{n}") // _decode_end_{n}:
-}
-
-func (self *_Assembler) _asm_OP_dyn(p *_Instr) {
- self.Emit("MOVQ" , jit.Type(p.vt()), _ET) // MOVQ ${p.vt()}, ET
- self.Emit("CMPQ" , jit.Ptr(_VP, 8), jit.Imm(0)) // CMPQ 8(VP), $0
- self.Sjmp("JE" , _LB_type_error) // JE _type_error
- self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX
- self.Emit("MOVQ" , jit.Ptr(_AX, 8), _AX) // MOVQ 8(AX), AX
- self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX) // MOVBLZX _Gt_KindFlags(AX), DX
- self.Emit("ANDL" , jit.Imm(rt.F_kind_mask), _DX) // ANDL ${F_kind_mask}, DX
- self.Emit("CMPL" , _DX, jit.Imm(_Vk_Ptr)) // CMPL DX, ${reflect.Ptr}
- self.Sjmp("JNE" , _LB_type_error) // JNE _type_error
- self.Emit("LEAQ" , jit.Ptr(_VP, 8), _DI) // LEAQ 8(VP), DI
- self.decode_dynamic(_AX, _DI) // DECODE AX, DI
- self.Link("_decode_end_{n}") // _decode_end_{n}:
-}
-
-func (self *_Assembler) _asm_OP_str(_ *_Instr) {
- self.parse_string() // PARSE STRING
- self.unquote_once(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false, true) // UNQUOTE once, (VP), 8(VP)
-}
-
-func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
- self.parse_string() // PARSE STRING
- self.slice_from(_VAR_st_Iv, -1) // SLICE st.Iv, $-1
- self.Emit("MOVQ" , _DI, jit.Ptr(_VP, 0)) // MOVQ DI, (VP)
- self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 8)) // MOVQ SI, 8(VP)
- self.Emit("SHRQ" , jit.Imm(2), _SI) // SHRQ $2, SI
- self.Emit("LEAQ" , jit.Sib(_SI, _SI, 2, 0), _SI) // LEAQ (SI)(SI*2), SI
- self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 16)) // MOVQ SI, 16(VP)
- self.malloc(_SI, _SI) // MALLOC SI, SI
-
- // TODO: due to base64x's bug, only use AVX mode now
- self.Emit("MOVL", jit.Imm(_MODE_JSON), _CX) // MOVL $_MODE_JSON, CX
-
- /* call the decoder */
- self.Emit("XORL" , _DX, _DX) // XORL DX, DX
- self.Emit("MOVQ" , _VP, _DI) // MOVQ VP, DI
-
- self.Emit("MOVQ" , jit.Ptr(_VP, 0), _R9) // MOVQ SI, (VP)
- self.WriteRecNotAX(4, _SI, jit.Ptr(_VP, 0), true, false) // XCHGQ SI, (VP)
- self.Emit("MOVQ" , _R9, _SI)
-
- self.Emit("XCHGQ", _DX, jit.Ptr(_VP, 8)) // XCHGQ DX, 8(VP)
- self.call(_F_b64decode) // CALL b64decode
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JS" , _LB_base64_error) // JS _base64_error
- self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP)
-}
-
-func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
- self.Emit("LEAQ", jit.Ptr(_IC, 4), _AX) // LEAQ 4(IC), AX
- self.Emit("CMPQ", _AX, _IL) // CMPQ AX, IL
- self.Sjmp("JA" , _LB_eof_error) // JA _eof_error
- self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('f')) // CMPB (IP)(IC), $'f'
- self.Sjmp("JE" , "_false_{n}") // JE _false_{n}
- self.Emit("MOVL", jit.Imm(_IM_true), _CX) // MOVL $"true", CX
- self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0)) // CMPL CX, (IP)(IC)
- self.Sjmp("JE" , "_bool_true_{n}")
-
- // try to skip the value
- self.Emit("MOVQ", _IC, _VAR_ic)
- self.Emit("MOVQ", _T_bool, _ET)
- self.Emit("MOVQ", _ET, _VAR_et)
- self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
- self.Sref("_end_{n}", 4)
- self.Emit("MOVQ", _R9, _VAR_pc)
- self.Sjmp("JMP" , _LB_skip_one)
-
- self.Link("_bool_true_{n}")
- self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC
- self.Emit("MOVB", jit.Imm(1), jit.Ptr(_VP, 0)) // MOVB $1, (VP)
- self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n}
- self.Link("_false_{n}") // _false_{n}:
- self.Emit("ADDQ", jit.Imm(1), _AX) // ADDQ $1, AX
- self.Emit("ADDQ", jit.Imm(1), _IC) // ADDQ $1, IC
- self.Emit("CMPQ", _AX, _IL) // CMPQ AX, IL
- self.Sjmp("JA" , _LB_eof_error) // JA _eof_error
- self.Emit("MOVL", jit.Imm(_IM_alse), _CX) // MOVL $"alse", CX
- self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0)) // CMPL CX, (IP)(IC)
- self.Sjmp("JNE" , _LB_im_error) // JNE _im_error
- self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC
- self.Emit("XORL", _AX, _AX) // XORL AX, AX
- self.Emit("MOVB", _AX, jit.Ptr(_VP, 0)) // MOVB AX, (VP)
- self.Link("_end_{n}") // _end_{n}:
-}
-
-func (self *_Assembler) _asm_OP_num(_ *_Instr) {
- self.Emit("MOVQ", jit.Imm(0), _VAR_fl)
- self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
- self.Emit("MOVQ", _IC, _BP)
- self.Sjmp("JNE", "_skip_number_{n}")
- self.Emit("MOVQ", jit.Imm(1), _VAR_fl)
- self.Emit("ADDQ", jit.Imm(1), _IC)
- self.Link("_skip_number_{n}")
-
- /* call skip_number */
- self.call_sf(_F_skip_number) // CALL_SF skip_one
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JNS" , "_num_next_{n}")
-
- /* call skip one */
- self.Emit("MOVQ", _BP, _VAR_ic)
- self.Emit("MOVQ", _T_number, _ET)
- self.Emit("MOVQ", _ET, _VAR_et)
- self.Byte(0x4c, 0x8d, 0x0d)
- self.Sref("_num_end_{n}", 4)
- self.Emit("MOVQ", _R9, _VAR_pc)
- self.Sjmp("JMP" , _LB_skip_one)
-
- /* assgin string */
- self.Link("_num_next_{n}")
- self.slice_from_r(_AX, 0)
- self.Emit("BTQ", jit.Imm(_F_copy_string), _ARG_fv)
- self.Sjmp("JNC", "_num_write_{n}")
- self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9
- self.Sref("_num_write_{n}", 4)
- self.Sjmp("JMP", "_copy_string")
- self.Link("_num_write_{n}")
- self.Emit("MOVQ", _SI, jit.Ptr(_VP, 8)) // MOVQ SI, 8(VP)
- self.WriteRecNotAX(13, _DI, jit.Ptr(_VP, 0), false, false)
-
- /* check if quoted */
- self.Emit("CMPQ", _VAR_fl, jit.Imm(1))
- self.Sjmp("JNE", "_num_end_{n}")
- self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
- self.Sjmp("JNE", _LB_char_0_error)
- self.Emit("ADDQ", jit.Imm(1), _IC)
- self.Link("_num_end_{n}")
-}
-
-func (self *_Assembler) _asm_OP_i8(ins *_Instr) {
- var pin = "_i8_end_{n}"
- self.parse_signed(int8Type, pin, -1) // PARSE int8
- self.range_signed(_I_int8, _T_int8, math.MinInt8, math.MaxInt8) // RANGE int8
- self.Emit("MOVB", _AX, jit.Ptr(_VP, 0)) // MOVB AX, (VP)
- self.Link(pin)
-}
-
-func (self *_Assembler) _asm_OP_i16(ins *_Instr) {
- var pin = "_i16_end_{n}"
- self.parse_signed(int16Type, pin, -1) // PARSE int16
- self.range_signed(_I_int16, _T_int16, math.MinInt16, math.MaxInt16) // RANGE int16
- self.Emit("MOVW", _AX, jit.Ptr(_VP, 0)) // MOVW AX, (VP)
- self.Link(pin)
-}
-
-func (self *_Assembler) _asm_OP_i32(ins *_Instr) {
- var pin = "_i32_end_{n}"
- self.parse_signed(int32Type, pin, -1) // PARSE int32
- self.range_signed(_I_int32, _T_int32, math.MinInt32, math.MaxInt32) // RANGE int32
- self.Emit("MOVL", _AX, jit.Ptr(_VP, 0)) // MOVL AX, (VP)
- self.Link(pin)
-}
-
-func (self *_Assembler) _asm_OP_i64(ins *_Instr) {
- var pin = "_i64_end_{n}"
- self.parse_signed(int64Type, pin, -1) // PARSE int64
- self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX
- self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ AX, (VP)
- self.Link(pin)
-}
-
-func (self *_Assembler) _asm_OP_u8(ins *_Instr) {
- var pin = "_u8_end_{n}"
- self.parse_unsigned(uint8Type, pin, -1) // PARSE uint8
- self.range_unsigned(_I_uint8, _T_uint8, math.MaxUint8) // RANGE uint8
- self.Emit("MOVB", _AX, jit.Ptr(_VP, 0)) // MOVB AX, (VP)
- self.Link(pin)
-}
-
-func (self *_Assembler) _asm_OP_u16(ins *_Instr) {
- var pin = "_u16_end_{n}"
- self.parse_unsigned(uint16Type, pin, -1) // PARSE uint16
- self.range_unsigned(_I_uint16, _T_uint16, math.MaxUint16) // RANGE uint16
- self.Emit("MOVW", _AX, jit.Ptr(_VP, 0)) // MOVW AX, (VP)
- self.Link(pin)
-}
-
-func (self *_Assembler) _asm_OP_u32(ins *_Instr) {
- var pin = "_u32_end_{n}"
- self.parse_unsigned(uint32Type, pin, -1) // PARSE uint32
- self.range_unsigned(_I_uint32, _T_uint32, math.MaxUint32) // RANGE uint32
- self.Emit("MOVL", _AX, jit.Ptr(_VP, 0)) // MOVL AX, (VP)
- self.Link(pin)
-}
-
-func (self *_Assembler) _asm_OP_u64(ins *_Instr) {
- var pin = "_u64_end_{n}"
- self.parse_unsigned(uint64Type, pin, -1) // PARSE uint64
- self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX
- self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ AX, (VP)
- self.Link(pin)
-}
-
-func (self *_Assembler) _asm_OP_f32(ins *_Instr) {
- var pin = "_f32_end_{n}"
- self.parse_number(float32Type, pin, -1) // PARSE NUMBER
- self.range_single() // RANGE float32
- self.Emit("MOVSS", _X0, jit.Ptr(_VP, 0)) // MOVSS X0, (VP)
- self.Link(pin)
-}
-
-func (self *_Assembler) _asm_OP_f64(ins *_Instr) {
- var pin = "_f64_end_{n}"
- self.parse_number(float64Type, pin, -1) // PARSE NUMBER
- self.Emit("MOVSD", _VAR_st_Dv, _X0) // MOVSD st.Dv, X0
- self.Emit("MOVSD", _X0, jit.Ptr(_VP, 0)) // MOVSD X0, (VP)
- self.Link(pin)
-}
-
-func (self *_Assembler) _asm_OP_unquote(ins *_Instr) {
- self.check_eof(2)
- self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('\\')) // CMPB (IP)(IC), $'\\'
- self.Sjmp("JNE" , _LB_char_0_error) // JNE _char_0_error
- self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 1), jit.Imm('"')) // CMPB 1(IP)(IC), $'"'
- self.Sjmp("JNE" , _LB_char_1_error) // JNE _char_1_error
- self.Emit("ADDQ", jit.Imm(2), _IC) // ADDQ $2, IC
- self.parse_string() // PARSE STRING
- self.unquote_twice(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false) // UNQUOTE twice, (VP), 8(VP)
-}
-
-func (self *_Assembler) _asm_OP_nil_1(_ *_Instr) {
- self.Emit("XORL", _AX, _AX) // XORL AX, AX
- self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ AX, (VP)
-}
-
-func (self *_Assembler) _asm_OP_nil_2(_ *_Instr) {
- self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
- self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP)
-}
-
-func (self *_Assembler) _asm_OP_nil_3(_ *_Instr) {
- self.Emit("XORL" , _AX, _AX) // XORL AX, AX
- self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
- self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP)
- self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 16)) // MOVOU X0, 16(VP)
-}
-
-func (self *_Assembler) _asm_OP_deref(p *_Instr) {
- self.vfollow(p.vt())
-}
-
-func (self *_Assembler) _asm_OP_index(p *_Instr) {
- self.Emit("MOVQ", jit.Imm(p.i64()), _AX) // MOVQ ${p.vi()}, AX
- self.Emit("ADDQ", _AX, _VP) // ADDQ _AX, _VP
-}
-
-func (self *_Assembler) _asm_OP_is_null(p *_Instr) {
- self.Emit("LEAQ" , jit.Ptr(_IC, 4), _AX) // LEAQ 4(IC), AX
- self.Emit("CMPQ" , _AX, _IL) // CMPQ AX, IL
- self.Sjmp("JA" , "_not_null_{n}") // JA _not_null_{n}
- self.Emit("CMPL" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null)) // CMPL (IP)(IC), $"null"
- self.Emit("CMOVQEQ", _AX, _IC) // CMOVQEQ AX, IC
- self.Xjmp("JE" , p.vi()) // JE {p.vi()}
- self.Link("_not_null_{n}") // _not_null_{n}:
-}
-
-func (self *_Assembler) _asm_OP_is_null_quote(p *_Instr) {
- self.Emit("LEAQ" , jit.Ptr(_IC, 5), _AX) // LEAQ 4(IC), AX
- self.Emit("CMPQ" , _AX, _IL) // CMPQ AX, IL
- self.Sjmp("JA" , "_not_null_quote_{n}") // JA _not_null_quote_{n}
- self.Emit("CMPL" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null)) // CMPL (IP)(IC), $"null"
- self.Sjmp("JNE" , "_not_null_quote_{n}") // JNE _not_null_quote_{n}
- self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, 4), jit.Imm('"')) // CMPB 4(IP)(IC), $'"'
- self.Emit("CMOVQEQ", _AX, _IC) // CMOVQEQ AX, IC
- self.Xjmp("JE" , p.vi()) // JE {p.vi()}
- self.Link("_not_null_quote_{n}") // _not_null_quote_{n}:
-}
-
-func (self *_Assembler) _asm_OP_map_init(_ *_Instr) {
- self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JNZ" , "_end_{n}") // JNZ _end_{n}
- self.call_go(_F_makemap_small) // CALL_GO makemap_small
- self.Emit("MOVQ" , jit.Ptr(_SP, 0), _AX) // MOVQ (SP), AX
- self.WritePtrAX(6, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP)
- self.Link("_end_{n}") // _end_{n}:
- self.Emit("MOVQ" , _AX, _VP) // MOVQ AX, VP
-}
-
-func (self *_Assembler) _asm_OP_map_key_i8(p *_Instr) {
- self.parse_signed(int8Type, "", p.vi()) // PARSE int8
- self.range_signed(_I_int8, _T_int8, math.MinInt8, math.MaxInt8) // RANGE int8
- self.match_char('"')
- self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN int8, mapassign, st.Iv
-}
-
-func (self *_Assembler) _asm_OP_map_key_i16(p *_Instr) {
- self.parse_signed(int16Type, "", p.vi()) // PARSE int16
- self.range_signed(_I_int16, _T_int16, math.MinInt16, math.MaxInt16) // RANGE int16
- self.match_char('"')
- self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN int16, mapassign, st.Iv
-}
-
-func (self *_Assembler) _asm_OP_map_key_i32(p *_Instr) {
- self.parse_signed(int32Type, "", p.vi()) // PARSE int32
- self.range_signed(_I_int32, _T_int32, math.MinInt32, math.MaxInt32) // RANGE int32
- self.match_char('"')
- if vt := p.vt(); !mapfast(vt) {
- self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN int32, mapassign, st.Iv
- } else {
- self.mapassign_fastx(vt, _F_mapassign_fast32) // MAPASSIGN int32, mapassign_fast32
- }
-}
-
-func (self *_Assembler) _asm_OP_map_key_i64(p *_Instr) {
- self.parse_signed(int64Type, "", p.vi()) // PARSE int64
- self.match_char('"')
- if vt := p.vt(); !mapfast(vt) {
- self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN int64, mapassign, st.Iv
- } else {
- self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX
- self.mapassign_fastx(vt, _F_mapassign_fast64) // MAPASSIGN int64, mapassign_fast64
- }
-}
-
-func (self *_Assembler) _asm_OP_map_key_u8(p *_Instr) {
- self.parse_unsigned(uint8Type, "", p.vi()) // PARSE uint8
- self.range_unsigned(_I_uint8, _T_uint8, math.MaxUint8) // RANGE uint8
- self.match_char('"')
- self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN uint8, vt.Iv
-}
-
-func (self *_Assembler) _asm_OP_map_key_u16(p *_Instr) {
- self.parse_unsigned(uint16Type, "", p.vi()) // PARSE uint16
- self.range_unsigned(_I_uint16, _T_uint16, math.MaxUint16) // RANGE uint16
- self.match_char('"')
- self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN uint16, vt.Iv
-}
-
-func (self *_Assembler) _asm_OP_map_key_u32(p *_Instr) {
- self.parse_unsigned(uint32Type, "", p.vi()) // PARSE uint32
- self.range_unsigned(_I_uint32, _T_uint32, math.MaxUint32) // RANGE uint32
- self.match_char('"')
- if vt := p.vt(); !mapfast(vt) {
- self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN uint32, vt.Iv
- } else {
- self.mapassign_fastx(vt, _F_mapassign_fast32) // MAPASSIGN uint32, mapassign_fast32
- }
-}
-
-func (self *_Assembler) _asm_OP_map_key_u64(p *_Instr) {
- self.parse_unsigned(uint64Type, "", p.vi()) // PARSE uint64
- self.match_char('"')
- if vt := p.vt(); !mapfast(vt) {
- self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN uint64, vt.Iv
- } else {
- self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX
- self.mapassign_fastx(vt, _F_mapassign_fast64) // MAPASSIGN uint64, mapassign_fast64
- }
-}
-
-func (self *_Assembler) _asm_OP_map_key_f32(p *_Instr) {
- self.parse_number(float32Type, "", p.vi()) // PARSE NUMBER
- self.range_single() // RANGE float32
- self.Emit("MOVSS", _X0, _VAR_st_Dv) // MOVSS X0, st.Dv
- self.match_char('"')
- self.mapassign_std(p.vt(), _VAR_st_Dv) // MAPASSIGN ${p.vt()}, mapassign, st.Dv
-}
-
-func (self *_Assembler) _asm_OP_map_key_f64(p *_Instr) {
- self.parse_number(float64Type, "", p.vi()) // PARSE NUMBER
- self.match_char('"')
- self.mapassign_std(p.vt(), _VAR_st_Dv) // MAPASSIGN ${p.vt()}, mapassign, st.Dv
-}
-
-func (self *_Assembler) _asm_OP_map_key_str(p *_Instr) {
- self.parse_string() // PARSE STRING
- self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true) // UNQUOTE once, sv.p, sv.n
- if vt := p.vt(); !mapfast(vt) {
- self.valloc(vt.Key(), _DI)
- self.Emit("MOVOU", _VAR_sv, _X0)
- self.Emit("MOVOU", _X0, jit.Ptr(_DI, 0))
- self.mapassign_std(vt, jit.Ptr(_DI, 0))
- } else {
- self.Emit("MOVQ", _VAR_sv_p, _DI) // MOVQ sv.p, DI
- self.Emit("MOVQ", _VAR_sv_n, _SI) // MOVQ sv.n, SI
- self.mapassign_str_fast(vt, _DI, _SI) // MAPASSIGN string, DI, SI
- }
-}
-
-func (self *_Assembler) _asm_OP_map_key_utext(p *_Instr) {
- self.parse_string() // PARSE STRING
- self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true) // UNQUOTE once, sv.p, sv.n
- self.mapassign_utext(p.vt(), false) // MAPASSIGN utext, ${p.vt()}, false
-}
-
-func (self *_Assembler) _asm_OP_map_key_utext_p(p *_Instr) {
- self.parse_string() // PARSE STRING
- self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, false) // UNQUOTE once, sv.p, sv.n
- self.mapassign_utext(p.vt(), true) // MAPASSIGN utext, ${p.vt()}, true
-}
-
-func (self *_Assembler) _asm_OP_array_skip(_ *_Instr) {
- self.call_sf(_F_skip_array) // CALL_SF skip_array
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
-}
-
-func (self *_Assembler) _asm_OP_array_clear(p *_Instr) {
- self.mem_clear_rem(p.i64(), true)
-}
-
-func (self *_Assembler) _asm_OP_array_clear_p(p *_Instr) {
- self.mem_clear_rem(p.i64(), false)
-}
-
-func (self *_Assembler) _asm_OP_slice_init(p *_Instr) {
- self.Emit("XORL" , _AX, _AX) // XORL AX, AX
- self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP)
- self.Emit("MOVQ" , jit.Ptr(_VP, 16), _AX) // MOVQ 16(VP), AX
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JNZ" , "_done_{n}") // JNZ _done_{n}
- self.Emit("MOVQ" , jit.Imm(_MinSlice), _CX) // MOVQ ${_MinSlice}, CX
- self.Emit("MOVQ" , _CX, jit.Ptr(_VP, 16)) // MOVQ CX, 16(VP)
- self.Emit("MOVQ" , jit.Type(p.vt()), _DX) // MOVQ ${p.vt()}, DX
- self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 0)) // MOVQ DX, (SP)
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
- self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 16)) // MOVQ CX, 16(SP)
- self.call_go(_F_makeslice) // CALL_GO makeslice
- self.Emit("MOVQ" , jit.Ptr(_SP, 24), _AX) // MOVQ 24(SP), AX
- self.WritePtrAX(7, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP)
- self.Link("_done_{n}") // _done_{n}:
- self.Emit("XORL" , _AX, _AX) // XORL AX, AX
- self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP)
-}
-
-func (self *_Assembler) _asm_OP_check_empty(p *_Instr) {
- rbracket := p.vb()
- if rbracket == ']' {
- self.check_eof(1)
- self.Emit("LEAQ", jit.Ptr(_IC, 1), _AX) // LEAQ 1(IC), AX
- self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(rbracket))) // CMPB (IP)(IC), ']'
- self.Sjmp("JNE" , "_not_empty_array_{n}") // JNE _not_empty_array_{n}
- self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC
- self.Emit("MOVQ", jit.Imm(_Zero_Base), _AX)
- self.WritePtrAX(9, jit.Ptr(_VP, 0), false)
- self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
- self.Emit("MOVOU", _X0, jit.Ptr(_VP, 8)) // MOVOU X0, 8(VP)
- self.Xjmp("JMP" , p.vi()) // JMP {p.vi()}
- self.Link("_not_empty_array_{n}")
- } else {
- panic("only implement check empty array here!")
- }
-}
-
-func (self *_Assembler) _asm_OP_slice_append(p *_Instr) {
- self.Emit("MOVQ" , jit.Ptr(_VP, 8), _AX) // MOVQ 8(VP), AX
- self.Emit("CMPQ" , _AX, jit.Ptr(_VP, 16)) // CMPQ AX, 16(VP)
- self.Sjmp("JB" , "_index_{n}") // JB _index_{n}
- self.Emit("MOVQ" , jit.Type(p.vt()), _AX) // MOVQ ${p.vt()}, AX
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
- self.Emit("MOVOU", jit.Ptr(_VP, 0), _X0) // MOVOU (VP), X0
- self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP)
- self.Emit("MOVQ" , jit.Ptr(_VP, 16), _AX) // MOVQ 16(VP), AX
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 24)) // MOVQ AX, 24(SP)
- self.Emit("SHLQ" , jit.Imm(1), _AX) // SHLQ $1, AX
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP)
- self.call_go(_F_growslice) // CALL_GO growslice
- self.Emit("MOVQ" , jit.Ptr(_SP, 40), _DI) // MOVQ 40(SP), DI
- self.Emit("MOVQ" , jit.Ptr(_SP, 48), _AX) // MOVQ 48(SP), AX
- self.Emit("MOVQ" , jit.Ptr(_SP, 56), _SI) // MOVQ 56(SP), SI
- self.WriteRecNotAX(8, _DI, jit.Ptr(_VP, 0), true, true)// MOVQ DI, (VP)
- self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP)
- self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 16)) // MOVQ SI, 16(VP)
-
- // because growslice not zero memory {oldcap, newlen} when append et not has ptrdata.
- // but we should zero it, avoid decode it as random values.
- if rt.UnpackType(p.vt()).PtrData == 0 {
- self.Emit("SUBQ" , _AX, _SI) // MOVQ AX, SI
-
- self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8)) // ADDQ $1, 8(VP)
- self.Emit("MOVQ" , _DI, _VP) // MOVQ DI, VP
- self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX) // MOVQ ${p.vlen()}, CX
- self.From("MULQ" , _CX) // MULQ CX
- self.Emit("ADDQ" , _AX, _VP) // ADDQ AX, VP
-
- self.Emit("MOVQ" , _SI, _AX) // MOVQ SI, AX
- self.From("MULQ" , _CX) // MULQ CX
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
-
- self.Emit("MOVQ" , _VP, jit.Ptr(_SP, 0)) // MOVQ VP, (SP)
- self.mem_clear_fn(true) // CALL_GO memclr{Has,NoHeap}
- self.Sjmp("JMP", "_append_slice_end_{n}") // JMP _append_slice_end_{n}
- }
-
- self.Link("_index_{n}") // _index_{n}:
- self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8)) // ADDQ $1, 8(VP)
- self.Emit("MOVQ" , jit.Ptr(_VP, 0), _VP) // MOVQ (VP), VP
- self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX) // MOVQ ${p.vlen()}, CX
- self.From("MULQ" , _CX) // MULQ CX
- self.Emit("ADDQ" , _AX, _VP) // ADDQ AX, VP
- self.Link("_append_slice_end_{n}")
-}
-
-func (self *_Assembler) _asm_OP_object_skip(_ *_Instr) {
- self.call_sf(_F_skip_object) // CALL_SF skip_object
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
-}
-
-func (self *_Assembler) _asm_OP_object_next(_ *_Instr) {
- self.call_sf(_F_skip_one) // CALL_SF skip_one
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
-}
-
-func (self *_Assembler) _asm_OP_struct_field(p *_Instr) {
- assert_eq(caching.FieldEntrySize, 32, "invalid field entry size")
- self.Emit("MOVQ" , jit.Imm(-1), _AX) // MOVQ $-1, AX
- self.Emit("MOVQ" , _AX, _VAR_sr) // MOVQ AX, sr
- self.parse_string() // PARSE STRING
- self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, false) // UNQUOTE once, sv.p, sv.n
- self.Emit("LEAQ" , _VAR_sv, _AX) // LEAQ sv, AX
- self.Emit("XORL" , _CX, _CX) // XORL CX, CX
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
- self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
- self.call_go(_F_strhash) // CALL_GO strhash
- self.Emit("MOVQ" , jit.Ptr(_SP, 16), _AX) // MOVQ 16(SP), AX
- self.Emit("MOVQ" , _AX, _R9) // MOVQ AX, R9
- self.Emit("MOVQ" , jit.Imm(freezeFields(p.vf())), _CX) // MOVQ ${p.vf()}, CX
- self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_b), _SI) // MOVQ FieldMap.b(CX), SI
- self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_N), _CX) // MOVQ FieldMap.N(CX), CX
- self.Emit("TESTQ", _CX, _CX) // TESTQ CX, CX
- self.Sjmp("JZ" , "_try_lowercase_{n}") // JZ _try_lowercase_{n}
- self.Link("_loop_{n}") // _loop_{n}:
- self.Emit("XORL" , _DX, _DX) // XORL DX, DX
- self.From("DIVQ" , _CX) // DIVQ CX
- self.Emit("LEAQ" , jit.Ptr(_DX, 1), _AX) // LEAQ 1(DX), AX
- self.Emit("SHLQ" , jit.Imm(5), _DX) // SHLQ $5, DX
- self.Emit("LEAQ" , jit.Sib(_SI, _DX, 1, 0), _DI) // LEAQ (SI)(DX), DI
- self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Hash), _R8) // MOVQ FieldEntry.Hash(DI), R8
- self.Emit("TESTQ", _R8, _R8) // TESTQ R8, R8
- self.Sjmp("JZ" , "_try_lowercase_{n}") // JZ _try_lowercase_{n}
- self.Emit("CMPQ" , _R8, _R9) // CMPQ R8, R9
- self.Sjmp("JNE" , "_loop_{n}") // JNE _loop_{n}
- self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name + 8), _DX) // MOVQ FieldEntry.Name+8(DI), DX
- self.Emit("CMPQ" , _DX, _VAR_sv_n) // CMPQ DX, sv.n
- self.Sjmp("JNE" , "_loop_{n}") // JNE _loop_{n}
- self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_ID), _R8) // MOVQ FieldEntry.ID(DI), R8
- self.Emit("MOVQ" , _AX, _VAR_ss_AX) // MOVQ AX, ss.AX
- self.Emit("MOVQ" , _CX, _VAR_ss_CX) // MOVQ CX, ss.CX
- self.Emit("MOVQ" , _SI, _VAR_ss_SI) // MOVQ SI, ss.SI
- self.Emit("MOVQ" , _R8, _VAR_ss_R8) // MOVQ R8, ss.R8
- self.Emit("MOVQ" , _R9, _VAR_ss_R9) // MOVQ R9, ss.R9
- self.Emit("MOVQ" , _VAR_sv_p, _AX) // MOVQ _VAR_sv_p, AX
- self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name), _CX) // MOVQ FieldEntry.Name(DI), CX
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
- self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
- self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 16)) // MOVQ DX, 16(SP)
- self.call_go(_F_memequal) // CALL_GO memequal
- self.Emit("MOVQ" , _VAR_ss_AX, _AX) // MOVQ ss.AX, AX
- self.Emit("MOVQ" , _VAR_ss_CX, _CX) // MOVQ ss.CX, CX
- self.Emit("MOVQ" , _VAR_ss_SI, _SI) // MOVQ ss.SI, SI
- self.Emit("MOVQ" , _VAR_ss_R9, _R9) // MOVQ ss.R9, R9
- self.Emit("MOVB" , jit.Ptr(_SP, 24), _DX) // MOVB 24(SP), DX
- self.Emit("TESTB", _DX, _DX) // TESTB DX, DX
- self.Sjmp("JZ" , "_loop_{n}") // JZ _loop_{n}
- self.Emit("MOVQ" , _VAR_ss_R8, _R8) // MOVQ ss.R8, R8
- self.Emit("MOVQ" , _R8, _VAR_sr) // MOVQ R8, sr
- self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n}
- self.Link("_try_lowercase_{n}") // _try_lowercase_{n}:
- self.Emit("MOVQ" , jit.Imm(referenceFields(p.vf())), _AX) // MOVQ ${p.vf()}, AX
- self.Emit("MOVOU", _VAR_sv, _X0) // MOVOU sv, X0
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
- self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP)
- self.call_go(_F_FieldMap_GetCaseInsensitive) // CALL_GO FieldMap::GetCaseInsensitive
- self.Emit("MOVQ" , jit.Ptr(_SP, 24), _AX) // MOVQ 24(SP), AX
- self.Emit("MOVQ" , _AX, _VAR_sr) // MOVQ AX, _VAR_sr
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JNS" , "_end_{n}") // JNS _end_{n}
- self.Emit("BTQ" , jit.Imm(_F_disable_unknown), _ARG_fv) // BTQ ${_F_disable_unknown}, fv
- self.Sjmp("JC" , _LB_field_error) // JC _field_error
- self.Link("_end_{n}") // _end_{n}:
-}
-
-func (self *_Assembler) _asm_OP_unmarshal(p *_Instr) {
- self.unmarshal_json(p.vt(), true)
-}
-
-func (self *_Assembler) _asm_OP_unmarshal_p(p *_Instr) {
- self.unmarshal_json(p.vt(), false)
-}
-
-func (self *_Assembler) _asm_OP_unmarshal_text(p *_Instr) {
- self.unmarshal_text(p.vt(), true)
-}
-
-func (self *_Assembler) _asm_OP_unmarshal_text_p(p *_Instr) {
- self.unmarshal_text(p.vt(), false)
-}
-
-func (self *_Assembler) _asm_OP_lspace(_ *_Instr) {
- self.lspace("_{n}")
-}
-
-func (self *_Assembler) lspace(subfix string) {
- var label = "_lspace" + subfix
-
- self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
- self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error
- self.Emit("MOVQ" , jit.Imm(_BM_space), _DX) // MOVQ _BM_space, DX
- self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
- self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' '
- self.Sjmp("JA" , label) // JA _nospace_{n}
- self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
- self.Sjmp("JNC" , label) // JNC _nospace_{n}
-
- /* test up to 4 characters */
- for i := 0; i < 3; i++ {
- self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
- self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
- self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error
- self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
- self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' '
- self.Sjmp("JA" , label) // JA _nospace_{n}
- self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
- self.Sjmp("JNC" , label) // JNC _nospace_{n}
- }
-
- /* handle over to the native function */
- self.Emit("MOVQ" , _IP, _DI) // MOVQ IP, DI
- self.Emit("MOVQ" , _IL, _SI) // MOVQ IL, SI
- self.Emit("MOVQ" , _IC, _DX) // MOVQ IC, DX
- self.call(_F_lspace) // CALL lspace
- self.Emit("TESTQ" , _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JS" , _LB_parsing_error_v) // JS _parsing_error_v
- self.Emit("CMPQ" , _AX, _IL) // CMPQ AX, IL
- self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error
- self.Emit("MOVQ" , _AX, _IC) // MOVQ AX, IC
- self.Link(label) // _nospace_{n}:
-}
-
-func (self *_Assembler) _asm_OP_match_char(p *_Instr) {
- self.match_char(p.vb())
-}
-
-func (self *_Assembler) match_char(char byte) {
- self.check_eof(1)
- self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(char))) // CMPB (IP)(IC), ${p.vb()}
- self.Sjmp("JNE" , _LB_char_0_error) // JNE _char_0_error
- self.Emit("ADDQ", jit.Imm(1), _IC) // ADDQ $1, IC
-}
-
-func (self *_Assembler) _asm_OP_check_char(p *_Instr) {
- self.check_eof(1)
- self.Emit("LEAQ" , jit.Ptr(_IC, 1), _AX) // LEAQ 1(IC), AX
- self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb()))) // CMPB (IP)(IC), ${p.vb()}
- self.Emit("CMOVQEQ", _AX, _IC) // CMOVQEQ AX, IC
- self.Xjmp("JE" , p.vi()) // JE {p.vi()}
-}
-
-func (self *_Assembler) _asm_OP_check_char_0(p *_Instr) {
- self.check_eof(1)
- self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb()))) // CMPB (IP)(IC), ${p.vb()}
- self.Xjmp("JE" , p.vi()) // JE {p.vi()}
-}
-
-func (self *_Assembler) _asm_OP_add(p *_Instr) {
- self.Emit("ADDQ", jit.Imm(int64(p.vi())), _IC) // ADDQ ${p.vi()}, IC
-}
-
-func (self *_Assembler) _asm_OP_load(_ *_Instr) {
- self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
- self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _VP) // MOVQ (ST)(AX), VP
-}
-
-func (self *_Assembler) _asm_OP_save(_ *_Instr) {
- self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX) // MOVQ (ST), CX
- self.Emit("CMPQ", _CX, jit.Imm(_MaxStackBytes)) // CMPQ CX, ${_MaxStackBytes}
- self.Sjmp("JAE" , _LB_stack_error) // JA _stack_error
- self.WriteRecNotAX(0 , _VP, jit.Sib(_ST, _CX, 1, 8), false, false) // MOVQ VP, 8(ST)(CX)
- self.Emit("ADDQ", jit.Imm(8), _CX) // ADDQ $8, CX
- self.Emit("MOVQ", _CX, jit.Ptr(_ST, 0)) // MOVQ CX, (ST)
-}
-
-func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
- self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
- self.Emit("SUBQ", jit.Imm(8), _AX) // SUBQ $8, AX
- self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 8), _VP) // MOVQ 8(ST)(AX), VP
- self.Emit("MOVQ", _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST)
- self.Emit("XORL", _ET, _ET) // XORL ET, ET
- self.Emit("MOVQ", _ET, jit.Sib(_ST, _AX, 1, 8)) // MOVQ ET, 8(ST)(AX)
-}
-
-func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
- self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
- self.Emit("SUBQ" , jit.Imm(16), _AX) // SUBQ $16, AX
- self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _VP) // MOVQ 8(ST)(AX), VP
- self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST)
- self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
- self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8)) // MOVOU X0, 8(ST)(AX)
-}
-
-func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
- self.Emit("MOVQ", jit.Type(p.vt()), _AX) // MOVQ ${p.vt()}, AX
- self.decode_dynamic(_AX, _VP) // DECODE AX, VP
-}
-
-func (self *_Assembler) _asm_OP_goto(p *_Instr) {
- self.Xjmp("JMP", p.vi())
-}
-
-func (self *_Assembler) _asm_OP_switch(p *_Instr) {
- self.Emit("MOVQ", _VAR_sr, _AX) // MOVQ sr, AX
- self.Emit("CMPQ", _AX, jit.Imm(p.i64())) // CMPQ AX, ${len(p.vs())}
- self.Sjmp("JAE" , "_default_{n}") // JAE _default_{n}
-
- /* jump table selector */
- self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI
- self.Sref("_switch_table_{n}", 4) // .... &_switch_table_{n}
- self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX) // MOVLQSX (DI)(AX*4), AX
- self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX
- self.Rjmp("JMP" , _AX) // JMP AX
- self.Link("_switch_table_{n}") // _switch_table_{n}:
-
- /* generate the jump table */
- for i, v := range p.vs() {
- self.Xref(v, int64(-i) * 4)
- }
-
- /* default case */
- self.Link("_default_{n}")
- self.NOP()
-}
-
-func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
- self.Emit("MOVQ", jit.Imm(int64(p2.op())), jit.Ptr(_SP, 16))// MOVQ $(p2.op()), 16(SP)
- self.Emit("MOVQ", jit.Imm(int64(p1.op())), jit.Ptr(_SP, 8)) // MOVQ $(p1.op()), 8(SP)
- self.Emit("MOVQ", jit.Imm(int64(i)), jit.Ptr(_SP, 0)) // MOVQ $(i), (SP)
- self.call_go(_F_println)
-}
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/consts/option.go b/vendor/github.com/bytedance/sonic/internal/decoder/consts/option.go
new file mode 100644
index 000000000..4195ebda7
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/consts/option.go
@@ -0,0 +1,36 @@
+
+package consts
+
+import (
+ `github.com/bytedance/sonic/internal/native/types`
+)
+
+
+const (
+ F_use_int64 = 0
+ F_disable_urc = 2
+ F_disable_unknown = 3
+ F_copy_string = 4
+
+
+ F_use_number = types.B_USE_NUMBER
+ F_validate_string = types.B_VALIDATE_STRING
+ F_allow_control = types.B_ALLOW_CONTROL
+ F_no_validate_json = types.B_NO_VALIDATE_JSON
+)
+
+type Options uint64
+
+const (
+ OptionUseInt64 Options = 1 << F_use_int64
+ OptionUseNumber Options = 1 << F_use_number
+ OptionUseUnicodeErrors Options = 1 << F_disable_urc
+ OptionDisableUnknown Options = 1 << F_disable_unknown
+ OptionCopyString Options = 1 << F_copy_string
+ OptionValidateString Options = 1 << F_validate_string
+ OptionNoValidateJSON Options = 1 << F_no_validate_json
+)
+
+const (
+ MaxStack = 4096
+) \ No newline at end of file
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/errors.go b/vendor/github.com/bytedance/sonic/internal/decoder/errors/errors.go
index 4453f5cfe..9f05e8b6a 100644
--- a/vendor/github.com/bytedance/sonic/internal/decoder/errors.go
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/errors/errors.go
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package decoder
+package errors
import (
`encoding/json`
@@ -46,7 +46,7 @@ func (self SyntaxError) Description() string {
func (self SyntaxError) description() string {
/* check for empty source */
if self.Src == "" {
- return fmt.Sprintf("no sources available: %#v", self)
+ return fmt.Sprintf("no sources available, the input json is empty: %#v", self)
}
p, x, q, y := calcBounds(len(self.Src), self.Pos)
@@ -112,12 +112,12 @@ func clamp_zero(v int) int {
/** JIT Error Helpers **/
-var stackOverflow = &json.UnsupportedValueError {
+var StackOverflow = &json.UnsupportedValueError {
Str : "Value nesting too deep",
Value : reflect.ValueOf("..."),
}
-func error_wrap(src string, pos int, code types.ParsingError) error {
+func ErrorWrap(src string, pos int, code types.ParsingError) error {
return *error_wrap_heap(src, pos, code)
}
@@ -130,7 +130,7 @@ func error_wrap_heap(src string, pos int, code types.ParsingError) *SyntaxError
}
}
-func error_type(vt *rt.GoType) error {
+func ErrorType(vt *rt.GoType) error {
return &json.UnmarshalTypeError{Type: vt.Pack()}
}
@@ -171,7 +171,7 @@ func (self MismatchTypeError) Description() string {
return fmt.Sprintf("Mismatch type %s with value %s %s", self.Type.String(), swithchJSONType(self.Src, self.Pos), se.description())
}
-func error_mismatch(src string, pos int, vt *rt.GoType) error {
+func ErrorMismatch(src string, pos int, vt *rt.GoType) error {
return &MismatchTypeError {
Pos : pos,
Src : src,
@@ -179,11 +179,11 @@ func error_mismatch(src string, pos int, vt *rt.GoType) error {
}
}
-func error_field(name string) error {
+func ErrorField(name string) error {
return errors.New("json: unknown field " + strconv.Quote(name))
}
-func error_value(value string, vtype reflect.Type) error {
+func ErrorValue(value string, vtype reflect.Type) error {
return &json.UnmarshalTypeError {
Type : vtype,
Value : value,
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/generic_stkabi_amd64.go b/vendor/github.com/bytedance/sonic/internal/decoder/generic_stkabi_amd64.go
deleted file mode 100644
index 8ce5c2926..000000000
--- a/vendor/github.com/bytedance/sonic/internal/decoder/generic_stkabi_amd64.go
+++ /dev/null
@@ -1,733 +0,0 @@
-// +build go1.16,!go1.17
-
-/*
- * Copyright 2021 ByteDance Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package decoder
-
-import (
- `encoding/json`
- `fmt`
- `reflect`
-
- `github.com/bytedance/sonic/internal/jit`
- `github.com/bytedance/sonic/internal/native`
- `github.com/bytedance/sonic/internal/native/types`
- `github.com/twitchyliquid64/golang-asm/obj`
-)
-
-/** Crucial Registers:
- *
- * ST(BX) : ro, decoder stack
- * DF(R10) : ro, decoder flags
- * EP(R11) : wo, error pointer
- * IP(R12) : ro, input pointer
- * IL(R13) : ro, input length
- * IC(R14) : rw, input cursor
- * VP(R15) : ro, value pointer (to an interface{})
- */
-
-const (
- _VD_args = 8 // 8 bytes for passing arguments to this functions
- _VD_fargs = 64 // 64 bytes for passing arguments to other Go functions
- _VD_saves = 40 // 40 bytes for saving the registers before CALL instructions
- _VD_locals = 88 // 88 bytes for local variables
-)
-
-const (
- _VD_offs = _VD_fargs + _VD_saves + _VD_locals
- _VD_size = _VD_offs + 8 // 8 bytes for the parent frame pointer
-)
-
-var (
- _VAR_ss = _VAR_ss_Vt
- _VAR_df = jit.Ptr(_SP, _VD_fargs + _VD_saves)
-)
-
-var (
- _VAR_ss_Vt = jit.Ptr(_SP, _VD_fargs + _VD_saves + 8)
- _VAR_ss_Dv = jit.Ptr(_SP, _VD_fargs + _VD_saves + 16)
- _VAR_ss_Iv = jit.Ptr(_SP, _VD_fargs + _VD_saves + 24)
- _VAR_ss_Ep = jit.Ptr(_SP, _VD_fargs + _VD_saves + 32)
- _VAR_ss_Db = jit.Ptr(_SP, _VD_fargs + _VD_saves + 40)
- _VAR_ss_Dc = jit.Ptr(_SP, _VD_fargs + _VD_saves + 48)
-)
-
-var (
- _VAR_cs_LR = jit.Ptr(_SP, _VD_fargs + _VD_saves + 56)
- _VAR_cs_p = jit.Ptr(_SP, _VD_fargs + _VD_saves + 64)
- _VAR_cs_n = jit.Ptr(_SP, _VD_fargs + _VD_saves + 72)
- _VAR_cs_d = jit.Ptr(_SP, _VD_fargs + _VD_saves + 80)
-)
-
-type _ValueDecoder struct {
- jit.BaseAssembler
-}
-
-func (self *_ValueDecoder) build() uintptr {
- self.Init(self.compile)
- return *(*uintptr)(self.Load("decode_value", _VD_size, _VD_args, argPtrs_generic, localPtrs_generic))
-}
-
-/** Function Calling Helpers **/
-
-func (self *_ValueDecoder) save(r ...obj.Addr) {
- for i, v := range r {
- if i > _VD_saves / 8 - 1 {
- panic("too many registers to save")
- } else {
- self.Emit("MOVQ", v, jit.Ptr(_SP, _VD_fargs + int64(i) * 8))
- }
- }
-}
-
-func (self *_ValueDecoder) load(r ...obj.Addr) {
- for i, v := range r {
- if i > _VD_saves / 8 - 1 {
- panic("too many registers to load")
- } else {
- self.Emit("MOVQ", jit.Ptr(_SP, _VD_fargs + int64(i) * 8), v)
- }
- }
-}
-
-func (self *_ValueDecoder) call(fn obj.Addr) {
- self.Emit("MOVQ", fn, _AX) // MOVQ ${fn}, AX
- self.Rjmp("CALL", _AX) // CALL AX
-}
-
-func (self *_ValueDecoder) call_go(fn obj.Addr) {
- self.save(_REG_go...) // SAVE $REG_go
- self.call(fn) // CALL ${fn}
- self.load(_REG_go...) // LOAD $REG_go
-}
-
-/** Decoder Assembler **/
-
-const (
- _S_val = iota + 1
- _S_arr
- _S_arr_0
- _S_obj
- _S_obj_0
- _S_obj_delim
- _S_obj_sep
-)
-
-const (
- _S_omask_key = (1 << _S_obj_0) | (1 << _S_obj_sep)
- _S_omask_end = (1 << _S_obj_0) | (1 << _S_obj)
- _S_vmask = (1 << _S_val) | (1 << _S_arr_0)
-)
-
-const (
- _A_init_len = 1
- _A_init_cap = 16
-)
-
-const (
- _ST_Sp = 0
- _ST_Vt = _PtrBytes
- _ST_Vp = _PtrBytes * (types.MAX_RECURSE + 1)
-)
-
-var (
- _V_true = jit.Imm(int64(pbool(true)))
- _V_false = jit.Imm(int64(pbool(false)))
- _F_value = jit.Imm(int64(native.S_value))
-)
-
-var (
- _V_max = jit.Imm(int64(types.V_MAX))
- _E_eof = jit.Imm(int64(types.ERR_EOF))
- _E_invalid = jit.Imm(int64(types.ERR_INVALID_CHAR))
- _E_recurse = jit.Imm(int64(types.ERR_RECURSE_EXCEED_MAX))
-)
-
-var (
- _F_convTslice = jit.Func(convTslice)
- _F_convTstring = jit.Func(convTstring)
- _F_invalid_vtype = jit.Func(invalid_vtype)
-)
-
-var (
- _T_map = jit.Type(reflect.TypeOf((map[string]interface{})(nil)))
- _T_bool = jit.Type(reflect.TypeOf(false))
- _T_int64 = jit.Type(reflect.TypeOf(int64(0)))
- _T_eface = jit.Type(reflect.TypeOf((*interface{})(nil)).Elem())
- _T_slice = jit.Type(reflect.TypeOf(([]interface{})(nil)))
- _T_string = jit.Type(reflect.TypeOf(""))
- _T_number = jit.Type(reflect.TypeOf(json.Number("")))
- _T_float64 = jit.Type(reflect.TypeOf(float64(0)))
-)
-
-var _R_tab = map[int]string {
- '[': "_decode_V_ARRAY",
- '{': "_decode_V_OBJECT",
- ':': "_decode_V_KEY_SEP",
- ',': "_decode_V_ELEM_SEP",
- ']': "_decode_V_ARRAY_END",
- '}': "_decode_V_OBJECT_END",
-}
-
-func (self *_ValueDecoder) compile() {
- self.Emit("SUBQ", jit.Imm(_VD_size), _SP) // SUBQ $_VD_size, SP
- self.Emit("MOVQ", _BP, jit.Ptr(_SP, _VD_offs)) // MOVQ BP, _VD_offs(SP)
- self.Emit("LEAQ", jit.Ptr(_SP, _VD_offs), _BP) // LEAQ _VD_offs(SP), BP
-
- /* initialize the state machine */
- self.Emit("XORL", _CX, _CX) // XORL CX, CX
- self.Emit("MOVQ", _DF, _VAR_df) // MOVQ DF, df
- /* initialize digital buffer first */
- self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_ss_Dc) // MOVQ $_MaxDigitNums, ss.Dcap
- self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX) // LEAQ _DbufOffset(ST), AX
- self.Emit("MOVQ", _AX, _VAR_ss_Db) // MOVQ AX, ss.Dbuf
- /* add ST offset */
- self.Emit("ADDQ", jit.Imm(_FsmOffset), _ST) // ADDQ _FsmOffset, _ST
- self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
- self.WriteRecNotAX(0, _VP, jit.Ptr(_ST, _ST_Vp), false) // MOVQ VP, ST.Vp[0]
- self.Emit("MOVQ", jit.Imm(_S_val), jit.Ptr(_ST, _ST_Vt)) // MOVQ _S_val, ST.Vt[0]
- self.Sjmp("JMP" , "_next") // JMP _next
-
- /* set the value from previous round */
- self.Link("_set_value") // _set_value:
- self.Emit("MOVL" , jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX
- self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
- self.Emit("MOVQ" , jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
- self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
- self.Sjmp("JNC" , "_vtype_error") // JNC _vtype_error
- self.Emit("XORL" , _SI, _SI) // XORL SI, SI
- self.Emit("SUBQ" , jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp
- self.Emit("XCHGQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // XCHGQ ST.Vp[CX], SI
- self.Emit("MOVQ" , _R8, jit.Ptr(_SI, 0)) // MOVQ R8, (SI)
- self.WriteRecNotAX(1, _R9, jit.Ptr(_SI, 8), false) // MOVQ R9, 8(SI)
-
- /* check for value stack */
- self.Link("_next") // _next:
- self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _AX) // MOVQ ST.Sp, AX
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JS" , "_return") // JS _return
-
- /* fast path: test up to 4 characters manually */
- self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
- self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF
- self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
- self.Emit("MOVQ" , jit.Imm(_BM_space), _DX) // MOVQ _BM_space, DX
- self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' '
- self.Sjmp("JA" , "_decode_fast") // JA _decode_fast
- self.Emit("BTQ" , _AX, _DX) // BTQ _AX, _DX
- self.Sjmp("JNC" , "_decode_fast") // JNC _decode_fast
- self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
-
- /* at least 1 to 3 spaces */
- for i := 0; i < 3; i++ {
- self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
- self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF
- self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
- self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' '
- self.Sjmp("JA" , "_decode_fast") // JA _decode_fast
- self.Emit("BTQ" , _AX, _DX) // BTQ _AX, _DX
- self.Sjmp("JNC" , "_decode_fast") // JNC _decode_fast
- self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
- }
-
- /* at least 4 spaces */
- self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
- self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF
- self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
-
- /* fast path: use lookup table to select decoder */
- self.Link("_decode_fast") // _decode_fast:
- self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI
- self.Sref("_decode_tab", 4) // .... &_decode_tab
- self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX) // MOVLQSX (DI)(AX*4), AX
- self.Emit("TESTQ" , _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JZ" , "_decode_native") // JZ _decode_native
- self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
- self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX
- self.Rjmp("JMP" , _AX) // JMP AX
-
- /* decode with native decoder */
- self.Link("_decode_native") // _decode_native:
- self.Emit("MOVQ", _IP, _DI) // MOVQ IP, DI
- self.Emit("MOVQ", _IL, _SI) // MOVQ IL, SI
- self.Emit("MOVQ", _IC, _DX) // MOVQ IC, DX
- self.Emit("LEAQ", _VAR_ss, _CX) // LEAQ ss, CX
- self.Emit("MOVQ", _VAR_df, _R8) // MOVQ $df, R8
- self.Emit("BTSQ", jit.Imm(_F_allow_control), _R8) // ANDQ $1<<_F_allow_control, R8
- self.call(_F_value) // CALL value
- self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC
-
- /* check for errors */
- self.Emit("MOVQ" , _VAR_ss_Vt, _AX) // MOVQ ss.Vt, AX
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JS" , "_parsing_error")
- self.Sjmp("JZ" , "_invalid_vtype") // JZ _invalid_vtype
- self.Emit("CMPQ" , _AX, _V_max) // CMPQ AX, _V_max
- self.Sjmp("JA" , "_invalid_vtype") // JA _invalid_vtype
-
- /* jump table selector */
- self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI
- self.Sref("_switch_table", 4) // .... &_switch_table
- self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, -4), _AX) // MOVLQSX -4(DI)(AX*4), AX
- self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX
- self.Rjmp("JMP" , _AX) // JMP AX
-
- /** V_EOF **/
- self.Link("_decode_V_EOF") // _decode_V_EOF:
- self.Emit("MOVL", _E_eof, _EP) // MOVL _E_eof, EP
- self.Sjmp("JMP" , "_error") // JMP _error
-
- /** V_NULL **/
- self.Link("_decode_V_NULL") // _decode_V_NULL:
- self.Emit("XORL", _R8, _R8) // XORL R8, R8
- self.Emit("XORL", _R9, _R9) // XORL R9, R9
- self.Emit("LEAQ", jit.Ptr(_IC, -4), _DI) // LEAQ -4(IC), DI
- self.Sjmp("JMP" , "_set_value") // JMP _set_value
-
- /** V_TRUE **/
- self.Link("_decode_V_TRUE") // _decode_V_TRUE:
- self.Emit("MOVQ", _T_bool, _R8) // MOVQ _T_bool, R8
- // TODO: maybe modified by users?
- self.Emit("MOVQ", _V_true, _R9) // MOVQ _V_true, R9
- self.Emit("LEAQ", jit.Ptr(_IC, -4), _DI) // LEAQ -4(IC), DI
- self.Sjmp("JMP" , "_set_value") // JMP _set_value
-
- /** V_FALSE **/
- self.Link("_decode_V_FALSE") // _decode_V_FALSE:
- self.Emit("MOVQ", _T_bool, _R8) // MOVQ _T_bool, R8
- self.Emit("MOVQ", _V_false, _R9) // MOVQ _V_false, R9
- self.Emit("LEAQ", jit.Ptr(_IC, -5), _DI) // LEAQ -5(IC), DI
- self.Sjmp("JMP" , "_set_value") // JMP _set_value
-
- /** V_ARRAY **/
- self.Link("_decode_V_ARRAY") // _decode_V_ARRAY
- self.Emit("MOVL", jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX
- self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
- self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
- self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
- self.Sjmp("JNC" , "_invalid_char") // JNC _invalid_char
-
- /* create a new array */
- self.Emit("MOVQ", _T_eface, _AX) // MOVQ _T_eface, AX
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
- self.Emit("MOVQ", jit.Imm(_A_init_len), jit.Ptr(_SP, 8)) // MOVQ _A_init_len, 8(SP)
- self.Emit("MOVQ", jit.Imm(_A_init_cap), jit.Ptr(_SP, 16)) // MOVQ _A_init_cap, 16(SP)
- self.call_go(_F_makeslice) // CALL_GO runtime.makeslice
- self.Emit("MOVQ", jit.Ptr(_SP, 24), _DX) // MOVQ 24(SP), DX
-
- /* pack into an interface */
- self.Emit("MOVQ", _DX, jit.Ptr(_SP, 0)) // MOVQ DX, (SP)
- self.Emit("MOVQ", jit.Imm(_A_init_len), jit.Ptr(_SP, 8)) // MOVQ _A_init_len, 8(SP)
- self.Emit("MOVQ", jit.Imm(_A_init_cap), jit.Ptr(_SP, 16)) // MOVQ _A_init_cap, 16(SP)
- self.call_go(_F_convTslice) // CALL_GO runtime.convTslice
- self.Emit("MOVQ", jit.Ptr(_SP, 24), _R8) // MOVQ 24(SP), R8
-
- /* replace current state with an array */
- self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
- self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
- self.Emit("MOVQ", jit.Imm(_S_arr), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_arr, ST.Vt[CX]
- self.Emit("MOVQ", _T_slice, _AX) // MOVQ _T_slice, AX
- self.Emit("MOVQ", _AX, jit.Ptr(_SI, 0)) // MOVQ AX, (SI)
- self.WriteRecNotAX(2, _R8, jit.Ptr(_SI, 8), false) // MOVQ R8, 8(SI)
-
- /* add a new slot for the first element */
- self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX
- self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE}
- self.Sjmp("JAE" , "_stack_overflow") // JA _stack_overflow
- self.Emit("MOVQ", jit.Ptr(_R8, 0), _AX) // MOVQ (R8), AX
- self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
- self.WritePtrAX(3, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ AX, ST.Vp[CX]
- self.Emit("MOVQ", jit.Imm(_S_arr_0), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_arr_0, ST.Vt[CX]
- self.Sjmp("JMP" , "_next") // JMP _next
-
- /** V_OBJECT **/
- self.Link("_decode_V_OBJECT") // _decode_V_OBJECT:
- self.Emit("MOVL", jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX
- self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
- self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
- self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
- self.Sjmp("JNC" , "_invalid_char") // JNC _invalid_char
- self.call_go(_F_makemap_small) // CALL_GO runtime.makemap_small
- self.Emit("MOVQ", jit.Ptr(_SP, 0), _AX) // MOVQ (SP), AX
- self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
- self.Emit("MOVQ", jit.Imm(_S_obj_0), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_obj, ST.Vt[CX]
- self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
- self.Emit("MOVQ", _T_map, _DX) // MOVQ _T_map, DX
- self.Emit("MOVQ", _DX, jit.Ptr(_SI, 0)) // MOVQ DX, (SI)
- self.WritePtrAX(4, jit.Ptr(_SI, 8), false) // MOVQ AX, 8(SI)
- self.Sjmp("JMP" , "_next") // JMP _next
-
- /** V_STRING **/
- self.Link("_decode_V_STRING") // _decode_V_STRING:
- self.Emit("MOVQ", _VAR_ss_Iv, _CX) // MOVQ ss.Iv, CX
- self.Emit("MOVQ", _IC, _AX) // MOVQ IC, AX
- self.Emit("SUBQ", _CX, _AX) // SUBQ CX, AX
-
- /* check for escapes */
- self.Emit("CMPQ", _VAR_ss_Ep, jit.Imm(-1)) // CMPQ ss.Ep, $-1
- self.Sjmp("JNE" , "_unquote") // JNE _unquote
- self.Emit("SUBQ", jit.Imm(1), _AX) // SUBQ $1, AX
- self.Emit("LEAQ", jit.Sib(_IP, _CX, 1, 0), _R8) // LEAQ (IP)(CX), R8
- self.Byte(0x48, 0x8d, 0x3d) // LEAQ (PC), DI
- self.Sref("_copy_string_end", 4)
- self.Emit("BTQ", jit.Imm(_F_copy_string), _VAR_df)
- self.Sjmp("JC", "copy_string")
- self.Link("_copy_string_end")
- self.Emit("XORL", _DX, _DX) // XORL DX, DX
- /* strings with no escape sequences */
- self.Link("_noescape") // _noescape:
- self.Emit("MOVL", jit.Imm(_S_omask_key), _DI) // MOVL _S_omask, DI
- self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
- self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _SI) // MOVQ ST.Vt[CX], SI
- self.Emit("BTQ" , _SI, _DI) // BTQ SI, DI
- self.Sjmp("JC" , "_object_key") // JC _object_key
-
- /* check for pre-packed strings, avoid 1 allocation */
- self.Emit("TESTQ", _DX, _DX) // TESTQ DX, DX
- self.Sjmp("JNZ" , "_packed_str") // JNZ _packed_str
- self.Emit("MOVQ" , _R8, jit.Ptr(_SP, 0)) // MOVQ R8, (SP)
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
- self.call_go(_F_convTstring) // CALL_GO runtime.convTstring
- self.Emit("MOVQ" , jit.Ptr(_SP, 16), _R9) // MOVQ 16(SP), R9
-
- /* packed string already in R9 */
- self.Link("_packed_str") // _packed_str:
- self.Emit("MOVQ", _T_string, _R8) // MOVQ _T_string, R8
- self.Emit("MOVQ", _VAR_ss_Iv, _DI) // MOVQ ss.Iv, DI
- self.Emit("SUBQ", jit.Imm(1), _DI) // SUBQ $1, DI
- self.Sjmp("JMP" , "_set_value") // JMP _set_value
-
- /* the string is an object key, get the map */
- self.Link("_object_key")
- self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
- self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
- self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
-
- /* add a new delimiter */
- self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX
- self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE}
- self.Sjmp("JAE" , "_stack_overflow") // JA _stack_overflow
- self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
- self.Emit("MOVQ", jit.Imm(_S_obj_delim), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_obj_delim, ST.Vt[CX]
-
- /* add a new slot int the map */
- self.Emit("MOVQ", _T_map, _DX) // MOVQ _T_map, DX
- self.Emit("MOVQ", _DX, jit.Ptr(_SP, 0)) // MOVQ DX, (SP)
- self.Emit("MOVQ", _SI, jit.Ptr(_SP, 8)) // MOVQ SI, 8(SP)
- self.Emit("MOVQ", _R8, jit.Ptr(_SP, 16)) // MOVQ R9, 16(SP)
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 24)) // MOVQ AX, 24(SP)
- self.call_go(_F_mapassign_faststr) // CALL_GO runtime.mapassign_faststr
- self.Emit("MOVQ", jit.Ptr(_SP, 32), _AX) // MOVQ 32(SP), AX
-
- /* add to the pointer stack */
- self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
- self.WritePtrAX(6, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ AX, ST.Vp[CX]
- self.Sjmp("JMP" , "_next") // JMP _next
-
- /* allocate memory to store the string header and unquoted result */
- self.Link("_unquote") // _unquote:
- self.Emit("ADDQ", jit.Imm(15), _AX) // ADDQ $15, AX
- self.Emit("MOVQ", _T_byte, _CX) // MOVQ _T_byte, CX
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
- self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
- self.Emit("MOVB", jit.Imm(0), jit.Ptr(_SP, 16)) // MOVB $0, 16(SP)
- self.call_go(_F_mallocgc) // CALL_GO runtime.mallocgc
- self.Emit("MOVQ", jit.Ptr(_SP, 24), _R9) // MOVQ 24(SP), R9
-
- /* prepare the unquoting parameters */
- self.Emit("MOVQ" , _VAR_ss_Iv, _CX) // MOVQ ss.Iv, CX
- self.Emit("LEAQ" , jit.Sib(_IP, _CX, 1, 0), _DI) // LEAQ (IP)(CX), DI
- self.Emit("NEGQ" , _CX) // NEGQ CX
- self.Emit("LEAQ" , jit.Sib(_IC, _CX, 1, -1), _SI) // LEAQ -1(IC)(CX), SI
- self.Emit("LEAQ" , jit.Ptr(_R9, 16), _DX) // LEAQ 16(R8), DX
- self.Emit("LEAQ" , _VAR_ss_Ep, _CX) // LEAQ ss.Ep, CX
- self.Emit("XORL" , _R8, _R8) // XORL R8, R8
- self.Emit("BTQ" , jit.Imm(_F_disable_urc), _VAR_df) // BTQ ${_F_disable_urc}, fv
- self.Emit("SETCC", _R8) // SETCC R8
- self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8) // SHLQ ${types.B_UNICODE_REPLACE}, R8
-
- /* unquote the string, with R9 been preserved */
- self.save(_R9) // SAVE R9
- self.call(_F_unquote) // CALL unquote
- self.load(_R9) // LOAD R9
-
- /* check for errors */
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JS" , "_unquote_error") // JS _unquote_error
- self.Emit("MOVL" , jit.Imm(1), _DX) // MOVL $1, DX
- self.Emit("LEAQ" , jit.Ptr(_R9, 16), _R8) // ADDQ $16, R8
- self.Emit("MOVQ" , _R8, jit.Ptr(_R9, 0)) // MOVQ R8, (R9)
- self.Emit("MOVQ" , _AX, jit.Ptr(_R9, 8)) // MOVQ AX, 8(R9)
- self.Sjmp("JMP" , "_noescape") // JMP _noescape
-
- /** V_DOUBLE **/
- self.Link("_decode_V_DOUBLE") // _decode_V_DOUBLE:
- self.Emit("BTQ" , jit.Imm(_F_use_number), _VAR_df) // BTQ _F_use_number, df
- self.Sjmp("JC" , "_use_number") // JC _use_number
- self.Emit("MOVSD", _VAR_ss_Dv, _X0) // MOVSD ss.Dv, X0
- self.Sjmp("JMP" , "_use_float64") // JMP _use_float64
-
- /** V_INTEGER **/
- self.Link("_decode_V_INTEGER") // _decode_V_INTEGER:
- self.Emit("BTQ" , jit.Imm(_F_use_number), _VAR_df) // BTQ _F_use_number, df
- self.Sjmp("JC" , "_use_number") // JC _use_number
- self.Emit("BTQ" , jit.Imm(_F_use_int64), _VAR_df) // BTQ _F_use_int64, df
- self.Sjmp("JC" , "_use_int64") // JC _use_int64
- self.Emit("MOVQ" , _VAR_ss_Iv, _AX) // MOVQ ss.Iv, AX
- self.Emit("CVTSQ2SD", _AX, _X0) // CVTSQ2SD AX, X0
-
- /* represent numbers as `float64` */
- self.Link("_use_float64") // _use_float64:
- self.Emit("MOVSD", _X0, jit.Ptr(_SP, 0)) // MOVSD X0, (SP)
- self.call_go(_F_convT64) // CALL_GO runtime.convT64
- self.Emit("MOVQ" , _T_float64, _R8) // MOVQ _T_float64, R8
- self.Emit("MOVQ" , jit.Ptr(_SP, 8), _R9) // MOVQ 8(SP), R9
- self.Emit("MOVQ" , _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI
- self.Sjmp("JMP" , "_set_value") // JMP _set_value
-
- /* represent numbers as `json.Number` */
- self.Link("_use_number") // _use_number
- self.Emit("MOVQ", _VAR_ss_Ep, _AX) // MOVQ ss.Ep, AX
- self.Emit("LEAQ", jit.Sib(_IP, _AX, 1, 0), _SI) // LEAQ (IP)(AX), SI
- self.Emit("MOVQ", _IC, _CX) // MOVQ IC, CX
- self.Emit("SUBQ", _AX, _CX) // SUBQ AX, CX
- self.Emit("MOVQ", _SI, jit.Ptr(_SP, 0)) // MOVQ SI, (SP)
- self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
- self.call_go(_F_convTstring) // CALL_GO runtime.convTstring
- self.Emit("MOVQ", _T_number, _R8) // MOVQ _T_number, R8
- self.Emit("MOVQ", jit.Ptr(_SP, 16), _R9) // MOVQ 16(SP), R9
- self.Emit("MOVQ", _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI
- self.Sjmp("JMP" , "_set_value") // JMP _set_value
-
- /* represent numbers as `int64` */
- self.Link("_use_int64") // _use_int64:
- self.Emit("MOVQ", _VAR_ss_Iv, _AX) // MOVQ ss.Iv, AX
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
- self.call_go(_F_convT64) // CALL_GO runtime.convT64
- self.Emit("MOVQ", _T_int64, _R8) // MOVQ _T_int64, R8
- self.Emit("MOVQ", jit.Ptr(_SP, 8), _R9) // MOVQ 8(SP), R9
- self.Emit("MOVQ", _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI
- self.Sjmp("JMP" , "_set_value") // JMP _set_value
-
- /** V_KEY_SEP **/
- self.Link("_decode_V_KEY_SEP") // _decode_V_KEY_SEP:
- // self.Byte(0xcc)
- self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
- self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
- self.Emit("CMPQ", _AX, jit.Imm(_S_obj_delim)) // CMPQ AX, _S_obj_delim
- self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char
- self.Emit("MOVQ", jit.Imm(_S_val), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_val, ST.Vt[CX]
- self.Emit("MOVQ", jit.Imm(_S_obj), jit.Sib(_ST, _CX, 8, _ST_Vt - 8)) // MOVQ _S_obj, ST.Vt[CX - 1]
- self.Sjmp("JMP" , "_next") // JMP _next
-
- /** V_ELEM_SEP **/
- self.Link("_decode_V_ELEM_SEP") // _decode_V_ELEM_SEP:
- self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
- self.Emit("MOVQ" , jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
- self.Emit("CMPQ" , _AX, jit.Imm(_S_arr)) // CMPQ _AX, _S_arr
- self.Sjmp("JE" , "_array_sep") // JZ _next
- self.Emit("CMPQ" , _AX, jit.Imm(_S_obj)) // CMPQ _AX, _S_arr
- self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char
- self.Emit("MOVQ" , jit.Imm(_S_obj_sep), jit.Sib(_ST, _CX, 8, _ST_Vt))
- self.Sjmp("JMP" , "_next") // JMP _next
-
- /* arrays */
- self.Link("_array_sep")
- self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
- self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
- self.Emit("MOVQ", jit.Ptr(_SI, 8), _DX) // MOVQ 8(SI), DX
- self.Emit("CMPQ", _DX, jit.Ptr(_SI, 16)) // CMPQ DX, 16(SI)
- self.Sjmp("JAE" , "_array_more") // JAE _array_more
-
- /* add a slot for the new element */
- self.Link("_array_append") // _array_append:
- self.Emit("ADDQ", jit.Imm(1), jit.Ptr(_SI, 8)) // ADDQ $1, 8(SI)
- self.Emit("MOVQ", jit.Ptr(_SI, 0), _SI) // MOVQ (SI), SI
- self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX
- self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE}
- self.Sjmp("JAE" , "_stack_overflow")
- self.Emit("SHLQ", jit.Imm(1), _DX) // SHLQ $1, DX
- self.Emit("LEAQ", jit.Sib(_SI, _DX, 8, 0), _SI) // LEAQ (SI)(DX*8), SI
- self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
- self.WriteRecNotAX(7 , _SI, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ SI, ST.Vp[CX]
- self.Emit("MOVQ", jit.Imm(_S_val), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_val, ST.Vt[CX}
- self.Sjmp("JMP" , "_next") // JMP _next
-
- /** V_ARRAY_END **/
- self.Link("_decode_V_ARRAY_END") // _decode_V_ARRAY_END:
- self.Emit("XORL", _DX, _DX) // XORL DX, DX
- self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
- self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
- self.Emit("CMPQ", _AX, jit.Imm(_S_arr_0)) // CMPQ AX, _S_arr_0
- self.Sjmp("JE" , "_first_item") // JE _first_item
- self.Emit("CMPQ", _AX, jit.Imm(_S_arr)) // CMPQ AX, _S_arr
- self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char
- self.Emit("SUBQ", jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp
- self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ DX, ST.Vp[CX]
- self.Sjmp("JMP" , "_next") // JMP _next
-
- /* first element of an array */
- self.Link("_first_item") // _first_item:
- self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
- self.Emit("SUBQ", jit.Imm(2), jit.Ptr(_ST, _ST_Sp)) // SUBQ $2, ST.Sp
- self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp - 8), _SI) // MOVQ ST.Vp[CX - 1], SI
- self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
- self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp - 8)) // MOVQ DX, ST.Vp[CX - 1]
- self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ DX, ST.Vp[CX]
- self.Emit("MOVQ", _DX, jit.Ptr(_SI, 8)) // MOVQ DX, 8(SI)
- self.Sjmp("JMP" , "_next") // JMP _next
-
- /** V_OBJECT_END **/
- self.Link("_decode_V_OBJECT_END") // _decode_V_OBJECT_END:
- self.Emit("MOVL", jit.Imm(_S_omask_end), _DX) // MOVL _S_omask, DI
- self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
- self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
- self.Emit("BTQ" , _AX, _DX)
- self.Sjmp("JNC" , "_invalid_char") // JNE _invalid_char
- self.Emit("XORL", _AX, _AX) // XORL AX, AX
- self.Emit("SUBQ", jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp
- self.Emit("MOVQ", _AX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ AX, ST.Vp[CX]
- self.Sjmp("JMP" , "_next") // JMP _next
-
- /* return from decoder */
- self.Link("_return") // _return:
- self.Emit("XORL", _EP, _EP) // XORL EP, EP
- self.Emit("MOVQ", _EP, jit.Ptr(_ST, _ST_Vp)) // MOVQ EP, ST.Vp[0]
- self.Link("_epilogue") // _epilogue:
- self.Emit("SUBQ", jit.Imm(_FsmOffset), _ST) // SUBQ _FsmOffset, _ST
- self.Emit("MOVQ", jit.Ptr(_SP, _VD_offs), _BP) // MOVQ _VD_offs(SP), BP
- self.Emit("ADDQ", jit.Imm(_VD_size), _SP) // ADDQ $_VD_size, SP
- self.Emit("RET") // RET
-
- /* array expand */
- self.Link("_array_more") // _array_more:
- self.Emit("MOVQ" , _T_eface, _AX) // MOVQ _T_eface, AX
- self.Emit("MOVOU", jit.Ptr(_SI, 0), _X0) // MOVOU (SI), X0
- self.Emit("MOVQ" , jit.Ptr(_SI, 16), _DX) // MOVQ 16(SI), DX
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
- self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP)
- self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 24)) // MOVQ DX, 24(SP)
- self.Emit("SHLQ" , jit.Imm(1), _DX) // SHLQ $1, DX
- self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 32)) // MOVQ DX, 32(SP)
- self.call_go(_F_growslice) // CALL_GO runtime.growslice
- self.Emit("MOVQ" , jit.Ptr(_SP, 40), _DI) // MOVOU 40(SP), DI
- self.Emit("MOVQ" , jit.Ptr(_SP, 48), _DX) // MOVOU 48(SP), DX
- self.Emit("MOVQ" , jit.Ptr(_SP, 56), _AX) // MOVQ 56(SP), AX
-
- /* update the slice */
- self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
- self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
- self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
- self.Emit("MOVQ", _DX, jit.Ptr(_SI, 8)) // MOVQ DX, 8(SI)
- self.Emit("MOVQ", _AX, jit.Ptr(_SI, 16)) // MOVQ AX, 16(AX)
- self.WriteRecNotAX(8 , _DI, jit.Ptr(_SI, 0), false) // MOVQ R10, (SI)
- self.Sjmp("JMP" , "_array_append") // JMP _array_append
-
- /* copy string */
- self.Link("copy_string") // pointer: R8, length: AX, return addr: DI
- // self.Byte(0xcc)
- self.Emit("MOVQ", _R8, _VAR_cs_p)
- self.Emit("MOVQ", _AX, _VAR_cs_n)
- self.Emit("MOVQ", _DI, _VAR_cs_LR)
- self.Emit("MOVQ", _T_byte, _R8)
- self.Emit("MOVQ", _R8, jit.Ptr(_SP, 0))
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))
- self.call_go(_F_makeslice)
- self.Emit("MOVQ", jit.Ptr(_SP, 24), _R8)
- self.Emit("MOVQ", _R8, _VAR_cs_d)
- self.Emit("MOVQ", _R8, jit.Ptr(_SP, 0))
- self.Emit("MOVQ", _VAR_cs_p, _R8)
- self.Emit("MOVQ", _R8, jit.Ptr(_SP, 8))
- self.Emit("MOVQ", _VAR_cs_n, _AX)
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))
- self.call_go(_F_memmove)
- self.Emit("MOVQ", _VAR_cs_d, _R8)
- self.Emit("MOVQ", _VAR_cs_n, _AX)
- self.Emit("MOVQ", _VAR_cs_LR, _DI)
- // self.Byte(0xcc)
- self.Rjmp("JMP", _DI)
-
- /* error handlers */
- self.Link("_stack_overflow")
- self.Emit("MOVL" , _E_recurse, _EP) // MOVQ _E_recurse, EP
- self.Sjmp("JMP" , "_error") // JMP _error
- self.Link("_vtype_error") // _vtype_error:
- self.Emit("MOVQ" , _DI, _IC) // MOVQ DI, IC
- self.Emit("MOVL" , _E_invalid, _EP) // MOVL _E_invalid, EP
- self.Sjmp("JMP" , "_error") // JMP _error
- self.Link("_invalid_char") // _invalid_char:
- self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC
- self.Emit("MOVL" , _E_invalid, _EP) // MOVL _E_invalid, EP
- self.Sjmp("JMP" , "_error") // JMP _error
- self.Link("_unquote_error") // _unquote_error:
- self.Emit("MOVQ" , _VAR_ss_Iv, _IC) // MOVQ ss.Iv, IC
- self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC
- self.Link("_parsing_error") // _parsing_error:
- self.Emit("NEGQ" , _AX) // NEGQ AX
- self.Emit("MOVQ" , _AX, _EP) // MOVQ AX, EP
- self.Link("_error") // _error:
- self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
- self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP)
- self.Sjmp("JMP" , "_epilogue") // JMP _epilogue
-
- /* invalid value type, never returns */
- self.Link("_invalid_vtype")
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
- self.call(_F_invalid_vtype) // CALL invalid_type
- self.Emit("UD2") // UD2
-
- /* switch jump table */
- self.Link("_switch_table") // _switch_table:
- self.Sref("_decode_V_EOF", 0) // SREF &_decode_V_EOF, $0
- self.Sref("_decode_V_NULL", -4) // SREF &_decode_V_NULL, $-4
- self.Sref("_decode_V_TRUE", -8) // SREF &_decode_V_TRUE, $-8
- self.Sref("_decode_V_FALSE", -12) // SREF &_decode_V_FALSE, $-12
- self.Sref("_decode_V_ARRAY", -16) // SREF &_decode_V_ARRAY, $-16
- self.Sref("_decode_V_OBJECT", -20) // SREF &_decode_V_OBJECT, $-20
- self.Sref("_decode_V_STRING", -24) // SREF &_decode_V_STRING, $-24
- self.Sref("_decode_V_DOUBLE", -28) // SREF &_decode_V_DOUBLE, $-28
- self.Sref("_decode_V_INTEGER", -32) // SREF &_decode_V_INTEGER, $-32
- self.Sref("_decode_V_KEY_SEP", -36) // SREF &_decode_V_KEY_SEP, $-36
- self.Sref("_decode_V_ELEM_SEP", -40) // SREF &_decode_V_ELEM_SEP, $-40
- self.Sref("_decode_V_ARRAY_END", -44) // SREF &_decode_V_ARRAY_END, $-44
- self.Sref("_decode_V_OBJECT_END", -48) // SREF &_decode_V_OBJECT_END, $-48
-
- /* fast character lookup table */
- self.Link("_decode_tab") // _decode_tab:
- self.Sref("_decode_V_EOF", 0) // SREF &_decode_V_EOF, $0
-
- /* generate rest of the tabs */
- for i := 1; i < 256; i++ {
- if to, ok := _R_tab[i]; ok {
- self.Sref(to, -int64(i) * 4)
- } else {
- self.Byte(0x00, 0x00, 0x00, 0x00)
- }
- }
-}
-
-/** Generic Decoder **/
-
-var (
- _subr_decode_value = new(_ValueDecoder).build()
-)
-
-//go:nosplit
-func invalid_vtype(vt types.ValueType) {
- throw(fmt.Sprintf("invalid value type: %d", vt))
-}
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/generic_stkabi_amd64_test.s b/vendor/github.com/bytedance/sonic/internal/decoder/generic_stkabi_amd64_test.s
deleted file mode 100644
index 4e509c2f8..000000000
--- a/vendor/github.com/bytedance/sonic/internal/decoder/generic_stkabi_amd64_test.s
+++ /dev/null
@@ -1,37 +0,0 @@
-// +build go1.16,!go1.17
-
-//
-// Copyright 2021 ByteDance Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-#include "go_asm.h"
-#include "funcdata.h"
-#include "textflag.h"
-
-TEXT ·decodeValueStub(SB), NOSPLIT, $0 - 72
- NO_LOCAL_POINTERS
- PXOR X0, X0
- MOVOU X0, rv+48(FP)
- MOVQ st+0(FP), BX
- MOVQ sp+8(FP), R12
- MOVQ sn+16(FP), R13
- MOVQ ic+24(FP), R14
- MOVQ vp+32(FP), R15
- MOVQ df+40(FP), R10
- MOVQ ·_subr_decode_value(SB), AX
- CALL AX
- MOVQ R14, rp+48(FP)
- MOVQ R11, ex+56(FP)
- RET
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/asm.s b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm.s
index e69de29bb..e69de29bb 100644
--- a/vendor/github.com/bytedance/sonic/internal/decoder/asm.s
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm.s
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go117.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm_stubs_amd64_go117.go
index b0125a792..48f73e5bf 100644
--- a/vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go117.go
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm_stubs_amd64_go117.go
@@ -14,7 +14,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
-package decoder
+package jitdec
import (
`strconv`
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go121.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm_stubs_amd64_go121.go
index 6adeac0cf..cbec3d248 100644
--- a/vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go121.go
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm_stubs_amd64_go121.go
@@ -1,4 +1,4 @@
-// +build go1.21,!go1.23
+// +build go1.21,!go1.24
// Copyright 2023 CloudWeGo Authors
//
@@ -14,7 +14,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
-package decoder
+package jitdec
import (
`strconv`
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/assembler_regabi_amd64.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/assembler_regabi_amd64.go
index 6c6fde91a..76eef333b 100644
--- a/vendor/github.com/bytedance/sonic/internal/decoder/assembler_regabi_amd64.go
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/assembler_regabi_amd64.go
@@ -1,4 +1,5 @@
-// +build go1.17,!go1.23
+//go:build go1.17 && !go1.24
+// +build go1.17,!go1.24
/*
* Copyright 2021 ByteDance Inc.
@@ -16,21 +17,22 @@
* limitations under the License.
*/
-package decoder
+package jitdec
import (
- `encoding/json`
- `fmt`
- `math`
- `reflect`
- `unsafe`
-
- `github.com/bytedance/sonic/internal/caching`
- `github.com/bytedance/sonic/internal/jit`
- `github.com/bytedance/sonic/internal/native`
- `github.com/bytedance/sonic/internal/native/types`
- `github.com/bytedance/sonic/internal/rt`
- `github.com/twitchyliquid64/golang-asm/obj`
+ "encoding/json"
+ "fmt"
+ "math"
+ "reflect"
+ "strings"
+ "unsafe"
+
+ "github.com/bytedance/sonic/internal/caching"
+ "github.com/bytedance/sonic/internal/jit"
+ "github.com/bytedance/sonic/internal/native"
+ "github.com/bytedance/sonic/internal/native/types"
+ "github.com/bytedance/sonic/internal/rt"
+ "github.com/twitchyliquid64/golang-asm/obj"
)
/** Register Allocations
@@ -292,7 +294,6 @@ var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
_OP_array_clear_p : (*_Assembler)._asm_OP_array_clear_p,
_OP_slice_init : (*_Assembler)._asm_OP_slice_init,
_OP_slice_append : (*_Assembler)._asm_OP_slice_append,
- _OP_object_skip : (*_Assembler)._asm_OP_object_skip,
_OP_object_next : (*_Assembler)._asm_OP_object_next,
_OP_struct_field : (*_Assembler)._asm_OP_struct_field,
_OP_unmarshal : (*_Assembler)._asm_OP_unmarshal,
@@ -312,6 +313,7 @@ var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
_OP_check_char_0 : (*_Assembler)._asm_OP_check_char_0,
_OP_dismatch_err : (*_Assembler)._asm_OP_dismatch_err,
_OP_go_skip : (*_Assembler)._asm_OP_go_skip,
+ _OP_skip_emtpy : (*_Assembler)._asm_OP_skip_empty,
_OP_add : (*_Assembler)._asm_OP_add,
_OP_check_empty : (*_Assembler)._asm_OP_check_empty,
_OP_debug : (*_Assembler)._asm_OP_debug,
@@ -385,7 +387,7 @@ func (self *_Assembler) prologue() {
var (
_REG_go = []obj.Addr { _ST, _VP, _IP, _IL, _IC }
- _REG_rt = []obj.Addr { _ST, _VP, _IP, _IL, _IC, _IL }
+ _REG_rt = []obj.Addr { _ST, _VP, _IP, _IL, _IC }
)
func (self *_Assembler) save(r ...obj.Addr) {
@@ -481,6 +483,7 @@ var (
_V_stackOverflow = jit.Imm(int64(uintptr(unsafe.Pointer(&stackOverflow))))
_I_json_UnsupportedValueError = jit.Itab(_T_error, reflect.TypeOf(new(json.UnsupportedValueError)))
_I_json_MismatchTypeError = jit.Itab(_T_error, reflect.TypeOf(new(MismatchTypeError)))
+ _I_json_MismatchQuotedError = jit.Itab(_T_error, reflect.TypeOf(new(MismatchQuotedError)))
)
func (self *_Assembler) type_error() {
@@ -492,9 +495,9 @@ func (self *_Assembler) type_error() {
func (self *_Assembler) mismatch_error() {
self.Link(_LB_mismatch_error) // _type_error:
self.Emit("MOVQ", _VAR_et, _ET) // MOVQ _VAR_et, ET
- self.Emit("MOVQ", _VAR_ic, _EP) // MOVQ _VAR_ic, EP
self.Emit("MOVQ", _I_json_MismatchTypeError, _CX) // MOVQ _I_json_MismatchType, CX
self.Emit("CMPQ", _ET, _CX) // CMPQ ET, CX
+ self.Emit("MOVQ", jit.Ptr(_ST, _EpOffset), _EP) // MOVQ stack.Ep, EP
self.Sjmp("JE" , _LB_error) // JE _LB_error
self.Emit("MOVQ", _ARG_sp, _AX)
self.Emit("MOVQ", _ARG_sl, _BX)
@@ -600,6 +603,28 @@ func (self *_Assembler) _asm_OP_go_skip(p *_Instr) {
self.Sjmp("JMP" , _LB_skip_one) // JMP _skip_one
}
+var _F_IndexByte = jit.Func(strings.IndexByte)
+
+func (self *_Assembler) _asm_OP_skip_empty(p *_Instr) {
+ // self.Byte(0xcc)
+ self.call_sf(_F_skip_one) // CALL_SF skip_one
+ // self.Byte(0xcc)
+ self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
+ self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
+ self.Emit("BTQ", jit.Imm(_F_disable_unknown), _ARG_fv)
+ self.Xjmp("JNC", p.vi())
+ self.Emit("LEAQ", jit.Sib(_IC, _AX, 1, 0), _BX)
+ self.Emit("MOVQ", _BX, _ARG_sv_n)
+ self.Emit("LEAQ", jit.Sib(_IP, _AX, 1, 0), _AX)
+ self.Emit("MOVQ", _AX, _ARG_sv_p)
+ self.Emit("MOVQ", jit.Imm(':'), _CX)
+ self.call_go(_F_IndexByte)
+ // self.Byte(0xcc)
+ self.Emit("TESTQ", _AX, _AX)
+ // disallow unknown field
+ self.Sjmp("JNS", _LB_field_error)
+}
+
func (self *_Assembler) skip_one() {
self.Link(_LB_skip_one) // _skip:
self.Emit("MOVQ", _VAR_ic, _IC) // MOVQ _VAR_ic, IC
@@ -972,11 +997,13 @@ var (
var (
_F_decodeJsonUnmarshaler obj.Addr
+ _F_decodeJsonUnmarshalerQuoted obj.Addr
_F_decodeTextUnmarshaler obj.Addr
)
func init() {
_F_decodeJsonUnmarshaler = jit.Func(decodeJsonUnmarshaler)
+ _F_decodeJsonUnmarshalerQuoted = jit.Func(decodeJsonUnmarshalerQuoted)
_F_decodeTextUnmarshaler = jit.Func(decodeTextUnmarshaler)
}
@@ -1057,18 +1084,18 @@ func (self *_Assembler) mapassign_utext(t reflect.Type, addressable bool) {
var (
_F_skip_one = jit.Imm(int64(native.S_skip_one))
_F_skip_array = jit.Imm(int64(native.S_skip_array))
- _F_skip_object = jit.Imm(int64(native.S_skip_object))
_F_skip_number = jit.Imm(int64(native.S_skip_number))
)
-func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool) {
+func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool, f obj.Addr) {
self.call_sf(_F_skip_one) // CALL_SF skip_one
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
+ self.Emit("MOVQ", _IC, _VAR_ic) // store for mismatche error skip
self.slice_from_r(_AX, 0) // SLICE_R AX, $0
self.Emit("MOVQ" , _DI, _ARG_sv_p) // MOVQ DI, sv.p
self.Emit("MOVQ" , _SI, _ARG_sv_n) // MOVQ SI, sv.n
- self.unmarshal_func(t, _F_decodeJsonUnmarshaler, deref) // UNMARSHAL json, ${t}, ${deref}
+ self.unmarshal_func(t, f, deref) // UNMARSHAL json, ${t}, ${deref}
}
func (self *_Assembler) unmarshal_text(t reflect.Type, deref bool) {
@@ -1103,7 +1130,19 @@ func (self *_Assembler) unmarshal_func(t reflect.Type, fn obj.Addr, deref bool)
self.Emit("MOVQ" , _ARG_sv_n, _DI) // MOVQ sv.n, DI
self.call_go(fn) // CALL_GO ${fn}
self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
- self.Sjmp("JNZ" , _LB_error) // JNZ _error
+ if fn == _F_decodeJsonUnmarshalerQuoted {
+ self.Sjmp("JZ" , "_unmarshal_func_end_{n}") // JZ _unmarshal_func_end_{n}
+ self.Emit("MOVQ", _I_json_MismatchQuotedError, _CX) // MOVQ _I_json_MismatchQuotedError, CX
+ self.Emit("CMPQ", _ET, _CX) // check if MismatchQuotedError
+ self.Sjmp("JNE" , _LB_error) // JNE _error
+ self.Emit("MOVQ", jit.Type(t), _CX) // store current type
+ self.Emit("MOVQ", _CX, _VAR_et) // store current type as mismatched type
+ self.Emit("MOVQ", _VAR_ic, _IC) // recover the pos at mismatched, continue to parse
+ self.Emit("XORL", _ET, _ET) // clear ET
+ self.Link("_unmarshal_func_end_{n}")
+ } else {
+ self.Sjmp("JNE" , _LB_error) // JNE _error
+ }
}
/** Dynamic Decoding Routine **/
@@ -1136,8 +1175,8 @@ func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) {
self.Emit("MOVQ", _I_json_MismatchTypeError, _CX) // MOVQ _I_json_MismatchTypeError, CX
self.Emit("CMPQ", _ET, _CX) // CMPQ ET, CX
self.Sjmp("JNE", _LB_error) // JNE LB_error
- self.Emit("MOVQ", _EP, _VAR_ic) // MOVQ EP, VAR_ic
self.Emit("MOVQ", _ET, _VAR_et) // MOVQ ET, VAR_et
+ self.WriteRecNotAX(14, _EP, jit.Ptr(_ST, _EpOffset), false, false) // MOVQ EP, stack.Ep
self.Link("_decode_dynamic_end_{n}")
}
@@ -1146,7 +1185,7 @@ func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) {
var (
_F_memequal = jit.Func(memequal)
_F_memmove = jit.Func(memmove)
- _F_growslice = jit.Func(growslice)
+ _F_growslice = jit.Func(rt.GrowSlice)
_F_makeslice = jit.Func(makeslice)
_F_makemap_small = jit.Func(makemap_small)
_F_mapassign_fast64 = jit.Func(mapassign_fast64)
@@ -1698,12 +1737,6 @@ func (self *_Assembler) _asm_OP_slice_append(p *_Instr) {
self.Link("_append_slice_end_{n}")
}
-func (self *_Assembler) _asm_OP_object_skip(_ *_Instr) {
- self.call_sf(_F_skip_object) // CALL_SF skip_object
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
-}
-
func (self *_Assembler) _asm_OP_object_next(_ *_Instr) {
self.call_sf(_F_skip_one) // CALL_SF skip_one
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
@@ -1774,11 +1807,19 @@ func (self *_Assembler) _asm_OP_struct_field(p *_Instr) {
}
func (self *_Assembler) _asm_OP_unmarshal(p *_Instr) {
- self.unmarshal_json(p.vt(), true)
+ if iv := p.i64(); iv != 0 {
+ self.unmarshal_json(p.vt(), true, _F_decodeJsonUnmarshalerQuoted)
+ } else {
+ self.unmarshal_json(p.vt(), true, _F_decodeJsonUnmarshaler)
+ }
}
func (self *_Assembler) _asm_OP_unmarshal_p(p *_Instr) {
- self.unmarshal_json(p.vt(), false)
+ if iv := p.i64(); iv != 0 {
+ self.unmarshal_json(p.vt(), false, _F_decodeJsonUnmarshalerQuoted)
+ } else {
+ self.unmarshal_json(p.vt(), false, _F_decodeJsonUnmarshaler)
+ }
}
func (self *_Assembler) _asm_OP_unmarshal_text(p *_Instr) {
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/compiler.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/compiler.go
index b350c0461..a097d171d 100644
--- a/vendor/github.com/bytedance/sonic/internal/decoder/compiler.go
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/compiler.go
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package decoder
+package jitdec
import (
`encoding/json`
@@ -77,7 +77,6 @@ const (
_OP_array_clear_p
_OP_slice_init
_OP_slice_append
- _OP_object_skip
_OP_object_next
_OP_struct_field
_OP_unmarshal
@@ -97,6 +96,7 @@ const (
_OP_check_char_0
_OP_dismatch_err
_OP_go_skip
+ _OP_skip_emtpy
_OP_add
_OP_check_empty
_OP_debug
@@ -155,7 +155,6 @@ var _OpNames = [256]string {
_OP_array_skip : "array_skip",
_OP_slice_init : "slice_init",
_OP_slice_append : "slice_append",
- _OP_object_skip : "object_skip",
_OP_object_next : "object_next",
_OP_struct_field : "struct_field",
_OP_unmarshal : "unmarshal",
@@ -271,6 +270,13 @@ func newInsVt(op _Op, vt reflect.Type) _Instr {
}
}
+func newInsVtI(op _Op, vt reflect.Type, iv int) _Instr {
+ return _Instr {
+ u: packOp(op) | rt.PackInt(iv),
+ p: unsafe.Pointer(rt.UnpackType(vt)),
+ }
+}
+
func newInsVf(op _Op, vf *caching.FieldMap) _Instr {
return _Instr {
u: packOp(op),
@@ -452,6 +458,10 @@ func (self *_Program) rtt(op _Op, vt reflect.Type) {
*self = append(*self, newInsVt(op, vt))
}
+func (self *_Program) rtti(op _Op, vt reflect.Type, iv int) {
+ *self = append(*self, newInsVtI(op, vt, iv))
+}
+
func (self *_Program) fmv(op _Op, vf *caching.FieldMap) {
*self = append(*self, newInsVf(op, vf))
}
@@ -527,35 +537,54 @@ func (self *_Compiler) compile(vt reflect.Type) (ret _Program, err error) {
return
}
-func (self *_Compiler) checkMarshaler(p *_Program, vt reflect.Type) bool {
+const (
+ checkMarshalerFlags_quoted = 1
+)
+
+func (self *_Compiler) checkMarshaler(p *_Program, vt reflect.Type, flags int, exec bool) bool {
pt := reflect.PtrTo(vt)
/* check for `json.Unmarshaler` with pointer receiver */
if pt.Implements(jsonUnmarshalerType) {
- p.rtt(_OP_unmarshal_p, pt)
+ if exec {
+ p.add(_OP_lspace)
+ p.rtti(_OP_unmarshal_p, pt, flags)
+ }
return true
}
/* check for `json.Unmarshaler` */
if vt.Implements(jsonUnmarshalerType) {
- p.add(_OP_lspace)
- self.compileUnmarshalJson(p, vt)
+ if exec {
+ p.add(_OP_lspace)
+ self.compileUnmarshalJson(p, vt, flags)
+ }
return true
}
+ if flags == checkMarshalerFlags_quoted {
+ // text marshaler shouldn't be supported for quoted string
+ return false
+ }
+
/* check for `encoding.TextMarshaler` with pointer receiver */
if pt.Implements(encodingTextUnmarshalerType) {
- p.add(_OP_lspace)
- self.compileUnmarshalTextPtr(p, pt)
+ if exec {
+ p.add(_OP_lspace)
+ self.compileUnmarshalTextPtr(p, pt, flags)
+ }
return true
}
/* check for `encoding.TextUnmarshaler` */
if vt.Implements(encodingTextUnmarshalerType) {
- p.add(_OP_lspace)
- self.compileUnmarshalText(p, vt)
+ if exec {
+ p.add(_OP_lspace)
+ self.compileUnmarshalText(p, vt, flags)
+ }
return true
}
+
return false
}
@@ -567,7 +596,7 @@ func (self *_Compiler) compileOne(p *_Program, sp int, vt reflect.Type) {
return
}
- if self.checkMarshaler(p, vt) {
+ if self.checkMarshaler(p, vt, 0, true) {
return
}
@@ -690,7 +719,7 @@ func (self *_Compiler) compilePtr(p *_Program, sp int, et reflect.Type) {
/* dereference all the way down */
for et.Kind() == reflect.Ptr {
- if self.checkMarshaler(p, et) {
+ if self.checkMarshaler(p, et, 0, true) {
return
}
et = et.Elem()
@@ -872,7 +901,24 @@ func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) {
n := p.pc()
p.add(_OP_is_null)
- skip := self.checkIfSkip(p, vt, '{')
+ j := p.pc()
+ p.chr(_OP_check_char_0, '{')
+ p.rtt(_OP_dismatch_err, vt)
+
+ /* special case for empty object */
+ if len(fv) == 0 {
+ p.pin(j)
+ s := p.pc()
+ p.add(_OP_skip_emtpy)
+ p.pin(s)
+ p.pin(n)
+ return
+ }
+
+ skip := p.pc()
+ p.add(_OP_go_skip)
+ p.pin(j)
+ p.int(_OP_add, 1)
p.add(_OP_save)
p.add(_OP_lspace)
@@ -890,11 +936,6 @@ func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) {
p.chr(_OP_check_char, '}')
p.chr(_OP_match_char, ',')
- /* special case of an empty struct */
- if len(fv) == 0 {
- p.add(_OP_object_skip)
- goto end_of_object
- }
/* match the remaining fields */
p.add(_OP_lspace)
@@ -930,7 +971,6 @@ func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) {
p.int(_OP_goto, y0)
}
-end_of_object:
p.pin(x)
p.pin(y1)
p.add(_OP_drop)
@@ -938,7 +978,22 @@ end_of_object:
p.pin(skip)
}
+func (self *_Compiler) compileStructFieldStrUnmarshal(p *_Program, vt reflect.Type) {
+ p.add(_OP_lspace)
+ n0 := p.pc()
+ p.add(_OP_is_null)
+ self.checkMarshaler(p, vt, checkMarshalerFlags_quoted, true)
+ p.pin(n0)
+}
+
func (self *_Compiler) compileStructFieldStr(p *_Program, sp int, vt reflect.Type) {
+ // according to std, json.Unmarshaler should be called before stringize
+ // see https://github.com/bytedance/sonic/issues/670
+ if self.checkMarshaler(p, vt, checkMarshalerFlags_quoted, false) {
+ self.compileStructFieldStrUnmarshal(p, vt)
+ return
+ }
+
n1 := -1
ft := vt
sv := false
@@ -1106,7 +1161,7 @@ func (self *_Compiler) compileUnmarshalEnd(p *_Program, vt reflect.Type, i int)
p.pin(j)
}
-func (self *_Compiler) compileUnmarshalJson(p *_Program, vt reflect.Type) {
+func (self *_Compiler) compileUnmarshalJson(p *_Program, vt reflect.Type, flags int) {
i := p.pc()
v := _OP_unmarshal
p.add(_OP_is_null)
@@ -1117,11 +1172,11 @@ func (self *_Compiler) compileUnmarshalJson(p *_Program, vt reflect.Type) {
}
/* call the unmarshaler */
- p.rtt(v, vt)
+ p.rtti(v, vt, flags)
self.compileUnmarshalEnd(p, vt, i)
}
-func (self *_Compiler) compileUnmarshalText(p *_Program, vt reflect.Type) {
+func (self *_Compiler) compileUnmarshalText(p *_Program, vt reflect.Type, iv int) {
i := p.pc()
v := _OP_unmarshal_text
p.add(_OP_is_null)
@@ -1134,15 +1189,15 @@ func (self *_Compiler) compileUnmarshalText(p *_Program, vt reflect.Type) {
}
/* call the unmarshaler */
- p.rtt(v, vt)
+ p.rtti(v, vt, iv)
self.compileUnmarshalEnd(p, vt, i)
}
-func (self *_Compiler) compileUnmarshalTextPtr(p *_Program, vt reflect.Type) {
+func (self *_Compiler) compileUnmarshalTextPtr(p *_Program, vt reflect.Type, iv int) {
i := p.pc()
p.add(_OP_is_null)
p.chr(_OP_match_char, '"')
- p.rtt(_OP_unmarshal_text_p, vt)
+ p.rtti(_OP_unmarshal_text_p, vt, iv)
p.pin(i)
}
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/debug.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/debug.go
index d5537ed9a..b59a3e571 100644
--- a/vendor/github.com/bytedance/sonic/internal/decoder/debug.go
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/debug.go
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package decoder
+package jitdec
import (
`os`
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/decoder.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/decoder.go
new file mode 100644
index 000000000..bbb4b4b61
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/decoder.go
@@ -0,0 +1,140 @@
+package jitdec
+
+import (
+ `unsafe`
+ `encoding/json`
+ `reflect`
+ `runtime`
+
+ `github.com/bytedance/sonic/internal/decoder/consts`
+ `github.com/bytedance/sonic/internal/decoder/errors`
+ `github.com/bytedance/sonic/internal/rt`
+ `github.com/bytedance/sonic/utf8`
+ `github.com/bytedance/sonic/option`
+)
+
+type (
+ MismatchTypeError = errors.MismatchTypeError
+ SyntaxError = errors.SyntaxError
+)
+
+const (
+ _F_allow_control = consts.F_allow_control
+ _F_copy_string = consts.F_copy_string
+ _F_disable_unknown = consts.F_disable_unknown
+ _F_disable_urc = consts.F_disable_urc
+ _F_use_int64 = consts.F_use_int64
+ _F_use_number = consts.F_use_number
+ _F_no_validate_json = consts.F_no_validate_json
+ _F_validate_string = consts.F_validate_string
+)
+
+var (
+ error_wrap = errors.ErrorWrap
+ error_type = errors.ErrorType
+ error_field = errors.ErrorField
+ error_value = errors.ErrorValue
+ error_mismatch = errors.ErrorMismatch
+ stackOverflow = errors.StackOverflow
+)
+
+
+// Decode parses the JSON-encoded data from current position and stores the result
+// in the value pointed to by val.
+func Decode(s *string, i *int, f uint64, val interface{}) error {
+ /* validate json if needed */
+ if (f & (1 << _F_validate_string)) != 0 && !utf8.ValidateString(*s){
+ dbuf := utf8.CorrectWith(nil, rt.Str2Mem(*s), "\ufffd")
+ *s = rt.Mem2Str(dbuf)
+ }
+
+ vv := rt.UnpackEface(val)
+ vp := vv.Value
+
+ /* check for nil type */
+ if vv.Type == nil {
+ return &json.InvalidUnmarshalError{}
+ }
+
+ /* must be a non-nil pointer */
+ if vp == nil || vv.Type.Kind() != reflect.Ptr {
+ return &json.InvalidUnmarshalError{Type: vv.Type.Pack()}
+ }
+
+ etp := rt.PtrElem(vv.Type)
+
+ /* check the defined pointer type for issue 379 */
+ if vv.Type.IsNamed() {
+ newp := vp
+ etp = vv.Type
+ vp = unsafe.Pointer(&newp)
+ }
+
+ /* create a new stack, and call the decoder */
+ sb := newStack()
+ nb, err := decodeTypedPointer(*s, *i, etp, vp, sb, f)
+ /* return the stack back */
+ *i = nb
+ freeStack(sb)
+
+ /* avoid GC ahead */
+ runtime.KeepAlive(vv)
+ return err
+}
+
+
+// Pretouch compiles vt ahead-of-time to avoid JIT compilation on-the-fly, in
+// order to reduce the first-hit latency.
+//
+// Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is
+// a compile option to set the depth of recursive compile for the nested struct type.
+func Pretouch(vt reflect.Type, opts ...option.CompileOption) error {
+ cfg := option.DefaultCompileOptions()
+ for _, opt := range opts {
+ opt(&cfg)
+ }
+ return pretouchRec(map[reflect.Type]bool{vt:true}, cfg)
+}
+
+func pretouchType(_vt reflect.Type, opts option.CompileOptions) (map[reflect.Type]bool, error) {
+ /* compile function */
+ compiler := newCompiler().apply(opts)
+ decoder := func(vt *rt.GoType, _ ...interface{}) (interface{}, error) {
+ if pp, err := compiler.compile(_vt); err != nil {
+ return nil, err
+ } else {
+ as := newAssembler(pp)
+ as.name = _vt.String()
+ return as.Load(), nil
+ }
+ }
+
+ /* find or compile */
+ vt := rt.UnpackType(_vt)
+ if val := programCache.Get(vt); val != nil {
+ return nil, nil
+ } else if _, err := programCache.Compute(vt, decoder); err == nil {
+ return compiler.rec, nil
+ } else {
+ return nil, err
+ }
+}
+
+func pretouchRec(vtm map[reflect.Type]bool, opts option.CompileOptions) error {
+ if opts.RecursiveDepth < 0 || len(vtm) == 0 {
+ return nil
+ }
+ next := make(map[reflect.Type]bool)
+ for vt := range(vtm) {
+ sub, err := pretouchType(vt, opts)
+ if err != nil {
+ return err
+ }
+ for svt := range(sub) {
+ next[svt] = true
+ }
+ }
+ opts.RecursiveDepth -= 1
+ return pretouchRec(next, opts)
+}
+
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/generic_regabi_amd64.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/generic_regabi_amd64.go
index c7514cb41..e6d5e3e84 100644
--- a/vendor/github.com/bytedance/sonic/internal/decoder/generic_regabi_amd64.go
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/generic_regabi_amd64.go
@@ -1,4 +1,4 @@
-// +build go1.17,!go1.23
+// +build go1.17,!go1.24
/*
* Copyright 2021 ByteDance Inc.
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package decoder
+package jitdec
import (
`encoding/json`
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/generic_regabi_amd64_test.s b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/generic_regabi_amd64_test.s
index b4b0de183..19ed3752f 100644
--- a/vendor/github.com/bytedance/sonic/internal/decoder/generic_regabi_amd64_test.s
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/generic_regabi_amd64_test.s
@@ -1,4 +1,4 @@
-// +build go1.17,!go1.23
+// +build go1.17,!go1.24
//
// Copyright 2021 ByteDance Inc.
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/pools.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/pools.go
index bcd14cc64..01868cb2f 100644
--- a/vendor/github.com/bytedance/sonic/internal/decoder/pools.go
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/pools.go
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package decoder
+package jitdec
import (
`sync`
@@ -36,6 +36,7 @@ const (
_PtrBytes = _PTR_SIZE / 8
_FsmOffset = (_MaxStack + 1) * _PtrBytes
_DbufOffset = _FsmOffset + int64(unsafe.Sizeof(types.StateMachine{})) + types.MAX_RECURSE * _PtrBytes
+ _EpOffset = _DbufOffset + _MaxDigitNums
_StackSize = unsafe.Sizeof(_Stack{})
)
@@ -53,6 +54,7 @@ type _Stack struct {
mm types.StateMachine
vp [types.MAX_RECURSE]unsafe.Pointer
dp [_MaxDigitNums]byte
+ ep unsafe.Pointer
}
type _Decoder func(
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/primitives.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/primitives.go
index 1c9ce1fa9..9de885007 100644
--- a/vendor/github.com/bytedance/sonic/internal/decoder/primitives.go
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/primitives.go
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package decoder
+package jitdec
import (
`encoding`
@@ -39,6 +39,20 @@ func decodeJsonUnmarshaler(vv interface{}, s string) error {
return vv.(json.Unmarshaler).UnmarshalJSON(rt.Str2Mem(s))
}
+// used to distinguish between MismatchQuoted and other MismatchedTyped errors, see issue #670 and #716
+type MismatchQuotedError struct {}
+
+func (*MismatchQuotedError) Error() string {
+ return "mismatch quoted"
+}
+
+func decodeJsonUnmarshalerQuoted(vv interface{}, s string) error {
+ if len(s) < 2 || s[0] != '"' || s[len(s)-1] != '"' {
+ return &MismatchQuotedError{}
+ }
+ return vv.(json.Unmarshaler).UnmarshalJSON(rt.Str2Mem(s[1:len(s)-1]))
+}
+
func decodeTextUnmarshaler(vv interface{}, s string) error {
return vv.(encoding.TextUnmarshaler).UnmarshalText(rt.Str2Mem(s))
}
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/stubs_go116.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/stubs_go116.go
index b02eb2b8b..8fa7c32fc 100644
--- a/vendor/github.com/bytedance/sonic/internal/decoder/stubs_go116.go
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/stubs_go116.go
@@ -1,4 +1,4 @@
-// +build go1.16,!go1.20
+// +build go1.17,!go1.20
/*
* Copyright 2021 ByteDance Inc.
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package decoder
+package jitdec
import (
`unsafe`
@@ -72,11 +72,6 @@ func mallocgc(size uintptr, typ *rt.GoType, needzero bool) unsafe.Pointer
//goland:noinspection GoUnusedParameter
func makeslice(et *rt.GoType, len int, cap int) unsafe.Pointer
-//go:noescape
-//go:linkname growslice runtime.growslice
-//goland:noinspection GoUnusedParameter
-func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
-
//go:linkname makemap_small runtime.makemap_small
func makemap_small() unsafe.Pointer
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/stubs_go120.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/stubs_go120.go
index 870e25390..a6dad26d7 100644
--- a/vendor/github.com/bytedance/sonic/internal/decoder/stubs_go120.go
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/stubs_go120.go
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package decoder
+package jitdec
import (
`unsafe`
@@ -72,11 +72,6 @@ func mallocgc(size uintptr, typ *rt.GoType, needzero bool) unsafe.Pointer
//goland:noinspection GoUnusedParameter
func makeslice(et *rt.GoType, len int, cap int) unsafe.Pointer
-//go:noescape
-//go:linkname growslice reflect.growslice
-//goland:noinspection GoUnusedParameter
-func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
-
//go:linkname makemap_small runtime.makemap_small
func makemap_small() unsafe.Pointer
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/types.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/types.go
index 6fc0e706c..c196eb5b7 100644
--- a/vendor/github.com/bytedance/sonic/internal/decoder/types.go
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/types.go
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package decoder
+package jitdec
import (
`encoding`
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/utils.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/utils.go
index 23ee5d501..0a7a20289 100644
--- a/vendor/github.com/bytedance/sonic/internal/decoder/utils.go
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/utils.go
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package decoder
+package jitdec
import (
`unsafe`
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/compile_struct.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/compile_struct.go
new file mode 100644
index 000000000..713fb6561
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/compile_struct.go
@@ -0,0 +1,174 @@
+package optdec
+
+import (
+ "fmt"
+ "reflect"
+
+ caching "github.com/bytedance/sonic/internal/optcaching"
+ "github.com/bytedance/sonic/internal/rt"
+ "github.com/bytedance/sonic/internal/resolver"
+)
+
+const (
+ _MAX_FIELDS = 50 // cutoff at 50 fields struct
+)
+
+func (c *compiler) compileIntStringOption(vt reflect.Type) decFunc {
+ switch vt.Size() {
+ case 4:
+ switch vt.Kind() {
+ case reflect.Uint:
+ fallthrough
+ case reflect.Uintptr:
+ return &u32StringDecoder{}
+ case reflect.Int:
+ return &i32StringDecoder{}
+ }
+ case 8:
+ switch vt.Kind() {
+ case reflect.Uint:
+ fallthrough
+ case reflect.Uintptr:
+ return &u64StringDecoder{}
+ case reflect.Int:
+ return &i64StringDecoder{}
+ }
+ default:
+ panic("not supported pointer size: " + fmt.Sprint(vt.Size()))
+ }
+ panic("unreachable")
+}
+
+func isInteger(vt reflect.Type) bool {
+ switch vt.Kind() {
+ case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint, reflect.Uintptr, reflect.Int: return true
+ default: return false
+ }
+}
+
+func (c *compiler) assertStringOptTypes(vt reflect.Type) {
+ if c.depth > _CompileMaxDepth {
+ panic(*stackOverflow)
+ }
+
+ c.depth += 1
+ defer func () {
+ c.depth -= 1
+ }()
+
+ if isInteger(vt) {
+ return
+ }
+
+ switch vt.Kind() {
+ case reflect.String, reflect.Bool, reflect.Float32, reflect.Float64:
+ return
+ case reflect.Ptr: c.assertStringOptTypes(vt.Elem())
+ default:
+ panicForInvalidStrType(vt)
+ }
+}
+
+func (c *compiler) compileFieldStringOption(vt reflect.Type) decFunc {
+ c.assertStringOptTypes(vt)
+ unmDec := c.tryCompilePtrUnmarshaler(vt, true)
+ if unmDec != nil {
+ return unmDec
+ }
+
+ switch vt.Kind() {
+ case reflect.String:
+ if vt == jsonNumberType {
+ return &numberStringDecoder{}
+ }
+ return &strStringDecoder{}
+ case reflect.Bool:
+ return &boolStringDecoder{}
+ case reflect.Int8:
+ return &i8StringDecoder{}
+ case reflect.Int16:
+ return &i16StringDecoder{}
+ case reflect.Int32:
+ return &i32StringDecoder{}
+ case reflect.Int64:
+ return &i64StringDecoder{}
+ case reflect.Uint8:
+ return &u8StringDecoder{}
+ case reflect.Uint16:
+ return &u16StringDecoder{}
+ case reflect.Uint32:
+ return &u32StringDecoder{}
+ case reflect.Uint64:
+ return &u64StringDecoder{}
+ case reflect.Float32:
+ return &f32StringDecoder{}
+ case reflect.Float64:
+ return &f64StringDecoder{}
+ case reflect.Uint:
+ fallthrough
+ case reflect.Uintptr:
+ fallthrough
+ case reflect.Int:
+ return c.compileIntStringOption(vt)
+ case reflect.Ptr:
+ return &ptrStrDecoder{
+ typ: rt.UnpackType(vt.Elem()),
+ deref: c.compileFieldStringOption(vt.Elem()),
+ }
+ default:
+ panicForInvalidStrType(vt)
+ return nil
+ }
+}
+
+func (c *compiler) compileStruct(vt reflect.Type) decFunc {
+ c.enter(vt)
+ defer c.exit(vt)
+ if c.namedPtr {
+ c.namedPtr = false
+ return c.compileStructBody(vt)
+ }
+
+ if c.depth >= c.opts.MaxInlineDepth + 1 || (c.counts > 0 && vt.NumField() >= _MAX_FIELDS) {
+ return &recuriveDecoder{
+ typ: rt.UnpackType(vt),
+ }
+ } else {
+ return c.compileStructBody(vt)
+ }
+}
+
+func (c *compiler) compileStructBody(vt reflect.Type) decFunc {
+ fv := resolver.ResolveStruct(vt)
+ entries := make([]fieldEntry, 0, len(fv))
+
+ for _, f := range fv {
+ var dec decFunc
+ /* dealt with field tag options */
+ if f.Opts&resolver.F_stringize != 0 {
+ dec = c.compileFieldStringOption(f.Type)
+ } else {
+ dec = c.compile(f.Type)
+ }
+
+ /* deal with embedded pointer fields */
+ if f.Path[0].Kind == resolver.F_deref {
+ dec = &embeddedFieldPtrDecoder{
+ field: f,
+ fieldDec: dec,
+ fieldName: f.Name,
+ }
+ }
+
+ entries = append(entries, fieldEntry{
+ FieldMeta: f,
+ fieldDec: dec,
+ })
+ }
+ return &structDecoder{
+ fieldMap: caching.NewFieldLookup(fv),
+ fields: entries,
+ structName: vt.Name(),
+ typ: vt,
+ }
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/compiler.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/compiler.go
new file mode 100644
index 000000000..fd164af93
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/compiler.go
@@ -0,0 +1,449 @@
+package optdec
+
+import (
+ "encoding/json"
+ "fmt"
+ "reflect"
+
+ "github.com/bytedance/sonic/option"
+ "github.com/bytedance/sonic/internal/rt"
+ "github.com/bytedance/sonic/internal/caching"
+)
+
+var (
+ programCache = caching.CreateProgramCache()
+)
+
+func findOrCompile(vt *rt.GoType) (decFunc, error) {
+ makeDecoder := func(vt *rt.GoType, _ ...interface{}) (interface{}, error) {
+ ret, err := newCompiler().compileType(vt.Pack())
+ return ret, err
+ }
+ if val := programCache.Get(vt); val != nil {
+ return val.(decFunc), nil
+ } else if ret, err := programCache.Compute(vt, makeDecoder); err == nil {
+ return ret.(decFunc), nil
+ } else {
+ return nil, err
+ }
+}
+
+type compiler struct {
+ visited map[reflect.Type]bool
+ depth int
+ counts int
+ opts option.CompileOptions
+ namedPtr bool
+}
+
+func newCompiler() *compiler {
+ return &compiler{
+ visited: make(map[reflect.Type]bool),
+ opts: option.DefaultCompileOptions(),
+ }
+}
+
+func (self *compiler) apply(opts option.CompileOptions) *compiler {
+ self.opts = opts
+ return self
+}
+
+const _CompileMaxDepth = 4096
+
+func (c *compiler) enter(vt reflect.Type) {
+ c.visited[vt] = true
+ c.depth += 1
+
+ if c.depth > _CompileMaxDepth {
+ panic(*stackOverflow)
+ }
+}
+
+func (c *compiler) exit(vt reflect.Type) {
+ c.visited[vt] = false
+ c.depth -= 1
+}
+
+func (c *compiler) compileInt(vt reflect.Type) decFunc {
+ switch vt.Size() {
+ case 4:
+ switch vt.Kind() {
+ case reflect.Uint:
+ fallthrough
+ case reflect.Uintptr:
+ return &u32Decoder{}
+ case reflect.Int:
+ return &i32Decoder{}
+ }
+ case 8:
+ switch vt.Kind() {
+ case reflect.Uint:
+ fallthrough
+ case reflect.Uintptr:
+ return &u64Decoder{}
+ case reflect.Int:
+ return &i64Decoder{}
+ }
+ default:
+ panic("not supported pointer size: " + fmt.Sprint(vt.Size()))
+ }
+ panic("unreachable")
+}
+
+func (c *compiler) rescue(ep *error) {
+ if val := recover(); val != nil {
+ if err, ok := val.(error); ok {
+ *ep = err
+ } else {
+ panic(val)
+ }
+ }
+}
+
+func (c *compiler) compileType(vt reflect.Type) (rt decFunc, err error) {
+ defer c.rescue(&err)
+ rt = c.compile(vt)
+ return rt, err
+}
+
+func (c *compiler) compile(vt reflect.Type) decFunc {
+ if c.visited[vt] {
+ return &recuriveDecoder{
+ typ: rt.UnpackType(vt),
+ }
+ }
+
+ dec := c.tryCompilePtrUnmarshaler(vt, false)
+ if dec != nil {
+ return dec
+ }
+
+ return c.compileBasic(vt)
+}
+
+func (c *compiler) compileBasic(vt reflect.Type) decFunc {
+ defer func() {
+ c.counts += 1
+ }()
+ switch vt.Kind() {
+ case reflect.Bool:
+ return &boolDecoder{}
+ case reflect.Int8:
+ return &i8Decoder{}
+ case reflect.Int16:
+ return &i16Decoder{}
+ case reflect.Int32:
+ return &i32Decoder{}
+ case reflect.Int64:
+ return &i64Decoder{}
+ case reflect.Uint8:
+ return &u8Decoder{}
+ case reflect.Uint16:
+ return &u16Decoder{}
+ case reflect.Uint32:
+ return &u32Decoder{}
+ case reflect.Uint64:
+ return &u64Decoder{}
+ case reflect.Float32:
+ return &f32Decoder{}
+ case reflect.Float64:
+ return &f64Decoder{}
+ case reflect.Uint:
+ fallthrough
+ case reflect.Uintptr:
+ fallthrough
+ case reflect.Int:
+ return c.compileInt(vt)
+ case reflect.String:
+ return c.compileString(vt)
+ case reflect.Array:
+ return c.compileArray(vt)
+ case reflect.Interface:
+ return c.compileInterface(vt)
+ case reflect.Map:
+ return c.compileMap(vt)
+ case reflect.Ptr:
+ return c.compilePtr(vt)
+ case reflect.Slice:
+ return c.compileSlice(vt)
+ case reflect.Struct:
+ return c.compileStruct(vt)
+ default:
+ panic(&json.UnmarshalTypeError{Type: vt})
+ }
+}
+
+func (c *compiler) compilePtr(vt reflect.Type) decFunc {
+ c.enter(vt)
+ defer c.exit(vt)
+
+ // specail logic for Named Ptr, issue 379
+ if reflect.PtrTo(vt.Elem()) != vt {
+ c.namedPtr = true
+ return &ptrDecoder{
+ typ: rt.UnpackType(vt.Elem()),
+ deref: c.compileBasic(vt.Elem()),
+ }
+ }
+
+ return &ptrDecoder{
+ typ: rt.UnpackType(vt.Elem()),
+ deref: c.compile(vt.Elem()),
+ }
+}
+
+func (c *compiler) compileArray(vt reflect.Type) decFunc {
+ c.enter(vt)
+ defer c.exit(vt)
+ return &arrayDecoder{
+ len: vt.Len(),
+ elemType: rt.UnpackType(vt.Elem()),
+ elemDec: c.compile(vt.Elem()),
+ typ: vt,
+ }
+}
+
+func (c *compiler) compileString(vt reflect.Type) decFunc {
+ if vt == jsonNumberType {
+ return &numberDecoder{}
+ }
+ return &stringDecoder{}
+
+}
+
+func (c *compiler) tryCompileSliceUnmarshaler(vt reflect.Type) decFunc {
+ pt := reflect.PtrTo(vt.Elem())
+ if pt.Implements(jsonUnmarshalerType) {
+ return &sliceDecoder{
+ elemType: rt.UnpackType(vt.Elem()),
+ elemDec: c.compile(vt.Elem()),
+ typ: vt,
+ }
+ }
+
+ if pt.Implements(encodingTextUnmarshalerType) {
+ return &sliceDecoder{
+ elemType: rt.UnpackType(vt.Elem()),
+ elemDec: c.compile(vt.Elem()),
+ typ: vt,
+ }
+ }
+ return nil
+}
+
+func (c *compiler) compileSlice(vt reflect.Type) decFunc {
+ c.enter(vt)
+ defer c.exit(vt)
+
+ // Some common slice, use a decoder, to avoid function calls
+ et := rt.UnpackType(vt.Elem())
+
+ /* first checking `[]byte` */
+ if et.Kind() == reflect.Uint8 /* []byte */ {
+ return c.compileSliceBytes(vt)
+ }
+
+ dec := c.tryCompileSliceUnmarshaler(vt)
+ if dec != nil {
+ return dec
+ }
+
+ if vt == reflect.TypeOf([]interface{}{}) {
+ return &sliceEfaceDecoder{}
+ }
+ if et.IsInt32() {
+ return &sliceI32Decoder{}
+ }
+ if et.IsInt64() {
+ return &sliceI64Decoder{}
+ }
+ if et.IsUint32() {
+ return &sliceU32Decoder{}
+ }
+ if et.IsUint64() {
+ return &sliceU64Decoder{}
+ }
+ if et.Kind() == reflect.String {
+ return &sliceStringDecoder{}
+ }
+
+ return &sliceDecoder{
+ elemType: rt.UnpackType(vt.Elem()),
+ elemDec: c.compile(vt.Elem()),
+ typ: vt,
+ }
+}
+
+func (c *compiler) compileSliceBytes(vt reflect.Type) decFunc {
+ ep := reflect.PtrTo(vt.Elem())
+
+ if ep.Implements(jsonUnmarshalerType) {
+ return &sliceBytesUnmarshalerDecoder{
+ elemType: rt.UnpackType(vt.Elem()),
+ elemDec: c.compile(vt.Elem()),
+ typ: vt,
+ }
+ }
+
+ if ep.Implements(encodingTextUnmarshalerType) {
+ return &sliceBytesUnmarshalerDecoder{
+ elemType: rt.UnpackType(vt.Elem()),
+ elemDec: c.compile(vt.Elem()),
+ typ: vt,
+ }
+ }
+
+ return &sliceBytesDecoder{}
+}
+
+func (c *compiler) compileInterface(vt reflect.Type) decFunc {
+ c.enter(vt)
+ defer c.exit(vt)
+ if vt.NumMethod() == 0 {
+ return &efaceDecoder{}
+ }
+
+ if vt.Implements(jsonUnmarshalerType) {
+ return &unmarshalJSONDecoder{
+ typ: rt.UnpackType(vt),
+ }
+ }
+
+ if vt.Implements(encodingTextUnmarshalerType) {
+ return &unmarshalTextDecoder{
+ typ: rt.UnpackType(vt),
+ }
+ }
+
+ return &ifaceDecoder{
+ typ: rt.UnpackType(vt),
+ }
+}
+
+func (c *compiler) compileMap(vt reflect.Type) decFunc {
+ c.enter(vt)
+ defer c.exit(vt)
+ // check the key unmarshaler at first
+ decKey := tryCompileKeyUnmarshaler(vt)
+ if decKey != nil {
+ return &mapDecoder{
+ mapType: rt.MapType(rt.UnpackType(vt)),
+ keyDec: decKey,
+ elemDec: c.compile(vt.Elem()),
+ }
+ }
+
+ // Most common map, use a decoder, to avoid function calls
+ if vt == reflect.TypeOf(map[string]interface{}{}) {
+ return &mapEfaceDecoder{}
+ } else if vt == reflect.TypeOf(map[string]string{}) {
+ return &mapStringDecoder{}
+ }
+
+ // Some common integer map later
+ mt := rt.MapType(rt.UnpackType(vt))
+
+ if mt.Key.Kind() == reflect.String {
+ return &mapStrKeyDecoder{
+ mapType: mt,
+ assign: rt.GetMapStrAssign(vt),
+ elemDec: c.compile(vt.Elem()),
+ }
+ }
+
+ if mt.Key.IsInt64() {
+ return &mapI64KeyDecoder{
+ mapType: mt,
+ elemDec: c.compile(vt.Elem()),
+ assign: rt.GetMap64Assign(vt),
+ }
+ }
+
+ if mt.Key.IsInt32() {
+ return &mapI32KeyDecoder{
+ mapType: mt,
+ elemDec: c.compile(vt.Elem()),
+ assign: rt.GetMap32Assign(vt),
+ }
+ }
+
+ if mt.Key.IsUint64() {
+ return &mapU64KeyDecoder{
+ mapType: mt,
+ elemDec: c.compile(vt.Elem()),
+ assign: rt.GetMap64Assign(vt),
+ }
+ }
+
+ if mt.Key.IsUint32() {
+ return &mapU32KeyDecoder{
+ mapType: mt,
+ elemDec: c.compile(vt.Elem()),
+ assign: rt.GetMap32Assign(vt),
+ }
+ }
+
+ // Generic map
+ return &mapDecoder{
+ mapType: mt,
+ keyDec: c.compileMapKey(vt),
+ elemDec: c.compile(vt.Elem()),
+ }
+}
+
+func tryCompileKeyUnmarshaler(vt reflect.Type) decKey {
+ pt := reflect.PtrTo(vt.Key())
+
+ /* check for `encoding.TextUnmarshaler` with pointer receiver */
+ if pt.Implements(encodingTextUnmarshalerType) {
+ return decodeKeyTextUnmarshaler
+ }
+
+ /* not support map key with `json.Unmarshaler` */
+ return nil
+}
+
+func (c *compiler) compileMapKey(vt reflect.Type) decKey {
+ switch vt.Key().Kind() {
+ case reflect.Int8:
+ return decodeKeyI8
+ case reflect.Int16:
+ return decodeKeyI16
+ case reflect.Uint8:
+ return decodeKeyU8
+ case reflect.Uint16:
+ return decodeKeyU16
+ default:
+ panic(&json.UnmarshalTypeError{Type: vt})
+ }
+}
+
+// maybe vt is a named type, and not a pointer receiver, see issue 379
+func (c *compiler) tryCompilePtrUnmarshaler(vt reflect.Type, strOpt bool) decFunc {
+ pt := reflect.PtrTo(vt)
+
+ /* check for `json.Unmarshaler` with pointer receiver */
+ if pt.Implements(jsonUnmarshalerType) {
+ return &unmarshalJSONDecoder{
+ typ: rt.UnpackType(pt),
+ strOpt: strOpt,
+ }
+ }
+
+ /* check for `encoding.TextMarshaler` with pointer receiver */
+ if pt.Implements(encodingTextUnmarshalerType) {
+ /* TextUnmarshal not support ,strig tag */
+ if strOpt {
+ panicForInvalidStrType(vt)
+ }
+ return &unmarshalTextDecoder{
+ typ: rt.UnpackType(pt),
+ }
+ }
+
+ return nil
+}
+
+func panicForInvalidStrType(vt reflect.Type) {
+ panic(error_type(rt.UnpackType(vt)))
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/const.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/const.go
new file mode 100644
index 000000000..77879fafe
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/const.go
@@ -0,0 +1,60 @@
+package optdec
+
+import "math"
+
+/*
+Copied from sonic-rs
+// JSON Value Type
+const NULL: u64 = 0;
+const BOOL: u64 = 2;
+const FALSE: u64 = BOOL;
+const TRUE: u64 = (1 << 3) | BOOL;
+const NUMBER: u64 = 3;
+const UINT: u64 = NUMBER;
+const SINT: u64 = (1 << 3) | NUMBER;
+const REAL: u64 = (2 << 3) | NUMBER;
+const RAWNUMBER: u64 = (3 << 3) | NUMBER;
+const STRING: u64 = 4;
+const STRING_COMMON: u64 = STRING;
+const STRING_HASESCAPED: u64 = (1 << 3) | STRING;
+const OBJECT: u64 = 6;
+const ARRAY: u64 = 7;
+
+/// JSON Type Mask
+const POS_MASK: u64 = (!0) << 32;
+const POS_BITS: u64 = 32;
+const TYPE_MASK: u64 = 0xFF;
+const TYPE_BITS: u64 = 8;
+
+*/
+
+const (
+ // BasicType: 3 bits
+ KNull = 0 // xxxxx000
+ KBool = 2 // xxxxx010
+ KNumber = 3 // xxxxx011
+ KString = 4 // xxxxx100
+ KRaw = 5 // xxxxx101
+ KObject = 6 // xxxxx110
+ KArray = 7 // xxxxx111
+
+ // SubType: 2 bits
+ KFalse = (0 << 3) | KBool // xxx00_010, 2
+ KTrue = (1 << 3) | KBool // xxx01_010, 10
+ KUint = (0 << 3) | KNumber // xxx00_011, 3
+ KSint = (1 << 3) | KNumber // xxx01_011, 11
+ KReal = (2 << 3) | KNumber // xxx10_011, 19
+ KRawNumber = (3 << 3) | KNumber // xxx11_011, 27
+ KStringCommon = KString // xxx00_100, 4
+ KStringEscaped = (1 << 3) | KString // xxx01_100, 12
+)
+
+const (
+ PosMask = math.MaxUint64 << 32
+ PosBits = 32
+ TypeMask = 0xFF
+ TypeBits = 8
+
+ ConLenMask = uint64(math.MaxUint32)
+ ConLenBits = 32
+)
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/context.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/context.go
new file mode 100644
index 000000000..93ed9b7e0
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/context.go
@@ -0,0 +1,3 @@
+package optdec
+
+type context = Context
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/decoder.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/decoder.go
new file mode 100644
index 000000000..81eed34ea
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/decoder.go
@@ -0,0 +1,160 @@
+package optdec
+
+import (
+ "reflect"
+ "unsafe"
+
+ "encoding/json"
+ "github.com/bytedance/sonic/internal/rt"
+ "github.com/bytedance/sonic/option"
+ "github.com/bytedance/sonic/internal/decoder/errors"
+ "github.com/bytedance/sonic/internal/decoder/consts"
+)
+
+
+type (
+ MismatchTypeError = errors.MismatchTypeError
+ SyntaxError = errors.SyntaxError
+)
+
+const (
+ _F_allow_control = consts.F_allow_control
+ _F_copy_string = consts.F_copy_string
+ _F_disable_unknown = consts.F_disable_unknown
+ _F_disable_urc = consts.F_disable_urc
+ _F_use_int64 = consts.F_use_int64
+ _F_use_number = consts.F_use_number
+ _F_validate_string = consts.F_validate_string
+)
+
+type Options = consts.Options
+
+const (
+ OptionUseInt64 = consts.OptionUseInt64
+ OptionUseNumber = consts.OptionUseNumber
+ OptionUseUnicodeErrors = consts.OptionUseUnicodeErrors
+ OptionDisableUnknown = consts.OptionDisableUnknown
+ OptionCopyString = consts.OptionCopyString
+ OptionValidateString = consts.OptionValidateString
+)
+
+
+func Decode(s *string, i *int, f uint64, val interface{}) error {
+ vv := rt.UnpackEface(val)
+ vp := vv.Value
+
+ /* check for nil type */
+ if vv.Type == nil {
+ return &json.InvalidUnmarshalError{}
+ }
+
+ /* must be a non-nil pointer */
+ if vp == nil || vv.Type.Kind() != reflect.Ptr {
+ return &json.InvalidUnmarshalError{Type: vv.Type.Pack()}
+ }
+
+ etp := rt.PtrElem(vv.Type)
+
+ /* check the defined pointer type for issue 379 */
+ if vv.Type.IsNamed() {
+ newp := vp
+ etp = vv.Type
+ vp = unsafe.Pointer(&newp)
+ }
+
+ dec, err := findOrCompile(etp)
+ if err != nil {
+ return err
+ }
+
+ /* parse into document */
+ ctx, err := NewContext(*s, *i, uint64(f), etp)
+ defer ctx.Delete()
+ if ctx.Parser.Utf8Inv {
+ *s = ctx.Parser.Json
+ }
+ if err != nil {
+ goto fix_error;
+ }
+ err = dec.FromDom(vp, ctx.Root(), &ctx)
+
+fix_error:
+ err = fix_error(*s, *i, err)
+
+ // update position at last
+ *i += ctx.Parser.Pos()
+ return err
+}
+
+func fix_error(json string, pos int, err error) error {
+ if e, ok := err.(SyntaxError); ok {
+ return SyntaxError{
+ Pos: int(e.Pos) + pos,
+ Src: json,
+ Msg: e.Msg,
+ }
+ }
+
+ if e, ok := err.(MismatchTypeError); ok {
+ return &MismatchTypeError {
+ Pos: int(e.Pos) + pos,
+ Src: json,
+ Type: e.Type,
+ }
+ }
+
+ return err
+}
+
+// Pretouch compiles vt ahead-of-time to avoid JIT compilation on-the-fly, in
+// order to reduce the first-hit latency.
+//
+// Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is
+// a compile option to set the depth of recursive compile for the nested struct type.
+func Pretouch(vt reflect.Type, opts ...option.CompileOption) error {
+ cfg := option.DefaultCompileOptions()
+ for _, opt := range opts {
+ opt(&cfg)
+ }
+ return pretouchRec(map[reflect.Type]bool{vt:true}, cfg)
+}
+
+func pretouchType(_vt reflect.Type, opts option.CompileOptions) (map[reflect.Type]bool, error) {
+ /* compile function */
+ compiler := newCompiler().apply(opts)
+ decoder := func(vt *rt.GoType, _ ...interface{}) (interface{}, error) {
+ if f, err := compiler.compileType(_vt); err != nil {
+ return nil, err
+ } else {
+ return f, nil
+ }
+ }
+
+ /* find or compile */
+ vt := rt.UnpackType(_vt)
+ if val := programCache.Get(vt); val != nil {
+ return nil, nil
+ } else if _, err := programCache.Compute(vt, decoder); err == nil {
+ return compiler.visited, nil
+ } else {
+ return nil, err
+ }
+}
+
+func pretouchRec(vtm map[reflect.Type]bool, opts option.CompileOptions) error {
+ if opts.RecursiveDepth < 0 || len(vtm) == 0 {
+ return nil
+ }
+ next := make(map[reflect.Type]bool)
+ for vt := range(vtm) {
+ sub, err := pretouchType(vt, opts)
+ if err != nil {
+ return err
+ }
+ for svt := range(sub) {
+ next[svt] = true
+ }
+ }
+ opts.RecursiveDepth -= 1
+ return pretouchRec(next, opts)
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/errors.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/errors.go
new file mode 100644
index 000000000..db0af547b
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/errors.go
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ package optdec
+
+ import (
+ "encoding/json"
+ "errors"
+ "reflect"
+ "strconv"
+
+ "github.com/bytedance/sonic/internal/rt"
+ )
+
+ /** JIT Error Helpers **/
+
+ var stackOverflow = &json.UnsupportedValueError{
+ Str: "Value nesting too deep",
+ Value: reflect.ValueOf("..."),
+ }
+
+ func error_type(vt *rt.GoType) error {
+ return &json.UnmarshalTypeError{Type: vt.Pack()}
+ }
+
+ func error_mismatch(node Node, ctx *context, typ reflect.Type) error {
+ return MismatchTypeError{
+ Pos: node.Position(),
+ Src: ctx.Parser.Json,
+ Type: typ,
+ }
+ }
+
+ func newUnmatched(pos int, vt *rt.GoType) error {
+ return MismatchTypeError{
+ Pos: pos,
+ Src: "",
+ Type: vt.Pack(),
+ }
+ }
+
+ func error_field(name string) error {
+ return errors.New("json: unknown field " + strconv.Quote(name))
+ }
+
+ func error_value(value string, vtype reflect.Type) error {
+ return &json.UnmarshalTypeError{
+ Type: vtype,
+ Value: value,
+ }
+ }
+
+ func error_syntax(pos int, src string, msg string) error {
+ return SyntaxError{
+ Pos: pos,
+ Src: src,
+ Msg: msg,
+ }
+ }
+ \ No newline at end of file
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/functor.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/functor.go
new file mode 100644
index 000000000..2a0523d5e
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/functor.go
@@ -0,0 +1,281 @@
+package optdec
+
+import (
+ "encoding/json"
+ "math"
+ "unsafe"
+
+ "github.com/bytedance/sonic/internal/rt"
+ "github.com/bytedance/sonic/internal/resolver"
+)
+
+type decFunc interface {
+ FromDom(vp unsafe.Pointer, node Node, ctx *context) error
+}
+
+type ptrDecoder struct {
+ typ *rt.GoType
+ deref decFunc
+}
+
+// Pointer Value is allocated in the Caller
+func (d *ptrDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*unsafe.Pointer)(vp) = nil
+ return nil
+ }
+
+ if *(*unsafe.Pointer)(vp) == nil {
+ *(*unsafe.Pointer)(vp) = rt.Mallocgc(d.typ.Size, d.typ, true)
+ }
+
+ return d.deref.FromDom(*(*unsafe.Pointer)(vp), node, ctx)
+}
+
+type embeddedFieldPtrDecoder struct {
+ field resolver.FieldMeta
+ fieldDec decFunc
+ fieldName string
+}
+
+// Pointer Value is allocated in the Caller
+func (d *embeddedFieldPtrDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ // seek into the pointer
+ vp = unsafe.Pointer(uintptr(vp) - uintptr(d.field.Path[0].Size))
+ for _, f := range d.field.Path {
+ deref := rt.UnpackType(f.Type)
+ vp = unsafe.Pointer(uintptr(vp) + f.Size)
+ if f.Kind == resolver.F_deref {
+ if *(*unsafe.Pointer)(vp) == nil {
+ *(*unsafe.Pointer)(vp) = rt.Mallocgc(deref.Size, deref, true)
+ }
+ vp = *(*unsafe.Pointer)(vp)
+ }
+ }
+ return d.fieldDec.FromDom(vp, node, ctx)
+}
+
+type i8Decoder struct{}
+
+func (d *i8Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ ret, ok := node.AsI64(ctx)
+ if !ok || ret > math.MaxInt8 || ret < math.MinInt8 {
+ return error_mismatch(node, ctx, int8Type)
+ }
+
+ *(*int8)(vp) = int8(ret)
+ return nil
+}
+
+type i16Decoder struct{}
+
+func (d *i16Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ ret, ok := node.AsI64(ctx)
+ if !ok || ret > math.MaxInt16 || ret < math.MinInt16 {
+ return error_mismatch(node, ctx, int16Type)
+ }
+
+ *(*int16)(vp) = int16(ret)
+ return nil
+}
+
+type i32Decoder struct{}
+
+func (d *i32Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ ret, ok := node.AsI64(ctx)
+ if !ok || ret > math.MaxInt32 || ret < math.MinInt32 {
+ return error_mismatch(node, ctx, int32Type)
+ }
+
+ *(*int32)(vp) = int32(ret)
+ return nil
+}
+
+type i64Decoder struct{}
+
+func (d *i64Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ ret, ok := node.AsI64(ctx)
+ if !ok {
+ return error_mismatch(node, ctx, int64Type)
+ }
+
+ *(*int64)(vp) = int64(ret)
+ return nil
+}
+
+type u8Decoder struct{}
+
+func (d *u8Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ ret, ok := node.AsU64(ctx)
+ if !ok || ret > math.MaxUint8 {
+ err := error_mismatch(node, ctx, uint8Type)
+ return err
+ }
+
+ *(*uint8)(vp) = uint8(ret)
+ return nil
+}
+
+type u16Decoder struct{}
+
+func (d *u16Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ ret, ok := node.AsU64(ctx)
+ if !ok || ret > math.MaxUint16 {
+ return error_mismatch(node, ctx, uint16Type)
+ }
+ *(*uint16)(vp) = uint16(ret)
+ return nil
+}
+
+type u32Decoder struct{}
+
+func (d *u32Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ ret, ok := node.AsU64(ctx)
+ if !ok || ret > math.MaxUint32 {
+ return error_mismatch(node, ctx, uint32Type)
+ }
+
+ *(*uint32)(vp) = uint32(ret)
+ return nil
+}
+
+type u64Decoder struct{}
+
+func (d *u64Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ ret, ok := node.AsU64(ctx)
+ if !ok {
+ return error_mismatch(node, ctx, uint64Type)
+ }
+
+ *(*uint64)(vp) = uint64(ret)
+ return nil
+}
+
+type f32Decoder struct{}
+
+func (d *f32Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ ret, ok := node.AsF64(ctx)
+ if !ok || ret > math.MaxFloat32 || ret < -math.MaxFloat32 {
+ return error_mismatch(node, ctx, float32Type)
+ }
+
+ *(*float32)(vp) = float32(ret)
+ return nil
+}
+
+type f64Decoder struct{}
+
+func (d *f64Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ ret, ok := node.AsF64(ctx)
+ if !ok {
+ return error_mismatch(node, ctx, float64Type)
+ }
+
+ *(*float64)(vp) = float64(ret)
+ return nil
+}
+
+type boolDecoder struct {
+}
+
+func (d *boolDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ ret, ok := node.AsBool()
+ if !ok {
+ return error_mismatch(node, ctx, boolType)
+ }
+
+ *(*bool)(vp) = bool(ret)
+ return nil
+}
+
+type stringDecoder struct {
+}
+
+func (d *stringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ ret, ok := node.AsStr(ctx)
+ if !ok {
+ return error_mismatch(node, ctx, stringType)
+ }
+ *(*string)(vp) = ret
+ return nil
+}
+
+type numberDecoder struct {
+}
+
+func (d *numberDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ num, ok := node.AsNumber(ctx)
+ if !ok {
+ return error_mismatch(node, ctx, jsonNumberType)
+ }
+ *(*json.Number)(vp) = num
+ return nil
+}
+
+type recuriveDecoder struct {
+ typ *rt.GoType
+}
+
+func (d *recuriveDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ dec, err := findOrCompile(d.typ)
+ if err != nil {
+ return err
+ }
+ return dec.FromDom(vp, node, ctx)
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/helper.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/helper.go
new file mode 100644
index 000000000..7bf8a8f39
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/helper.go
@@ -0,0 +1,110 @@
+package optdec
+
+import (
+ "encoding/json"
+ "strconv"
+
+ "github.com/bytedance/sonic/internal/native"
+ "github.com/bytedance/sonic/internal/utils"
+ "github.com/bytedance/sonic/internal/native/types"
+)
+
+
+func SkipNumberFast(json string, start int) (int, bool) {
+ // find the number ending, we pasred in native, it alway valid
+ pos := start
+ for pos < len(json) && json[pos] != ']' && json[pos] != '}' && json[pos] != ',' {
+ if json[pos] >= '0' && json[pos] <= '9' || json[pos] == '.' || json[pos] == '-' || json[pos] == '+' || json[pos] == 'e' || json[pos] == 'E' {
+ pos += 1
+ } else {
+ break
+ }
+ }
+
+ // if not found number, return false
+ if pos == start {
+ return pos, false
+ }
+ return pos, true
+}
+
+
+func isSpace(c byte) bool {
+ return c == ' ' || c == '\t' || c == '\n' || c == '\r'
+}
+
+// pos is the start index of the raw
+func ValidNumberFast(raw string) bool {
+ ret := utils.SkipNumber(raw, 0)
+ if ret < 0 {
+ return false
+ }
+
+ // check trainling chars
+ for ret < len(raw) {
+ return false
+ }
+
+ return true
+}
+
+func SkipOneFast2(json string, pos *int) (int, error) {
+ // find the number ending, we pasred in sonic-cpp, it alway valid
+ start := native.SkipOneFast(&json, pos)
+ if start < 0 {
+ return -1, error_syntax(*pos, json, types.ParsingError(-start).Error())
+ }
+ return start, nil
+}
+
+func SkipOneFast(json string, pos int) (string, error) {
+ // find the number ending, we pasred in sonic-cpp, it alway valid
+ start := native.SkipOneFast(&json, &pos)
+ if start < 0 {
+ // TODO: details error code
+ return "", error_syntax(pos, json, types.ParsingError(-start).Error())
+ }
+ return json[start:pos], nil
+}
+
+func ParseI64(raw string) (int64, error) {
+ i64, err := strconv.ParseInt(raw, 10, 64)
+ if err != nil {
+ return 0, err
+ }
+ return i64, nil
+}
+
+func ParseBool(raw string) (bool, error) {
+ var b bool
+ err := json.Unmarshal([]byte(raw), &b)
+ if err != nil {
+ return false, err
+ }
+ return b, nil
+}
+
+func ParseU64(raw string) (uint64, error) {
+ u64, err := strconv.ParseUint(raw, 10, 64)
+ if err != nil {
+ return 0, err
+ }
+ return u64, nil
+}
+
+func ParseF64(raw string) (float64, error) {
+ f64, err := strconv.ParseFloat(raw, 64)
+ if err != nil {
+ return 0, err
+ }
+ return f64, nil
+}
+
+func Unquote(raw string) (string, error) {
+ var u string
+ err := json.Unmarshal([]byte(raw), &u)
+ if err != nil {
+ return "", err
+ }
+ return u, nil
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/interface.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/interface.go
new file mode 100644
index 000000000..0c063d55f
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/interface.go
@@ -0,0 +1,169 @@
+package optdec
+
+import (
+ "encoding"
+ "encoding/json"
+ "unsafe"
+ "reflect"
+
+ "github.com/bytedance/sonic/internal/rt"
+)
+
+type efaceDecoder struct {
+}
+
+func (d *efaceDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*interface{})(vp) = interface{}(nil)
+ return nil
+ }
+
+ eface := *(*rt.GoEface)(vp)
+
+ // not pointer type, or nil pointer, or *interface{}
+ if eface.Value == nil || eface.Type.Kind() != reflect.Ptr || rt.PtrElem(eface.Type) == anyType {
+ ret, err := node.AsEface(ctx)
+ if err != nil {
+ return err
+ }
+
+ *(*interface{})(vp) = ret
+ return nil
+ }
+
+ etp := rt.PtrElem(eface.Type)
+ vp = eface.Value
+
+ /* check the defined pointer type for issue 379 */
+ if eface.Type.IsNamed() {
+ newp := vp
+ etp = eface.Type
+ vp = unsafe.Pointer(&newp)
+ }
+
+ dec, err := findOrCompile(etp)
+ if err != nil {
+ return err
+ }
+
+ return dec.FromDom(vp, node, ctx)
+}
+
+type ifaceDecoder struct {
+ typ *rt.GoType
+}
+
+func (d *ifaceDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*unsafe.Pointer)(vp) = nil
+ return nil
+ }
+
+ iface := *(*rt.GoIface)(vp)
+ if iface.Itab == nil {
+ return error_type(d.typ)
+ }
+
+ vt := iface.Itab.Vt
+
+ // not pointer type, or nil pointer, or *interface{}
+ if vp == nil || vt.Kind() != reflect.Ptr || rt.PtrElem(vt) == anyType {
+ ret, err := node.AsEface(ctx)
+ if err != nil {
+ return err
+ }
+
+ *(*interface{})(vp) = ret
+ return nil
+ }
+
+
+ etp := rt.PtrElem(vt)
+ vp = iface.Value
+
+ /* check the defined pointer type for issue 379 */
+ if vt.IsNamed() {
+ newp := vp
+ etp = vt
+ vp = unsafe.Pointer(&newp)
+ }
+
+ dec, err := findOrCompile(etp)
+ if err != nil {
+ return err
+ }
+
+ return dec.FromDom(vp, node, ctx)
+}
+
+type unmarshalTextDecoder struct {
+ typ *rt.GoType
+}
+
+func (d *unmarshalTextDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*unsafe.Pointer)(vp) = nil
+ return nil
+ }
+
+ txt, ok := node.AsStringText(ctx)
+ if !ok {
+ return error_mismatch(node, ctx, d.typ.Pack())
+ }
+
+ v := *(*interface{})(unsafe.Pointer(&rt.GoEface{
+ Type: d.typ,
+ Value: vp,
+ }))
+
+ // fast path
+ if u, ok := v.(encoding.TextUnmarshaler); ok {
+ return u.UnmarshalText(txt)
+ }
+
+ // slow path
+ rv := reflect.ValueOf(v)
+ if u, ok := rv.Interface().(encoding.TextUnmarshaler); ok {
+ return u.UnmarshalText(txt)
+ }
+
+ return error_type(d.typ)
+}
+
+type unmarshalJSONDecoder struct {
+ typ *rt.GoType
+ strOpt bool
+}
+
+func (d *unmarshalJSONDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ v := *(*interface{})(unsafe.Pointer(&rt.GoEface{
+ Type: d.typ,
+ Value: vp,
+ }))
+
+ var input []byte
+ if d.strOpt && node.IsNull() {
+ input = []byte("null")
+ } else if d.strOpt {
+ s, ok := node.AsStringText(ctx)
+ if !ok {
+ return error_mismatch(node, ctx, d.typ.Pack())
+ }
+ input = s
+ } else {
+ input = []byte(node.AsRaw(ctx))
+ }
+
+ // fast path
+ if u, ok := v.(json.Unmarshaler); ok {
+ return u.UnmarshalJSON((input))
+ }
+
+ // slow path
+ rv := reflect.ValueOf(v)
+ if u, ok := rv.Interface().(json.Unmarshaler); ok {
+ return u.UnmarshalJSON(input)
+ }
+
+ return error_type(d.typ)
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/map.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/map.go
new file mode 100644
index 000000000..1a2bda8f3
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/map.go
@@ -0,0 +1,430 @@
+package optdec
+
+import (
+ "encoding"
+ "encoding/json"
+ "math"
+ "reflect"
+ "unsafe"
+
+ "github.com/bytedance/sonic/internal/rt"
+)
+
+/** Decoder for most common map types: map[string]interface{}, map[string]string **/
+
+type mapEfaceDecoder struct {
+}
+
+func (d *mapEfaceDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*map[string]interface{})(vp) = nil
+ return nil
+ }
+
+ return node.AsMapEface(ctx, vp)
+}
+
+type mapStringDecoder struct {
+}
+
+func (d *mapStringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*map[string]string)(vp) = nil
+ return nil
+ }
+
+ return node.AsMapString(ctx, vp)
+}
+
+/** Decoder for map with string key **/
+
+type mapStrKeyDecoder struct {
+ mapType *rt.GoMapType
+ elemDec decFunc
+ assign rt.MapStrAssign
+ typ reflect.Type
+}
+
+func (d *mapStrKeyDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*unsafe.Pointer)(vp) = nil
+ return nil
+ }
+
+ obj, ok := node.AsObj()
+ if !ok {
+ return error_mismatch(node, ctx, d.mapType.Pack())
+ }
+
+ // allocate map
+ m := *(*unsafe.Pointer)(vp)
+ if m == nil {
+ m = rt.Makemap(&d.mapType.GoType, obj.Len())
+ }
+
+ var gerr error
+ next := obj.Children()
+ for i := 0; i < obj.Len(); i++ {
+ keyn := NewNode(next)
+ key, _ := keyn.AsStr(ctx)
+
+ valn := NewNode(PtrOffset(next, 1))
+ valp := d.assign(d.mapType, m, key)
+ err := d.elemDec.FromDom(valp, valn, ctx)
+ if gerr == nil && err != nil {
+ gerr = err
+ }
+ next = valn.Next()
+ }
+
+ *(*unsafe.Pointer)(vp) = m
+ return gerr
+}
+
+/** Decoder for map with int32 or int64 key **/
+
+type mapI32KeyDecoder struct {
+ mapType *rt.GoMapType
+ elemDec decFunc
+ assign rt.Map32Assign
+}
+
+func (d *mapI32KeyDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*unsafe.Pointer)(vp) = nil
+ return nil
+ }
+
+ obj, ok := node.AsObj()
+ if !ok {
+ return error_mismatch(node, ctx, d.mapType.Pack())
+ }
+
+ // allocate map
+ m := *(*unsafe.Pointer)(vp)
+ if m == nil {
+ m = rt.Makemap(&d.mapType.GoType, obj.Len())
+ }
+
+ next := obj.Children()
+ var gerr error
+ for i := 0; i < obj.Len(); i++ {
+ keyn := NewNode(next)
+ k, ok := keyn.ParseI64(ctx)
+ if !ok || k > math.MaxInt32 || k < math.MinInt32 {
+ if gerr == nil {
+ gerr = error_mismatch(keyn, ctx, d.mapType.Pack())
+ }
+ valn := NewNode(PtrOffset(next, 1))
+ next = valn.Next()
+ continue
+ }
+
+ key := int32(k)
+ ku32 := *(*uint32)(unsafe.Pointer(&key))
+ valn := NewNode(PtrOffset(next, 1))
+ valp := d.assign(d.mapType, m, ku32)
+ err := d.elemDec.FromDom(valp, valn, ctx)
+ if gerr == nil && err != nil {
+ gerr = err
+ }
+
+ next = valn.Next()
+ }
+
+ *(*unsafe.Pointer)(vp) = m
+ return gerr
+}
+
+type mapI64KeyDecoder struct {
+ mapType *rt.GoMapType
+ elemDec decFunc
+ assign rt.Map64Assign
+}
+
+func (d *mapI64KeyDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*unsafe.Pointer)(vp) = nil
+ return nil
+ }
+
+ obj, ok := node.AsObj()
+ if !ok {
+ return error_mismatch(node, ctx, d.mapType.Pack())
+ }
+
+ // allocate map
+ m := *(*unsafe.Pointer)(vp)
+ if m == nil {
+ m = rt.Makemap(&d.mapType.GoType, obj.Len())
+ }
+
+ var gerr error
+ next := obj.Children()
+ for i := 0; i < obj.Len(); i++ {
+ keyn := NewNode(next)
+ key, ok := keyn.ParseI64(ctx)
+
+ if !ok {
+ if gerr == nil {
+ gerr = error_mismatch(keyn, ctx, d.mapType.Pack())
+ }
+ valn := NewNode(PtrOffset(next, 1))
+ next = valn.Next()
+ continue
+ }
+
+ ku64 := *(*uint64)(unsafe.Pointer(&key))
+ valn := NewNode(PtrOffset(next, 1))
+ valp := d.assign(d.mapType, m, ku64)
+ err := d.elemDec.FromDom(valp, valn, ctx)
+ if gerr == nil && err != nil {
+ gerr = err
+ }
+ next = valn.Next()
+ }
+
+ *(*unsafe.Pointer)(vp) = m
+ return gerr
+}
+
+/** Decoder for map with unt32 or uint64 key **/
+
+type mapU32KeyDecoder struct {
+ mapType *rt.GoMapType
+ elemDec decFunc
+ assign rt.Map32Assign
+}
+
+func (d *mapU32KeyDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*unsafe.Pointer)(vp) = nil
+ return nil
+ }
+
+ obj, ok := node.AsObj()
+ if !ok {
+ return error_mismatch(node, ctx, d.mapType.Pack())
+ }
+
+ // allocate map
+ m := *(*unsafe.Pointer)(vp)
+ if m == nil {
+ m = rt.Makemap(&d.mapType.GoType, obj.Len())
+ }
+
+ var gerr error
+ next := obj.Children()
+ for i := 0; i < obj.Len(); i++ {
+ keyn := NewNode(next)
+ k, ok := keyn.ParseU64(ctx)
+ if !ok || k > math.MaxUint32 {
+ if gerr == nil {
+ gerr = error_mismatch(keyn, ctx, d.mapType.Pack())
+ }
+ valn := NewNode(PtrOffset(next, 1))
+ next = valn.Next()
+ continue
+ }
+
+ key := uint32(k)
+ valn := NewNode(PtrOffset(next, 1))
+ valp := d.assign(d.mapType, m, key)
+ err := d.elemDec.FromDom(valp, valn, ctx)
+ if gerr == nil && err != nil {
+ gerr = err
+ }
+ next = valn.Next()
+ }
+
+ *(*unsafe.Pointer)(vp) = m
+ return gerr
+}
+
+type mapU64KeyDecoder struct {
+ mapType *rt.GoMapType
+ elemDec decFunc
+ assign rt.Map64Assign
+}
+
+func (d *mapU64KeyDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*unsafe.Pointer)(vp) = nil
+ return nil
+ }
+
+ obj, ok := node.AsObj()
+ if !ok {
+ return error_mismatch(node, ctx, d.mapType.Pack())
+ }
+ // allocate map
+ m := *(*unsafe.Pointer)(vp)
+ if m == nil {
+ m = rt.Makemap(&d.mapType.GoType, obj.Len())
+ }
+
+ var gerr error
+ next := obj.Children()
+ for i := 0; i < obj.Len(); i++ {
+ keyn := NewNode(next)
+ key, ok := keyn.ParseU64(ctx)
+ if !ok {
+ if gerr == nil {
+ gerr = error_mismatch(keyn, ctx, d.mapType.Pack())
+ }
+ valn := NewNode(PtrOffset(next, 1))
+ next = valn.Next()
+ continue
+ }
+
+ valn := NewNode(PtrOffset(next, 1))
+ valp := d.assign(d.mapType, m, key)
+ err := d.elemDec.FromDom(valp, valn, ctx)
+ if gerr == nil && err != nil {
+ gerr = err
+ }
+ next = valn.Next()
+ }
+
+ *(*unsafe.Pointer)(vp) = m
+ return gerr
+}
+
+/** Decoder for generic cases */
+
+type decKey func(dec *mapDecoder, raw string, ctx *context) (interface{}, error)
+
+func decodeKeyU8(dec *mapDecoder, raw string, ctx *context) (interface{}, error) {
+ key, err := Unquote(raw)
+ if err != nil {
+ return nil, err
+ }
+ ret, err := ParseU64(key)
+ if err != nil {
+ return nil, err
+ }
+ if ret > math.MaxUint8 {
+ return nil, error_value(key, dec.mapType.Key.Pack())
+ }
+ return uint8(ret), nil
+}
+
+func decodeKeyU16(dec *mapDecoder, raw string, ctx *context) (interface{}, error) {
+ key, err := Unquote(raw)
+ if err != nil {
+ return nil, err
+ }
+ ret, err := ParseU64(key)
+ if err != nil {
+ return nil, err
+ }
+ if ret > math.MaxUint16 {
+ return nil, error_value(key, dec.mapType.Key.Pack())
+ }
+ return uint16(ret), nil
+}
+
+func decodeKeyI8(dec *mapDecoder, raw string, ctx *context) (interface{}, error) {
+ key, err := Unquote(raw)
+ if err != nil {
+ return nil, err
+ }
+ ret, err := ParseI64(key)
+ if err != nil {
+ return nil, err
+ }
+ if ret > math.MaxInt8 || ret < math.MinInt8 {
+ return nil, error_value(key, dec.mapType.Key.Pack())
+ }
+ return int8(ret), nil
+}
+
+func decodeKeyI16(dec *mapDecoder, raw string, ctx *context) (interface{}, error) {
+ key, err := Unquote(raw)
+ if err != nil {
+ return nil, err
+ }
+ ret, err := ParseI64(key)
+ if err != nil {
+ return nil, err
+ }
+ if ret > math.MaxInt16 || ret < math.MinInt16 {
+ return nil, error_value(key, dec.mapType.Key.Pack())
+ }
+ return int16(ret), nil
+}
+
+func decodeKeyJSONUnmarshaler(dec *mapDecoder, raw string, _ *context) (interface{}, error) {
+ ret := reflect.New(dec.mapType.Key.Pack()).Interface()
+ err := ret.(json.Unmarshaler).UnmarshalJSON([]byte(raw))
+ if err != nil {
+ return nil, err
+ }
+ return ret, nil
+}
+
+func decodeKeyTextUnmarshaler(dec *mapDecoder, raw string, ctx *context) (interface{}, error) {
+ key, err := Unquote(raw)
+ if err != nil {
+ return nil, err
+ }
+ ret := reflect.New(dec.mapType.Key.Pack()).Interface()
+ err = ret.(encoding.TextUnmarshaler).UnmarshalText([]byte(key))
+ if err != nil {
+ return nil, err
+ }
+ return ret, nil
+}
+
+type mapDecoder struct {
+ mapType *rt.GoMapType
+ keyDec decKey
+ elemDec decFunc
+}
+
+func (d *mapDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*unsafe.Pointer)(vp) = nil
+ return nil
+ }
+
+ obj, ok := node.AsObj()
+ if !ok {
+ return error_mismatch(node, ctx, d.mapType.Pack())
+ }
+
+ // allocate map
+ m := *(*unsafe.Pointer)(vp)
+ if m == nil {
+ m = rt.Makemap(&d.mapType.GoType, obj.Len())
+ }
+
+ next := obj.Children()
+ var gerr error
+ for i := 0; i < obj.Len(); i++ {
+ keyn := NewNode(next)
+ raw := keyn.AsRaw(ctx)
+ key, err := d.keyDec(d, raw, ctx)
+ if err != nil {
+ if gerr == nil {
+ gerr = error_mismatch(keyn, ctx, d.mapType.Pack())
+ }
+ valn := NewNode(PtrOffset(next, 1))
+ next = valn.Next()
+ continue
+ }
+
+ valn := NewNode(PtrOffset(next, 1))
+ keyp := rt.UnpackEface(key).Value
+ valp := rt.Mapassign(d.mapType, m, keyp)
+ err = d.elemDec.FromDom(valp, valn, ctx)
+ if gerr == nil && err != nil {
+ gerr = err
+ }
+
+ next = valn.Next()
+ }
+
+ *(*unsafe.Pointer)(vp) = m
+ return gerr
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/native.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/native.go
new file mode 100644
index 000000000..29a0136ae
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/native.go
@@ -0,0 +1,269 @@
+package optdec
+
+import (
+ "fmt"
+ "reflect"
+ "unsafe"
+
+ "sync"
+
+ "github.com/bytedance/sonic/internal/native"
+ "github.com/bytedance/sonic/internal/native/types"
+ "github.com/bytedance/sonic/internal/rt"
+ "github.com/bytedance/sonic/utf8"
+)
+
+
+type ErrorCode int
+
+const (
+ SONIC_OK = 0;
+ SONIC_CONTROL_CHAR = 1;
+ SONIC_INVALID_ESCAPED = 2;
+ SONIC_INVALID_NUM = 3;
+ SONIC_FLOAT_INF = 4;
+ SONIC_EOF = 5;
+ SONIC_INVALID_CHAR = 6;
+ SONIC_EXPECT_KEY = 7;
+ SONIC_EXPECT_COLON = 8;
+ SONIC_EXPECT_OBJ_COMMA_OR_END = 9;
+ SONIC_EXPECT_ARR_COMMA_OR_END = 10;
+ SONIC_VISIT_FAILED = 11;
+ SONIC_INVALID_ESCAPED_UTF = 12;
+ SONIC_INVALID_LITERAL = 13;
+ SONIC_STACK_OVERFLOW = 14;
+)
+
+var ParsingErrors = []string{
+ SONIC_OK : "ok",
+ SONIC_CONTROL_CHAR : "control chars in string",
+ SONIC_INVALID_ESCAPED : "invalid escaped chars in string",
+ SONIC_INVALID_NUM : "invalid number",
+ SONIC_FLOAT_INF : "float infinity",
+ SONIC_EOF : "eof",
+ SONIC_INVALID_CHAR : "invalid chars",
+ SONIC_EXPECT_KEY : "expect a json key",
+ SONIC_EXPECT_COLON : "expect a `:`",
+ SONIC_EXPECT_OBJ_COMMA_OR_END : "expect a `,` or `}`",
+ SONIC_EXPECT_ARR_COMMA_OR_END : "expect a `,` or `]`",
+ SONIC_VISIT_FAILED : "failed in json visitor",
+ SONIC_INVALID_ESCAPED_UTF : "invalid escaped unicodes",
+ SONIC_INVALID_LITERAL : "invalid literal(true/false/null)",
+ SONIC_STACK_OVERFLOW : "json is exceeded max depth 4096, cause stack overflow",
+}
+
+func (code ErrorCode) Error() string {
+ return ParsingErrors[code]
+}
+
+type node struct {
+ typ uint64
+ val uint64
+}
+
+// should consitent with native/parser.c
+type _nospaceBlock struct {
+ _ [8]byte
+ _ [8]byte
+}
+
+// should consitent with native/parser.c
+type nodeBuf struct {
+ ncur uintptr
+ parent int64
+ depth uint64
+ nstart uintptr
+ nend uintptr
+ stat jsonStat
+}
+
+func (self *nodeBuf) init(nodes []node) {
+ self.ncur = uintptr(unsafe.Pointer(&nodes[0]))
+ self.nstart = self.ncur
+ self.nend = self.ncur + uintptr(cap(nodes)) * unsafe.Sizeof(node{})
+ self.parent = -1
+}
+
+// should consitent with native/parser.c
+type Parser struct {
+ Json string
+ padded []byte
+ nodes []node
+ dbuf []byte
+ backup []node
+
+ options uint64
+ // JSON cursor
+ start uintptr
+ cur uintptr
+ end uintptr
+ _nbk _nospaceBlock
+
+ // node buffer cursor
+ nbuf nodeBuf
+ Utf8Inv bool
+ isEface bool
+}
+
+// only when parse non-empty object/array are needed.
+type jsonStat struct {
+ object uint32
+ array uint32
+ str uint32
+ number uint32
+ array_elems uint32
+ object_keys uint32
+ max_depth uint32
+}
+
+
+var (
+ defaultJsonPaddedCap uintptr = 1 << 20 // 1 Mb
+ defaultNodesCap uintptr = (1 << 20) / unsafe.Sizeof(node{}) // 1 Mb
+)
+
+var parsePool sync.Pool = sync.Pool {
+ New: func () interface{} {
+ return &Parser{
+ options: 0,
+ padded: make([]byte, 0, defaultJsonPaddedCap),
+ nodes: make([]node, defaultNodesCap, defaultNodesCap),
+ dbuf: make([]byte, types.MaxDigitNums, types.MaxDigitNums),
+ }
+ },
+}
+
+var padding string = "x\"x\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
+
+func newParser(data string, pos int, opt uint64) *Parser {
+ p := parsePool.Get().(*Parser)
+
+ /* validate json if needed */
+ if (opt & (1 << _F_validate_string)) != 0 && !utf8.ValidateString(data){
+ dbuf := utf8.CorrectWith(nil, rt.Str2Mem(data[pos:]), "\ufffd")
+ dbuf = append(dbuf, padding...)
+ p.Json = rt.Mem2Str(dbuf[:len(dbuf) - len(padding)])
+ p.Utf8Inv = true
+ p.start = uintptr((*rt.GoString)(unsafe.Pointer(&p.Json)).Ptr)
+ } else {
+ p.Json = data
+ // TODO: prevent too large JSON
+ p.padded = append(p.padded, data[pos:]...)
+ p.padded = append(p.padded, padding...)
+ p.start = uintptr((*rt.GoSlice)(unsafe.Pointer(&p.padded)).Ptr)
+ }
+
+ p.cur = p.start
+ p.end = p.cur + uintptr(len(p.Json))
+ p.options = opt
+ p.nbuf.init(p.nodes)
+ return p
+}
+
+
+func (p *Parser) Pos() int {
+ return int(p.cur - p.start)
+}
+
+func (p *Parser) JsonBytes() []byte {
+ if p.Utf8Inv {
+ return (rt.Str2Mem(p.Json))
+ } else {
+ return p.padded
+ }
+}
+
+var nodeType = rt.UnpackType(reflect.TypeOf(node{}))
+
+//go:inline
+func calMaxNodeCap(jsonSize int) int {
+ return jsonSize / 2 + 2
+}
+
+func (p *Parser) parse() ErrorCode {
+ // when decode into struct, we should decode number as possible
+ old := p.options
+ if !p.isEface {
+ p.options &^= 1 << _F_use_number
+ }
+
+ // fast path with limited node buffer
+ err := ErrorCode(native.ParseWithPadding(unsafe.Pointer(p)))
+ if err != SONIC_VISIT_FAILED {
+ p.options = old
+ return err
+ }
+
+ // check OoB here
+ offset := p.nbuf.ncur - p.nbuf.nstart
+ curLen := offset / unsafe.Sizeof(node{})
+ if curLen != uintptr(len(p.nodes)) {
+ panic(fmt.Sprintf("current len: %d, real len: %d cap: %d", curLen, len(p.nodes), cap(p.nodes)))
+ }
+
+ // node buf is not enough, continue parse
+ // the maxCap is always meet all valid JSON
+ maxCap := calMaxNodeCap(len(p.Json))
+ slice := rt.GoSlice{
+ Ptr: rt.Mallocgc(uintptr(maxCap) * nodeType.Size, nodeType, false),
+ Len: maxCap,
+ Cap: maxCap,
+ }
+ rt.Memmove(unsafe.Pointer(slice.Ptr), unsafe.Pointer(&p.nodes[0]), offset)
+ p.backup = p.nodes
+ p.nodes = *(*[]node)(unsafe.Pointer(&slice))
+
+ // update node cursor
+ p.nbuf.nstart = uintptr(unsafe.Pointer(&p.nodes[0]))
+ p.nbuf.nend = p.nbuf.nstart + uintptr(cap(p.nodes)) * unsafe.Sizeof(node{})
+ p.nbuf.ncur = p.nbuf.nstart + offset
+
+ // continue parse json
+ err = ErrorCode(native.ParseWithPadding(unsafe.Pointer(p)))
+ p.options = old
+ return err
+}
+
+func (p *Parser) reset() {
+ p.options = 0
+ p.padded = p.padded[:0]
+ // nodes is too large here, we will not reset it and use small backup nodes buffer
+ if p.backup != nil {
+ p.nodes = p.backup
+ p.backup = nil
+ }
+ p.start = 0
+ p.cur = 0
+ p.end = 0
+ p.Json = ""
+ p.nbuf = nodeBuf{}
+ p._nbk = _nospaceBlock{}
+ p.Utf8Inv = false
+ p.isEface = false
+}
+
+func (p *Parser) free() {
+ p.reset()
+ parsePool.Put(p)
+}
+
+//go:noinline
+func (p *Parser) fixError(code ErrorCode) error {
+ if code == SONIC_OK {
+ return nil
+ }
+
+ if p.Pos() == 0 {
+ code = SONIC_EOF;
+ }
+
+ pos := p.Pos() - 1
+ return error_syntax(pos, p.Json, ParsingErrors[code])
+}
+
+func Parse(data string, opt uint64) error {
+ p := newParser(data, 0, opt)
+ err := p.parse()
+ p.free()
+ return err
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/node.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/node.go
new file mode 100644
index 000000000..3f60a3368
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/node.go
@@ -0,0 +1,1278 @@
+package optdec
+
+import (
+ "encoding/json"
+ "math"
+ "unsafe"
+
+ "github.com/bytedance/sonic/internal/envs"
+ "github.com/bytedance/sonic/internal/rt"
+)
+
+type Context struct {
+ Parser *Parser
+ efacePool *efacePool
+ Stack bounedStack
+ Utf8Inv bool
+}
+
+func (ctx *Context) Options() uint64 {
+ return ctx.Parser.options
+}
+
+/************************* Stack and Pool Helper *******************/
+
+type parentStat struct {
+ con unsafe.Pointer
+ remain uint64
+}
+type bounedStack struct {
+ stack []parentStat
+ index int
+}
+
+func newStack(size int) bounedStack {
+ return bounedStack{
+ stack: make([]parentStat, size + 2),
+ index: 0,
+ }
+}
+
+//go:nosplit
+func (s *bounedStack) Pop() (unsafe.Pointer, int, bool){
+ s.index--
+ con := s.stack[s.index].con
+ remain := s.stack[s.index].remain &^ (uint64(1) << 63)
+ isObj := (s.stack[s.index].remain & (uint64(1) << 63)) != 0
+ s.stack[s.index].con = nil
+ s.stack[s.index].remain = 0
+ return con, int(remain), isObj
+}
+
+//go:nosplit
+func (s *bounedStack) Push(p unsafe.Pointer, remain int, isObj bool) {
+ s.stack[s.index].con = p
+ s.stack[s.index].remain = uint64(remain)
+ if isObj {
+ s.stack[s.index].remain |= (uint64(1) << 63)
+ }
+ s.index++
+}
+
+type efacePool struct{
+ t64 rt.T64Pool
+ tslice rt.TslicePool
+ tstring rt.TstringPool
+ efaceSlice rt.SlicePool
+}
+
+func newEfacePool(stat *jsonStat, useNumber bool) *efacePool {
+ strs := int(stat.str)
+ nums := 0
+ if useNumber {
+ strs += int(stat.number)
+ } else {
+ nums = int(stat.number)
+ }
+
+ return &efacePool{
+ t64: rt.NewT64Pool(nums),
+ tslice: rt.NewTslicePool(int(stat.array)),
+ tstring: rt.NewTstringPool(strs),
+ efaceSlice: rt.NewPool(rt.AnyType, int(stat.array_elems)),
+ }
+}
+
+func (self *efacePool) GetMap(hint int) unsafe.Pointer {
+ m := make(map[string]interface{}, hint)
+ return *(*unsafe.Pointer)(unsafe.Pointer(&m))
+}
+
+func (self *efacePool) GetSlice(hint int) unsafe.Pointer {
+ return unsafe.Pointer(self.efaceSlice.GetSlice(hint))
+}
+
+func (self *efacePool) ConvTSlice(val rt.GoSlice, typ *rt.GoType, dst unsafe.Pointer) {
+ self.tslice.Conv(val, typ, (*interface{})(dst))
+}
+
+func (self *efacePool) ConvF64(val float64, dst unsafe.Pointer) {
+ self.t64.Conv(castU64(val), rt.Float64Type, (*interface{})(dst))
+}
+
+func (self *efacePool) ConvTstring(val string, dst unsafe.Pointer) {
+ self.tstring.Conv(val, (*interface{})(dst))
+}
+
+func (self *efacePool) ConvTnum(val json.Number, dst unsafe.Pointer) {
+ self.tstring.ConvNum(val, (*interface{})(dst))
+}
+
+/********************************************************/
+
+func canUseFastMap( opts uint64, root *rt.GoType) bool {
+ return envs.UseFastMap && (opts & (1 << _F_copy_string)) == 0 && (opts & (1 << _F_use_int64)) == 0 && (root == rt.AnyType || root == rt.MapEfaceType || root == rt.SliceEfaceType)
+}
+
+func NewContext(json string, pos int, opts uint64, root *rt.GoType) (Context, error) {
+ ctx := Context{
+ Parser: newParser(json, pos, opts),
+ }
+ if root == rt.AnyType || root == rt.MapEfaceType || root == rt.SliceEfaceType {
+ ctx.Parser.isEface = true
+ }
+
+ ecode := ctx.Parser.parse()
+
+ if ecode != 0 {
+ return ctx, ctx.Parser.fixError(ecode)
+ }
+
+ useNumber := (opts & (1 << _F_use_number )) != 0
+ if canUseFastMap(opts, root) {
+ ctx.efacePool = newEfacePool(&ctx.Parser.nbuf.stat, useNumber)
+ ctx.Stack = newStack(int(ctx.Parser.nbuf.stat.max_depth))
+ }
+
+ return ctx, nil
+}
+
+func (ctx *Context) Delete() {
+ ctx.Parser.free()
+ ctx.Parser = nil
+}
+
+type Node struct {
+ cptr uintptr
+}
+
+func NewNode(cptr uintptr) Node {
+ return Node{cptr: cptr}
+}
+
+type Dom struct {
+ cdom uintptr
+}
+
+func (ctx *Context) Root() Node {
+ root := (uintptr)(((*rt.GoSlice)(unsafe.Pointer(&ctx.Parser.nodes))).Ptr)
+ return Node{cptr: root}
+}
+
+type Array struct {
+ cptr uintptr
+}
+
+type Object struct {
+ cptr uintptr
+}
+
+func (obj Object) Len() int {
+ cobj := ptrCast(obj.cptr)
+ return int(uint64(cobj.val) & ConLenMask)
+}
+
+func (arr Array) Len() int {
+ carr := ptrCast(arr.cptr)
+ return int(uint64(carr.val) & ConLenMask)
+}
+
+// / Helper functions to eliminate CGO calls
+func (val Node) Type() uint8 {
+ ctype := ptrCast(val.cptr)
+ return uint8(ctype.typ & TypeMask)
+}
+
+func (val Node) Next() uintptr {
+ if val.Type() != KObject && val.Type() != KArray {
+ return PtrOffset(val.cptr, 1)
+ }
+ cobj := ptrCast(val.cptr)
+ offset := int64(uint64(cobj.val) >> ConLenBits)
+ return PtrOffset(val.cptr, offset)
+}
+
+func (val *Node) next() {
+ *val = NewNode(val.Next())
+}
+
+type NodeIter struct {
+ next uintptr
+}
+
+func NewNodeIter(node Node) NodeIter {
+ return NodeIter{next: node.cptr}
+}
+
+func (iter *NodeIter) Next() Node {
+ ret := NewNode(iter.next)
+ iter.next = PtrOffset(iter.next, 1)
+ return ret
+}
+
+
+func (iter *NodeIter) Peek() Node {
+ return NewNode(iter.next)
+}
+
+func (val Node) U64() uint64 {
+ cnum := ptrCast(val.cptr)
+ return *(*uint64)((unsafe.Pointer)(&(cnum.val)))
+}
+
+func (val Node) I64() int64 {
+ cnum := ptrCast(val.cptr)
+ return *(*int64)((unsafe.Pointer)(&(cnum.val)))
+}
+
+func (val Node) IsNull() bool {
+ return val.Type() == KNull
+}
+
+func (val Node) IsNumber() bool {
+ return val.Type() & KNumber != 0
+}
+
+func (val Node) F64() float64 {
+ cnum := ptrCast(val.cptr)
+ return *(*float64)((unsafe.Pointer)(&(cnum.val)))
+}
+
+func (val Node) Bool() bool {
+ return val.Type() == KTrue
+}
+
+func (self Node) AsU64(ctx *Context) (uint64, bool) {
+ if self.Type() == KUint {
+ return self.U64(), true
+ } else if self.Type() == KRawNumber {
+ num, err := ParseU64(self.Raw(ctx))
+ if err != nil {
+ return 0, false
+ }
+ return num, true
+ } else {
+ return 0, false
+ }
+}
+
+func (val *Node) AsObj() (Object, bool) {
+ var ret Object
+ if val.Type() != KObject {
+ return ret, false
+ }
+ return Object{
+ cptr: val.cptr,
+ }, true
+}
+
+func (val Node) Obj() Object {
+ return Object{cptr: val.cptr}
+}
+
+func (val Node) Arr() Array {
+ return Array{cptr: val.cptr}
+}
+
+func (val *Node) AsArr() (Array, bool) {
+ var ret Array
+ if val.Type() != KArray {
+ return ret, false
+ }
+ return Array{
+ cptr: val.cptr,
+ }, true
+}
+
+func (self Node) AsI64(ctx *Context) (int64, bool) {
+ typ := self.Type()
+ if typ == KUint && self.U64() <= math.MaxInt64 {
+ return int64(self.U64()), true
+ } else if typ == KSint {
+ return self.I64(), true
+ } else if typ == KRawNumber {
+ val, err := self.Number(ctx).Int64()
+ if err != nil {
+ return 0, false
+ }
+ return val, true
+ } else {
+ return 0, false
+ }
+}
+
+/********* Parse Node String into Value ***************/
+
+func (val Node) ParseI64(ctx *Context) (int64, bool) {
+ s, ok := val.AsStrRef(ctx)
+ if !ok {
+ return 0, false
+ }
+
+ if s == "null" {
+ return 0, true
+ }
+
+ i, err := ParseI64(s)
+ if err != nil {
+ return 0, false
+ }
+ return i, true
+}
+
+func (val Node) ParseBool(ctx *Context) (bool, bool) {
+ s, ok := val.AsStrRef(ctx)
+ if !ok {
+ return false, false
+ }
+
+ if s == "null" {
+ return false, true
+ }
+
+ b, err := ParseBool(s)
+ if err != nil {
+ return false, false
+ }
+ return b, true
+}
+
+func (val Node) ParseU64(ctx *Context) (uint64, bool) {
+ s, ok := val.AsStrRef(ctx)
+ if !ok {
+ return 0, false
+ }
+
+ if s == "null" {
+ return 0, true
+ }
+
+ i, err := ParseU64(s)
+ if err != nil {
+ return 0, false
+ }
+ return i, true
+}
+
+func (val Node) ParseF64(ctx *Context) (float64, bool) {
+ s, ok := val.AsStrRef(ctx)
+ if !ok {
+ return 0, false
+ }
+
+ if s == "null" {
+ return 0, true
+ }
+
+ i, err := ParseF64(s)
+ if err != nil {
+ return 0, false
+ }
+ return i, true
+}
+
+func (val Node) ParseString(ctx *Context) (string, bool) {
+ // shoud not use AsStrRef
+ s, ok := val.AsStr(ctx)
+ if !ok {
+ return "", false
+ }
+
+ if s == "null" {
+ return "", true
+ }
+
+ s, err := Unquote(s)
+ if err != nil {
+ return "", false
+ }
+ return s, true
+}
+
+
+func (val Node) ParseNumber(ctx *Context) (json.Number, bool) {
+ // shoud not use AsStrRef
+ s, ok := val.AsStr(ctx)
+ if !ok {
+ return json.Number(""), false
+ }
+
+ if s == "null" {
+ return json.Number(""), true
+ }
+
+ end, ok := SkipNumberFast(s, 0)
+ // has error or trailing chars
+ if !ok || end != len(s) {
+ return json.Number(""), false
+ }
+ return json.Number(s), true
+}
+
+
+
+func (val Node) AsF64(ctx *Context) (float64, bool) {
+ switch val.Type() {
+ case KUint: return float64(val.U64()), true
+ case KSint: return float64(val.I64()), true
+ case KReal: return float64(val.F64()), true
+ case KRawNumber: f, err := val.Number(ctx).Float64(); return f, err == nil
+ default: return 0, false
+ }
+}
+
+func (val Node) AsBool() (bool, bool) {
+ switch val.Type() {
+ case KTrue: return true, true
+ case KFalse: return false, true
+ default: return false, false
+ }
+}
+
+func (val Node) AsStr(ctx *Context) (string, bool) {
+ switch val.Type() {
+ case KStringCommon:
+ s := val.StringRef(ctx)
+ if (ctx.Options() & (1 << _F_copy_string) == 0) {
+ return s, true
+ }
+ return string(rt.Str2Mem(s)), true
+ case KStringEscaped:
+ return val.StringCopyEsc(ctx), true
+ default: return "", false
+ }
+}
+
+func (val Node) AsStrRef(ctx *Context) (string, bool) {
+ switch val.Type() {
+ case KStringEscaped:
+ node := ptrCast(val.cptr)
+ offset := val.Position()
+ len := int(node.val)
+ return rt.Mem2Str(ctx.Parser.JsonBytes()[offset : offset + len]), true
+ case KStringCommon:
+ return val.StringRef(ctx), true
+ default:
+ return "", false
+ }
+}
+
+func (val Node) AsBytesRef(ctx *Context) ([]byte, bool) {
+ switch val.Type() {
+ case KStringEscaped:
+ node := ptrCast(val.cptr)
+ offset := val.Position()
+ len := int(node.val)
+ return ctx.Parser.JsonBytes()[offset : offset + len], true
+ case KStringCommon:
+ return rt.Str2Mem(val.StringRef(ctx)), true
+ default:
+ return nil, false
+ }
+}
+
+func (val Node) AsStringText(ctx *Context) ([]byte, bool) {
+ if !val.IsStr() {
+ return nil, false
+ }
+
+ // clone to new bytes
+ s, b := val.AsStrRef(ctx)
+ return []byte(s), b
+}
+
+func (val Node) IsStr() bool {
+ return (val.Type() == KStringCommon) || (val.Type() == KStringEscaped)
+}
+
+func (val Node) IsRawNumber() bool {
+ return val.Type() == KRawNumber
+}
+
+func (val Node) Number(ctx *Context) json.Number {
+ return json.Number(val.Raw(ctx))
+}
+
+func (val Node) Raw(ctx *Context) string {
+ node := ptrCast(val.cptr)
+ len := int(node.val)
+ offset := val.Position()
+ return ctx.Parser.Json[offset:int(offset+len)]
+}
+
+func (val Node) Position() int {
+ node := ptrCast(val.cptr)
+ return int(node.typ >> PosBits)
+}
+
+func (val Node) AsNumber(ctx *Context) (json.Number, bool) {
+ // parse JSON string as number
+ if val.IsStr() {
+ s, _ := val.AsStr(ctx)
+ if !ValidNumberFast(s) {
+ return "", false
+ } else {
+ return json.Number(s), true
+ }
+ }
+
+ return val.NonstrAsNumber(ctx)
+}
+
+func (val Node) NonstrAsNumber(ctx *Context) (json.Number, bool) {
+ // deal with raw number
+ if val.IsRawNumber() {
+ return val.Number(ctx), true
+ }
+
+ // deal with parse number
+ if !val.IsNumber() {
+ return json.Number(""), false
+ }
+
+ start := val.Position()
+ end, ok := SkipNumberFast(ctx.Parser.Json, start)
+ if !ok {
+ return "", false
+ }
+ return json.Number(ctx.Parser.Json[start:end]), true
+}
+
+func (val Node) AsRaw(ctx *Context) string {
+ // fast path for unescaped strings
+ switch val.Type() {
+ case KNull:
+ return "null"
+ case KTrue:
+ return "true"
+ case KFalse:
+ return "false"
+ case KStringCommon:
+ node := ptrCast(val.cptr)
+ len := int(node.val)
+ offset := val.Position()
+ // add start abd end quote
+ ref := rt.Str2Mem(ctx.Parser.Json)[offset-1 : offset+len+1]
+ return rt.Mem2Str(ref)
+ case KRawNumber: fallthrough
+ case KRaw: return val.Raw(ctx)
+ case KStringEscaped:
+ raw, _ := SkipOneFast(ctx.Parser.Json, val.Position() - 1)
+ return raw
+ default:
+ raw, err := SkipOneFast(ctx.Parser.Json, val.Position())
+ if err != nil {
+ break
+ }
+ return raw
+ }
+ panic("should always be valid json here")
+}
+
+// reference from the input JSON as possible
+func (val Node) StringRef(ctx *Context) string {
+ return val.Raw(ctx)
+}
+
+//go:nocheckptr
+func ptrCast(p uintptr) *node {
+ return (*node)(unsafe.Pointer(p))
+}
+
+func (val Node) StringCopyEsc(ctx *Context) string {
+ // check whether there are in padded
+ node := ptrCast(val.cptr)
+ len := int(node.val)
+ offset := val.Position()
+ return string(ctx.Parser.JsonBytes()[offset : offset + len])
+}
+
+func (val Node) Object() Object {
+ return Object{cptr: val.cptr}
+}
+
+func (val Node) Array() Array {
+ return Array{cptr: val.cptr}
+}
+
+func (val *Array) Children() uintptr {
+ return PtrOffset(val.cptr, 1)
+}
+
+func (val *Object) Children() uintptr {
+ return PtrOffset(val.cptr, 1)
+}
+
+func (val *Node) Equal(ctx *Context, lhs string) bool {
+ // check whether escaped
+ cstr := ptrCast(val.cptr)
+ offset := int(val.Position())
+ len := int(cstr.val)
+ return lhs == ctx.Parser.Json[offset:offset+len]
+}
+
+func (node *Node) AsMapEface(ctx *Context, vp unsafe.Pointer) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ obj, ok := node.AsObj()
+ if !ok {
+ return newUnmatched(node.Position(), rt.MapEfaceType)
+ }
+
+ var err, gerr error
+ size := obj.Len()
+
+ var m map[string]interface{}
+ if *(*unsafe.Pointer)(vp) == nil {
+ if ctx.efacePool != nil {
+ p := ctx.efacePool.GetMap(size)
+ m = *(*map[string]interface{})(unsafe.Pointer(&p))
+ } else {
+ m = make(map[string]interface{}, size)
+ }
+ } else {
+ m = *(*map[string]interface{})(vp)
+ }
+
+ next := obj.Children()
+ for i := 0; i < size; i++ {
+ knode := NewNode(next)
+ key, _ := knode.AsStr(ctx)
+ val := NewNode(PtrOffset(next, 1))
+ m[key], err = val.AsEface(ctx)
+ next = val.cptr
+ if gerr == nil && err != nil {
+ gerr = err
+ }
+ }
+
+ *(*map[string]interface{})(vp) = m
+ return gerr
+}
+
+func (node *Node) AsMapString(ctx *Context, vp unsafe.Pointer) error {
+ obj, ok := node.AsObj()
+ if !ok {
+ return newUnmatched(node.Position(), rt.MapStringType)
+ }
+
+ size := obj.Len()
+
+ var m map[string]string
+ if *(*unsafe.Pointer)(vp) == nil {
+ m = make(map[string]string, size)
+ } else {
+ m = *(*map[string]string)(vp)
+ }
+
+ var gerr error
+ next := obj.Children()
+ for i := 0; i < size; i++ {
+ knode := NewNode(next)
+ key, _ := knode.AsStr(ctx)
+ val := NewNode(PtrOffset(next, 1))
+ m[key], ok = val.AsStr(ctx)
+ if !ok {
+ if gerr == nil {
+ gerr = newUnmatched(val.Position(), rt.StringType)
+ }
+ next = val.Next()
+ } else {
+ next = PtrOffset(val.cptr, 1)
+ }
+ }
+
+ *(*map[string]string)(vp) = m
+ return gerr
+}
+
+func (node *Node) AsSliceEface(ctx *Context, vp unsafe.Pointer) error {
+ arr, ok := node.AsArr()
+ if !ok {
+ return newUnmatched(node.Position(), rt.SliceEfaceType)
+ }
+
+ size := arr.Len()
+ var s []interface{}
+ if size != 0 && ctx.efacePool != nil {
+ slice := rt.GoSlice {
+ Ptr: ctx.efacePool.GetSlice(size),
+ Len: size,
+ Cap: size,
+ }
+ *(*rt.GoSlice)(unsafe.Pointer(&s)) = slice
+ } else {
+ s = *(*[]interface{})((unsafe.Pointer)(rt.MakeSlice(vp, rt.AnyType, size)))
+ }
+
+ *node = NewNode(arr.Children())
+
+ var err, gerr error
+ for i := 0; i < size; i++ {
+ s[i], err = node.AsEface(ctx)
+ if gerr == nil && err != nil {
+ gerr = err
+ }
+ }
+
+ *(*[]interface{})(vp) = s
+ return nil
+}
+
+func (node *Node) AsSliceI32(ctx *Context, vp unsafe.Pointer) error {
+ arr, ok := node.AsArr()
+ if !ok {
+ return newUnmatched(node.Position(), rt.SliceI32Type)
+ }
+
+ size := arr.Len()
+ s := *(*[]int32)((unsafe.Pointer)(rt.MakeSlice(vp, rt.Int32Type, size)))
+ next := arr.Children()
+
+ var gerr error
+ for i := 0; i < size; i++ {
+ val := NewNode(next)
+ ret, ok := val.AsI64(ctx)
+ if !ok || ret > math.MaxInt32 || ret < math.MinInt32 {
+ if gerr == nil {
+ gerr = newUnmatched(val.Position(), rt.Int32Type)
+ }
+ next = val.Next()
+ } else {
+ s[i] = int32(ret)
+ next = PtrOffset(val.cptr, 1)
+ }
+ }
+
+ *(*[]int32)(vp) = s
+ return gerr
+}
+
+func (node *Node) AsSliceI64(ctx *Context, vp unsafe.Pointer) error {
+ arr, ok := node.AsArr()
+ if !ok {
+ return newUnmatched(node.Position(), rt.SliceI64Type)
+ }
+
+ size := arr.Len()
+ s := *(*[]int64)((unsafe.Pointer)(rt.MakeSlice(vp, rt.Int64Type, size)))
+ next := arr.Children()
+
+ var gerr error
+ for i := 0; i < size; i++ {
+ val := NewNode(next)
+
+ ret, ok := val.AsI64(ctx)
+ if !ok {
+ if gerr == nil {
+ gerr = newUnmatched(val.Position(), rt.Int64Type)
+ }
+ next = val.Next()
+ } else {
+ s[i] = ret
+ next = PtrOffset(val.cptr, 1)
+ }
+ }
+
+ *(*[]int64)(vp) = s
+ return gerr
+}
+
+func (node *Node) AsSliceU32(ctx *Context, vp unsafe.Pointer) error {
+ arr, ok := node.AsArr()
+ if !ok {
+ return newUnmatched(node.Position(), rt.SliceU32Type)
+ }
+
+ size := arr.Len()
+ next := arr.Children()
+ s := *(*[]uint32)((unsafe.Pointer)(rt.MakeSlice(vp, rt.Uint32Type, size)))
+
+ var gerr error
+ for i := 0; i < size; i++ {
+ val := NewNode(next)
+ ret, ok := val.AsU64(ctx)
+ if !ok || ret > math.MaxUint32 {
+ if gerr == nil {
+ gerr = newUnmatched(val.Position(), rt.Uint32Type)
+ }
+ next = val.Next()
+ } else {
+ s[i] = uint32(ret)
+ next = PtrOffset(val.cptr, 1)
+ }
+ }
+
+ *(*[]uint32)(vp) = s
+ return gerr
+}
+
+func (node *Node) AsSliceU64(ctx *Context, vp unsafe.Pointer) error {
+ arr, ok := node.AsArr()
+ if !ok {
+ return newUnmatched(node.Position(), rt.SliceU64Type)
+ }
+
+ size := arr.Len()
+ next := arr.Children()
+
+ s := *(*[]uint64)((unsafe.Pointer)(rt.MakeSlice(vp, rt.Uint64Type, size)))
+ var gerr error
+ for i := 0; i < size; i++ {
+ val := NewNode(next)
+ ret, ok := val.AsU64(ctx)
+ if !ok {
+ if gerr == nil {
+ gerr = newUnmatched(val.Position(), rt.Uint64Type)
+ }
+ next = val.Next()
+ } else {
+ s[i] = ret
+ next = PtrOffset(val.cptr, 1)
+ }
+ }
+
+ *(*[]uint64)(vp) = s
+ return gerr
+}
+
+func (node *Node) AsSliceString(ctx *Context, vp unsafe.Pointer) error {
+ arr, ok := node.AsArr()
+ if !ok {
+ return newUnmatched(node.Position(), rt.SliceStringType)
+ }
+
+ size := arr.Len()
+ next := arr.Children()
+ s := *(*[]string)((unsafe.Pointer)(rt.MakeSlice(vp, rt.StringType, size)))
+
+ var gerr error
+ for i := 0; i < size; i++ {
+ val := NewNode(next)
+ ret, ok := val.AsStr(ctx)
+ if !ok {
+ if gerr == nil {
+ gerr = newUnmatched(val.Position(), rt.StringType)
+ }
+ next = val.Next()
+ } else {
+ s[i] = ret
+ next = PtrOffset(val.cptr, 1)
+ }
+ }
+
+ *(*[]string)(vp) = s
+ return gerr
+}
+
+func (node *Node) AsSliceBytes(ctx *Context) ([]byte, error) {
+ b, ok := node.AsBytesRef(ctx)
+ if !ok {
+ return nil, newUnmatched(node.Position(), rt.BytesType)
+ }
+
+ b64, err := rt.DecodeBase64(b)
+ if err != nil {
+ return nil, newUnmatched(node.Position(), rt.BytesType)
+ }
+ return b64, nil
+}
+
+// AsEface will always ok, because we have parse in native.
+func (node *Node) AsEface(ctx *Context) (interface{}, error) {
+ if ctx.efacePool != nil {
+ iter := NewNodeIter(*node)
+ v := AsEfaceFast(&iter, ctx)
+ *node = iter.Peek()
+ return v, nil
+ } else {
+ return node.AsEfaceFallback(ctx)
+ }
+}
+
+func parseSingleNode(node Node, ctx *Context) interface{} {
+ var v interface{}
+ switch node.Type() {
+ case KObject: v = map[string]interface{}{}
+ case KArray: v = []interface{}{}
+ case KStringCommon: v = node.StringRef(ctx)
+ case KStringEscaped: v = node.StringCopyEsc(ctx)
+ case KTrue: v = true
+ case KFalse: v = false
+ case KNull: v = nil
+ case KUint: v = float64(node.U64())
+ case KSint: v = float64(node.I64())
+ case KReal: v = float64(node.F64())
+ case KRawNumber: v = node.Number(ctx)
+ default: panic("unreachable for as eface")
+ }
+ return v
+}
+
+func castU64(val float64) uint64 {
+ return *((*uint64)(unsafe.Pointer((&val))))
+}
+
+func AsEfaceFast(iter *NodeIter, ctx *Context) interface{} {
+ var mp, sp, parent unsafe.Pointer // current container pointer
+ var node Node
+ var size int
+ var isObj bool
+ var slice rt.GoSlice
+ var val unsafe.Pointer
+ var vt **rt.GoType
+ var vp *unsafe.Pointer
+ var rootM unsafe.Pointer
+ var rootS rt.GoSlice
+ var root interface{}
+ var key string
+
+ node = iter.Next()
+
+ switch node.Type() {
+ case KObject:
+ size = node.Object().Len()
+ if size != 0 {
+ ctx.Stack.Push(nil, 0, true)
+ mp = ctx.efacePool.GetMap(size)
+ rootM = mp
+ isObj = true
+ goto _object_key
+ } else {
+ return rt.GoEface {
+ Type: rt.MapEfaceType,
+ Value: ctx.efacePool.GetMap(0),
+ }.Pack()
+ }
+ case KArray:
+ size = node.Array().Len()
+ if size != 0 {
+ ctx.Stack.Push(nil, 0, false)
+ sp = ctx.efacePool.GetSlice(size)
+ slice = rt.GoSlice {
+ Ptr: sp,
+ Len: size,
+ Cap: size,
+ }
+ rootS = slice
+ isObj = false
+ val = sp
+ goto _arr_val;
+ } else {
+ ctx.efacePool.ConvTSlice(rt.EmptySlice, rt.SliceEfaceType, unsafe.Pointer(&root))
+ }
+ case KStringCommon: ctx.efacePool.ConvTstring(node.StringRef(ctx), unsafe.Pointer(&root))
+ case KStringEscaped: ctx.efacePool.ConvTstring(node.StringCopyEsc(ctx), unsafe.Pointer(&root))
+ case KTrue: root = true
+ case KFalse: root = false
+ case KNull: root = nil
+ case KUint: ctx.efacePool.ConvF64(float64(node.U64()), unsafe.Pointer(&root))
+ case KSint: ctx.efacePool.ConvF64(float64(node.I64()), unsafe.Pointer(&root))
+ case KReal: ctx.efacePool.ConvF64(node.F64(), unsafe.Pointer(&root))
+ case KRawNumber: ctx.efacePool.ConvTnum(node.Number(ctx), unsafe.Pointer(&root))
+ default: panic("unreachable for as eface")
+ }
+ return root
+
+_object_key:
+ node = iter.Next()
+ if node.Type() == KStringCommon {
+ key = node.StringRef(ctx)
+ } else {
+ key = node.StringCopyEsc(ctx)
+ }
+
+ // interface{} slot in map bucket
+ val = rt.Mapassign_faststr(rt.MapEfaceMapType, mp, key)
+ vt = &(*rt.GoEface)(val).Type
+ vp = &(*rt.GoEface)(val).Value
+
+ // parse value node
+ node = iter.Next()
+ switch node.Type() {
+ case KObject:
+ newSize := node.Object().Len()
+ newMp := ctx.efacePool.GetMap(newSize)
+ *vt = rt.MapEfaceType
+ *vp = newMp
+ remain := size - 1
+ isObj = true
+ if newSize != 0 {
+ if remain > 0 {
+ ctx.Stack.Push(mp, remain, true)
+ }
+ mp = newMp
+ size = newSize
+ goto _object_key;
+ }
+ case KArray:
+ newSize := node.Array().Len()
+ if newSize == 0 {
+ ctx.efacePool.ConvTSlice(rt.EmptySlice, rt.SliceEfaceType, val)
+ break;
+ }
+
+ newSp := ctx.efacePool.GetSlice(newSize)
+ // pack to []interface{}
+ ctx.efacePool.ConvTSlice(rt.GoSlice{
+ Ptr: newSp,
+ Len: newSize,
+ Cap: newSize,
+ }, rt.SliceEfaceType, val)
+ remain := size - 1
+ if remain > 0 {
+ ctx.Stack.Push(mp, remain, true)
+ }
+ val = newSp
+ isObj = false
+ size = newSize
+ goto _arr_val;
+ case KStringCommon:
+ ctx.efacePool.ConvTstring(node.StringRef(ctx), val)
+ case KStringEscaped:
+ ctx.efacePool.ConvTstring(node.StringCopyEsc(ctx), val)
+ case KTrue:
+ rt.ConvTBool(true, (*interface{})(val))
+ case KFalse:
+ rt.ConvTBool(false, (*interface{})(val))
+ case KNull: /* skip */
+ case KUint:
+ ctx.efacePool.ConvF64(float64(node.U64()), val)
+ case KSint:
+ ctx.efacePool.ConvF64(float64(node.I64()), val)
+ case KReal:
+ ctx.efacePool.ConvF64(node.F64(), val)
+ case KRawNumber:
+ ctx.efacePool.ConvTnum(node.Number(ctx), val)
+ default:
+ panic("unreachable for as eface")
+ }
+
+ // check size
+ size -= 1
+ if size != 0 {
+ goto _object_key;
+ }
+
+ parent, size, isObj = ctx.Stack.Pop()
+
+ // parent is empty
+ if parent == nil {
+ if isObj {
+ return rt.GoEface {
+ Type: rt.MapEfaceType,
+ Value: rootM,
+ }.Pack()
+ } else {
+ ctx.efacePool.ConvTSlice(rootS, rt.SliceEfaceType, (unsafe.Pointer)(&root))
+ return root
+ }
+ }
+
+ // continue to parse parent
+ if isObj {
+ mp = parent
+ goto _object_key;
+ } else {
+ val = rt.PtrAdd(parent, rt.AnyType.Size)
+ goto _arr_val;
+ }
+
+_arr_val:
+ // interface{} slot in slice
+ vt = &(*rt.GoEface)(val).Type
+ vp = &(*rt.GoEface)(val).Value
+
+ // parse value node
+ node = iter.Next()
+ switch node.Type() {
+ case KObject:
+ newSize := node.Object().Len()
+ newMp := ctx.efacePool.GetMap(newSize)
+ *vt = rt.MapEfaceType
+ *vp = newMp
+ remain := size - 1
+ if newSize != 0 {
+ // push next array elem into stack
+ if remain > 0 {
+ ctx.Stack.Push(val, remain, false)
+ }
+ mp = newMp
+ size = newSize
+ isObj = true
+ goto _object_key;
+ }
+ case KArray:
+ newSize := node.Array().Len()
+ if newSize == 0 {
+ ctx.efacePool.ConvTSlice(rt.EmptySlice, rt.SliceEfaceType, val)
+ break;
+ }
+
+ newSp := ctx.efacePool.GetSlice(newSize)
+ // pack to []interface{}
+ ctx.efacePool.ConvTSlice(rt.GoSlice {
+ Ptr: newSp,
+ Len: newSize,
+ Cap: newSize,
+ }, rt.SliceEfaceType, val)
+
+ remain := size - 1
+ if remain > 0 {
+ ctx.Stack.Push(val, remain, false)
+ }
+
+ val = newSp
+ isObj = false
+ size = newSize
+ goto _arr_val;
+ case KStringCommon:
+ ctx.efacePool.ConvTstring(node.StringRef(ctx), val)
+ case KStringEscaped:
+ ctx.efacePool.ConvTstring(node.StringCopyEsc(ctx), val)
+ case KTrue:
+ rt.ConvTBool(true, (*interface{})(val))
+ case KFalse:
+ rt.ConvTBool(false, (*interface{})(val))
+ case KNull: /* skip */
+ case KUint:
+ ctx.efacePool.ConvF64(float64(node.U64()), val)
+ case KSint:
+ ctx.efacePool.ConvF64(float64(node.I64()), val)
+ case KReal:
+ ctx.efacePool.ConvF64(node.F64(), val)
+ case KRawNumber:
+ ctx.efacePool.ConvTnum(node.Number(ctx), val)
+ default: panic("unreachable for as eface")
+ }
+
+ // check size
+ size -= 1
+ if size != 0 {
+ val = rt.PtrAdd(val, rt.AnyType.Size)
+ goto _arr_val;
+ }
+
+
+ parent, size, isObj = ctx.Stack.Pop()
+
+ // parent is empty
+ if parent == nil {
+ if isObj {
+ return rt.GoEface {
+ Type: rt.MapEfaceType,
+ Value: rootM,
+ }.Pack()
+ } else {
+ ctx.efacePool.ConvTSlice(rootS, rt.SliceEfaceType, unsafe.Pointer(&root))
+ return root
+ }
+ }
+
+ // continue to parse parent
+ if isObj {
+ mp = parent
+ goto _object_key;
+ } else {
+ val = rt.PtrAdd(parent, rt.AnyType.Size)
+ goto _arr_val;
+ }
+}
+
+func (node *Node) AsEfaceFallback(ctx *Context) (interface{}, error) {
+ switch node.Type() {
+ case KObject:
+ obj := node.Object()
+ size := obj.Len()
+ m := make(map[string]interface{}, size)
+ *node = NewNode(obj.Children())
+ var gerr, err error
+ for i := 0; i < size; i++ {
+ key, _ := node.AsStr(ctx)
+ *node = NewNode(PtrOffset(node.cptr, 1))
+ m[key], err = node.AsEfaceFallback(ctx)
+ if gerr == nil && err != nil {
+ gerr = err
+ }
+ }
+ return m, gerr
+ case KArray:
+ arr := node.Array()
+ size := arr.Len()
+ a := make([]interface{}, size)
+ *node = NewNode(arr.Children())
+ var gerr, err error
+ for i := 0; i < size; i++ {
+ a[i], err = node.AsEfaceFallback(ctx)
+ if gerr == nil && err != nil {
+ gerr = err
+ }
+ }
+ return a, gerr
+ case KStringCommon:
+ str, _ := node.AsStr(ctx)
+ *node = NewNode(PtrOffset(node.cptr, 1))
+ return str, nil
+ case KStringEscaped:
+ str := node.StringCopyEsc(ctx)
+ *node = NewNode(PtrOffset(node.cptr, 1))
+ return str, nil
+ case KTrue:
+ *node = NewNode(PtrOffset(node.cptr, 1))
+ return true, nil
+ case KFalse:
+ *node = NewNode(PtrOffset(node.cptr, 1))
+ return false, nil
+ case KNull:
+ *node = NewNode(PtrOffset(node.cptr, 1))
+ return nil, nil
+ default:
+ // use float64
+ if ctx.Parser.options & (1 << _F_use_number) != 0 {
+ num, ok := node.AsNumber(ctx)
+ if !ok {
+ // skip the unmacthed type
+ *node = NewNode(node.Next())
+ return nil, newUnmatched(node.Position(), rt.JsonNumberType)
+ } else {
+ *node = NewNode(PtrOffset(node.cptr, 1))
+ return num, nil
+ }
+ } else if ctx.Parser.options & (1 << _F_use_int64) != 0 {
+ // first try int64
+ i, ok := node.AsI64(ctx)
+ if ok {
+ *node = NewNode(PtrOffset(node.cptr, 1))
+ return i, nil
+ }
+
+ // is not integer, then use float64
+ f, ok := node.AsF64(ctx)
+ if ok {
+ *node = NewNode(PtrOffset(node.cptr, 1))
+ return f, nil
+ }
+
+ // skip the unmacthed type
+ *node = NewNode(node.Next())
+ return nil, newUnmatched(node.Position(), rt.Int64Type)
+ } else {
+ num, ok := node.AsF64(ctx)
+ if !ok {
+ // skip the unmacthed type
+ *node = NewNode(node.Next())
+ return nil, newUnmatched(node.Position(), rt.Float64Type)
+ } else {
+ *node = NewNode(PtrOffset(node.cptr, 1))
+ return num, nil
+ }
+ }
+ }
+}
+
+//go:nosplit
+func PtrOffset(ptr uintptr, off int64) uintptr {
+ return uintptr(int64(ptr) + off * int64(unsafe.Sizeof(node{})))
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/slice.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/slice.go
new file mode 100644
index 000000000..a94e422b3
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/slice.go
@@ -0,0 +1,224 @@
+package optdec
+
+import (
+ "reflect"
+ "unsafe"
+
+ "github.com/bytedance/sonic/internal/rt"
+)
+
+type sliceDecoder struct {
+ elemType *rt.GoType
+ elemDec decFunc
+ typ reflect.Type
+}
+
+var (
+ emptyPtr = &struct{}{}
+)
+
+func (d *sliceDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*rt.GoSlice)(vp) = rt.GoSlice{}
+ return nil
+ }
+
+ arr, ok := node.AsArr()
+ if !ok {
+ return error_mismatch(node, ctx, d.typ)
+ }
+
+ slice := rt.MakeSlice(vp, d.elemType, arr.Len())
+ elems := slice.Ptr
+ next := arr.Children()
+
+ var gerr error
+ for i := 0; i < arr.Len(); i++ {
+ val := NewNode(next)
+ elem := unsafe.Pointer(uintptr(elems) + uintptr(i)*d.elemType.Size)
+ err := d.elemDec.FromDom(elem, val, ctx)
+ if gerr == nil && err != nil {
+ gerr = err
+ }
+ next = val.Next()
+ }
+
+ *(*rt.GoSlice)(vp) = *slice
+ return gerr
+}
+
+type arrayDecoder struct {
+ len int
+ elemType *rt.GoType
+ elemDec decFunc
+ typ reflect.Type
+}
+
+//go:nocheckptr
+func (d *arrayDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ arr, ok := node.AsArr()
+ if !ok {
+ return error_mismatch(node, ctx, d.typ)
+ }
+
+ next := arr.Children()
+ i := 0
+
+ var gerr error
+ for ; i < d.len && i < arr.Len(); i++ {
+ elem := unsafe.Pointer(uintptr(vp) + uintptr(i)*d.elemType.Size)
+ val := NewNode(next)
+ err := d.elemDec.FromDom(elem, val, ctx)
+ if gerr == nil && err != nil {
+ gerr = err
+ }
+ next = val.Next()
+ }
+
+ /* zero rest of array */
+ ptr := unsafe.Pointer(uintptr(vp) + uintptr(i)*d.elemType.Size)
+ n := uintptr(d.len-i) * d.elemType.Size
+ rt.ClearMemory(d.elemType, ptr, n)
+ return gerr
+}
+
+type sliceEfaceDecoder struct {
+}
+
+func (d *sliceEfaceDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*rt.GoSlice)(vp) = rt.GoSlice{}
+ return nil
+ }
+
+ return node.AsSliceEface(ctx, vp)
+}
+
+type sliceI32Decoder struct {
+}
+
+func (d *sliceI32Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*rt.GoSlice)(vp) = rt.GoSlice{}
+ return nil
+ }
+
+ return node.AsSliceI32(ctx, vp)
+}
+
+type sliceI64Decoder struct {
+}
+
+func (d *sliceI64Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*rt.GoSlice)(vp) = rt.GoSlice{}
+ return nil
+ }
+
+ return node.AsSliceI64(ctx, vp)
+}
+
+type sliceU32Decoder struct {
+}
+
+func (d *sliceU32Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*rt.GoSlice)(vp) = rt.GoSlice{}
+ return nil
+ }
+
+ return node.AsSliceU32(ctx, vp)
+}
+
+type sliceU64Decoder struct {
+}
+
+func (d *sliceU64Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*rt.GoSlice)(vp) = rt.GoSlice{}
+ return nil
+ }
+
+ return node.AsSliceU64(ctx, vp)
+}
+
+type sliceStringDecoder struct {
+}
+
+func (d *sliceStringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*rt.GoSlice)(vp) = rt.GoSlice{}
+ return nil
+ }
+
+ return node.AsSliceString(ctx, vp)
+}
+
+type sliceBytesDecoder struct {
+}
+
+func (d *sliceBytesDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*rt.GoSlice)(vp) = rt.GoSlice{}
+ return nil
+ }
+
+ s, err := node.AsSliceBytes(ctx)
+ if err != nil {
+ return err
+ }
+
+ *(*[]byte)(vp) = s
+ return nil
+}
+
+type sliceBytesUnmarshalerDecoder struct {
+ elemType *rt.GoType
+ elemDec decFunc
+ typ reflect.Type
+}
+
+func (d *sliceBytesUnmarshalerDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*rt.GoSlice)(vp) = rt.GoSlice{}
+ return nil
+ }
+
+ /* parse JSON string into `[]byte` */
+ if node.IsStr() {
+ slice, err := node.AsSliceBytes(ctx)
+ if err != nil {
+ return err
+ }
+ *(*[]byte)(vp) = slice
+ return nil
+ }
+
+ /* parse JSON array into `[]byte` */
+ arr, ok := node.AsArr()
+ if !ok {
+ return error_mismatch(node, ctx, d.typ)
+ }
+
+ slice := rt.MakeSlice(vp, d.elemType, arr.Len())
+ elems := slice.Ptr
+
+ var gerr error
+ next := arr.Children()
+ for i := 0; i < arr.Len(); i++ {
+ child := NewNode(next)
+ elem := unsafe.Pointer(uintptr(elems) + uintptr(i)*d.elemType.Size)
+ err := d.elemDec.FromDom(elem, child, ctx)
+ if gerr == nil && err != nil {
+ gerr = err
+ }
+ next = child.Next()
+ }
+
+ *(*rt.GoSlice)(vp) = *slice
+ return gerr
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/stringopts.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/stringopts.go
new file mode 100644
index 000000000..5af8c97e2
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/stringopts.go
@@ -0,0 +1,360 @@
+package optdec
+
+import (
+ "encoding/json"
+ "math"
+ "unsafe"
+
+ "github.com/bytedance/sonic/internal/rt"
+)
+
+type ptrStrDecoder struct {
+ typ *rt.GoType
+ deref decFunc
+}
+
+// Pointer Value is allocated in the Caller
+func (d *ptrStrDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ *(*unsafe.Pointer)(vp) = nil
+ return nil
+ }
+
+ s, ok := node.AsStrRef(ctx)
+ if !ok {
+ return error_mismatch(node, ctx, stringType)
+ }
+
+ if s == "null" {
+ *(*unsafe.Pointer)(vp) = nil
+ return nil
+ }
+
+ if *(*unsafe.Pointer)(vp) == nil {
+ *(*unsafe.Pointer)(vp) = rt.Mallocgc(d.typ.Size, d.typ, true)
+ }
+
+ return d.deref.FromDom(*(*unsafe.Pointer)(vp), node, ctx)
+}
+
+type boolStringDecoder struct {
+}
+
+func (d *boolStringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ s, ok := node.AsStrRef(ctx)
+ if !ok {
+ return error_mismatch(node, ctx, stringType)
+ }
+
+ if s == "null" {
+ return nil
+ }
+
+ b, err := ParseBool(s)
+ if err != nil {
+ return error_mismatch(node, ctx, boolType)
+ }
+
+ *(*bool)(vp) = b
+ return nil
+}
+
+func parseI64(node Node, ctx *context) (int64, error, bool) {
+ if node.IsNull() {
+ return 0, nil, true
+ }
+
+ s, ok := node.AsStrRef(ctx)
+ if !ok {
+ return 0, error_mismatch(node, ctx, stringType), false
+ }
+
+ if s == "null" {
+ return 0, nil, true
+ }
+
+ ret, err := ParseI64(s)
+ return ret, err, false
+}
+
+type i8StringDecoder struct{}
+
+func (d *i8StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ ret, err, null := parseI64(node, ctx)
+ if null {
+ return nil
+ }
+
+ if err != nil {
+ return err
+ }
+
+ if ret > math.MaxInt8 || ret < math.MinInt8 {
+ return error_mismatch(node, ctx, int8Type)
+ }
+
+ *(*int8)(vp) = int8(ret)
+ return nil
+}
+
+type i16StringDecoder struct{}
+
+func (d *i16StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ ret, err, null := parseI64(node, ctx)
+ if null {
+ return nil
+ }
+
+ if err != nil {
+ return err
+ }
+
+ if ret > math.MaxInt16 || ret < math.MinInt16 {
+ return error_mismatch(node, ctx, int16Type)
+ }
+
+ *(*int16)(vp) = int16(ret)
+ return nil
+}
+
+type i32StringDecoder struct{}
+
+func (d *i32StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ ret, err, null := parseI64(node, ctx)
+ if null {
+ return nil
+ }
+
+ if err != nil {
+ return err
+ }
+
+ if ret > math.MaxInt32 || ret < math.MinInt32 {
+ return error_mismatch(node, ctx, int32Type)
+ }
+
+ *(*int32)(vp) = int32(ret)
+ return nil
+}
+
+type i64StringDecoder struct{}
+
+func (d *i64StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ ret, err, null := parseI64(node, ctx)
+ if null {
+ return nil
+ }
+
+ if err != nil {
+ return err
+ }
+
+ *(*int64)(vp) = int64(ret)
+ return nil
+}
+
+func parseU64(node Node, ctx *context) (uint64, error, bool) {
+ if node.IsNull() {
+ return 0, nil, true
+ }
+
+ s, ok := node.AsStrRef(ctx)
+ if !ok {
+ return 0, error_mismatch(node, ctx, stringType), false
+ }
+
+ if s == "null" {
+ return 0, nil, true
+ }
+
+ ret, err := ParseU64(s)
+ return ret, err, false
+}
+
+type u8StringDecoder struct{}
+
+func (d *u8StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ ret, err, null := parseU64(node, ctx)
+ if null {
+ return nil
+ }
+
+ if err != nil {
+ return err
+ }
+
+ if ret > math.MaxUint8 {
+ return error_mismatch(node, ctx, uint8Type)
+ }
+
+ *(*uint8)(vp) = uint8(ret)
+ return nil
+}
+
+type u16StringDecoder struct{}
+
+func (d *u16StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ ret, err, null := parseU64(node, ctx)
+ if null {
+ return nil
+ }
+
+ if err != nil {
+ return err
+ }
+
+ if ret > math.MaxUint16 {
+ return error_mismatch(node, ctx, uint16Type)
+ }
+
+ *(*uint16)(vp) = uint16(ret)
+ return nil
+}
+
+type u32StringDecoder struct{}
+
+func (d *u32StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ ret, err, null := parseU64(node, ctx)
+ if null {
+ return nil
+ }
+
+ if err != nil {
+ return err
+ }
+
+ if ret > math.MaxUint32 {
+ return error_mismatch(node, ctx, uint32Type)
+ }
+
+ *(*uint32)(vp) = uint32(ret)
+ return nil
+}
+
+
+type u64StringDecoder struct{}
+
+func (d *u64StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ ret, err, null := parseU64(node, ctx)
+ if null {
+ return nil
+ }
+
+ if err != nil {
+ return err
+ }
+
+ *(*uint64)(vp) = uint64(ret)
+ return nil
+}
+
+type f32StringDecoder struct{}
+
+func (d *f32StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ s, ok := node.AsStrRef(ctx)
+ if !ok {
+ return error_mismatch(node, ctx, stringType)
+ }
+
+ if s == "null" {
+ return nil
+ }
+
+ ret, err := ParseF64(s)
+ if err != nil || ret > math.MaxFloat32 || ret < -math.MaxFloat32 {
+ return error_mismatch(node, ctx, float32Type)
+ }
+
+ *(*float32)(vp) = float32(ret)
+ return nil
+}
+
+type f64StringDecoder struct{}
+
+func (d *f64StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ s, ok := node.AsStrRef(ctx)
+ if !ok {
+ return error_mismatch(node, ctx, stringType)
+ }
+
+ if s == "null" {
+ return nil
+ }
+
+ ret, err := ParseF64(s)
+ if err != nil {
+ return error_mismatch(node, ctx, float64Type)
+ }
+
+ *(*float64)(vp) = float64(ret)
+ return nil
+}
+
+/* parse string field with string options */
+type strStringDecoder struct{}
+
+func (d *strStringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ s, ok := node.AsStrRef(ctx)
+ if !ok {
+ return error_mismatch(node, ctx, stringType)
+ }
+
+ if s == "null" {
+ return nil
+ }
+
+ s, err := Unquote(s)
+ if err != nil {
+ return error_mismatch(node, ctx, stringType)
+ }
+
+ *(*string)(vp) = s
+ return nil
+}
+
+type numberStringDecoder struct{}
+
+func (d *numberStringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ s, ok := node.AsStrRef(ctx)
+ if !ok {
+ return error_mismatch(node, ctx, stringType)
+ }
+
+ if s == "null" {
+ return nil
+ }
+
+ num, ok := node.ParseNumber(ctx)
+ if !ok {
+ return error_mismatch(node, ctx, jsonNumberType)
+ }
+
+ end, ok := SkipNumberFast(s, 0)
+ // has error or trailing chars
+ if !ok || end != len(s) {
+ return error_mismatch(node, ctx, jsonNumberType)
+ }
+
+ *(*json.Number)(vp) = json.Number(num)
+ return nil
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/structs.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/structs.go
new file mode 100644
index 000000000..bce2758f1
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/structs.go
@@ -0,0 +1,61 @@
+package optdec
+
+import (
+ "reflect"
+ "unsafe"
+
+ caching "github.com/bytedance/sonic/internal/optcaching"
+ "github.com/bytedance/sonic/internal/resolver"
+)
+
+type fieldEntry struct {
+ resolver.FieldMeta
+ fieldDec decFunc
+}
+
+type structDecoder struct {
+ fieldMap caching.FieldLookup
+ fields []fieldEntry
+ structName string
+ typ reflect.Type
+}
+
+func (d *structDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
+ if node.IsNull() {
+ return nil
+ }
+
+ var gerr error
+ obj, ok := node.AsObj()
+ if !ok {
+ return error_mismatch(node, ctx, d.typ)
+ }
+
+ next := obj.Children()
+ for i := 0; i < obj.Len(); i++ {
+ key, _ := NewNode(next).AsStrRef(ctx)
+ val := NewNode(PtrOffset(next, 1))
+ next = val.Next()
+
+ // find field idx
+ idx := d.fieldMap.Get(key)
+ if idx == -1 {
+ if Options(ctx.Options())&OptionDisableUnknown != 0 {
+ return error_field(key)
+ }
+ continue
+ }
+
+ offset := d.fields[idx].Path[0].Size
+ elem := unsafe.Pointer(uintptr(vp) + offset)
+ err := d.fields[idx].fieldDec.FromDom(elem, val, ctx)
+
+ // deal with mismatch type errors
+ if gerr == nil && err != nil {
+ // TODO: better error info
+ gerr = err
+ }
+ }
+ return gerr
+}
+
diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/types.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/types.go
new file mode 100644
index 000000000..fe1433eec
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/types.go
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package optdec
+
+import (
+ "encoding"
+ "encoding/base64"
+ "encoding/json"
+ "reflect"
+ "unsafe"
+
+ "github.com/bytedance/sonic/internal/rt"
+)
+
+var (
+ boolType = reflect.TypeOf(bool(false))
+ byteType = reflect.TypeOf(byte(0))
+ intType = reflect.TypeOf(int(0))
+ int8Type = reflect.TypeOf(int8(0))
+ int16Type = reflect.TypeOf(int16(0))
+ int32Type = reflect.TypeOf(int32(0))
+ int64Type = reflect.TypeOf(int64(0))
+ uintType = reflect.TypeOf(uint(0))
+ uint8Type = reflect.TypeOf(uint8(0))
+ uint16Type = reflect.TypeOf(uint16(0))
+ uint32Type = reflect.TypeOf(uint32(0))
+ uint64Type = reflect.TypeOf(uint64(0))
+ float32Type = reflect.TypeOf(float32(0))
+ float64Type = reflect.TypeOf(float64(0))
+ stringType = reflect.TypeOf("")
+ bytesType = reflect.TypeOf([]byte(nil))
+ jsonNumberType = reflect.TypeOf(json.Number(""))
+ base64CorruptInputError = reflect.TypeOf(base64.CorruptInputError(0))
+ anyType = rt.UnpackType(reflect.TypeOf((*interface{})(nil)).Elem())
+)
+
+var (
+ errorType = reflect.TypeOf((*error)(nil)).Elem()
+ jsonUnmarshalerType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem()
+ encodingTextUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
+)
+
+func rtype(t reflect.Type) (*rt.GoItab, *rt.GoType) {
+ p := (*rt.GoIface)(unsafe.Pointer(&t))
+ return p.Itab, (*rt.GoType)(p.Value)
+}