summaryrefslogtreecommitdiff
path: root/vendor/github.com/bytedance/sonic/internal/encoder
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/bytedance/sonic/internal/encoder')
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/alg/mapiter.go (renamed from vendor/github.com/bytedance/sonic/internal/encoder/mapiter.go)89
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/alg/opts.go31
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/alg/primitives.go95
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/alg/sort.go (renamed from vendor/github.com/bytedance/sonic/internal/encoder/sort.go)2
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/alg/spec.go198
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/alg/spec_compat.go148
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/asm.s0
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/asm_stubs_amd64_go116.go51
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/assembler_regabi_amd64.go1176
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/assembler_stkabi_amd64.go1175
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/compiler.go1517
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/debug_go117.go205
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/encode_norace.go24
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/encode_race.go54
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/encoder.go171
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/ir/op.go473
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/pools.go193
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/pools_amd64.go97
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/pools_compt.go24
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/primitives.go167
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/stream.go28
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/stubs_go116.go61
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/stubs_go117.go62
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/stubs_go120.go62
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/stubs_go121.go62
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/utils.go52
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/vars/cache.go48
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/vars/const.go42
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/vars/errors.go (renamed from vendor/github.com/bytedance/sonic/internal/encoder/errors.go)20
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/vars/stack.go146
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/vars/types.go (renamed from vendor/github.com/bytedance/sonic/internal/encoder/types.go)20
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/vm/stbus.go45
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/vm/vm.go374
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/x86/asm_stubs_amd64_go117.go (renamed from vendor/github.com/bytedance/sonic/internal/encoder/asm_stubs_amd64_go117.go)20
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/x86/asm_stubs_amd64_go121.go (renamed from vendor/github.com/bytedance/sonic/internal/encoder/asm_stubs_amd64_go121.go)22
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/x86/assembler_regabi_amd64.go1195
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/x86/debug_go116.go (renamed from vendor/github.com/bytedance/sonic/internal/encoder/debug_go116.go)4
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/x86/debug_go117.go201
-rw-r--r--vendor/github.com/bytedance/sonic/internal/encoder/x86/stbus.go54
39 files changed, 4079 insertions, 4329 deletions
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/mapiter.go b/vendor/github.com/bytedance/sonic/internal/encoder/alg/mapiter.go
index 8a322b3af..5d9956a90 100644
--- a/vendor/github.com/bytedance/sonic/internal/encoder/mapiter.go
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/alg/mapiter.go
@@ -14,15 +14,16 @@
* limitations under the License.
*/
-package encoder
+package alg
import (
"encoding"
"reflect"
+ "strconv"
"sync"
"unsafe"
- "github.com/bytedance/sonic/internal/native"
+ "github.com/bytedance/sonic/internal/encoder/vars"
"github.com/bytedance/sonic/internal/rt"
)
@@ -32,8 +33,8 @@ type _MapPair struct {
m [32]byte
}
-type _MapIterator struct {
- it rt.GoMapIterator // must be the first field
+type MapIterator struct {
+ It rt.GoMapIterator // must be the first field
kv rt.GoSlice // slice of _MapPair
ki int
}
@@ -44,43 +45,43 @@ var (
)
func init() {
- if unsafe.Offsetof(_MapIterator{}.it) != 0 {
+ if unsafe.Offsetof(MapIterator{}.It) != 0 {
panic("_MapIterator.it is not the first field")
}
}
-func newIterator() *_MapIterator {
+func newIterator() *MapIterator {
if v := iteratorPool.Get(); v == nil {
- return new(_MapIterator)
+ return new(MapIterator)
} else {
- return resetIterator(v.(*_MapIterator))
+ return resetIterator(v.(*MapIterator))
}
}
-func resetIterator(p *_MapIterator) *_MapIterator {
+func resetIterator(p *MapIterator) *MapIterator {
p.ki = 0
- p.it = rt.GoMapIterator{}
+ p.It = rt.GoMapIterator{}
p.kv.Len = 0
return p
}
-func (self *_MapIterator) at(i int) *_MapPair {
+func (self *MapIterator) at(i int) *_MapPair {
return (*_MapPair)(unsafe.Pointer(uintptr(self.kv.Ptr) + uintptr(i) * unsafe.Sizeof(_MapPair{})))
}
-func (self *_MapIterator) add() (p *_MapPair) {
+func (self *MapIterator) add() (p *_MapPair) {
p = self.at(self.kv.Len)
self.kv.Len++
return
}
-func (self *_MapIterator) data() (p []_MapPair) {
+func (self *MapIterator) data() (p []_MapPair) {
*(*rt.GoSlice)(unsafe.Pointer(&p)) = self.kv
return
}
-func (self *_MapIterator) append(t *rt.GoType, k unsafe.Pointer, v unsafe.Pointer) (err error) {
+func (self *MapIterator) append(t *rt.GoType, k unsafe.Pointer, v unsafe.Pointer) (err error) {
p := self.add()
p.v = v
@@ -94,26 +95,26 @@ func (self *_MapIterator) append(t *rt.GoType, k unsafe.Pointer, v unsafe.Pointe
return nil
}
-func (self *_MapIterator) appendGeneric(p *_MapPair, t *rt.GoType, v reflect.Kind, k unsafe.Pointer) error {
+func (self *MapIterator) appendGeneric(p *_MapPair, t *rt.GoType, v reflect.Kind, k unsafe.Pointer) error {
switch v {
- case reflect.Int : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], int64(*(*int)(k)))]) ; return nil
- case reflect.Int8 : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], int64(*(*int8)(k)))]) ; return nil
- case reflect.Int16 : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], int64(*(*int16)(k)))]) ; return nil
- case reflect.Int32 : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], int64(*(*int32)(k)))]) ; return nil
- case reflect.Int64 : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], *(*int64)(k))]) ; return nil
- case reflect.Uint : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uint)(k)))]) ; return nil
- case reflect.Uint8 : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uint8)(k)))]) ; return nil
- case reflect.Uint16 : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uint16)(k)))]) ; return nil
- case reflect.Uint32 : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uint32)(k)))]) ; return nil
- case reflect.Uint64 : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], *(*uint64)(k))]) ; return nil
- case reflect.Uintptr : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uintptr)(k)))]) ; return nil
+ case reflect.Int : p.k = rt.Mem2Str(strconv.AppendInt(p.m[:0], int64(*(*int)(k)), 10)) ; return nil
+ case reflect.Int8 : p.k = rt.Mem2Str(strconv.AppendInt(p.m[:0], int64(*(*int8)(k)), 10)) ; return nil
+ case reflect.Int16 : p.k = rt.Mem2Str(strconv.AppendInt(p.m[:0], int64(*(*int16)(k)), 10)) ; return nil
+ case reflect.Int32 : p.k = rt.Mem2Str(strconv.AppendInt(p.m[:0], int64(*(*int32)(k)), 10)) ; return nil
+ case reflect.Int64 : p.k = rt.Mem2Str(strconv.AppendInt(p.m[:0], int64(*(*int64)(k)), 10)) ; return nil
+ case reflect.Uint : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uint)(k)), 10)) ; return nil
+ case reflect.Uint8 : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uint8)(k)), 10)) ; return nil
+ case reflect.Uint16 : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uint16)(k)), 10)) ; return nil
+ case reflect.Uint32 : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uint32)(k)), 10)) ; return nil
+ case reflect.Uint64 : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uint64)(k)), 10)) ; return nil
+ case reflect.Uintptr : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uintptr)(k)), 10)) ; return nil
case reflect.Interface : return self.appendInterface(p, t, k)
case reflect.Struct, reflect.Ptr : return self.appendConcrete(p, t, k)
default : panic("unexpected map key type")
}
}
-func (self *_MapIterator) appendConcrete(p *_MapPair, t *rt.GoType, k unsafe.Pointer) (err error) {
+func (self *MapIterator) appendConcrete(p *_MapPair, t *rt.GoType, k unsafe.Pointer) (err error) {
// compiler has already checked that the type implements the encoding.MarshalText interface
if !t.Indirect() {
k = *(*unsafe.Pointer)(k)
@@ -127,7 +128,7 @@ func (self *_MapIterator) appendConcrete(p *_MapPair, t *rt.GoType, k unsafe.Poi
return
}
-func (self *_MapIterator) appendInterface(p *_MapPair, t *rt.GoType, k unsafe.Pointer) (err error) {
+func (self *MapIterator) appendInterface(p *_MapPair, t *rt.GoType, k unsafe.Pointer) (err error) {
if len(rt.IfaceType(t).Methods) == 0 {
panic("unexpected map key type")
} else if p.k, err = asText(k); err == nil {
@@ -137,17 +138,17 @@ func (self *_MapIterator) appendInterface(p *_MapPair, t *rt.GoType, k unsafe.Po
}
}
-func iteratorStop(p *_MapIterator) {
+func IteratorStop(p *MapIterator) {
iteratorPool.Put(p)
}
-func iteratorNext(p *_MapIterator) {
+func IteratorNext(p *MapIterator) {
i := p.ki
- t := &p.it
+ t := &p.It
/* check for unordered iteration */
if i < 0 {
- mapiternext(t)
+ rt.Mapiternext(t)
return
}
@@ -164,25 +165,25 @@ func iteratorNext(p *_MapIterator) {
p.ki++
}
-func iteratorStart(t *rt.GoMapType, m *rt.GoMap, fv uint64) (*_MapIterator, error) {
+func IteratorStart(t *rt.GoMapType, m *rt.GoMap, fv uint64) (*MapIterator, error) {
it := newIterator()
- mapiterinit(t, m, &it.it)
+ rt.Mapiterinit(t, m, &it.It)
/* check for key-sorting, empty map don't need sorting */
- if m.Count == 0 || (fv & uint64(SortMapKeys)) == 0 {
+ if m.Count == 0 || (fv & (1<<BitSortMapKeys)) == 0 {
it.ki = -1
return it, nil
}
/* pre-allocate space if needed */
if m.Count > it.kv.Cap {
- it.kv = growslice(iteratorPair, it.kv, m.Count)
+ it.kv = rt.GrowSlice(iteratorPair, it.kv, m.Count)
}
/* dump all the key-value pairs */
- for ; it.it.K != nil; mapiternext(&it.it) {
- if err := it.append(t.Key, it.it.K, it.it.V); err != nil {
- iteratorStop(it)
+ for ; it.It.K != nil; rt.Mapiternext(&it.It) {
+ if err := it.append(t.Key, it.It.K, it.It.V); err != nil {
+ IteratorStop(it)
return nil, err
}
}
@@ -193,7 +194,13 @@ func iteratorStart(t *rt.GoMapType, m *rt.GoMap, fv uint64) (*_MapIterator, erro
}
/* load the first pair into iterator */
- it.it.V = it.at(0).v
- it.it.K = unsafe.Pointer(&it.at(0).k)
+ it.It.V = it.at(0).v
+ it.It.K = unsafe.Pointer(&it.at(0).k)
return it, nil
}
+
+func asText(v unsafe.Pointer) (string, error) {
+ text := rt.AssertI2I(rt.UnpackType(vars.EncodingTextMarshalerType), *(*rt.GoIface)(v))
+ r, e := (*(*encoding.TextMarshaler)(unsafe.Pointer(&text))).MarshalText()
+ return rt.Mem2Str(r), e
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/alg/opts.go b/vendor/github.com/bytedance/sonic/internal/encoder/alg/opts.go
new file mode 100644
index 000000000..c19e2de4e
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/alg/opts.go
@@ -0,0 +1,31 @@
+/**
+ * Copyright 2024 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package alg
+
+const (
+ BitSortMapKeys = iota
+ BitEscapeHTML
+ BitCompactMarshaler
+ BitNoQuoteTextMarshaler
+ BitNoNullSliceOrMap
+ BitValidateString
+ BitNoValidateJSONMarshaler
+ BitNoEncoderNewline
+ BitEncodeNullForInfOrNan
+
+ BitPointerValue = 63
+)
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/alg/primitives.go b/vendor/github.com/bytedance/sonic/internal/encoder/alg/primitives.go
new file mode 100644
index 000000000..63fa01890
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/alg/primitives.go
@@ -0,0 +1,95 @@
+/**
+ * Copyright 2024 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package alg
+
+import (
+ "encoding"
+ "encoding/json"
+
+ "github.com/bytedance/sonic/internal/encoder/vars"
+ "github.com/bytedance/sonic/internal/rt"
+)
+
+func Compact(p *[]byte, v []byte) error {
+ buf := vars.NewBuffer()
+ err := json.Compact(buf, v)
+
+ /* check for errors */
+ if err != nil {
+ return err
+ }
+
+ /* add to result */
+ v = buf.Bytes()
+ *p = append(*p, v...)
+
+ /* return the buffer into pool */
+ vars.FreeBuffer(buf)
+ return nil
+}
+
+func EncodeNil(rb *[]byte) error {
+ *rb = append(*rb, 'n', 'u', 'l', 'l')
+ return nil
+}
+
+// func Make_EncodeTypedPointer(computor func(*rt.GoType, ...interface{}) (interface{}, error)) func(*[]byte, *rt.GoType, *unsafe.Pointer, *vars.Stack, uint64) error {
+// return func(buf *[]byte, vt *rt.GoType, vp *unsafe.Pointer, sb *vars.Stack, fv uint64) error {
+// if vt == nil {
+// return EncodeNil(buf)
+// } else if fn, err := vars.FindOrCompile(vt, (fv&(1<<BitPointerValue)) != 0, computor); err != nil {
+// return err
+// } else if vt.Indirect() {
+// err := fn(buf, *vp, sb, fv)
+// return err
+// } else {
+// err := fn(buf, unsafe.Pointer(vp), sb, fv)
+// return err
+// }
+// }
+// }
+
+func EncodeJsonMarshaler(buf *[]byte, val json.Marshaler, opt uint64) error {
+ if ret, err := val.MarshalJSON(); err != nil {
+ return err
+ } else {
+ if opt&(1<<BitCompactMarshaler) != 0 {
+ return Compact(buf, ret)
+ }
+ if opt&(1<<BitNoValidateJSONMarshaler) == 0 {
+ if ok, s := Valid(ret); !ok {
+ return vars.Error_marshaler(ret, s)
+ }
+ }
+ *buf = append(*buf, ret...)
+ return nil
+ }
+}
+
+func EncodeTextMarshaler(buf *[]byte, val encoding.TextMarshaler, opt uint64) error {
+ if ret, err := val.MarshalText(); err != nil {
+ return err
+ } else {
+ if opt&(1<<BitNoQuoteTextMarshaler) != 0 {
+ *buf = append(*buf, ret...)
+ return nil
+ }
+ *buf = Quote(*buf, rt.Mem2Str(ret), false)
+ return nil
+ }
+}
+ \ No newline at end of file
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/sort.go b/vendor/github.com/bytedance/sonic/internal/encoder/alg/sort.go
index b1a67598b..9b69bce9a 100644
--- a/vendor/github.com/bytedance/sonic/internal/encoder/sort.go
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/alg/sort.go
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package encoder
+package alg
// Algorithm 3-way Radix Quicksort, d means the radix.
// Reference: https://algs4.cs.princeton.edu/51radix/Quick3string.java.html
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/alg/spec.go b/vendor/github.com/bytedance/sonic/internal/encoder/alg/spec.go
new file mode 100644
index 000000000..bff943626
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/alg/spec.go
@@ -0,0 +1,198 @@
+//go:build (amd64 && go1.16 && !go1.24) || (arm64 && go1.20 && !go1.24)
+// +build amd64,go1.16,!go1.24 arm64,go1.20,!go1.24
+
+/**
+ * Copyright 2024 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package alg
+
+import (
+ "runtime"
+ "unsafe"
+
+ "github.com/bytedance/sonic/internal/native"
+ "github.com/bytedance/sonic/internal/native/types"
+ "github.com/bytedance/sonic/internal/rt"
+)
+
+// Valid validates json and returns first non-blank character position,
+// if it is only one valid json value.
+// Otherwise returns invalid character position using start.
+//
+// Note: it does not check for the invalid UTF-8 characters.
+func Valid(data []byte) (ok bool, start int) {
+ n := len(data)
+ if n == 0 {
+ return false, -1
+ }
+ s := rt.Mem2Str(data)
+ p := 0
+ m := types.NewStateMachine()
+ ret := native.ValidateOne(&s, &p, m, 0)
+ types.FreeStateMachine(m)
+
+ if ret < 0 {
+ return false, p-1
+ }
+
+ /* check for trailing spaces */
+ for ;p < n; p++ {
+ if (types.SPACE_MASK & (1 << data[p])) == 0 {
+ return false, p
+ }
+ }
+
+ return true, ret
+}
+
+var typeByte = rt.UnpackEface(byte(0)).Type
+
+//go:nocheckptr
+func Quote(buf []byte, val string, double bool) []byte {
+ if len(val) == 0 {
+ if double {
+ return append(buf, `"\"\""`...)
+ }
+ return append(buf, `""`...)
+ }
+
+ if double {
+ buf = append(buf, `"\"`...)
+ } else {
+ buf = append(buf, `"`...)
+ }
+ sp := rt.IndexChar(val, 0)
+ nb := len(val)
+ b := (*rt.GoSlice)(unsafe.Pointer(&buf))
+
+ // input buffer
+ for nb > 0 {
+ // output buffer
+ dp := unsafe.Pointer(uintptr(b.Ptr) + uintptr(b.Len))
+ dn := b.Cap - b.Len
+ // call native.Quote, dn is byte count it outputs
+ opts := uint64(0)
+ if double {
+ opts = types.F_DOUBLE_UNQUOTE
+ }
+ ret := native.Quote(sp, nb, dp, &dn, opts)
+ // update *buf length
+ b.Len += dn
+
+ // no need more output
+ if ret >= 0 {
+ break
+ }
+
+ // double buf size
+ *b = rt.GrowSlice(typeByte, *b, b.Cap*2)
+ // ret is the complement of consumed input
+ ret = ^ret
+ // update input buffer
+ nb -= ret
+ sp = unsafe.Pointer(uintptr(sp) + uintptr(ret))
+ }
+
+ runtime.KeepAlive(buf)
+ runtime.KeepAlive(sp)
+ if double {
+ buf = append(buf, `\""`...)
+ } else {
+ buf = append(buf, `"`...)
+ }
+
+ return buf
+}
+
+func HtmlEscape(dst []byte, src []byte) []byte {
+ var sidx int
+
+ dst = append(dst, src[:0]...) // avoid check nil dst
+ sbuf := (*rt.GoSlice)(unsafe.Pointer(&src))
+ dbuf := (*rt.GoSlice)(unsafe.Pointer(&dst))
+
+ /* grow dst if it is shorter */
+ if cap(dst)-len(dst) < len(src)+types.BufPaddingSize {
+ cap := len(src)*3/2 + types.BufPaddingSize
+ *dbuf = rt.GrowSlice(typeByte, *dbuf, cap)
+ }
+
+ for sidx < sbuf.Len {
+ sp := rt.Add(sbuf.Ptr, uintptr(sidx))
+ dp := rt.Add(dbuf.Ptr, uintptr(dbuf.Len))
+
+ sn := sbuf.Len - sidx
+ dn := dbuf.Cap - dbuf.Len
+ nb := native.HTMLEscape(sp, sn, dp, &dn)
+
+ /* check for errors */
+ if dbuf.Len += dn; nb >= 0 {
+ break
+ }
+
+ /* not enough space, grow the slice and try again */
+ sidx += ^nb
+ *dbuf = rt.GrowSlice(typeByte, *dbuf, dbuf.Cap*2)
+ }
+ return dst
+}
+
+func F64toa(buf []byte, v float64) ([]byte) {
+ if v == 0 {
+ return append(buf, '0')
+ }
+ buf = rt.GuardSlice2(buf, 64)
+ ret := native.F64toa((*byte)(rt.IndexByte(buf, len(buf))), v)
+ if ret > 0 {
+ return buf[:len(buf)+ret]
+ } else {
+ return buf
+ }
+}
+
+func F32toa(buf []byte, v float32) ([]byte) {
+ if v == 0 {
+ return append(buf, '0')
+ }
+ buf = rt.GuardSlice2(buf, 64)
+ ret := native.F32toa((*byte)(rt.IndexByte(buf, len(buf))), v)
+ if ret > 0 {
+ return buf[:len(buf)+ret]
+ } else {
+ return buf
+ }
+}
+
+func I64toa(buf []byte, v int64) ([]byte) {
+ buf = rt.GuardSlice2(buf, 32)
+ ret := native.I64toa((*byte)(rt.IndexByte(buf, len(buf))), v)
+ if ret > 0 {
+ return buf[:len(buf)+ret]
+ } else {
+ return buf
+ }
+}
+
+func U64toa(buf []byte, v uint64) ([]byte) {
+ buf = rt.GuardSlice2(buf, 32)
+ ret := native.U64toa((*byte)(rt.IndexByte(buf, len(buf))), v)
+ if ret > 0 {
+ return buf[:len(buf)+ret]
+ } else {
+ return buf
+ }
+}
+
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/alg/spec_compat.go b/vendor/github.com/bytedance/sonic/internal/encoder/alg/spec_compat.go
new file mode 100644
index 000000000..c15cbf7d8
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/alg/spec_compat.go
@@ -0,0 +1,148 @@
+// +build !amd64,!arm64 go1.24 !go1.16 arm64,!go1.20
+
+/**
+ * Copyright 2024 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package alg
+
+import (
+ _ "unsafe"
+ "unicode/utf8"
+ "strconv"
+ "bytes"
+ "encoding/json"
+
+ "github.com/bytedance/sonic/internal/rt"
+)
+
+// Valid validates json and returns first non-blank character position,
+// if it is only one valid json value.
+// Otherwise returns invalid character position using start.
+//
+// Note: it does not check for the invalid UTF-8 characters.
+func Valid(data []byte) (ok bool, start int) {
+ ok = json.Valid(data)
+ return ok, 0
+}
+
+var typeByte = rt.UnpackEface(byte(0)).Type
+
+func Quote(e []byte, s string, double bool) []byte {
+ if len(s) == 0 {
+ if double {
+ return append(e, `"\"\""`...)
+ }
+ return append(e, `""`...)
+ }
+
+ b := e
+ ss := len(e)
+ e = append(e, '"')
+ start := 0
+
+ for i := 0; i < len(s); {
+ if b := s[i]; b < utf8.RuneSelf {
+ if rt.SafeSet[b] {
+ i++
+ continue
+ }
+ if start < i {
+ e = append(e, s[start:i]...)
+ }
+ e = append(e, '\\')
+ switch b {
+ case '\\', '"':
+ e = append(e, b)
+ case '\n':
+ e = append(e, 'n')
+ case '\r':
+ e = append(e, 'r')
+ case '\t':
+ e = append(e, 't')
+ default:
+ // This encodes bytes < 0x20 except for \t, \n and \r.
+ // If escapeHTML is set, it also escapes <, >, and &
+ // because they can lead to security holes when
+ // user-controlled strings are rendered into JSON
+ // and served to some browsers.
+ e = append(e, `u00`...)
+ e = append(e, rt.Hex[b>>4])
+ e = append(e, rt.Hex[b&0xF])
+ }
+ i++
+ start = i
+ continue
+ }
+ c, size := utf8.DecodeRuneInString(s[i:])
+ // if correct && c == utf8.RuneError && size == 1 {
+ // if start < i {
+ // e = append(e, s[start:i]...)
+ // }
+ // e = append(e, `\ufffd`...)
+ // i += size
+ // start = i
+ // continue
+ // }
+ if c == '\u2028' || c == '\u2029' {
+ if start < i {
+ e = append(e, s[start:i]...)
+ }
+ e = append(e, `\u202`...)
+ e = append(e, rt.Hex[c&0xF])
+ i += size
+ start = i
+ continue
+ }
+ i += size
+ }
+
+ if start < len(s) {
+ e = append(e, s[start:]...)
+ }
+ e = append(e, '"')
+
+ if double {
+ return strconv.AppendQuote(b, string(e[ss:]))
+ } else {
+ return e
+ }
+}
+
+func HtmlEscape(dst []byte, src []byte) []byte {
+ buf := bytes.NewBuffer(dst)
+ json.HTMLEscape(buf, src)
+ return buf.Bytes()
+}
+
+func F64toa(buf []byte, v float64) ([]byte) {
+ bs := bytes.NewBuffer(buf)
+ _ = json.NewEncoder(bs).Encode(v)
+ return bs.Bytes()
+}
+
+func F32toa(buf []byte, v float32) ([]byte) {
+ bs := bytes.NewBuffer(buf)
+ _ = json.NewEncoder(bs).Encode(v)
+ return bs.Bytes()
+}
+
+func I64toa(buf []byte, v int64) ([]byte) {
+ return strconv.AppendInt(buf, int64(v), 10)
+}
+
+func U64toa(buf []byte, v uint64) ([]byte) {
+ return strconv.AppendUint(buf, v, 10)
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/asm.s b/vendor/github.com/bytedance/sonic/internal/encoder/asm.s
deleted file mode 100644
index e69de29bb..000000000
--- a/vendor/github.com/bytedance/sonic/internal/encoder/asm.s
+++ /dev/null
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/asm_stubs_amd64_go116.go b/vendor/github.com/bytedance/sonic/internal/encoder/asm_stubs_amd64_go116.go
deleted file mode 100644
index 0a99f30ab..000000000
--- a/vendor/github.com/bytedance/sonic/internal/encoder/asm_stubs_amd64_go116.go
+++ /dev/null
@@ -1,51 +0,0 @@
-// +build go1.16,!go1.17
-
-// Copyright 2023 CloudWeGo Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package encoder
-
-import (
- `strconv`
-
- `github.com/bytedance/sonic/internal/jit`
- `github.com/twitchyliquid64/golang-asm/obj`
- `github.com/twitchyliquid64/golang-asm/obj/x86`
-)
-
-var (
- _V_writeBarrier = jit.Imm(int64(_runtime_writeBarrier))
-
- _F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX)
-)
-
-func (self *_Assembler) WritePtr(i int, ptr obj.Addr, rec obj.Addr) {
- if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
- panic("rec contains AX!")
- }
- self.Emit("MOVQ", _V_writeBarrier, _R10)
- self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
- self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
- self.Emit("MOVQ", ptr, _AX)
- self.xsave(_DI)
- self.Emit("LEAQ", rec, _DI)
- self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
- self.Rjmp("CALL", _R10)
- self.xload(_DI)
- self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
- self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
- self.Emit("MOVQ", ptr, rec)
- self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
-}
-
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/assembler_regabi_amd64.go b/vendor/github.com/bytedance/sonic/internal/encoder/assembler_regabi_amd64.go
deleted file mode 100644
index 330b68817..000000000
--- a/vendor/github.com/bytedance/sonic/internal/encoder/assembler_regabi_amd64.go
+++ /dev/null
@@ -1,1176 +0,0 @@
-// +build go1.17,!go1.23
-
-/*
- * Copyright 2021 ByteDance Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package encoder
-
-import (
- `fmt`
- `reflect`
- `strconv`
- `unsafe`
-
- `github.com/bytedance/sonic/internal/cpu`
- `github.com/bytedance/sonic/internal/jit`
- `github.com/bytedance/sonic/internal/native/types`
- `github.com/twitchyliquid64/golang-asm/obj`
- `github.com/twitchyliquid64/golang-asm/obj/x86`
-
- `github.com/bytedance/sonic/internal/native`
- `github.com/bytedance/sonic/internal/rt`
-)
-
-/** Register Allocations
- *
- * State Registers:
- *
- * %rbx : stack base
- * %rdi : result pointer
- * %rsi : result length
- * %rdx : result capacity
- * %r12 : sp->p
- * %r13 : sp->q
- * %r14 : sp->x
- * %r15 : sp->f
- *
- * Error Registers:
- *
- * %r10 : error type register
- * %r11 : error pointer register
- */
-
-/** Function Prototype & Stack Map
- *
- * func (buf *[]byte, p unsafe.Pointer, sb *_Stack, fv uint64) (err error)
- *
- * buf : (FP)
- * p : 8(FP)
- * sb : 16(FP)
- * fv : 24(FP)
- * err.vt : 32(FP)
- * err.vp : 40(FP)
- */
-
-const (
- _S_cond = iota
- _S_init
-)
-
-const (
- _FP_args = 32 // 32 bytes for spill registers of arguments
- _FP_fargs = 40 // 40 bytes for passing arguments to other Go functions
- _FP_saves = 64 // 64 bytes for saving the registers before CALL instructions
- _FP_locals = 24 // 24 bytes for local variables
-)
-
-const (
- _FP_loffs = _FP_fargs + _FP_saves
- _FP_offs = _FP_loffs + _FP_locals
- // _FP_offs = _FP_loffs + _FP_locals + _FP_debug
- _FP_size = _FP_offs + 8 // 8 bytes for the parent frame pointer
- _FP_base = _FP_size + 8 // 8 bytes for the return address
-)
-
-const (
- _FM_exp32 = 0x7f800000
- _FM_exp64 = 0x7ff0000000000000
-)
-
-const (
- _IM_null = 0x6c6c756e // 'null'
- _IM_true = 0x65757274 // 'true'
- _IM_fals = 0x736c6166 // 'fals' ('false' without the 'e')
- _IM_open = 0x00225c22 // '"\"∅'
- _IM_array = 0x5d5b // '[]'
- _IM_object = 0x7d7b // '{}'
- _IM_mulv = -0x5555555555555555
-)
-
-const (
- _LB_more_space = "_more_space"
- _LB_more_space_return = "_more_space_return_"
-)
-
-const (
- _LB_error = "_error"
- _LB_error_too_deep = "_error_too_deep"
- _LB_error_invalid_number = "_error_invalid_number"
- _LB_error_nan_or_infinite = "_error_nan_or_infinite"
- _LB_panic = "_panic"
-)
-
-var (
- _AX = jit.Reg("AX")
- _BX = jit.Reg("BX")
- _CX = jit.Reg("CX")
- _DX = jit.Reg("DX")
- _DI = jit.Reg("DI")
- _SI = jit.Reg("SI")
- _BP = jit.Reg("BP")
- _SP = jit.Reg("SP")
- _R8 = jit.Reg("R8")
- _R9 = jit.Reg("R9")
-)
-
-var (
- _X0 = jit.Reg("X0")
- _X15 = jit.Reg("X15")
- _Y0 = jit.Reg("Y0")
-)
-
-var (
- _ST = jit.Reg("R15") // can't use R14 since it's always scratched by Go...
- _RP = jit.Reg("DI")
- _RL = jit.Reg("SI")
- _RC = jit.Reg("DX")
-)
-
-var (
- _LR = jit.Reg("R9")
- _ET = jit.Reg("AX")
- _EP = jit.Reg("BX")
-)
-
-var (
- _SP_p = jit.Reg("R10") // saved on BX when call_c
- _SP_q = jit.Reg("R11") // saved on BP when call_c
- _SP_x = jit.Reg("R12")
- _SP_f = jit.Reg("R13")
-)
-
-var (
- _ARG_rb = jit.Ptr(_SP, _FP_base)
- _ARG_vp = jit.Ptr(_SP, _FP_base + 8)
- _ARG_sb = jit.Ptr(_SP, _FP_base + 16)
- _ARG_fv = jit.Ptr(_SP, _FP_base + 24)
-)
-
-var (
- _RET_et = _ET
- _RET_ep = _EP
-)
-
-var (
- _VAR_sp = jit.Ptr(_SP, _FP_fargs + _FP_saves)
- _VAR_dn = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8)
- _VAR_vp = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16)
-)
-
-var (
- _REG_ffi = []obj.Addr{ _RP, _RL, _RC, _SP_q}
- _REG_b64 = []obj.Addr{_SP_p, _SP_q}
-
- _REG_all = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RP, _RL, _RC}
- _REG_ms = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _LR}
- _REG_enc = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RL}
-)
-
-type _Assembler struct {
- jit.BaseAssembler
- p _Program
- x int
- name string
-}
-
-func newAssembler(p _Program) *_Assembler {
- return new(_Assembler).Init(p)
-}
-
-/** Assembler Interface **/
-
-func (self *_Assembler) Load() _Encoder {
- return ptoenc(self.BaseAssembler.Load("encode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
-}
-
-func (self *_Assembler) Init(p _Program) *_Assembler {
- self.p = p
- self.BaseAssembler.Init(self.compile)
- return self
-}
-
-func (self *_Assembler) compile() {
- self.prologue()
- self.instrs()
- self.epilogue()
- self.builtins()
-}
-
-/** Assembler Stages **/
-
-var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
- _OP_null : (*_Assembler)._asm_OP_null,
- _OP_empty_arr : (*_Assembler)._asm_OP_empty_arr,
- _OP_empty_obj : (*_Assembler)._asm_OP_empty_obj,
- _OP_bool : (*_Assembler)._asm_OP_bool,
- _OP_i8 : (*_Assembler)._asm_OP_i8,
- _OP_i16 : (*_Assembler)._asm_OP_i16,
- _OP_i32 : (*_Assembler)._asm_OP_i32,
- _OP_i64 : (*_Assembler)._asm_OP_i64,
- _OP_u8 : (*_Assembler)._asm_OP_u8,
- _OP_u16 : (*_Assembler)._asm_OP_u16,
- _OP_u32 : (*_Assembler)._asm_OP_u32,
- _OP_u64 : (*_Assembler)._asm_OP_u64,
- _OP_f32 : (*_Assembler)._asm_OP_f32,
- _OP_f64 : (*_Assembler)._asm_OP_f64,
- _OP_str : (*_Assembler)._asm_OP_str,
- _OP_bin : (*_Assembler)._asm_OP_bin,
- _OP_quote : (*_Assembler)._asm_OP_quote,
- _OP_number : (*_Assembler)._asm_OP_number,
- _OP_eface : (*_Assembler)._asm_OP_eface,
- _OP_iface : (*_Assembler)._asm_OP_iface,
- _OP_byte : (*_Assembler)._asm_OP_byte,
- _OP_text : (*_Assembler)._asm_OP_text,
- _OP_deref : (*_Assembler)._asm_OP_deref,
- _OP_index : (*_Assembler)._asm_OP_index,
- _OP_load : (*_Assembler)._asm_OP_load,
- _OP_save : (*_Assembler)._asm_OP_save,
- _OP_drop : (*_Assembler)._asm_OP_drop,
- _OP_drop_2 : (*_Assembler)._asm_OP_drop_2,
- _OP_recurse : (*_Assembler)._asm_OP_recurse,
- _OP_is_nil : (*_Assembler)._asm_OP_is_nil,
- _OP_is_nil_p1 : (*_Assembler)._asm_OP_is_nil_p1,
- _OP_is_zero_1 : (*_Assembler)._asm_OP_is_zero_1,
- _OP_is_zero_2 : (*_Assembler)._asm_OP_is_zero_2,
- _OP_is_zero_4 : (*_Assembler)._asm_OP_is_zero_4,
- _OP_is_zero_8 : (*_Assembler)._asm_OP_is_zero_8,
- _OP_is_zero_map : (*_Assembler)._asm_OP_is_zero_map,
- _OP_goto : (*_Assembler)._asm_OP_goto,
- _OP_map_iter : (*_Assembler)._asm_OP_map_iter,
- _OP_map_stop : (*_Assembler)._asm_OP_map_stop,
- _OP_map_check_key : (*_Assembler)._asm_OP_map_check_key,
- _OP_map_write_key : (*_Assembler)._asm_OP_map_write_key,
- _OP_map_value_next : (*_Assembler)._asm_OP_map_value_next,
- _OP_slice_len : (*_Assembler)._asm_OP_slice_len,
- _OP_slice_next : (*_Assembler)._asm_OP_slice_next,
- _OP_marshal : (*_Assembler)._asm_OP_marshal,
- _OP_marshal_p : (*_Assembler)._asm_OP_marshal_p,
- _OP_marshal_text : (*_Assembler)._asm_OP_marshal_text,
- _OP_marshal_text_p : (*_Assembler)._asm_OP_marshal_text_p,
- _OP_cond_set : (*_Assembler)._asm_OP_cond_set,
- _OP_cond_testc : (*_Assembler)._asm_OP_cond_testc,
-}
-
-func (self *_Assembler) instr(v *_Instr) {
- if fn := _OpFuncTab[v.op()]; fn != nil {
- fn(self, v)
- } else {
- panic(fmt.Sprintf("invalid opcode: %d", v.op()))
- }
-}
-
-func (self *_Assembler) instrs() {
- for i, v := range self.p {
- self.Mark(i)
- self.instr(&v)
- self.debug_instr(i, &v)
- }
-}
-
-func (self *_Assembler) builtins() {
- self.more_space()
- self.error_too_deep()
- self.error_invalid_number()
- self.error_nan_or_infinite()
- self.go_panic()
-}
-
-func (self *_Assembler) epilogue() {
- self.Mark(len(self.p))
- self.Emit("XORL", _ET, _ET)
- self.Emit("XORL", _EP, _EP)
- self.Link(_LB_error)
- self.Emit("MOVQ", _ARG_rb, _CX) // MOVQ rb<>+0(FP), CX
- self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8)) // MOVQ RL, 8(CX)
- self.Emit("MOVQ", jit.Imm(0), _ARG_rb) // MOVQ AX, rb<>+0(FP)
- self.Emit("MOVQ", jit.Imm(0), _ARG_vp) // MOVQ BX, vp<>+8(FP)
- self.Emit("MOVQ", jit.Imm(0), _ARG_sb) // MOVQ CX, sb<>+16(FP)
- self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP) // MOVQ _FP_offs(SP), BP
- self.Emit("ADDQ", jit.Imm(_FP_size), _SP) // ADDQ $_FP_size, SP
- self.Emit("RET") // RET
-}
-
-func (self *_Assembler) prologue() {
- self.Emit("SUBQ", jit.Imm(_FP_size), _SP) // SUBQ $_FP_size, SP
- self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs)) // MOVQ BP, _FP_offs(SP)
- self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP) // LEAQ _FP_offs(SP), BP
- self.Emit("MOVQ", _AX, _ARG_rb) // MOVQ AX, rb<>+0(FP)
- self.Emit("MOVQ", _BX, _ARG_vp) // MOVQ BX, vp<>+8(FP)
- self.Emit("MOVQ", _CX, _ARG_sb) // MOVQ CX, sb<>+16(FP)
- self.Emit("MOVQ", _DI, _ARG_fv) // MOVQ DI, rb<>+24(FP)
- self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP) // MOVQ (AX) , DI
- self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL) // MOVQ 8(AX) , SI
- self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC) // MOVQ 16(AX), DX
- self.Emit("MOVQ", _BX, _SP_p) // MOVQ BX, R10
- self.Emit("MOVQ", _CX, _ST) // MOVQ CX, R8
- self.Emit("XORL", _SP_x, _SP_x) // XORL R10, R12
- self.Emit("XORL", _SP_f, _SP_f) // XORL R11, R13
- self.Emit("XORL", _SP_q, _SP_q) // XORL R13, R11
-}
-
-/** Assembler Inline Functions **/
-
-func (self *_Assembler) xsave(reg ...obj.Addr) {
- for i, v := range reg {
- if i > _FP_saves / 8 - 1 {
- panic("too many registers to save")
- } else {
- self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8))
- }
- }
-}
-
-func (self *_Assembler) xload(reg ...obj.Addr) {
- for i, v := range reg {
- if i > _FP_saves / 8 - 1 {
- panic("too many registers to load")
- } else {
- self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v)
- }
- }
-}
-
-func (self *_Assembler) rbuf_di() {
- if _RP.Reg != x86.REG_DI {
- panic("register allocation messed up: RP != DI")
- } else {
- self.Emit("ADDQ", _RL, _RP)
- }
-}
-
-func (self *_Assembler) store_int(nd int, fn obj.Addr, ins string) {
- self.check_size(nd)
- self.save_c() // SAVE $C_regs
- self.rbuf_di() // MOVQ RP, DI
- self.Emit(ins, jit.Ptr(_SP_p, 0), _SI) // $ins (SP.p), SI
- self.call_c(fn) // CALL_C $fn
- self.Emit("ADDQ", _AX, _RL) // ADDQ AX, RL
-}
-
-func (self *_Assembler) store_str(s string) {
- i := 0
- m := rt.Str2Mem(s)
-
- /* 8-byte stores */
- for i <= len(m) - 8 {
- self.Emit("MOVQ", jit.Imm(rt.Get64(m[i:])), _AX) // MOVQ $s[i:], AX
- self.Emit("MOVQ", _AX, jit.Sib(_RP, _RL, 1, int64(i))) // MOVQ AX, i(RP)(RL)
- i += 8
- }
-
- /* 4-byte stores */
- if i <= len(m) - 4 {
- self.Emit("MOVL", jit.Imm(int64(rt.Get32(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVL $s[i:], i(RP)(RL)
- i += 4
- }
-
- /* 2-byte stores */
- if i <= len(m) - 2 {
- self.Emit("MOVW", jit.Imm(int64(rt.Get16(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVW $s[i:], i(RP)(RL)
- i += 2
- }
-
- /* last byte */
- if i < len(m) {
- self.Emit("MOVB", jit.Imm(int64(m[i])), jit.Sib(_RP, _RL, 1, int64(i))) // MOVB $s[i:], i(RP)(RL)
- }
-}
-
-func (self *_Assembler) check_size(n int) {
- self.check_size_rl(jit.Ptr(_RL, int64(n)))
-}
-
-func (self *_Assembler) check_size_r(r obj.Addr, d int) {
- self.check_size_rl(jit.Sib(_RL, r, 1, int64(d)))
-}
-
-func (self *_Assembler) check_size_rl(v obj.Addr) {
- idx := self.x
- key := _LB_more_space_return + strconv.Itoa(idx)
-
- /* the following code relies on LR == R9 to work */
- if _LR.Reg != x86.REG_R9 {
- panic("register allocation messed up: LR != R9")
- }
-
- /* check for buffer capacity */
- self.x++
- self.Emit("LEAQ", v, _AX) // LEAQ $v, AX
- self.Emit("CMPQ", _AX, _RC) // CMPQ AX, RC
- self.Sjmp("JBE" , key) // JBE _more_space_return_{n}
- self.slice_grow_ax(key) // GROW $key
- self.Link(key) // _more_space_return_{n}:
-}
-
-func (self *_Assembler) slice_grow_ax(ret string) {
- self.Byte(0x4c, 0x8d, 0x0d) // LEAQ ?(PC), R9
- self.Sref(ret, 4) // .... &ret
- self.Sjmp("JMP" , _LB_more_space) // JMP _more_space
-}
-
-/** State Stack Helpers **/
-
-const (
- _StateSize = int64(unsafe.Sizeof(_State{}))
- _StackLimit = _MaxStack * _StateSize
-)
-
-func (self *_Assembler) save_state() {
- self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX) // MOVQ (ST), CX
- self.Emit("LEAQ", jit.Ptr(_CX, _StateSize), _R9) // LEAQ _StateSize(CX), R9
- self.Emit("CMPQ", _R9, jit.Imm(_StackLimit)) // CMPQ R9, $_StackLimit
- self.Sjmp("JAE" , _LB_error_too_deep) // JA _error_too_deep
- self.Emit("MOVQ", _SP_x, jit.Sib(_ST, _CX, 1, 8)) // MOVQ SP.x, 8(ST)(CX)
- self.Emit("MOVQ", _SP_f, jit.Sib(_ST, _CX, 1, 16)) // MOVQ SP.f, 16(ST)(CX)
- self.WritePtr(0, _SP_p, jit.Sib(_ST, _CX, 1, 24)) // MOVQ SP.p, 24(ST)(CX)
- self.WritePtr(1, _SP_q, jit.Sib(_ST, _CX, 1, 32)) // MOVQ SP.q, 32(ST)(CX)
- self.Emit("MOVQ", _R9, jit.Ptr(_ST, 0)) // MOVQ R9, (ST)
-}
-
-func (self *_Assembler) drop_state(decr int64) {
- self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
- self.Emit("SUBQ" , jit.Imm(decr), _AX) // SUBQ $decr, AX
- self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST)
- self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _SP_x) // MOVQ 8(ST)(AX), SP.x
- self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 16), _SP_f) // MOVQ 16(ST)(AX), SP.f
- self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 24), _SP_p) // MOVQ 24(ST)(AX), SP.p
- self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 32), _SP_q) // MOVQ 32(ST)(AX), SP.q
- self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
- self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8)) // MOVOU X0, 8(ST)(AX)
- self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 24)) // MOVOU X0, 24(ST)(AX)
-}
-
-/** Buffer Helpers **/
-
-func (self *_Assembler) add_char(ch byte) {
- self.Emit("MOVB", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVB $ch, (RP)(RL)
- self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL
-}
-
-func (self *_Assembler) add_long(ch uint32, n int64) {
- self.Emit("MOVL", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVL $ch, (RP)(RL)
- self.Emit("ADDQ", jit.Imm(n), _RL) // ADDQ $n, RL
-}
-
-func (self *_Assembler) add_text(ss string) {
- self.store_str(ss) // TEXT $ss
- self.Emit("ADDQ", jit.Imm(int64(len(ss))), _RL) // ADDQ ${len(ss)}, RL
-}
-
-// get *buf at AX
-func (self *_Assembler) prep_buffer_AX() {
- self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX
- self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX)
-}
-
-func (self *_Assembler) save_buffer() {
- self.Emit("MOVQ", _ARG_rb, _CX) // MOVQ rb<>+0(FP), CX
- self.Emit("MOVQ", _RP, jit.Ptr(_CX, 0)) // MOVQ RP, (CX)
- self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8)) // MOVQ RL, 8(CX)
- self.Emit("MOVQ", _RC, jit.Ptr(_CX, 16)) // MOVQ RC, 16(CX)
-}
-
-// get *buf at AX
-func (self *_Assembler) load_buffer_AX() {
- self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX
- self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP) // MOVQ (AX), RP
- self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL) // MOVQ 8(AX), RL
- self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC) // MOVQ 16(AX), RC
-}
-
-/** Function Interface Helpers **/
-
-func (self *_Assembler) call(pc obj.Addr) {
- self.Emit("MOVQ", pc, _LR) // MOVQ $pc, AX
- self.Rjmp("CALL", _LR) // CALL AX
-}
-
-func (self *_Assembler) save_c() {
- self.xsave(_REG_ffi...) // SAVE $REG_ffi
-}
-
-func (self *_Assembler) call_b64(pc obj.Addr) {
- self.xsave(_REG_b64...) // SAVE $REG_all
- self.call(pc) // CALL $pc
- self.xload(_REG_b64...) // LOAD $REG_ffi
-}
-
-func (self *_Assembler) call_c(pc obj.Addr) {
- self.Emit("XCHGQ", _SP_p, _BX)
- self.call(pc) // CALL $pc
- self.xload(_REG_ffi...) // LOAD $REG_ffi
- self.Emit("XCHGQ", _SP_p, _BX)
- self.Emit("XORPS", _X15, _X15)
-}
-
-func (self *_Assembler) call_go(pc obj.Addr) {
- self.xsave(_REG_all...) // SAVE $REG_all
- self.call(pc) // CALL $pc
- self.xload(_REG_all...) // LOAD $REG_all
-}
-
-func (self *_Assembler) call_more_space(pc obj.Addr) {
- self.xsave(_REG_ms...) // SAVE $REG_all
- self.call(pc) // CALL $pc
- self.xload(_REG_ms...) // LOAD $REG_all
-}
-
-func (self *_Assembler) call_encoder(pc obj.Addr) {
- self.xsave(_REG_enc...) // SAVE $REG_all
- self.call(pc) // CALL $pc
- self.xload(_REG_enc...) // LOAD $REG_all
-}
-
-func (self *_Assembler) call_marshaler(fn obj.Addr, it *rt.GoType, vt reflect.Type) {
- switch vt.Kind() {
- case reflect.Interface : self.call_marshaler_i(fn, it)
- case reflect.Ptr, reflect.Map : self.call_marshaler_v(fn, it, vt, true)
- // struct/array of 1 direct iface type can be direct
- default : self.call_marshaler_v(fn, it, vt, !rt.UnpackType(vt).Indirect())
- }
-}
-
-func (self *_Assembler) call_marshaler_i(fn obj.Addr, it *rt.GoType) {
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JZ" , "_null_{n}") // JZ _null_{n}
- self.Emit("MOVQ" , _AX, _BX) // MOVQ AX, BX
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX) // MOVQ 8(SP.p), CX
- self.Emit("MOVQ" , jit.Gtype(it), _AX) // MOVQ $it, AX
- self.call_go(_F_assertI2I) // CALL_GO assertI2I
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JZ" , "_null_{n}") // JZ _null_{n}
- self.Emit("MOVQ", _BX, _CX) // MOVQ BX, CX
- self.Emit("MOVQ", _AX, _BX) // MOVQ AX, BX
- self.prep_buffer_AX()
- self.Emit("MOVQ", _ARG_fv, _DI) // MOVQ ARG.fv, DI
- self.call_go(fn) // CALL $fn
- self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
- self.Sjmp("JNZ" , _LB_error) // JNZ _error
- self.load_buffer_AX()
- self.Sjmp("JMP" , "_done_{n}") // JMP _done_{n}
- self.Link("_null_{n}") // _null_{n}:
- self.check_size(4) // SIZE $4
- self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1)
- self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL
- self.Link("_done_{n}") // _done_{n}:
-}
-
-func (self *_Assembler) call_marshaler_v(fn obj.Addr, it *rt.GoType, vt reflect.Type, deref bool) {
- self.prep_buffer_AX() // MOVE {buf}, (SP)
- self.Emit("MOVQ", jit.Itab(it, vt), _BX) // MOVQ $(itab(it, vt)), BX
-
- /* dereference the pointer if needed */
- if !deref {
- self.Emit("MOVQ", _SP_p, _CX) // MOVQ SP.p, CX
- } else {
- self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _CX) // MOVQ 0(SP.p), CX
- }
-
- /* call the encoder, and perform error checks */
- self.Emit("MOVQ", _ARG_fv, _DI) // MOVQ ARG.fv, DI
- self.call_go(fn) // CALL $fn
- self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
- self.Sjmp("JNZ" , _LB_error) // JNZ _error
- self.load_buffer_AX()
-}
-
-/** Builtin: _more_space **/
-
-var (
- _T_byte = jit.Type(byteType)
- _F_growslice = jit.Func(growslice)
-)
-
-// AX must saving n
-func (self *_Assembler) more_space() {
- self.Link(_LB_more_space)
- self.Emit("MOVQ", _RP, _BX) // MOVQ DI, BX
- self.Emit("MOVQ", _RL, _CX) // MOVQ SI, CX
- self.Emit("MOVQ", _RC, _DI) // MOVQ DX, DI
- self.Emit("MOVQ", _AX, _SI) // MOVQ AX, SI
- self.Emit("MOVQ", _T_byte, _AX) // MOVQ $_T_byte, AX
- self.call_more_space(_F_growslice) // CALL $pc
- self.Emit("MOVQ", _AX, _RP) // MOVQ AX, DI
- self.Emit("MOVQ", _BX, _RL) // MOVQ BX, SI
- self.Emit("MOVQ", _CX, _RC) // MOVQ CX, DX
- self.save_buffer() // SAVE {buf}
- self.Rjmp("JMP" , _LR) // JMP LR
-}
-
-/** Builtin Errors **/
-
-var (
- _V_ERR_too_deep = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_too_deep))))
- _V_ERR_nan_or_infinite = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_nan_or_infinite))))
- _I_json_UnsupportedValueError = jit.Itab(rt.UnpackType(errorType), jsonUnsupportedValueType)
-)
-
-func (self *_Assembler) error_too_deep() {
- self.Link(_LB_error_too_deep)
- self.Emit("MOVQ", _V_ERR_too_deep, _EP) // MOVQ $_V_ERR_too_deep, EP
- self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET
- self.Sjmp("JMP" , _LB_error) // JMP _error
-}
-
-func (self *_Assembler) error_invalid_number() {
- self.Link(_LB_error_invalid_number)
- self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ 0(SP), AX
- self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _BX) // MOVQ 8(SP), BX
- self.call_go(_F_error_number) // CALL_GO error_number
- self.Sjmp("JMP" , _LB_error) // JMP _error
-}
-
-func (self *_Assembler) error_nan_or_infinite() {
- self.Link(_LB_error_nan_or_infinite)
- self.Emit("MOVQ", _V_ERR_nan_or_infinite, _EP) // MOVQ $_V_ERR_nan_or_infinite, EP
- self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET
- self.Sjmp("JMP" , _LB_error) // JMP _error
-}
-
-/** String Encoding Routine **/
-
-var (
- _F_quote = jit.Imm(int64(native.S_quote))
- _F_panic = jit.Func(goPanic)
-)
-
-func (self *_Assembler) go_panic() {
- self.Link(_LB_panic)
- self.Emit("MOVQ", _SP_p, _BX)
- self.call_go(_F_panic)
-}
-
-func (self *_Assembler) encode_string(doubleQuote bool) {
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JZ" , "_str_empty_{n}") // JZ _str_empty_{n}
- self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))
- self.Sjmp("JNE" , "_str_next_{n}")
- self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), _AX)
- self.Sjmp("JMP", _LB_panic)
- self.Link("_str_next_{n}")
-
- /* openning quote, check for double quote */
- if !doubleQuote {
- self.check_size_r(_AX, 2) // SIZE $2
- self.add_char('"') // CHAR $'"'
- } else {
- self.check_size_r(_AX, 6) // SIZE $6
- self.add_long(_IM_open, 3) // TEXT $`"\"`
- }
-
- /* quoting loop */
- self.Emit("XORL", _AX, _AX) // XORL AX, AX
- self.Emit("MOVQ", _AX, _VAR_sp) // MOVQ AX, sp
- self.Link("_str_loop_{n}") // _str_loop_{n}:
- self.save_c() // SAVE $REG_ffi
-
- /* load the output buffer first, and then input buffer,
- * because the parameter registers collide with RP / RL / RC */
- self.Emit("MOVQ", _RC, _CX) // MOVQ RC, CX
- self.Emit("SUBQ", _RL, _CX) // SUBQ RL, CX
- self.Emit("MOVQ", _CX, _VAR_dn) // MOVQ CX, dn
- self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _DX) // LEAQ (RP)(RL), DX
- self.Emit("LEAQ", _VAR_dn, _CX) // LEAQ dn, CX
- self.Emit("MOVQ", _VAR_sp, _AX) // MOVQ sp, AX
- self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _DI) // MOVQ (SP.p), DI
- self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SI) // MOVQ 8(SP.p), SI
- self.Emit("ADDQ", _AX, _DI) // ADDQ AX, DI
- self.Emit("SUBQ", _AX, _SI) // SUBQ AX, SI
-
- /* set the flags based on `doubleQuote` */
- if !doubleQuote {
- self.Emit("XORL", _R8, _R8) // XORL R8, R8
- } else {
- self.Emit("MOVL", jit.Imm(types.F_DOUBLE_UNQUOTE), _R8) // MOVL ${types.F_DOUBLE_UNQUOTE}, R8
- }
-
- /* call the native quoter */
- self.call_c(_F_quote) // CALL quote
- self.Emit("ADDQ" , _VAR_dn, _RL) // ADDQ dn, RL
-
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JS" , "_str_space_{n}") // JS _str_space_{n}
-
- /* close the string, check for double quote */
- if !doubleQuote {
- self.check_size(1) // SIZE $1
- self.add_char('"') // CHAR $'"'
- self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n}
- } else {
- self.check_size(3) // SIZE $3
- self.add_text("\\\"\"") // TEXT $'\""'
- self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n}
- }
-
- /* not enough space to contain the quoted string */
- self.Link("_str_space_{n}") // _str_space_{n}:
- self.Emit("NOTQ", _AX) // NOTQ AX
- self.Emit("ADDQ", _AX, _VAR_sp) // ADDQ AX, sp
- self.Emit("LEAQ", jit.Sib(_RC, _RC, 1, 0), _AX) // LEAQ (RC)(RC), AX
- self.slice_grow_ax("_str_loop_{n}") // GROW _str_loop_{n}
-
- /* empty string, check for double quote */
- if !doubleQuote {
- self.Link("_str_empty_{n}") // _str_empty_{n}:
- self.check_size(2) // SIZE $2
- self.add_text("\"\"") // TEXT $'""'
- self.Link("_str_end_{n}") // _str_end_{n}:
- } else {
- self.Link("_str_empty_{n}") // _str_empty_{n}:
- self.check_size(6) // SIZE $6
- self.add_text("\"\\\"\\\"\"") // TEXT $'"\"\""'
- self.Link("_str_end_{n}") // _str_end_{n}:
- }
-}
-
-/** OpCode Assembler Functions **/
-
-var (
- _T_json_Marshaler = rt.UnpackType(jsonMarshalerType)
- _T_encoding_TextMarshaler = rt.UnpackType(encodingTextMarshalerType)
-)
-
-var (
- _F_f64toa = jit.Imm(int64(native.S_f64toa))
- _F_f32toa = jit.Imm(int64(native.S_f32toa))
- _F_i64toa = jit.Imm(int64(native.S_i64toa))
- _F_u64toa = jit.Imm(int64(native.S_u64toa))
- _F_b64encode = jit.Imm(int64(_subr__b64encode))
-)
-
-var (
- _F_memmove = jit.Func(memmove)
- _F_error_number = jit.Func(error_number)
- _F_isValidNumber = jit.Func(isValidNumber)
-)
-
-var (
- _F_iteratorStop = jit.Func(iteratorStop)
- _F_iteratorNext = jit.Func(iteratorNext)
- _F_iteratorStart = jit.Func(iteratorStart)
-)
-
-var (
- _F_encodeTypedPointer obj.Addr
- _F_encodeJsonMarshaler obj.Addr
- _F_encodeTextMarshaler obj.Addr
-)
-
-const (
- _MODE_AVX2 = 1 << 2
-)
-
-func init() {
- _F_encodeTypedPointer = jit.Func(encodeTypedPointer)
- _F_encodeJsonMarshaler = jit.Func(encodeJsonMarshaler)
- _F_encodeTextMarshaler = jit.Func(encodeTextMarshaler)
-}
-
-func (self *_Assembler) _asm_OP_null(_ *_Instr) {
- self.check_size(4)
- self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1)
- self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL
-}
-
-func (self *_Assembler) _asm_OP_empty_arr(_ *_Instr) {
- self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
- self.Sjmp("JC", "_empty_arr_{n}")
- self._asm_OP_null(nil)
- self.Sjmp("JMP", "_empty_arr_end_{n}")
- self.Link("_empty_arr_{n}")
- self.check_size(2)
- self.Emit("MOVW", jit.Imm(_IM_array), jit.Sib(_RP, _RL, 1, 0))
- self.Emit("ADDQ", jit.Imm(2), _RL)
- self.Link("_empty_arr_end_{n}")
-}
-
-func (self *_Assembler) _asm_OP_empty_obj(_ *_Instr) {
- self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
- self.Sjmp("JC", "_empty_obj_{n}")
- self._asm_OP_null(nil)
- self.Sjmp("JMP", "_empty_obj_end_{n}")
- self.Link("_empty_obj_{n}")
- self.check_size(2)
- self.Emit("MOVW", jit.Imm(_IM_object), jit.Sib(_RP, _RL, 1, 0))
- self.Emit("ADDQ", jit.Imm(2), _RL)
- self.Link("_empty_obj_end_{n}")
-}
-
-func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
- self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0
- self.Sjmp("JE" , "_false_{n}") // JE _false_{n}
- self.check_size(4) // SIZE $4
- self.Emit("MOVL", jit.Imm(_IM_true), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'true', (RP)(RL*1)
- self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL
- self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n}
- self.Link("_false_{n}") // _false_{n}:
- self.check_size(5) // SIZE $5
- self.Emit("MOVL", jit.Imm(_IM_fals), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'fals', (RP)(RL*1)
- self.Emit("MOVB", jit.Imm('e'), jit.Sib(_RP, _RL, 1, 4)) // MOVB $'e', 4(RP)(RL*1)
- self.Emit("ADDQ", jit.Imm(5), _RL) // ADDQ $5, RL
- self.Link("_end_{n}") // _end_{n}:
-}
-
-func (self *_Assembler) _asm_OP_i8(_ *_Instr) {
- self.store_int(4, _F_i64toa, "MOVBQSX")
-}
-
-func (self *_Assembler) _asm_OP_i16(_ *_Instr) {
- self.store_int(6, _F_i64toa, "MOVWQSX")
-}
-
-func (self *_Assembler) _asm_OP_i32(_ *_Instr) {
- self.store_int(17, _F_i64toa, "MOVLQSX")
-}
-
-func (self *_Assembler) _asm_OP_i64(_ *_Instr) {
- self.store_int(21, _F_i64toa, "MOVQ")
-}
-
-func (self *_Assembler) _asm_OP_u8(_ *_Instr) {
- self.store_int(3, _F_u64toa, "MOVBQZX")
-}
-
-func (self *_Assembler) _asm_OP_u16(_ *_Instr) {
- self.store_int(5, _F_u64toa, "MOVWQZX")
-}
-
-func (self *_Assembler) _asm_OP_u32(_ *_Instr) {
- self.store_int(16, _F_u64toa, "MOVLQZX")
-}
-
-func (self *_Assembler) _asm_OP_u64(_ *_Instr) {
- self.store_int(20, _F_u64toa, "MOVQ")
-}
-
-func (self *_Assembler) _asm_OP_f32(_ *_Instr) {
- self.check_size(32)
- self.Emit("MOVL" , jit.Ptr(_SP_p, 0), _AX) // MOVL (SP.p), AX
- self.Emit("ANDL" , jit.Imm(_FM_exp32), _AX) // ANDL $_FM_exp32, AX
- self.Emit("XORL" , jit.Imm(_FM_exp32), _AX) // XORL $_FM_exp32, AX
- self.Sjmp("JZ" , _LB_error_nan_or_infinite) // JZ _error_nan_or_infinite
- self.save_c() // SAVE $C_regs
- self.rbuf_di() // MOVQ RP, DI
- self.Emit("MOVSS" , jit.Ptr(_SP_p, 0), _X0) // MOVSS (SP.p), X0
- self.call_c(_F_f32toa) // CALL_C f64toa
- self.Emit("ADDQ" , _AX, _RL) // ADDQ AX, RL
-}
-
-func (self *_Assembler) _asm_OP_f64(_ *_Instr) {
- self.check_size(32)
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
- self.Emit("MOVQ" , jit.Imm(_FM_exp64), _CX) // MOVQ $_FM_exp64, CX
- self.Emit("ANDQ" , _CX, _AX) // ANDQ CX, AX
- self.Emit("XORQ" , _CX, _AX) // XORQ CX, AX
- self.Sjmp("JZ" , _LB_error_nan_or_infinite) // JZ _error_nan_or_infinite
- self.save_c() // SAVE $C_regs
- self.rbuf_di() // MOVQ RP, DI
- self.Emit("MOVSD" , jit.Ptr(_SP_p, 0), _X0) // MOVSD (SP.p), X0
- self.call_c(_F_f64toa) // CALL_C f64toa
- self.Emit("ADDQ" , _AX, _RL) // ADDQ AX, RL
-}
-
-func (self *_Assembler) _asm_OP_str(_ *_Instr) {
- self.encode_string(false)
-}
-
-func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
- self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX
- self.Emit("ADDQ", jit.Imm(2), _AX) // ADDQ $2, AX
- self.Emit("MOVQ", jit.Imm(_IM_mulv), _CX) // MOVQ $_MF_mulv, CX
- self.Emit("MOVQ", _DX, _BX) // MOVQ DX, BX
- self.From("MULQ", _CX) // MULQ CX
- self.Emit("LEAQ", jit.Sib(_DX, _DX, 1, 1), _AX) // LEAQ 1(DX)(DX), AX
- self.Emit("ORQ" , jit.Imm(2), _AX) // ORQ $2, AX
- self.Emit("MOVQ", _BX, _DX) // MOVQ BX, DX
- self.check_size_r(_AX, 0) // SIZE AX
- self.add_char('"') // CHAR $'"'
- self.Emit("MOVQ", _ARG_rb, _DI) // MOVQ rb<>+0(FP), DI
- self.Emit("MOVQ", _RL, jit.Ptr(_DI, 8)) // MOVQ SI, 8(DI)
- self.Emit("MOVQ", _SP_p, _SI) // MOVQ SP.p, SI
-
- /* check for AVX2 support */
- if !cpu.HasAVX2 {
- self.Emit("XORL", _DX, _DX) // XORL DX, DX
- } else {
- self.Emit("MOVL", jit.Imm(_MODE_AVX2), _DX) // MOVL $_MODE_AVX2, DX
- }
-
- /* call the encoder */
- self.call_b64(_F_b64encode) // CALL b64encode
- self.load_buffer_AX() // LOAD {buf}
- self.add_char('"') // CHAR $'"'
-}
-
-func (self *_Assembler) _asm_OP_quote(_ *_Instr) {
- self.encode_string(true)
-}
-
-func (self *_Assembler) _asm_OP_number(_ *_Instr) {
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _BX) // MOVQ (SP.p), BX
- self.Emit("TESTQ", _BX, _BX) // TESTQ BX, BX
- self.Sjmp("JZ" , "_empty_{n}")
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JNZ" , "_number_next_{n}")
- self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), _AX)
- self.Sjmp("JMP", _LB_panic)
- self.Link("_number_next_{n}")
- self.call_go(_F_isValidNumber) // CALL_GO isValidNumber
- self.Emit("CMPB" , _AX, jit.Imm(0)) // CMPB AX, $0
- self.Sjmp("JE" , _LB_error_invalid_number) // JE _error_invalid_number
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _BX) // MOVQ (SP.p), BX
- self.check_size_r(_BX, 0) // SIZE BX
- self.Emit("LEAQ" , jit.Sib(_RP, _RL, 1, 0), _AX) // LEAQ (RP)(RL), AX
- self.Emit("ADDQ" , jit.Ptr(_SP_p, 8), _RL) // ADDQ 8(SP.p), RL
- self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _BX) // MOVOU (SP.p), BX
- self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _CX) // MOVOU X0, 8(SP)
- self.call_go(_F_memmove) // CALL_GO memmove
- self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX
- self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX)
- self.Sjmp("JMP" , "_done_{n}") // JMP _done_{n}
- self.Link("_empty_{n}") // _empty_{n}
- self.check_size(1) // SIZE $1
- self.add_char('0') // CHAR $'0'
- self.Link("_done_{n}") // _done_{n}:
-}
-
-func (self *_Assembler) _asm_OP_eface(_ *_Instr) {
- self.prep_buffer_AX() // MOVE {buf}, AX
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _BX) // MOVQ (SP.p), BX
- self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _CX) // LEAQ 8(SP.p), CX
- self.Emit("MOVQ" , _ST, _DI) // MOVQ ST, DI
- self.Emit("MOVQ" , _ARG_fv, _SI) // MOVQ fv, AX
- self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer
- self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
- self.Sjmp("JNZ" , _LB_error) // JNZ _error
- self.load_buffer_AX()
-}
-
-func (self *_Assembler) _asm_OP_iface(_ *_Instr) {
- self.prep_buffer_AX() // MOVE {buf}, AX
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _CX) // MOVQ (SP.p), CX
- self.Emit("MOVQ" , jit.Ptr(_CX, 8), _BX) // MOVQ 8(CX), BX
- self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _CX) // LEAQ 8(SP.p), CX
- self.Emit("MOVQ" , _ST, _DI) // MOVQ ST, DI
- self.Emit("MOVQ" , _ARG_fv, _SI) // MOVQ fv, AX
- self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer
- self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
- self.Sjmp("JNZ" , _LB_error) // JNZ _error
- self.load_buffer_AX()
-}
-
-func (self *_Assembler) _asm_OP_byte(p *_Instr) {
- self.check_size(1)
- self.Emit("MOVB", jit.Imm(p.i64()), jit.Sib(_RP, _RL, 1, 0)) // MOVL p.vi(), (RP)(RL*1)
- self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL
-}
-
-func (self *_Assembler) _asm_OP_text(p *_Instr) {
- self.check_size(len(p.vs())) // SIZE ${len(p.vs())}
- self.add_text(p.vs()) // TEXT ${p.vs()}
-}
-
-func (self *_Assembler) _asm_OP_deref(_ *_Instr) {
- self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p
-}
-
-func (self *_Assembler) _asm_OP_index(p *_Instr) {
- self.Emit("MOVQ", jit.Imm(p.i64()), _AX) // MOVQ $p.vi(), AX
- self.Emit("ADDQ", _AX, _SP_p) // ADDQ AX, SP.p
-}
-
-func (self *_Assembler) _asm_OP_load(_ *_Instr) {
- self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
- self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -24), _SP_x) // MOVQ -24(ST)(AX), SP.x
- self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -8), _SP_p) // MOVQ -8(ST)(AX), SP.p
- self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _SP_q) // MOVQ (ST)(AX), SP.q
-}
-
-func (self *_Assembler) _asm_OP_save(_ *_Instr) {
- self.save_state()
-}
-
-func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
- self.drop_state(_StateSize)
-}
-
-func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
- self.drop_state(_StateSize * 2) // DROP $(_StateSize * 2)
- self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 56)) // MOVOU X0, 56(ST)(AX)
-}
-
-func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
- self.prep_buffer_AX() // MOVE {buf}, (SP)
- vt, pv := p.vp()
- self.Emit("MOVQ", jit.Type(vt), _BX) // MOVQ $(type(p.vt())), BX
-
- /* check for indirection */
- if !rt.UnpackType(vt).Indirect() {
- self.Emit("MOVQ", _SP_p, _CX) // MOVQ SP.p, CX
- } else {
- self.Emit("MOVQ", _SP_p, _VAR_vp) // MOVQ SP.p, VAR.vp
- self.Emit("LEAQ", _VAR_vp, _CX) // LEAQ VAR.vp, CX
- }
-
- /* call the encoder */
- self.Emit("MOVQ" , _ST, _DI) // MOVQ ST, DI
- self.Emit("MOVQ" , _ARG_fv, _SI) // MOVQ $fv, SI
- if pv {
- self.Emit("BTCQ", jit.Imm(bitPointerValue), _SI) // BTCQ $1, SI
- }
- self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer
- self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
- self.Sjmp("JNZ" , _LB_error) // JNZ _error
- self.load_buffer_AX()
-}
-
-func (self *_Assembler) _asm_OP_is_nil(p *_Instr) {
- self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0
- self.Xjmp("JE" , p.vi()) // JE p.vi()
-}
-
-func (self *_Assembler) _asm_OP_is_nil_p1(p *_Instr) {
- self.Emit("CMPQ", jit.Ptr(_SP_p, 8), jit.Imm(0)) // CMPQ 8(SP.p), $0
- self.Xjmp("JE" , p.vi()) // JE p.vi()
-}
-
-func (self *_Assembler) _asm_OP_is_zero_1(p *_Instr) {
- self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0
- self.Xjmp("JE" , p.vi()) // JE p.vi()
-}
-
-func (self *_Assembler) _asm_OP_is_zero_2(p *_Instr) {
- self.Emit("CMPW", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPW (SP.p), $0
- self.Xjmp("JE" , p.vi()) // JE p.vi()
-}
-
-func (self *_Assembler) _asm_OP_is_zero_4(p *_Instr) {
- self.Emit("CMPL", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPL (SP.p), $0
- self.Xjmp("JE" , p.vi()) // JE p.vi()
-}
-
-func (self *_Assembler) _asm_OP_is_zero_8(p *_Instr) {
- self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0
- self.Xjmp("JE" , p.vi()) // JE p.vi()
-}
-
-func (self *_Assembler) _asm_OP_is_zero_map(p *_Instr) {
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Xjmp("JZ" , p.vi()) // JZ p.vi()
- self.Emit("CMPQ" , jit.Ptr(_AX, 0), jit.Imm(0)) // CMPQ (AX), $0
- self.Xjmp("JE" , p.vi()) // JE p.vi()
-}
-
-func (self *_Assembler) _asm_OP_goto(p *_Instr) {
- self.Xjmp("JMP", p.vi())
-}
-
-func (self *_Assembler) _asm_OP_map_iter(p *_Instr) {
- self.Emit("MOVQ" , jit.Type(p.vt()), _AX) // MOVQ $p.vt(), AX
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _BX) // MOVQ (SP.p), BX
- self.Emit("MOVQ" , _ARG_fv, _CX) // MOVQ fv, CX
- self.call_go(_F_iteratorStart) // CALL_GO iteratorStart
- self.Emit("MOVQ" , _AX, _SP_q) // MOVQ AX, SP.q
- self.Emit("MOVQ" , _BX, _ET) // MOVQ 32(SP), ET
- self.Emit("MOVQ" , _CX, _EP) // MOVQ 40(SP), EP
- self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
- self.Sjmp("JNZ" , _LB_error) // JNZ _error
-}
-
-func (self *_Assembler) _asm_OP_map_stop(_ *_Instr) {
- self.Emit("MOVQ", _SP_q, _AX) // MOVQ SP.q, AX
- self.call_go(_F_iteratorStop) // CALL_GO iteratorStop
- self.Emit("XORL", _SP_q, _SP_q) // XORL SP.q, SP.q
-}
-
-func (self *_Assembler) _asm_OP_map_check_key(p *_Instr) {
- self.Emit("MOVQ" , jit.Ptr(_SP_q, 0), _SP_p) // MOVQ (SP.q), SP.p
- self.Emit("TESTQ", _SP_p, _SP_p) // TESTQ SP.p, SP.p
- self.Xjmp("JZ" , p.vi()) // JNZ p.vi()
-}
-
-func (self *_Assembler) _asm_OP_map_write_key(p *_Instr) {
- self.Emit("BTQ", jit.Imm(bitSortMapKeys), _ARG_fv) // BTQ ${SortMapKeys}, fv
- self.Sjmp("JNC", "_unordered_key_{n}") // JNC _unordered_key_{n}
- self.encode_string(false) // STR $false
- self.Xjmp("JMP", p.vi()) // JMP ${p.vi()}
- self.Link("_unordered_key_{n}") // _unordered_key_{n}:
-}
-
-func (self *_Assembler) _asm_OP_map_value_next(_ *_Instr) {
- self.Emit("MOVQ", jit.Ptr(_SP_q, 8), _SP_p) // MOVQ 8(SP.q), SP.p
- self.Emit("MOVQ", _SP_q, _AX) // MOVQ SP.q, AX
- self.call_go(_F_iteratorNext) // CALL_GO iteratorNext
-}
-
-func (self *_Assembler) _asm_OP_slice_len(_ *_Instr) {
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _SP_x) // MOVQ 8(SP.p), SP.x
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p
- self.Emit("ORQ" , jit.Imm(1 << _S_init), _SP_f) // ORQ $(1<<_S_init), SP.f
-}
-
-func (self *_Assembler) _asm_OP_slice_next(p *_Instr) {
- self.Emit("TESTQ" , _SP_x, _SP_x) // TESTQ SP.x, SP.x
- self.Xjmp("JZ" , p.vi()) // JZ p.vi()
- self.Emit("SUBQ" , jit.Imm(1), _SP_x) // SUBQ $1, SP.x
- self.Emit("BTRQ" , jit.Imm(_S_init), _SP_f) // BTRQ $_S_init, SP.f
- self.Emit("LEAQ" , jit.Ptr(_SP_p, int64(p.vlen())), _AX) // LEAQ $(p.vlen())(SP.p), AX
- self.Emit("CMOVQCC", _AX, _SP_p) // CMOVQNC AX, SP.p
-}
-
-func (self *_Assembler) _asm_OP_marshal(p *_Instr) {
- self.call_marshaler(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt())
-}
-
-func (self *_Assembler) _asm_OP_marshal_p(p *_Instr) {
- if p.vk() != reflect.Ptr {
- panic("marshal_p: invalid type")
- } else {
- self.call_marshaler_v(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt(), false)
- }
-}
-
-func (self *_Assembler) _asm_OP_marshal_text(p *_Instr) {
- self.call_marshaler(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt())
-}
-
-func (self *_Assembler) _asm_OP_marshal_text_p(p *_Instr) {
- if p.vk() != reflect.Ptr {
- panic("marshal_text_p: invalid type")
- } else {
- self.call_marshaler_v(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt(), false)
- }
-}
-
-func (self *_Assembler) _asm_OP_cond_set(_ *_Instr) {
- self.Emit("ORQ", jit.Imm(1 << _S_cond), _SP_f) // ORQ $(1<<_S_cond), SP.f
-}
-
-func (self *_Assembler) _asm_OP_cond_testc(p *_Instr) {
- self.Emit("BTRQ", jit.Imm(_S_cond), _SP_f) // BTRQ $_S_cond, SP.f
- self.Xjmp("JC" , p.vi())
-}
-
-func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
- self.Emit("MOVQ", jit.Imm(int64(p2.op())), _CX) // MOVQ $(p2.op()), AX
- self.Emit("MOVQ", jit.Imm(int64(p1.op())), _BX) // MOVQ $(p1.op()), BX
- self.Emit("MOVQ", jit.Imm(int64(i)), _AX) // MOVQ $(i), CX
- self.call_go(_F_println)
-}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/assembler_stkabi_amd64.go b/vendor/github.com/bytedance/sonic/internal/encoder/assembler_stkabi_amd64.go
deleted file mode 100644
index 89dafc84e..000000000
--- a/vendor/github.com/bytedance/sonic/internal/encoder/assembler_stkabi_amd64.go
+++ /dev/null
@@ -1,1175 +0,0 @@
-// +build go1.16,!go1.17
-
-/*
- * Copyright 2021 ByteDance Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package encoder
-
-import (
- `fmt`
- `reflect`
- `strconv`
- `unsafe`
-
- `github.com/bytedance/sonic/internal/cpu`
- `github.com/bytedance/sonic/internal/jit`
- `github.com/bytedance/sonic/internal/native/types`
- `github.com/twitchyliquid64/golang-asm/obj`
- `github.com/twitchyliquid64/golang-asm/obj/x86`
-
- `github.com/bytedance/sonic/internal/native`
- `github.com/bytedance/sonic/internal/rt`
-)
-
-/** Register Allocations
- *
- * State Registers:
- *
- * %rbx : stack base
- * %rdi : result pointer
- * %rsi : result length
- * %rdx : result capacity
- * %r12 : sp->p
- * %r13 : sp->q
- * %r14 : sp->x
- * %r15 : sp->f
- *
- * Error Registers:
- *
- * %r10 : error type register
- * %r11 : error pointer register
- */
-
-/** Function Prototype & Stack Map
- *
- * func (buf *[]byte, p unsafe.Pointer, sb *_Stack, fv uint64) (err error)
- *
- * buf : (FP)
- * p : 8(FP)
- * sb : 16(FP)
- * fv : 24(FP)
- * err.vt : 32(FP)
- * err.vp : 40(FP)
- */
-
-const (
- _S_cond = iota
- _S_init
-)
-
-const (
- _FP_args = 48 // 48 bytes for passing arguments to this function
- _FP_fargs = 64 // 64 bytes for passing arguments to other Go functions
- _FP_saves = 64 // 64 bytes for saving the registers before CALL instructions
- _FP_locals = 24 // 24 bytes for local variables
-)
-
-const (
- _FP_offs = _FP_fargs + _FP_saves + _FP_locals
- _FP_size = _FP_offs + 8 // 8 bytes for the parent frame pointer
- _FP_base = _FP_size + 8 // 8 bytes for the return address
-)
-
-const (
- _FM_exp32 = 0x7f800000
- _FM_exp64 = 0x7ff0000000000000
-)
-
-const (
- _IM_null = 0x6c6c756e // 'null'
- _IM_true = 0x65757274 // 'true'
- _IM_fals = 0x736c6166 // 'fals' ('false' without the 'e')
- _IM_open = 0x00225c22 // '"\"∅'
- _IM_array = 0x5d5b // '[]'
- _IM_object = 0x7d7b // '{}'
- _IM_mulv = -0x5555555555555555
-)
-
-const (
- _LB_more_space = "_more_space"
- _LB_more_space_return = "_more_space_return_"
-)
-
-const (
- _LB_error = "_error"
- _LB_error_too_deep = "_error_too_deep"
- _LB_error_invalid_number = "_error_invalid_number"
- _LB_error_nan_or_infinite = "_error_nan_or_infinite"
- _LB_panic = "_panic"
-)
-
-var (
- _AX = jit.Reg("AX")
- _CX = jit.Reg("CX")
- _DX = jit.Reg("DX")
- _DI = jit.Reg("DI")
- _SI = jit.Reg("SI")
- _BP = jit.Reg("BP")
- _SP = jit.Reg("SP")
- _R8 = jit.Reg("R8")
-)
-
-var (
- _X0 = jit.Reg("X0")
- _Y0 = jit.Reg("Y0")
-)
-
-var (
- _ST = jit.Reg("BX")
- _RP = jit.Reg("DI")
- _RL = jit.Reg("SI")
- _RC = jit.Reg("DX")
-)
-
-var (
- _LR = jit.Reg("R9")
- _R10 = jit.Reg("R10") // used for gcWriterBarrier
- _ET = jit.Reg("R10")
- _EP = jit.Reg("R11")
-)
-
-var (
- _SP_p = jit.Reg("R12")
- _SP_q = jit.Reg("R13")
- _SP_x = jit.Reg("R14")
- _SP_f = jit.Reg("R15")
-)
-
-var (
- _ARG_rb = jit.Ptr(_SP, _FP_base)
- _ARG_vp = jit.Ptr(_SP, _FP_base + 8)
- _ARG_sb = jit.Ptr(_SP, _FP_base + 16)
- _ARG_fv = jit.Ptr(_SP, _FP_base + 24)
-)
-
-var (
- _RET_et = jit.Ptr(_SP, _FP_base + 32)
- _RET_ep = jit.Ptr(_SP, _FP_base + 40)
-)
-
-var (
- _VAR_sp = jit.Ptr(_SP, _FP_fargs + _FP_saves)
- _VAR_dn = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8)
- _VAR_vp = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16)
-)
-
-var (
- _REG_ffi = []obj.Addr{_RP, _RL, _RC}
- _REG_enc = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RL}
- _REG_jsr = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _LR}
- _REG_all = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RP, _RL, _RC}
-)
-
-type _Assembler struct {
- jit.BaseAssembler
- p _Program
- x int
- name string
-}
-
-func newAssembler(p _Program) *_Assembler {
- return new(_Assembler).Init(p)
-}
-
-/** Assembler Interface **/
-func (self *_Assembler) Load() _Encoder {
- return ptoenc(self.BaseAssembler.Load("encode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
-}
-
-func (self *_Assembler) Init(p _Program) *_Assembler {
- self.p = p
- self.BaseAssembler.Init(self.compile)
- return self
-}
-
-func (self *_Assembler) compile() {
- self.prologue()
- self.instrs()
- self.epilogue()
- self.builtins()
-}
-
-/** Assembler Stages **/
-
-var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
- _OP_null : (*_Assembler)._asm_OP_null,
- _OP_empty_arr : (*_Assembler)._asm_OP_empty_arr,
- _OP_empty_obj : (*_Assembler)._asm_OP_empty_obj,
- _OP_bool : (*_Assembler)._asm_OP_bool,
- _OP_i8 : (*_Assembler)._asm_OP_i8,
- _OP_i16 : (*_Assembler)._asm_OP_i16,
- _OP_i32 : (*_Assembler)._asm_OP_i32,
- _OP_i64 : (*_Assembler)._asm_OP_i64,
- _OP_u8 : (*_Assembler)._asm_OP_u8,
- _OP_u16 : (*_Assembler)._asm_OP_u16,
- _OP_u32 : (*_Assembler)._asm_OP_u32,
- _OP_u64 : (*_Assembler)._asm_OP_u64,
- _OP_f32 : (*_Assembler)._asm_OP_f32,
- _OP_f64 : (*_Assembler)._asm_OP_f64,
- _OP_str : (*_Assembler)._asm_OP_str,
- _OP_bin : (*_Assembler)._asm_OP_bin,
- _OP_quote : (*_Assembler)._asm_OP_quote,
- _OP_number : (*_Assembler)._asm_OP_number,
- _OP_eface : (*_Assembler)._asm_OP_eface,
- _OP_iface : (*_Assembler)._asm_OP_iface,
- _OP_byte : (*_Assembler)._asm_OP_byte,
- _OP_text : (*_Assembler)._asm_OP_text,
- _OP_deref : (*_Assembler)._asm_OP_deref,
- _OP_index : (*_Assembler)._asm_OP_index,
- _OP_load : (*_Assembler)._asm_OP_load,
- _OP_save : (*_Assembler)._asm_OP_save,
- _OP_drop : (*_Assembler)._asm_OP_drop,
- _OP_drop_2 : (*_Assembler)._asm_OP_drop_2,
- _OP_recurse : (*_Assembler)._asm_OP_recurse,
- _OP_is_nil : (*_Assembler)._asm_OP_is_nil,
- _OP_is_nil_p1 : (*_Assembler)._asm_OP_is_nil_p1,
- _OP_is_zero_1 : (*_Assembler)._asm_OP_is_zero_1,
- _OP_is_zero_2 : (*_Assembler)._asm_OP_is_zero_2,
- _OP_is_zero_4 : (*_Assembler)._asm_OP_is_zero_4,
- _OP_is_zero_8 : (*_Assembler)._asm_OP_is_zero_8,
- _OP_is_zero_map : (*_Assembler)._asm_OP_is_zero_map,
- _OP_goto : (*_Assembler)._asm_OP_goto,
- _OP_map_iter : (*_Assembler)._asm_OP_map_iter,
- _OP_map_stop : (*_Assembler)._asm_OP_map_stop,
- _OP_map_check_key : (*_Assembler)._asm_OP_map_check_key,
- _OP_map_write_key : (*_Assembler)._asm_OP_map_write_key,
- _OP_map_value_next : (*_Assembler)._asm_OP_map_value_next,
- _OP_slice_len : (*_Assembler)._asm_OP_slice_len,
- _OP_slice_next : (*_Assembler)._asm_OP_slice_next,
- _OP_marshal : (*_Assembler)._asm_OP_marshal,
- _OP_marshal_p : (*_Assembler)._asm_OP_marshal_p,
- _OP_marshal_text : (*_Assembler)._asm_OP_marshal_text,
- _OP_marshal_text_p : (*_Assembler)._asm_OP_marshal_text_p,
- _OP_cond_set : (*_Assembler)._asm_OP_cond_set,
- _OP_cond_testc : (*_Assembler)._asm_OP_cond_testc,
-}
-
-func (self *_Assembler) instr(v *_Instr) {
- if fn := _OpFuncTab[v.op()]; fn != nil {
- fn(self, v)
- } else {
- panic(fmt.Sprintf("invalid opcode: %d", v.op()))
- }
-}
-
-func (self *_Assembler) instrs() {
- for i, v := range self.p {
- self.Mark(i)
- self.instr(&v)
- self.debug_instr(i, &v)
- }
-}
-
-func (self *_Assembler) builtins() {
- self.more_space()
- self.error_too_deep()
- self.error_invalid_number()
- self.error_nan_or_infinite()
- self.go_panic()
-}
-
-func (self *_Assembler) epilogue() {
- self.Mark(len(self.p))
- self.Emit("XORL", _ET, _ET)
- self.Emit("XORL", _EP, _EP)
- self.Link(_LB_error)
- self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX
- self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX)
- self.Emit("MOVQ", _ET, _RET_et) // MOVQ ET, et<>+24(FP)
- self.Emit("MOVQ", _EP, _RET_ep) // MOVQ EP, ep<>+32(FP)
- self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP) // MOVQ _FP_offs(SP), BP
- self.Emit("ADDQ", jit.Imm(_FP_size), _SP) // ADDQ $_FP_size, SP
- self.Emit("RET") // RET
-}
-
-func (self *_Assembler) prologue() {
- self.Emit("SUBQ", jit.Imm(_FP_size), _SP) // SUBQ $_FP_size, SP
- self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs)) // MOVQ BP, _FP_offs(SP)
- self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP) // LEAQ _FP_offs(SP), BP
- self.load_buffer() // LOAD {buf}
- self.Emit("MOVQ", _ARG_vp, _SP_p) // MOVQ vp<>+8(FP), SP.p
- self.Emit("MOVQ", _ARG_sb, _ST) // MOVQ sb<>+16(FP), ST
- self.Emit("XORL", _SP_x, _SP_x) // XORL SP.x, SP.x
- self.Emit("XORL", _SP_f, _SP_f) // XORL SP.f, SP.f
- self.Emit("XORL", _SP_q, _SP_q) // XORL SP.q, SP.q
-}
-
-/** Assembler Inline Functions **/
-
-func (self *_Assembler) xsave(reg ...obj.Addr) {
- for i, v := range reg {
- if i > _FP_saves / 8 - 1 {
- panic("too many registers to save")
- } else {
- self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8))
- }
- }
-}
-
-func (self *_Assembler) xload(reg ...obj.Addr) {
- for i, v := range reg {
- if i > _FP_saves / 8 - 1 {
- panic("too many registers to load")
- } else {
- self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v)
- }
- }
-}
-
-func (self *_Assembler) rbuf_di() {
- if _RP.Reg != x86.REG_DI {
- panic("register allocation messed up: RP != DI")
- } else {
- self.Emit("ADDQ", _RL, _RP)
- }
-}
-
-func (self *_Assembler) store_int(nd int, fn obj.Addr, ins string) {
- self.check_size(nd)
- self.save_c() // SAVE $C_regs
- self.rbuf_di() // MOVQ RP, DI
- self.Emit(ins, jit.Ptr(_SP_p, 0), _SI) // $ins (SP.p), SI
- self.call_c(fn) // CALL_C $fn
- self.Emit("ADDQ", _AX, _RL) // ADDQ AX, RL
-}
-
-func (self *_Assembler) store_str(s string) {
- i := 0
- m := rt.Str2Mem(s)
-
- /* 8-byte stores */
- for i <= len(m) - 8 {
- self.Emit("MOVQ", jit.Imm(rt.Get64(m[i:])), _AX) // MOVQ $s[i:], AX
- self.Emit("MOVQ", _AX, jit.Sib(_RP, _RL, 1, int64(i))) // MOVQ AX, i(RP)(RL)
- i += 8
- }
-
- /* 4-byte stores */
- if i <= len(m) - 4 {
- self.Emit("MOVL", jit.Imm(int64(rt.Get32(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVL $s[i:], i(RP)(RL)
- i += 4
- }
-
- /* 2-byte stores */
- if i <= len(m) - 2 {
- self.Emit("MOVW", jit.Imm(int64(rt.Get16(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVW $s[i:], i(RP)(RL)
- i += 2
- }
-
- /* last byte */
- if i < len(m) {
- self.Emit("MOVB", jit.Imm(int64(m[i])), jit.Sib(_RP, _RL, 1, int64(i))) // MOVB $s[i:], i(RP)(RL)
- }
-}
-
-func (self *_Assembler) check_size(n int) {
- self.check_size_rl(jit.Ptr(_RL, int64(n)))
-}
-
-func (self *_Assembler) check_size_r(r obj.Addr, d int) {
- self.check_size_rl(jit.Sib(_RL, r, 1, int64(d)))
-}
-
-func (self *_Assembler) check_size_rl(v obj.Addr) {
- idx := self.x
- key := _LB_more_space_return + strconv.Itoa(idx)
-
- /* the following code relies on LR == R9 to work */
- if _LR.Reg != x86.REG_R9 {
- panic("register allocation messed up: LR != R9")
- }
-
- /* check for buffer capacity */
- self.x++
- self.Emit("LEAQ", v, _AX) // LEAQ $v, AX
- self.Emit("CMPQ", _AX, _RC) // CMPQ AX, RC
- self.Sjmp("JBE" , key) // JBE _more_space_return_{n}
- self.slice_grow_ax(key) // GROW $key
- self.Link(key) // _more_space_return_{n}:
-}
-
-func (self *_Assembler) slice_grow_ax(ret string) {
- self.Byte(0x4c, 0x8d, 0x0d) // LEAQ ?(PC), R9
- self.Sref(ret, 4) // .... &ret
- self.Sjmp("JMP" , _LB_more_space) // JMP _more_space
-}
-
-/** State Stack Helpers **/
-
-const (
- _StateSize = int64(unsafe.Sizeof(_State{}))
- _StackLimit = _MaxStack * _StateSize
-)
-
-func (self *_Assembler) save_state() {
- self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX) // MOVQ (ST), CX
- self.Emit("LEAQ", jit.Ptr(_CX, _StateSize), _R8) // LEAQ _StateSize(CX), R8
- self.Emit("CMPQ", _R8, jit.Imm(_StackLimit)) // CMPQ R8, $_StackLimit
- self.Sjmp("JAE" , _LB_error_too_deep) // JA _error_too_deep
- self.Emit("MOVQ", _SP_x, jit.Sib(_ST, _CX, 1, 8)) // MOVQ SP.x, 8(ST)(CX)
- self.Emit("MOVQ", _SP_f, jit.Sib(_ST, _CX, 1, 16)) // MOVQ SP.f, 16(ST)(CX)
- self.WritePtr(0, _SP_p, jit.Sib(_ST, _CX, 1, 24)) // MOVQ SP.p, 24(ST)(CX)
- self.WritePtr(1, _SP_q, jit.Sib(_ST, _CX, 1, 32)) // MOVQ SP.q, 32(ST)(CX)
- self.Emit("MOVQ", _R8, jit.Ptr(_ST, 0)) // MOVQ R8, (ST)
-}
-
-func (self *_Assembler) drop_state(decr int64) {
- self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
- self.Emit("SUBQ" , jit.Imm(decr), _AX) // SUBQ $decr, AX
- self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST)
- self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _SP_x) // MOVQ 8(ST)(AX), SP.x
- self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 16), _SP_f) // MOVQ 16(ST)(AX), SP.f
- self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 24), _SP_p) // MOVQ 24(ST)(AX), SP.p
- self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 32), _SP_q) // MOVQ 32(ST)(AX), SP.q
- self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
- self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8)) // MOVOU X0, 8(ST)(AX)
- self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 24)) // MOVOU X0, 24(ST)(AX)
-}
-
-/** Buffer Helpers **/
-
-func (self *_Assembler) add_char(ch byte) {
- self.Emit("MOVB", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVB $ch, (RP)(RL)
- self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL
-}
-
-func (self *_Assembler) add_long(ch uint32, n int64) {
- self.Emit("MOVL", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVL $ch, (RP)(RL)
- self.Emit("ADDQ", jit.Imm(n), _RL) // ADDQ $n, RL
-}
-
-func (self *_Assembler) add_text(ss string) {
- self.store_str(ss) // TEXT $ss
- self.Emit("ADDQ", jit.Imm(int64(len(ss))), _RL) // ADDQ ${len(ss)}, RL
-}
-
-func (self *_Assembler) prep_buffer() {
- self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX
- self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX)
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
-}
-
-func (self *_Assembler) prep_buffer_c() {
- self.Emit("MOVQ", _ARG_rb, _DI) // MOVQ rb<>+0(FP), DI
- self.Emit("MOVQ", _RL, jit.Ptr(_DI, 8)) // MOVQ RL, 8(DI)
-}
-
-func (self *_Assembler) save_buffer() {
- self.Emit("MOVQ", _ARG_rb, _CX) // MOVQ rb<>+0(FP), CX
- self.Emit("MOVQ", _RP, jit.Ptr(_CX, 0)) // MOVQ RP, (CX)
- self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8)) // MOVQ RL, 8(CX)
- self.Emit("MOVQ", _RC, jit.Ptr(_CX, 16)) // MOVQ RC, 16(CX)
-}
-
-func (self *_Assembler) load_buffer() {
- self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX
- self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP) // MOVQ (AX), RP
- self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL) // MOVQ 8(AX), RL
- self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC) // MOVQ 16(AX), RC
-}
-
-/** Function Interface Helpers **/
-
-func (self *_Assembler) call(pc obj.Addr) {
- self.Emit("MOVQ", pc, _AX) // MOVQ $pc, AX
- self.Rjmp("CALL", _AX) // CALL AX
-}
-
-func (self *_Assembler) save_c() {
- self.xsave(_REG_ffi...) // SAVE $REG_ffi
-}
-
-func (self *_Assembler) call_c(pc obj.Addr) {
- self.call(pc) // CALL $pc
- self.xload(_REG_ffi...) // LOAD $REG_ffi
-}
-
-func (self *_Assembler) call_go(pc obj.Addr) {
- self.xsave(_REG_all...) // SAVE $REG_all
- self.call(pc) // CALL $pc
- self.xload(_REG_all...) // LOAD $REG_all
-}
-
-func (self *_Assembler) call_encoder(pc obj.Addr) {
- self.xsave(_REG_enc...) // SAVE $REG_enc
- self.call(pc) // CALL $pc
- self.xload(_REG_enc...) // LOAD $REG_enc
- self.load_buffer() // LOAD {buf}
-}
-
-func (self *_Assembler) call_marshaler(fn obj.Addr, it *rt.GoType, vt reflect.Type) {
- switch vt.Kind() {
- case reflect.Interface : self.call_marshaler_i(fn, it)
- case reflect.Ptr, reflect.Map: self.call_marshaler_v(fn, it, vt, true)
- // struct/array of 1 direct iface type can be direct
- default : self.call_marshaler_v(fn, it, vt, !rt.UnpackType(vt).Indirect())
- }
-}
-
-func (self *_Assembler) call_marshaler_i(fn obj.Addr, it *rt.GoType) {
- self.Emit("MOVQ" , jit.Gtype(it), _AX) // MOVQ $it, AX
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX) // MOVQ 8(SP.p), CX
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JZ" , "_null_{n}") // JZ _null_{n}
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
- self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 16)) // MOVQ CX, 16(SP)
- self.call_go(_F_assertI2I) // CALL_GO assertI2I
- self.prep_buffer() // MOVE {buf}, (SP)
- self.Emit("MOVOU", jit.Ptr(_SP, 24), _X0) // MOVOU 24(SP), X0
- self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP)
- self.Emit("MOVQ", _ARG_fv, _CX) // MOVQ ARG.fv, CX
- self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24)) // MOVQ CX, 24(SP)
- self.call_encoder(fn) // CALL $fn
- self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
- self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
- self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
- self.Sjmp("JNZ" , _LB_error) // JNZ _error
- self.Sjmp("JMP" , "_done_{n}") // JMP _done_{n}
- self.Link("_null_{n}") // _null_{n}:
- self.check_size(4) // SIZE $4
- self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1)
- self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL
- self.Link("_done_{n}") // _done_{n}:
-}
-
-func (self *_Assembler) call_marshaler_v(fn obj.Addr, it *rt.GoType, vt reflect.Type, deref bool) {
- self.prep_buffer() // MOVE {buf}, (SP)
- self.Emit("MOVQ", jit.Itab(it, vt), _AX) // MOVQ $(itab(it, vt)), AX
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
-
- /* dereference the pointer if needed */
- if !deref {
- self.Emit("MOVQ", _SP_p, jit.Ptr(_SP, 16)) // MOVQ SP.p, 16(SP)
- } else {
- self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
- }
-
- /* call the encoder, and perform error checks */
- self.Emit("MOVQ", _ARG_fv, _CX) // MOVQ ARG.fv, CX
- self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24)) // MOVQ CX, 24(SP)
- self.call_encoder(fn) // CALL $fn
- self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
- self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
- self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
- self.Sjmp("JNZ" , _LB_error) // JNZ _error
-}
-
-/** Builtin: _more_space **/
-
-var (
- _T_byte = jit.Type(byteType)
- _F_growslice = jit.Func(growslice)
-)
-
-func (self *_Assembler) more_space() {
- self.Link(_LB_more_space)
- self.Emit("MOVQ", _T_byte, _AX) // MOVQ $_T_byte, _AX
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ _AX, (SP)
- self.Emit("MOVQ", _RP, jit.Ptr(_SP, 8)) // MOVQ RP, 8(SP)
- self.Emit("MOVQ", _RL, jit.Ptr(_SP, 16)) // MOVQ RL, 16(SP)
- self.Emit("MOVQ", _RC, jit.Ptr(_SP, 24)) // MOVQ RC, 24(SP)
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP)
- self.xsave(_REG_jsr...) // SAVE $REG_jsr
- self.call(_F_growslice) // CALL $pc
- self.xload(_REG_jsr...) // LOAD $REG_jsr
- self.Emit("MOVQ", jit.Ptr(_SP, 40), _RP) // MOVQ 40(SP), RP
- self.Emit("MOVQ", jit.Ptr(_SP, 48), _RL) // MOVQ 48(SP), RL
- self.Emit("MOVQ", jit.Ptr(_SP, 56), _RC) // MOVQ 56(SP), RC
- self.save_buffer() // SAVE {buf}
- self.Rjmp("JMP" , _LR) // JMP LR
-}
-
-/** Builtin Errors **/
-
-var (
- _V_ERR_too_deep = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_too_deep))))
- _V_ERR_nan_or_infinite = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_nan_or_infinite))))
- _I_json_UnsupportedValueError = jit.Itab(rt.UnpackType(errorType), jsonUnsupportedValueType)
-)
-
-func (self *_Assembler) error_too_deep() {
- self.Link(_LB_error_too_deep)
- self.Emit("MOVQ", _V_ERR_too_deep, _EP) // MOVQ $_V_ERR_too_deep, EP
- self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET
- self.Sjmp("JMP" , _LB_error) // JMP _error
-}
-
-func (self *_Assembler) error_invalid_number() {
- self.Link(_LB_error_invalid_number)
- self.call_go(_F_error_number) // CALL_GO error_number
- self.Emit("MOVQ", jit.Ptr(_SP, 16), _ET) // MOVQ 16(SP), ET
- self.Emit("MOVQ", jit.Ptr(_SP, 24), _EP) // MOVQ 24(SP), EP
- self.Sjmp("JMP" , _LB_error) // JMP _error
-}
-
-func (self *_Assembler) error_nan_or_infinite() {
- self.Link(_LB_error_nan_or_infinite)
- self.Emit("MOVQ", _V_ERR_nan_or_infinite, _EP) // MOVQ $_V_ERR_nan_or_infinite, EP
- self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET
- self.Sjmp("JMP" , _LB_error) // JMP _error
-}
-
-/** String Encoding Routine **/
-
-var (
- _F_quote = jit.Imm(int64(native.S_quote))
- _F_panic = jit.Func(goPanic)
-)
-
-func (self *_Assembler) go_panic() {
- self.Link(_LB_panic)
- self.Emit("MOVQ", _SP_p, jit.Ptr(_SP, 8))
- self.call_go(_F_panic)
-}
-
-func (self *_Assembler) encode_string(doubleQuote bool) {
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JZ" , "_str_empty_{n}") // JZ _str_empty_{n}
- self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))
- self.Sjmp("JNE" , "_str_next_{n}")
- self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), jit.Ptr(_SP, 0))
- self.Sjmp("JMP", _LB_panic)
- self.Link("_str_next_{n}")
-
- /* openning quote, check for double quote */
- if !doubleQuote {
- self.check_size_r(_AX, 2) // SIZE $2
- self.add_char('"') // CHAR $'"'
- } else {
- self.check_size_r(_AX, 6) // SIZE $6
- self.add_long(_IM_open, 3) // TEXT $`"\"`
- }
-
- /* quoting loop */
- self.Emit("XORL", _AX, _AX) // XORL AX, AX
- self.Emit("MOVQ", _AX, _VAR_sp) // MOVQ AX, sp
- self.Link("_str_loop_{n}") // _str_loop_{n}:
- self.save_c() // SAVE $REG_ffi
-
- /* load the output buffer first, and then input buffer,
- * because the parameter registers collide with RP / RL / RC */
- self.Emit("MOVQ", _RC, _CX) // MOVQ RC, CX
- self.Emit("SUBQ", _RL, _CX) // SUBQ RL, CX
- self.Emit("MOVQ", _CX, _VAR_dn) // MOVQ CX, dn
- self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _DX) // LEAQ (RP)(RL), DX
- self.Emit("LEAQ", _VAR_dn, _CX) // LEAQ dn, CX
- self.Emit("MOVQ", _VAR_sp, _AX) // MOVQ sp, AX
- self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _DI) // MOVQ (SP.p), DI
- self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SI) // MOVQ 8(SP.p), SI
- self.Emit("ADDQ", _AX, _DI) // ADDQ AX, DI
- self.Emit("SUBQ", _AX, _SI) // SUBQ AX, SI
-
- /* set the flags based on `doubleQuote` */
- if !doubleQuote {
- self.Emit("XORL", _R8, _R8) // XORL R8, R8
- } else {
- self.Emit("MOVL", jit.Imm(types.F_DOUBLE_UNQUOTE), _R8) // MOVL ${types.F_DOUBLE_UNQUOTE}, R8
- }
-
- /* call the native quoter */
- self.call_c(_F_quote) // CALL quote
- self.Emit("ADDQ" , _VAR_dn, _RL) // ADDQ dn, RL
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JS" , "_str_space_{n}") // JS _str_space_{n}
-
- /* close the string, check for double quote */
- if !doubleQuote {
- self.check_size(1) // SIZE $1
- self.add_char('"') // CHAR $'"'
- self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n}
- } else {
- self.check_size(3) // SIZE $3
- self.add_text("\\\"\"") // TEXT $'\""'
- self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n}
- }
-
- /* not enough space to contain the quoted string */
- self.Link("_str_space_{n}") // _str_space_{n}:
- self.Emit("NOTQ", _AX) // NOTQ AX
- self.Emit("ADDQ", _AX, _VAR_sp) // ADDQ AX, sp
- self.Emit("LEAQ", jit.Sib(_RC, _RC, 1, 0), _AX) // LEAQ (RC)(RC), AX
- self.slice_grow_ax("_str_loop_{n}") // GROW _str_loop_{n}
-
- /* empty string, check for double quote */
- if !doubleQuote {
- self.Link("_str_empty_{n}") // _str_empty_{n}:
- self.check_size(2) // SIZE $2
- self.add_text("\"\"") // TEXT $'""'
- self.Link("_str_end_{n}") // _str_end_{n}:
- } else {
- self.Link("_str_empty_{n}") // _str_empty_{n}:
- self.check_size(6) // SIZE $6
- self.add_text("\"\\\"\\\"\"") // TEXT $'"\"\""'
- self.Link("_str_end_{n}") // _str_end_{n}:
- }
-}
-
-/** OpCode Assembler Functions **/
-
-var (
- _T_json_Marshaler = rt.UnpackType(jsonMarshalerType)
- _T_encoding_TextMarshaler = rt.UnpackType(encodingTextMarshalerType)
-)
-
-var (
- _F_f64toa = jit.Imm(int64(native.S_f64toa))
- _F_f32toa = jit.Imm(int64(native.S_f32toa))
- _F_i64toa = jit.Imm(int64(native.S_i64toa))
- _F_u64toa = jit.Imm(int64(native.S_u64toa))
- _F_b64encode = jit.Imm(int64(_subr__b64encode))
-)
-
-var (
- _F_memmove = jit.Func(memmove)
- _F_error_number = jit.Func(error_number)
- _F_isValidNumber = jit.Func(isValidNumber)
-)
-
-var (
- _F_iteratorStop = jit.Func(iteratorStop)
- _F_iteratorNext = jit.Func(iteratorNext)
- _F_iteratorStart = jit.Func(iteratorStart)
-)
-
-var (
- _F_encodeTypedPointer obj.Addr
- _F_encodeJsonMarshaler obj.Addr
- _F_encodeTextMarshaler obj.Addr
-)
-
-const (
- _MODE_AVX2 = 1 << 2
-)
-
-func init() {
- _F_encodeTypedPointer = jit.Func(encodeTypedPointer)
- _F_encodeJsonMarshaler = jit.Func(encodeJsonMarshaler)
- _F_encodeTextMarshaler = jit.Func(encodeTextMarshaler)
-}
-
-func (self *_Assembler) _asm_OP_null(_ *_Instr) {
- self.check_size(4)
- self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1)
- self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL
-}
-
-func (self *_Assembler) _asm_OP_empty_arr(_ *_Instr) {
- self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
- self.Sjmp("JC", "_empty_arr_{n}")
- self._asm_OP_null(nil)
- self.Sjmp("JMP", "_empty_arr_end_{n}")
- self.Link("_empty_arr_{n}")
- self.check_size(2)
- self.Emit("MOVW", jit.Imm(_IM_array), jit.Sib(_RP, _RL, 1, 0))
- self.Emit("ADDQ", jit.Imm(2), _RL)
- self.Link("_empty_arr_end_{n}")
-}
-
-func (self *_Assembler) _asm_OP_empty_obj(_ *_Instr) {
- self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
- self.Sjmp("JC", "_empty_obj_{n}")
- self._asm_OP_null(nil)
- self.Sjmp("JMP", "_empty_obj_end_{n}")
- self.Link("_empty_obj_{n}")
- self.check_size(2)
- self.Emit("MOVW", jit.Imm(_IM_object), jit.Sib(_RP, _RL, 1, 0))
- self.Emit("ADDQ", jit.Imm(2), _RL)
- self.Link("_empty_obj_end_{n}")
-}
-
-func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
- self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0
- self.Sjmp("JE" , "_false_{n}") // JE _false_{n}
- self.check_size(4) // SIZE $4
- self.Emit("MOVL", jit.Imm(_IM_true), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'true', (RP)(RL*1)
- self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL
- self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n}
- self.Link("_false_{n}") // _false_{n}:
- self.check_size(5) // SIZE $5
- self.Emit("MOVL", jit.Imm(_IM_fals), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'fals', (RP)(RL*1)
- self.Emit("MOVB", jit.Imm('e'), jit.Sib(_RP, _RL, 1, 4)) // MOVB $'e', 4(RP)(RL*1)
- self.Emit("ADDQ", jit.Imm(5), _RL) // ADDQ $5, RL
- self.Link("_end_{n}") // _end_{n}:
-}
-
-func (self *_Assembler) _asm_OP_i8(_ *_Instr) {
- self.store_int(4, _F_i64toa, "MOVBQSX")
-}
-
-func (self *_Assembler) _asm_OP_i16(_ *_Instr) {
- self.store_int(6, _F_i64toa, "MOVWQSX")
-}
-
-func (self *_Assembler) _asm_OP_i32(_ *_Instr) {
- self.store_int(17, _F_i64toa, "MOVLQSX")
-}
-
-func (self *_Assembler) _asm_OP_i64(_ *_Instr) {
- self.store_int(21, _F_i64toa, "MOVQ")
-}
-
-func (self *_Assembler) _asm_OP_u8(_ *_Instr) {
- self.store_int(3, _F_u64toa, "MOVBQZX")
-}
-
-func (self *_Assembler) _asm_OP_u16(_ *_Instr) {
- self.store_int(5, _F_u64toa, "MOVWQZX")
-}
-
-func (self *_Assembler) _asm_OP_u32(_ *_Instr) {
- self.store_int(16, _F_u64toa, "MOVLQZX")
-}
-
-func (self *_Assembler) _asm_OP_u64(_ *_Instr) {
- self.store_int(20, _F_u64toa, "MOVQ")
-}
-
-func (self *_Assembler) _asm_OP_f32(_ *_Instr) {
- self.check_size(32)
- self.Emit("MOVL" , jit.Ptr(_SP_p, 0), _AX) // MOVL (SP.p), AX
- self.Emit("ANDL" , jit.Imm(_FM_exp32), _AX) // ANDL $_FM_exp32, AX
- self.Emit("XORL" , jit.Imm(_FM_exp32), _AX) // XORL $_FM_exp32, AX
- self.Sjmp("JZ" , _LB_error_nan_or_infinite) // JZ _error_nan_or_infinite
- self.save_c() // SAVE $C_regs
- self.rbuf_di() // MOVQ RP, DI
- self.Emit("MOVSS" , jit.Ptr(_SP_p, 0), _X0) // MOVSS (SP.p), X0
- self.call_c(_F_f32toa) // CALL_C f64toa
- self.Emit("ADDQ" , _AX, _RL) // ADDQ AX, RL
-}
-
-func (self *_Assembler) _asm_OP_f64(_ *_Instr) {
- self.check_size(32)
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
- self.Emit("MOVQ" , jit.Imm(_FM_exp64), _CX) // MOVQ $_FM_exp64, CX
- self.Emit("ANDQ" , _CX, _AX) // ANDQ CX, AX
- self.Emit("XORQ" , _CX, _AX) // XORQ CX, AX
- self.Sjmp("JZ" , _LB_error_nan_or_infinite) // JZ _error_nan_or_infinite
- self.save_c() // SAVE $C_regs
- self.rbuf_di() // MOVQ RP, DI
- self.Emit("MOVSD" , jit.Ptr(_SP_p, 0), _X0) // MOVSD (SP.p), X0
- self.call_c(_F_f64toa) // CALL_C f64toa
- self.Emit("ADDQ" , _AX, _RL) // ADDQ AX, RL
-}
-
-func (self *_Assembler) _asm_OP_str(_ *_Instr) {
- self.encode_string(false)
-}
-
-func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
- self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX
- self.Emit("ADDQ", jit.Imm(2), _AX) // ADDQ $2, AX
- self.Emit("MOVQ", jit.Imm(_IM_mulv), _CX) // MOVQ $_MF_mulv, CX
- self.Emit("MOVQ", _DX, _R8) // MOVQ DX, R8
- self.From("MULQ", _CX) // MULQ CX
- self.Emit("LEAQ", jit.Sib(_DX, _DX, 1, 1), _AX) // LEAQ 1(DX)(DX), AX
- self.Emit("ORQ" , jit.Imm(2), _AX) // ORQ $2, AX
- self.Emit("MOVQ", _R8, _DX) // MOVQ R8, DX
- self.check_size_r(_AX, 0) // SIZE AX
- self.add_char('"') // CHAR $'"'
- self.save_c() // SAVE $REG_ffi
- self.prep_buffer_c() // MOVE {buf}, DI
- self.Emit("MOVQ", _SP_p, _SI) // MOVQ SP.p, SI
-
- /* check for AVX2 support */
- if !cpu.HasAVX2 {
- self.Emit("XORL", _DX, _DX) // XORL DX, DX
- } else {
- self.Emit("MOVL", jit.Imm(_MODE_AVX2), _DX) // MOVL $_MODE_AVX2, DX
- }
-
- /* call the encoder */
- self.call_c(_F_b64encode) // CALL b64encode
- self.load_buffer() // LOAD {buf}
- self.add_char('"') // CHAR $'"'
-}
-
-func (self *_Assembler) _asm_OP_quote(_ *_Instr) {
- self.encode_string(true)
-}
-
-func (self *_Assembler) _asm_OP_number(_ *_Instr) {
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX) // MOVQ (SP.p), CX
- self.Emit("TESTQ", _CX, _CX) // TESTQ CX, CX
- self.Sjmp("JZ" , "_empty_{n}") // JZ _empty_{n}
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Sjmp("JNZ" , "_number_next_{n}")
- self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), jit.Ptr(_SP, 0))
- self.Sjmp("JMP", _LB_panic)
- self.Link("_number_next_{n}")
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
- self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
- self.call_go(_F_isValidNumber) // CALL_GO isValidNumber
- self.Emit("CMPB" , jit.Ptr(_SP, 16), jit.Imm(0)) // CMPB 16(SP), $0
- self.Sjmp("JE" , _LB_error_invalid_number) // JE _error_invalid_number
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX
- self.check_size_r(_AX, 0) // SIZE AX
- self.Emit("LEAQ" , jit.Sib(_RP, _RL, 1, 0), _AX) // LEAQ (RP)(RL), AX
- self.Emit("ADDQ" , jit.Ptr(_SP_p, 8), _RL) // ADDQ 8(SP.p), RL
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
- self.Emit("MOVOU", jit.Ptr(_SP_p, 0), _X0) // MOVOU (SP.p), X0
- self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP)
- self.call_go(_F_memmove) // CALL_GO memmove
- self.Sjmp("JMP" , "_done_{n}") // JMP _done_{n}
- self.Link("_empty_{n}") // _empty_{n}:
- self.check_size(1) // SIZE $1
- self.add_char('0') // CHAR $'0'
- self.Link("_done_{n}") // _done_{n}:
-}
-
-func (self *_Assembler) _asm_OP_eface(_ *_Instr) {
- self.prep_buffer() // MOVE {buf}, (SP)s
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
- self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _AX) // LEAQ 8(SP.p), AX
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
- self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24)) // MOVQ ST, 24(SP)
- self.Emit("MOVQ" , _ARG_fv, _AX) // MOVQ fv, AX
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP)
- self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer
- self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET) // MOVQ 40(SP), ET
- self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP) // MOVQ 48(SP), EP
- self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
- self.Sjmp("JNZ" , _LB_error) // JNZ _error
-}
-
-func (self *_Assembler) _asm_OP_iface(_ *_Instr) {
- self.prep_buffer() // MOVE {buf}, (SP)
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
- self.Emit("MOVQ" , jit.Ptr(_AX, 8), _AX) // MOVQ 8(AX), AX
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
- self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _AX) // LEAQ 8(SP.p), AX
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
- self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24)) // MOVQ ST, 24(SP)
- self.Emit("MOVQ" , _ARG_fv, _AX) // MOVQ fv, AX
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP)
- self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer
- self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET) // MOVQ 40(SP), ET
- self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP) // MOVQ 48(SP), EP
- self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
- self.Sjmp("JNZ" , _LB_error) // JNZ _error
-}
-
-func (self *_Assembler) _asm_OP_byte(p *_Instr) {
- self.check_size(1)
- self.Emit("MOVB", jit.Imm(p.i64()), jit.Sib(_RP, _RL, 1, 0)) // MOVL p.vi(), (RP)(RL*1)
- self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL
-}
-
-func (self *_Assembler) _asm_OP_text(p *_Instr) {
- self.check_size(len(p.vs())) // SIZE ${len(p.vs())}
- self.add_text(p.vs()) // TEXT ${p.vs()}
-}
-
-func (self *_Assembler) _asm_OP_deref(_ *_Instr) {
- self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p
-}
-
-func (self *_Assembler) _asm_OP_index(p *_Instr) {
- self.Emit("MOVQ", jit.Imm(p.i64()), _AX) // MOVQ $p.vi(), AX
- self.Emit("ADDQ", _AX, _SP_p) // ADDQ AX, SP.p
-}
-
-func (self *_Assembler) _asm_OP_load(_ *_Instr) {
- self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
- self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -24), _SP_x) // MOVQ -24(ST)(AX), SP.x
- self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -8), _SP_p) // MOVQ -8(ST)(AX), SP.p
- self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _SP_q) // MOVQ (ST)(AX), SP.q
-}
-
-func (self *_Assembler) _asm_OP_save(_ *_Instr) {
- self.save_state()
-}
-
-func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
- self.drop_state(_StateSize)
-}
-
-func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
- self.drop_state(_StateSize * 2) // DROP $(_StateSize * 2)
- self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 56)) // MOVOU X0, 56(ST)(AX)
-}
-
-func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
- self.prep_buffer() // MOVE {buf}, (SP)
- vt, pv := p.vp()
- self.Emit("MOVQ", jit.Type(vt), _AX) // MOVQ $(type(p.vt())), AX
- self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
-
- /* check for indirection */
- if !rt.UnpackType(vt).Indirect() {
- self.Emit("MOVQ", _SP_p, _AX) // MOVQ SP.p, AX
- } else {
- self.Emit("MOVQ", _SP_p, _VAR_vp) // MOVQ SP.p, 48(SP)
- self.Emit("LEAQ", _VAR_vp, _AX) // LEAQ 48(SP), AX
- }
-
- /* call the encoder */
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
- self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24)) // MOVQ ST, 24(SP)
- self.Emit("MOVQ" , _ARG_fv, _AX) // MOVQ fv, AX
- if pv {
- self.Emit("BTCQ", jit.Imm(bitPointerValue), _AX) // BTCQ $1, AX
- }
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP)
- self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer
- self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET) // MOVQ 40(SP), ET
- self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP) // MOVQ 48(SP), EP
- self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
- self.Sjmp("JNZ" , _LB_error) // JNZ _error
-}
-
-func (self *_Assembler) _asm_OP_is_nil(p *_Instr) {
- self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0
- self.Xjmp("JE" , p.vi()) // JE p.vi()
-}
-
-func (self *_Assembler) _asm_OP_is_nil_p1(p *_Instr) {
- self.Emit("CMPQ", jit.Ptr(_SP_p, 8), jit.Imm(0)) // CMPQ 8(SP.p), $0
- self.Xjmp("JE" , p.vi()) // JE p.vi()
-}
-
-func (self *_Assembler) _asm_OP_is_zero_1(p *_Instr) {
- self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0
- self.Xjmp("JE" , p.vi()) // JE p.vi()
-}
-
-func (self *_Assembler) _asm_OP_is_zero_2(p *_Instr) {
- self.Emit("CMPW", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPW (SP.p), $0
- self.Xjmp("JE" , p.vi()) // JE p.vi()
-}
-
-func (self *_Assembler) _asm_OP_is_zero_4(p *_Instr) {
- self.Emit("CMPL", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPL (SP.p), $0
- self.Xjmp("JE" , p.vi()) // JE p.vi()
-}
-
-func (self *_Assembler) _asm_OP_is_zero_8(p *_Instr) {
- self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0
- self.Xjmp("JE" , p.vi()) // JE p.vi()
-}
-
-func (self *_Assembler) _asm_OP_is_zero_map(p *_Instr) {
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
- self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
- self.Xjmp("JZ" , p.vi()) // JZ p.vi()
- self.Emit("CMPQ" , jit.Ptr(_AX, 0), jit.Imm(0)) // CMPQ (AX), $0
- self.Xjmp("JE" , p.vi()) // JE p.vi()
-}
-
-func (self *_Assembler) _asm_OP_goto(p *_Instr) {
- self.Xjmp("JMP", p.vi())
-}
-
-func (self *_Assembler) _asm_OP_map_iter(p *_Instr) {
- self.Emit("MOVQ" , jit.Type(p.vt()), _AX) // MOVQ $p.vt(), AX
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _CX) // MOVQ (SP.p), CX
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
- self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
- self.Emit("MOVQ" , _ARG_fv, _AX) // MOVQ fv, AX
- self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP)
- self.call_go(_F_iteratorStart) // CALL_GO iteratorStart
- self.Emit("MOVQ" , jit.Ptr(_SP, 24), _SP_q) // MOVQ 24(SP), SP.q
- self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET
- self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP
- self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
- self.Sjmp("JNZ" , _LB_error) // JNZ _error
-}
-
-func (self *_Assembler) _asm_OP_map_stop(_ *_Instr) {
- self.Emit("MOVQ", _SP_q, jit.Ptr(_SP, 0)) // MOVQ SP.q, 0(SP)
- self.call_go(_F_iteratorStop) // CALL_GO iteratorStop
- self.Emit("XORL", _SP_q, _SP_q) // XORL SP.q, SP.q
-}
-
-func (self *_Assembler) _asm_OP_map_check_key(p *_Instr) {
- self.Emit("MOVQ" , jit.Ptr(_SP_q, 0), _SP_p) // MOVQ (SP.q), SP.p
- self.Emit("TESTQ", _SP_p, _SP_p) // TESTQ SP.p, SP.p
- self.Xjmp("JZ" , p.vi()) // JNZ p.vi()
-}
-
-func (self *_Assembler) _asm_OP_map_write_key(p *_Instr) {
- self.Emit("BTQ", jit.Imm(bitSortMapKeys), _ARG_fv) // BTQ ${SortMapKeys}, fv
- self.Sjmp("JNC", "_unordered_key_{n}") // JNC _unordered_key_{n}
- self.encode_string(false) // STR $false
- self.Xjmp("JMP", p.vi()) // JMP ${p.vi()}
- self.Link("_unordered_key_{n}") // _unordered_key_{n}:
-}
-
-func (self *_Assembler) _asm_OP_map_value_next(_ *_Instr) {
- self.Emit("MOVQ", jit.Ptr(_SP_q, 8), _SP_p) // MOVQ 8(SP.q), SP.p
- self.Emit("MOVQ", _SP_q, jit.Ptr(_SP, 0)) // MOVQ SP.q, (SP)
- self.call_go(_F_iteratorNext) // CALL_GO iteratorNext
-}
-
-func (self *_Assembler) _asm_OP_slice_len(_ *_Instr) {
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _SP_x) // MOVQ 8(SP.p), SP.x
- self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p
- self.Emit("ORQ" , jit.Imm(1 << _S_init), _SP_f) // ORQ $(1<<_S_init), SP.f
-}
-
-func (self *_Assembler) _asm_OP_slice_next(p *_Instr) {
- self.Emit("TESTQ" , _SP_x, _SP_x) // TESTQ SP.x, SP.x
- self.Xjmp("JZ" , p.vi()) // JZ p.vi()
- self.Emit("SUBQ" , jit.Imm(1), _SP_x) // SUBQ $1, SP.x
- self.Emit("BTRQ" , jit.Imm(_S_init), _SP_f) // BTRQ $_S_init, SP.f
- self.Emit("LEAQ" , jit.Ptr(_SP_p, int64(p.vlen())), _AX) // LEAQ $(p.vlen())(SP.p), AX
- self.Emit("CMOVQCC", _AX, _SP_p) // CMOVQNC AX, SP.p
-}
-
-func (self *_Assembler) _asm_OP_marshal(p *_Instr) {
- self.call_marshaler(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt())
-}
-
-func (self *_Assembler) _asm_OP_marshal_p(p *_Instr) {
- if p.vk() != reflect.Ptr {
- panic("marshal_p: invalid type")
- } else {
- self.call_marshaler_v(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt(), false)
- }
-}
-
-func (self *_Assembler) _asm_OP_marshal_text(p *_Instr) {
- self.call_marshaler(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt())
-}
-
-func (self *_Assembler) _asm_OP_marshal_text_p(p *_Instr) {
- if p.vk() != reflect.Ptr {
- panic("marshal_text_p: invalid type")
- } else {
- self.call_marshaler_v(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt(), false)
- }
-}
-
-func (self *_Assembler) _asm_OP_cond_set(_ *_Instr) {
- self.Emit("ORQ", jit.Imm(1 << _S_cond), _SP_f) // ORQ $(1<<_S_cond), SP.f
-}
-
-func (self *_Assembler) _asm_OP_cond_testc(p *_Instr) {
- self.Emit("BTRQ", jit.Imm(_S_cond), _SP_f) // BTRQ $_S_cond, SP.f
- self.Xjmp("JC" , p.vi())
-}
-
-func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
- self.Emit("MOVQ", jit.Imm(int64(p2.op())), jit.Ptr(_SP, 16))// MOVQ $(p2.op()), 16(SP)
- self.Emit("MOVQ", jit.Imm(int64(p1.op())), jit.Ptr(_SP, 8)) // MOVQ $(p1.op()), 8(SP)
- self.Emit("MOVQ", jit.Imm(int64(i)), jit.Ptr(_SP, 0)) // MOVQ $(i), (SP)
- self.call_go(_F_println)
-}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/compiler.go b/vendor/github.com/bytedance/sonic/internal/encoder/compiler.go
index ca0be8f40..902fbc98b 100644
--- a/vendor/github.com/bytedance/sonic/internal/encoder/compiler.go
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/compiler.go
@@ -17,869 +17,660 @@
package encoder
import (
- `fmt`
- `reflect`
- `strconv`
- `strings`
- `unsafe`
-
- `github.com/bytedance/sonic/internal/resolver`
- `github.com/bytedance/sonic/internal/rt`
- `github.com/bytedance/sonic/option`
+ "reflect"
+ "unsafe"
+
+ "github.com/bytedance/sonic/internal/encoder/ir"
+ "github.com/bytedance/sonic/internal/encoder/vars"
+ "github.com/bytedance/sonic/internal/encoder/vm"
+ "github.com/bytedance/sonic/internal/resolver"
+ "github.com/bytedance/sonic/internal/rt"
+ "github.com/bytedance/sonic/option"
)
-type _Op uint8
-
-const (
- _OP_null _Op = iota + 1
- _OP_empty_arr
- _OP_empty_obj
- _OP_bool
- _OP_i8
- _OP_i16
- _OP_i32
- _OP_i64
- _OP_u8
- _OP_u16
- _OP_u32
- _OP_u64
- _OP_f32
- _OP_f64
- _OP_str
- _OP_bin
- _OP_quote
- _OP_number
- _OP_eface
- _OP_iface
- _OP_byte
- _OP_text
- _OP_deref
- _OP_index
- _OP_load
- _OP_save
- _OP_drop
- _OP_drop_2
- _OP_recurse
- _OP_is_nil
- _OP_is_nil_p1
- _OP_is_zero_1
- _OP_is_zero_2
- _OP_is_zero_4
- _OP_is_zero_8
- _OP_is_zero_map
- _OP_goto
- _OP_map_iter
- _OP_map_stop
- _OP_map_check_key
- _OP_map_write_key
- _OP_map_value_next
- _OP_slice_len
- _OP_slice_next
- _OP_marshal
- _OP_marshal_p
- _OP_marshal_text
- _OP_marshal_text_p
- _OP_cond_set
- _OP_cond_testc
-)
-
-const (
- _INT_SIZE = 32 << (^uint(0) >> 63)
- _PTR_SIZE = 32 << (^uintptr(0) >> 63)
- _PTR_BYTE = unsafe.Sizeof(uintptr(0))
-)
-
-const (
- _MAX_ILBUF = 100000 // cutoff at 100k of IL instructions
- _MAX_FIELDS = 50 // cutoff at 50 fields struct
-)
-
-var _OpNames = [256]string {
- _OP_null : "null",
- _OP_empty_arr : "empty_arr",
- _OP_empty_obj : "empty_obj",
- _OP_bool : "bool",
- _OP_i8 : "i8",
- _OP_i16 : "i16",
- _OP_i32 : "i32",
- _OP_i64 : "i64",
- _OP_u8 : "u8",
- _OP_u16 : "u16",
- _OP_u32 : "u32",
- _OP_u64 : "u64",
- _OP_f32 : "f32",
- _OP_f64 : "f64",
- _OP_str : "str",
- _OP_bin : "bin",
- _OP_quote : "quote",
- _OP_number : "number",
- _OP_eface : "eface",
- _OP_iface : "iface",
- _OP_byte : "byte",
- _OP_text : "text",
- _OP_deref : "deref",
- _OP_index : "index",
- _OP_load : "load",
- _OP_save : "save",
- _OP_drop : "drop",
- _OP_drop_2 : "drop_2",
- _OP_recurse : "recurse",
- _OP_is_nil : "is_nil",
- _OP_is_nil_p1 : "is_nil_p1",
- _OP_is_zero_1 : "is_zero_1",
- _OP_is_zero_2 : "is_zero_2",
- _OP_is_zero_4 : "is_zero_4",
- _OP_is_zero_8 : "is_zero_8",
- _OP_is_zero_map : "is_zero_map",
- _OP_goto : "goto",
- _OP_map_iter : "map_iter",
- _OP_map_stop : "map_stop",
- _OP_map_check_key : "map_check_key",
- _OP_map_write_key : "map_write_key",
- _OP_map_value_next : "map_value_next",
- _OP_slice_len : "slice_len",
- _OP_slice_next : "slice_next",
- _OP_marshal : "marshal",
- _OP_marshal_p : "marshal_p",
- _OP_marshal_text : "marshal_text",
- _OP_marshal_text_p : "marshal_text_p",
- _OP_cond_set : "cond_set",
- _OP_cond_testc : "cond_testc",
-}
-
-func (self _Op) String() string {
- if ret := _OpNames[self]; ret != "" {
- return ret
- } else {
- return "<invalid>"
- }
-}
-
-func _OP_int() _Op {
- switch _INT_SIZE {
- case 32: return _OP_i32
- case 64: return _OP_i64
- default: panic("unsupported int size")
- }
-}
-
-func _OP_uint() _Op {
- switch _INT_SIZE {
- case 32: return _OP_u32
- case 64: return _OP_u64
- default: panic("unsupported uint size")
- }
-}
-
-func _OP_uintptr() _Op {
- switch _PTR_SIZE {
- case 32: return _OP_u32
- case 64: return _OP_u64
- default: panic("unsupported pointer size")
- }
-}
-
-func _OP_is_zero_ints() _Op {
- switch _INT_SIZE {
- case 32: return _OP_is_zero_4
- case 64: return _OP_is_zero_8
- default: panic("unsupported integer size")
- }
-}
-
-type _Instr struct {
- u uint64 // union {op: 8, _: 8, vi: 48}, vi maybe int or len(str)
- p unsafe.Pointer // maybe GoString.Ptr, or *GoType
-}
-
-func packOp(op _Op) uint64 {
- return uint64(op) << 56
-}
-
-func newInsOp(op _Op) _Instr {
- return _Instr{u: packOp(op)}
-}
-
-func newInsVi(op _Op, vi int) _Instr {
- return _Instr{u: packOp(op) | rt.PackInt(vi)}
-}
-
-func newInsVs(op _Op, vs string) _Instr {
- return _Instr {
- u: packOp(op) | rt.PackInt(len(vs)),
- p: (*rt.GoString)(unsafe.Pointer(&vs)).Ptr,
- }
-}
-
-func newInsVt(op _Op, vt reflect.Type) _Instr {
- return _Instr {
- u: packOp(op),
- p: unsafe.Pointer(rt.UnpackType(vt)),
- }
-}
-
-func newInsVp(op _Op, vt reflect.Type, pv bool) _Instr {
- i := 0
- if pv {
- i = 1
- }
- return _Instr {
- u: packOp(op) | rt.PackInt(i),
- p: unsafe.Pointer(rt.UnpackType(vt)),
- }
-}
-
-func (self _Instr) op() _Op {
- return _Op(self.u >> 56)
-}
-
-func (self _Instr) vi() int {
- return rt.UnpackInt(self.u)
-}
-
-func (self _Instr) vf() uint8 {
- return (*rt.GoType)(self.p).KindFlags
-}
-
-func (self _Instr) vs() (v string) {
- (*rt.GoString)(unsafe.Pointer(&v)).Ptr = self.p
- (*rt.GoString)(unsafe.Pointer(&v)).Len = self.vi()
- return
-}
-
-func (self _Instr) vk() reflect.Kind {
- return (*rt.GoType)(self.p).Kind()
-}
-
-func (self _Instr) vt() reflect.Type {
- return (*rt.GoType)(self.p).Pack()
-}
-
-func (self _Instr) vp() (vt reflect.Type, pv bool) {
- return (*rt.GoType)(self.p).Pack(), rt.UnpackInt(self.u) == 1
-}
-
-func (self _Instr) i64() int64 {
- return int64(self.vi())
-}
-
-func (self _Instr) vlen() int {
- return int((*rt.GoType)(self.p).Size)
-}
-
-func (self _Instr) isBranch() bool {
- switch self.op() {
- case _OP_goto : fallthrough
- case _OP_is_nil : fallthrough
- case _OP_is_nil_p1 : fallthrough
- case _OP_is_zero_1 : fallthrough
- case _OP_is_zero_2 : fallthrough
- case _OP_is_zero_4 : fallthrough
- case _OP_is_zero_8 : fallthrough
- case _OP_map_check_key : fallthrough
- case _OP_map_write_key : fallthrough
- case _OP_slice_next : fallthrough
- case _OP_cond_testc : return true
- default : return false
- }
-}
-
-func (self _Instr) disassemble() string {
- switch self.op() {
- case _OP_byte : return fmt.Sprintf("%-18s%s", self.op().String(), strconv.QuoteRune(rune(self.vi())))
- case _OP_text : return fmt.Sprintf("%-18s%s", self.op().String(), strconv.Quote(self.vs()))
- case _OP_index : return fmt.Sprintf("%-18s%d", self.op().String(), self.vi())
- case _OP_recurse : fallthrough
- case _OP_map_iter : fallthrough
- case _OP_marshal : fallthrough
- case _OP_marshal_p : fallthrough
- case _OP_marshal_text : fallthrough
- case _OP_marshal_text_p : return fmt.Sprintf("%-18s%s", self.op().String(), self.vt())
- case _OP_goto : fallthrough
- case _OP_is_nil : fallthrough
- case _OP_is_nil_p1 : fallthrough
- case _OP_is_zero_1 : fallthrough
- case _OP_is_zero_2 : fallthrough
- case _OP_is_zero_4 : fallthrough
- case _OP_is_zero_8 : fallthrough
- case _OP_is_zero_map : fallthrough
- case _OP_cond_testc : fallthrough
- case _OP_map_check_key : fallthrough
- case _OP_map_write_key : return fmt.Sprintf("%-18sL_%d", self.op().String(), self.vi())
- case _OP_slice_next : return fmt.Sprintf("%-18sL_%d, %s", self.op().String(), self.vi(), self.vt())
- default : return self.op().String()
- }
-}
-
-type (
- _Program []_Instr
-)
-
-func (self _Program) pc() int {
- return len(self)
-}
-
-func (self _Program) tag(n int) {
- if n >= _MaxStack {
- panic("type nesting too deep")
- }
-}
-
-func (self _Program) pin(i int) {
- v := &self[i]
- v.u &= 0xffff000000000000
- v.u |= rt.PackInt(self.pc())
-}
-
-func (self _Program) rel(v []int) {
- for _, i := range v {
- self.pin(i)
- }
-}
-
-func (self *_Program) add(op _Op) {
- *self = append(*self, newInsOp(op))
-}
-
-func (self *_Program) key(op _Op) {
- *self = append(*self,
- newInsVi(_OP_byte, '"'),
- newInsOp(op),
- newInsVi(_OP_byte, '"'),
- )
-}
-
-func (self *_Program) int(op _Op, vi int) {
- *self = append(*self, newInsVi(op, vi))
-}
-
-func (self *_Program) str(op _Op, vs string) {
- *self = append(*self, newInsVs(op, vs))
-}
-
-func (self *_Program) rtt(op _Op, vt reflect.Type) {
- *self = append(*self, newInsVt(op, vt))
-}
-
-func (self *_Program) vp(op _Op, vt reflect.Type, pv bool) {
- *self = append(*self, newInsVp(op, vt, pv))
-}
-
-func (self _Program) disassemble() string {
- nb := len(self)
- tab := make([]bool, nb + 1)
- ret := make([]string, 0, nb + 1)
-
- /* prescan to get all the labels */
- for _, ins := range self {
- if ins.isBranch() {
- tab[ins.vi()] = true
- }
- }
-
- /* disassemble each instruction */
- for i, ins := range self {
- if !tab[i] {
- ret = append(ret, "\t" + ins.disassemble())
- } else {
- ret = append(ret, fmt.Sprintf("L_%d:\n\t%s", i, ins.disassemble()))
- }
- }
-
- /* add the last label, if needed */
- if tab[nb] {
- ret = append(ret, fmt.Sprintf("L_%d:", nb))
- }
-
- /* add an "end" indicator, and join all the strings */
- return strings.Join(append(ret, "\tend"), "\n")
-}
-
-type _Compiler struct {
- opts option.CompileOptions
- pv bool
- tab map[reflect.Type]bool
- rec map[reflect.Type]uint8
-}
-
-func newCompiler() *_Compiler {
- return &_Compiler {
- opts: option.DefaultCompileOptions(),
- tab: map[reflect.Type]bool{},
- rec: map[reflect.Type]uint8{},
- }
-}
-
-func (self *_Compiler) apply(opts option.CompileOptions) *_Compiler {
- self.opts = opts
- if self.opts.RecursiveDepth > 0 {
- self.rec = map[reflect.Type]uint8{}
- }
- return self
-}
-
-func (self *_Compiler) rescue(ep *error) {
- if val := recover(); val != nil {
- if err, ok := val.(error); ok {
- *ep = err
- } else {
- panic(val)
- }
- }
-}
-
-func (self *_Compiler) compile(vt reflect.Type, pv bool) (ret _Program, err error) {
- defer self.rescue(&err)
- self.compileOne(&ret, 0, vt, pv)
- return
-}
-
-func (self *_Compiler) compileOne(p *_Program, sp int, vt reflect.Type, pv bool) {
- if self.tab[vt] {
- p.vp(_OP_recurse, vt, pv)
- } else {
- self.compileRec(p, sp, vt, pv)
- }
-}
-
-func (self *_Compiler) compileRec(p *_Program, sp int, vt reflect.Type, pv bool) {
- pr := self.pv
- pt := reflect.PtrTo(vt)
-
- /* check for addressable `json.Marshaler` with pointer receiver */
- if pv && pt.Implements(jsonMarshalerType) {
- p.rtt(_OP_marshal_p, pt)
- return
- }
-
- /* check for `json.Marshaler` */
- if vt.Implements(jsonMarshalerType) {
- self.compileMarshaler(p, _OP_marshal, vt, jsonMarshalerType)
- return
- }
-
- /* check for addressable `encoding.TextMarshaler` with pointer receiver */
- if pv && pt.Implements(encodingTextMarshalerType) {
- p.rtt(_OP_marshal_text_p, pt)
- return
- }
-
- /* check for `encoding.TextMarshaler` */
- if vt.Implements(encodingTextMarshalerType) {
- self.compileMarshaler(p, _OP_marshal_text, vt, encodingTextMarshalerType)
- return
- }
-
- /* enter the recursion, and compile the type */
- self.pv = pv
- self.tab[vt] = true
- self.compileOps(p, sp, vt)
-
- /* exit the recursion */
- self.pv = pr
- delete(self.tab, vt)
-}
-
-func (self *_Compiler) compileOps(p *_Program, sp int, vt reflect.Type) {
- switch vt.Kind() {
- case reflect.Bool : p.add(_OP_bool)
- case reflect.Int : p.add(_OP_int())
- case reflect.Int8 : p.add(_OP_i8)
- case reflect.Int16 : p.add(_OP_i16)
- case reflect.Int32 : p.add(_OP_i32)
- case reflect.Int64 : p.add(_OP_i64)
- case reflect.Uint : p.add(_OP_uint())
- case reflect.Uint8 : p.add(_OP_u8)
- case reflect.Uint16 : p.add(_OP_u16)
- case reflect.Uint32 : p.add(_OP_u32)
- case reflect.Uint64 : p.add(_OP_u64)
- case reflect.Uintptr : p.add(_OP_uintptr())
- case reflect.Float32 : p.add(_OP_f32)
- case reflect.Float64 : p.add(_OP_f64)
- case reflect.String : self.compileString (p, vt)
- case reflect.Array : self.compileArray (p, sp, vt.Elem(), vt.Len())
- case reflect.Interface : self.compileInterface (p, vt)
- case reflect.Map : self.compileMap (p, sp, vt)
- case reflect.Ptr : self.compilePtr (p, sp, vt.Elem())
- case reflect.Slice : self.compileSlice (p, sp, vt.Elem())
- case reflect.Struct : self.compileStruct (p, sp, vt)
- default : panic (error_type(vt))
- }
-}
-
-func (self *_Compiler) compileNil(p *_Program, sp int, vt reflect.Type, nil_op _Op, fn func(*_Program, int, reflect.Type)) {
- x := p.pc()
- p.add(_OP_is_nil)
- fn(p, sp, vt)
- e := p.pc()
- p.add(_OP_goto)
- p.pin(x)
- p.add(nil_op)
- p.pin(e)
-}
-
-func (self *_Compiler) compilePtr(p *_Program, sp int, vt reflect.Type) {
- self.compileNil(p, sp, vt, _OP_null, self.compilePtrBody)
-}
-
-func (self *_Compiler) compilePtrBody(p *_Program, sp int, vt reflect.Type) {
- p.tag(sp)
- p.add(_OP_save)
- p.add(_OP_deref)
- self.compileOne(p, sp + 1, vt, true)
- p.add(_OP_drop)
-}
-
-func (self *_Compiler) compileMap(p *_Program, sp int, vt reflect.Type) {
- self.compileNil(p, sp, vt, _OP_empty_obj, self.compileMapBody)
-}
-
-func (self *_Compiler) compileMapBody(p *_Program, sp int, vt reflect.Type) {
- p.tag(sp + 1)
- p.int(_OP_byte, '{')
- p.add(_OP_save)
- p.rtt(_OP_map_iter, vt)
- p.add(_OP_save)
- i := p.pc()
- p.add(_OP_map_check_key)
- u := p.pc()
- p.add(_OP_map_write_key)
- self.compileMapBodyKey(p, vt.Key())
- p.pin(u)
- p.int(_OP_byte, ':')
- p.add(_OP_map_value_next)
- self.compileOne(p, sp + 2, vt.Elem(), false)
- j := p.pc()
- p.add(_OP_map_check_key)
- p.int(_OP_byte, ',')
- v := p.pc()
- p.add(_OP_map_write_key)
- self.compileMapBodyKey(p, vt.Key())
- p.pin(v)
- p.int(_OP_byte, ':')
- p.add(_OP_map_value_next)
- self.compileOne(p, sp + 2, vt.Elem(), false)
- p.int(_OP_goto, j)
- p.pin(i)
- p.pin(j)
- p.add(_OP_map_stop)
- p.add(_OP_drop_2)
- p.int(_OP_byte, '}')
-}
-
-func (self *_Compiler) compileMapBodyKey(p *_Program, vk reflect.Type) {
- if !vk.Implements(encodingTextMarshalerType) {
- self.compileMapBodyTextKey(p, vk)
- } else {
- self.compileMapBodyUtextKey(p, vk)
- }
-}
-
-func (self *_Compiler) compileMapBodyTextKey(p *_Program, vk reflect.Type) {
- switch vk.Kind() {
- case reflect.Invalid : panic("map key is nil")
- case reflect.Bool : p.key(_OP_bool)
- case reflect.Int : p.key(_OP_int())
- case reflect.Int8 : p.key(_OP_i8)
- case reflect.Int16 : p.key(_OP_i16)
- case reflect.Int32 : p.key(_OP_i32)
- case reflect.Int64 : p.key(_OP_i64)
- case reflect.Uint : p.key(_OP_uint())
- case reflect.Uint8 : p.key(_OP_u8)
- case reflect.Uint16 : p.key(_OP_u16)
- case reflect.Uint32 : p.key(_OP_u32)
- case reflect.Uint64 : p.key(_OP_u64)
- case reflect.Uintptr : p.key(_OP_uintptr())
- case reflect.Float32 : p.key(_OP_f32)
- case reflect.Float64 : p.key(_OP_f64)
- case reflect.String : self.compileString(p, vk)
- default : panic(error_type(vk))
- }
-}
-
-func (self *_Compiler) compileMapBodyUtextKey(p *_Program, vk reflect.Type) {
- if vk.Kind() != reflect.Ptr {
- p.rtt(_OP_marshal_text, vk)
- } else {
- self.compileMapBodyUtextPtr(p, vk)
- }
-}
-
-func (self *_Compiler) compileMapBodyUtextPtr(p *_Program, vk reflect.Type) {
- i := p.pc()
- p.add(_OP_is_nil)
- p.rtt(_OP_marshal_text, vk)
- j := p.pc()
- p.add(_OP_goto)
- p.pin(i)
- p.str(_OP_text, "\"\"")
- p.pin(j)
-}
-
-func (self *_Compiler) compileSlice(p *_Program, sp int, vt reflect.Type) {
- self.compileNil(p, sp, vt, _OP_empty_arr, self.compileSliceBody)
-}
-
-func (self *_Compiler) compileSliceBody(p *_Program, sp int, vt reflect.Type) {
- if isSimpleByte(vt) {
- p.add(_OP_bin)
- } else {
- self.compileSliceArray(p, sp, vt)
- }
-}
-
-func (self *_Compiler) compileSliceArray(p *_Program, sp int, vt reflect.Type) {
- p.tag(sp)
- p.int(_OP_byte, '[')
- p.add(_OP_save)
- p.add(_OP_slice_len)
- i := p.pc()
- p.rtt(_OP_slice_next, vt)
- self.compileOne(p, sp + 1, vt, true)
- j := p.pc()
- p.rtt(_OP_slice_next, vt)
- p.int(_OP_byte, ',')
- self.compileOne(p, sp + 1, vt, true)
- p.int(_OP_goto, j)
- p.pin(i)
- p.pin(j)
- p.add(_OP_drop)
- p.int(_OP_byte, ']')
-}
-
-func (self *_Compiler) compileArray(p *_Program, sp int, vt reflect.Type, nb int) {
- p.tag(sp)
- p.int(_OP_byte, '[')
- p.add(_OP_save)
-
- /* first item */
- if nb != 0 {
- self.compileOne(p, sp + 1, vt, self.pv)
- p.add(_OP_load)
- }
-
- /* remaining items */
- for i := 1; i < nb; i++ {
- p.int(_OP_byte, ',')
- p.int(_OP_index, i * int(vt.Size()))
- self.compileOne(p, sp + 1, vt, self.pv)
- p.add(_OP_load)
- }
-
- /* end of array */
- p.add(_OP_drop)
- p.int(_OP_byte, ']')
-}
-
-func (self *_Compiler) compileString(p *_Program, vt reflect.Type) {
- if vt != jsonNumberType {
- p.add(_OP_str)
- } else {
- p.add(_OP_number)
- }
-}
-
-func (self *_Compiler) compileStruct(p *_Program, sp int, vt reflect.Type) {
- if sp >= self.opts.MaxInlineDepth || p.pc() >= _MAX_ILBUF || (sp > 0 && vt.NumField() >= _MAX_FIELDS) {
- p.vp(_OP_recurse, vt, self.pv)
- if self.opts.RecursiveDepth > 0 {
- if self.pv {
- self.rec[vt] = 1
- } else {
- self.rec[vt] = 0
- }
- }
- } else {
- self.compileStructBody(p, sp, vt)
- }
-}
-
-func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) {
- p.tag(sp)
- p.int(_OP_byte, '{')
- p.add(_OP_save)
- p.add(_OP_cond_set)
-
- /* compile each field */
- for _, fv := range resolver.ResolveStruct(vt) {
- var s []int
- var o resolver.Offset
-
- /* "omitempty" for arrays */
- if fv.Type.Kind() == reflect.Array {
- if fv.Type.Len() == 0 && (fv.Opts & resolver.F_omitempty) != 0 {
- continue
- }
- }
-
- /* index to the field */
- for _, o = range fv.Path {
- if p.int(_OP_index, int(o.Size)); o.Kind == resolver.F_deref {
- s = append(s, p.pc())
- p.add(_OP_is_nil)
- p.add(_OP_deref)
- }
- }
-
- /* check for "omitempty" option */
- if fv.Type.Kind() != reflect.Struct && fv.Type.Kind() != reflect.Array && (fv.Opts & resolver.F_omitempty) != 0 {
- s = append(s, p.pc())
- self.compileStructFieldZero(p, fv.Type)
- }
-
- /* add the comma if not the first element */
- i := p.pc()
- p.add(_OP_cond_testc)
- p.int(_OP_byte, ',')
- p.pin(i)
-
- /* compile the key and value */
- ft := fv.Type
- p.str(_OP_text, Quote(fv.Name) + ":")
-
- /* check for "stringnize" option */
- if (fv.Opts & resolver.F_stringize) == 0 {
- self.compileOne(p, sp + 1, ft, self.pv)
- } else {
- self.compileStructFieldStr(p, sp + 1, ft)
- }
-
- /* patch the skipping jumps and reload the struct pointer */
- p.rel(s)
- p.add(_OP_load)
- }
-
- /* end of object */
- p.add(_OP_drop)
- p.int(_OP_byte, '}')
-}
-
-func (self *_Compiler) compileStructFieldStr(p *_Program, sp int, vt reflect.Type) {
- pc := -1
- ft := vt
- sv := false
-
- /* dereference the pointer if needed */
- if ft.Kind() == reflect.Ptr {
- ft = ft.Elem()
- }
-
- /* check if it can be stringized */
- switch ft.Kind() {
- case reflect.Bool : sv = true
- case reflect.Int : sv = true
- case reflect.Int8 : sv = true
- case reflect.Int16 : sv = true
- case reflect.Int32 : sv = true
- case reflect.Int64 : sv = true
- case reflect.Uint : sv = true
- case reflect.Uint8 : sv = true
- case reflect.Uint16 : sv = true
- case reflect.Uint32 : sv = true
- case reflect.Uint64 : sv = true
- case reflect.Uintptr : sv = true
- case reflect.Float32 : sv = true
- case reflect.Float64 : sv = true
- case reflect.String : sv = true
- }
-
- /* if it's not, ignore the "string" and follow the regular path */
- if !sv {
- self.compileOne(p, sp, vt, self.pv)
- return
- }
-
- /* dereference the pointer */
- if vt.Kind() == reflect.Ptr {
- pc = p.pc()
- vt = vt.Elem()
- p.add(_OP_is_nil)
- p.add(_OP_deref)
- }
-
- /* special case of a double-quoted string */
- if ft != jsonNumberType && ft.Kind() == reflect.String {
- p.add(_OP_quote)
- } else {
- self.compileStructFieldQuoted(p, sp, vt)
- }
-
- /* the "null" case of the pointer */
- if pc != -1 {
- e := p.pc()
- p.add(_OP_goto)
- p.pin(pc)
- p.add(_OP_null)
- p.pin(e)
- }
-}
-
-func (self *_Compiler) compileStructFieldZero(p *_Program, vt reflect.Type) {
- switch vt.Kind() {
- case reflect.Bool : p.add(_OP_is_zero_1)
- case reflect.Int : p.add(_OP_is_zero_ints())
- case reflect.Int8 : p.add(_OP_is_zero_1)
- case reflect.Int16 : p.add(_OP_is_zero_2)
- case reflect.Int32 : p.add(_OP_is_zero_4)
- case reflect.Int64 : p.add(_OP_is_zero_8)
- case reflect.Uint : p.add(_OP_is_zero_ints())
- case reflect.Uint8 : p.add(_OP_is_zero_1)
- case reflect.Uint16 : p.add(_OP_is_zero_2)
- case reflect.Uint32 : p.add(_OP_is_zero_4)
- case reflect.Uint64 : p.add(_OP_is_zero_8)
- case reflect.Uintptr : p.add(_OP_is_nil)
- case reflect.Float32 : p.add(_OP_is_zero_4)
- case reflect.Float64 : p.add(_OP_is_zero_8)
- case reflect.String : p.add(_OP_is_nil_p1)
- case reflect.Interface : p.add(_OP_is_nil)
- case reflect.Map : p.add(_OP_is_zero_map)
- case reflect.Ptr : p.add(_OP_is_nil)
- case reflect.Slice : p.add(_OP_is_nil_p1)
- default : panic(error_type(vt))
- }
-}
-
-func (self *_Compiler) compileStructFieldQuoted(p *_Program, sp int, vt reflect.Type) {
- p.int(_OP_byte, '"')
- self.compileOne(p, sp, vt, self.pv)
- p.int(_OP_byte, '"')
-}
-
-func (self *_Compiler) compileInterface(p *_Program, vt reflect.Type) {
- x := p.pc()
- p.add(_OP_is_nil_p1)
-
- /* iface and efaces are different */
- if vt.NumMethod() == 0 {
- p.add(_OP_eface)
- } else {
- p.add(_OP_iface)
- }
-
- /* the "null" value */
- e := p.pc()
- p.add(_OP_goto)
- p.pin(x)
- p.add(_OP_null)
- p.pin(e)
-}
-
-func (self *_Compiler) compileMarshaler(p *_Program, op _Op, vt reflect.Type, mt reflect.Type) {
- pc := p.pc()
- vk := vt.Kind()
-
- /* direct receiver */
- if vk != reflect.Ptr {
- p.rtt(op, vt)
- return
- }
-
- /* value receiver with a pointer type, check for nil before calling the marshaler */
- p.add(_OP_is_nil)
- p.rtt(op, vt)
- i := p.pc()
- p.add(_OP_goto)
- p.pin(pc)
- p.add(_OP_null)
- p.pin(i)
+func ForceUseVM() {
+ vm.SetCompiler(makeEncoderVM)
+ pretouchType = pretouchTypeVM
+ encodeTypedPointer = vm.EncodeTypedPointer
+ vars.UseVM = true
+}
+
+var encodeTypedPointer func(buf *[]byte, vt *rt.GoType, vp *unsafe.Pointer, sb *vars.Stack, fv uint64) error
+
+func makeEncoderVM(vt *rt.GoType, ex ...interface{}) (interface{}, error) {
+ pp, err := NewCompiler().Compile(vt.Pack(), ex[0].(bool))
+ if err != nil {
+ return nil, err
+ }
+ return &pp, nil
+}
+
+var pretouchType func(_vt reflect.Type, opts option.CompileOptions, v uint8) (map[reflect.Type]uint8, error)
+
+func pretouchTypeVM(_vt reflect.Type, opts option.CompileOptions, v uint8) (map[reflect.Type]uint8, error) {
+ /* compile function */
+ compiler := NewCompiler().apply(opts)
+
+ /* find or compile */
+ vt := rt.UnpackType(_vt)
+ if val := vars.GetProgram(vt); val != nil {
+ return nil, nil
+ } else if _, err := vars.ComputeProgram(vt, makeEncoderVM, v == 1); err == nil {
+ return compiler.rec, nil
+ } else {
+ return nil, err
+ }
+}
+
+func pretouchRec(vtm map[reflect.Type]uint8, opts option.CompileOptions) error {
+ if opts.RecursiveDepth < 0 || len(vtm) == 0 {
+ return nil
+ }
+ next := make(map[reflect.Type]uint8)
+ for vt, v := range vtm {
+ sub, err := pretouchType(vt, opts, v)
+ if err != nil {
+ return err
+ }
+ for svt, v := range sub {
+ next[svt] = v
+ }
+ }
+ opts.RecursiveDepth -= 1
+ return pretouchRec(next, opts)
+}
+
+type Compiler struct {
+ opts option.CompileOptions
+ pv bool
+ tab map[reflect.Type]bool
+ rec map[reflect.Type]uint8
+}
+
+func NewCompiler() *Compiler {
+ return &Compiler{
+ opts: option.DefaultCompileOptions(),
+ tab: map[reflect.Type]bool{},
+ rec: map[reflect.Type]uint8{},
+ }
+}
+
+func (self *Compiler) apply(opts option.CompileOptions) *Compiler {
+ self.opts = opts
+ if self.opts.RecursiveDepth > 0 {
+ self.rec = map[reflect.Type]uint8{}
+ }
+ return self
+}
+
+func (self *Compiler) rescue(ep *error) {
+ if val := recover(); val != nil {
+ if err, ok := val.(error); ok {
+ *ep = err
+ } else {
+ panic(val)
+ }
+ }
+}
+
+func (self *Compiler) Compile(vt reflect.Type, pv bool) (ret ir.Program, err error) {
+ defer self.rescue(&err)
+ self.compileOne(&ret, 0, vt, pv)
+ return
+}
+
+func (self *Compiler) compileOne(p *ir.Program, sp int, vt reflect.Type, pv bool) {
+ if self.tab[vt] {
+ p.Vp(ir.OP_recurse, vt, pv)
+ } else {
+ self.compileRec(p, sp, vt, pv)
+ }
+}
+
+func (self *Compiler) tryCompileMarshaler(p *ir.Program, vt reflect.Type, pv bool) bool {
+ pt := reflect.PtrTo(vt)
+
+ /* check for addressable `json.Marshaler` with pointer receiver */
+ if pv && pt.Implements(vars.JsonMarshalerType) {
+ addMarshalerOp(p, ir.OP_marshal_p, pt, vars.JsonMarshalerType)
+ return true
+ }
+
+ /* check for `json.Marshaler` */
+ if vt.Implements(vars.JsonMarshalerType) {
+ self.compileMarshaler(p, ir.OP_marshal, vt, vars.JsonMarshalerType)
+ return true
+ }
+
+ /* check for addressable `encoding.TextMarshaler` with pointer receiver */
+ if pv && pt.Implements(vars.EncodingTextMarshalerType) {
+ addMarshalerOp(p, ir.OP_marshal_text_p, pt, vars.EncodingTextMarshalerType)
+ return true
+ }
+
+ /* check for `encoding.TextMarshaler` */
+ if vt.Implements(vars.EncodingTextMarshalerType) {
+ self.compileMarshaler(p, ir.OP_marshal_text, vt, vars.EncodingTextMarshalerType)
+ return true
+ }
+
+ return false
+}
+
+func (self *Compiler) compileRec(p *ir.Program, sp int, vt reflect.Type, pv bool) {
+ pr := self.pv
+
+ if self.tryCompileMarshaler(p, vt, pv) {
+ return
+ }
+
+ /* enter the recursion, and compile the type */
+ self.pv = pv
+ self.tab[vt] = true
+ self.compileOps(p, sp, vt)
+
+ /* exit the recursion */
+ self.pv = pr
+ delete(self.tab, vt)
+}
+
+func (self *Compiler) compileOps(p *ir.Program, sp int, vt reflect.Type) {
+ switch vt.Kind() {
+ case reflect.Bool:
+ p.Add(ir.OP_bool)
+ case reflect.Int:
+ p.Add(ir.OP_int())
+ case reflect.Int8:
+ p.Add(ir.OP_i8)
+ case reflect.Int16:
+ p.Add(ir.OP_i16)
+ case reflect.Int32:
+ p.Add(ir.OP_i32)
+ case reflect.Int64:
+ p.Add(ir.OP_i64)
+ case reflect.Uint:
+ p.Add(ir.OP_uint())
+ case reflect.Uint8:
+ p.Add(ir.OP_u8)
+ case reflect.Uint16:
+ p.Add(ir.OP_u16)
+ case reflect.Uint32:
+ p.Add(ir.OP_u32)
+ case reflect.Uint64:
+ p.Add(ir.OP_u64)
+ case reflect.Uintptr:
+ p.Add(ir.OP_uintptr())
+ case reflect.Float32:
+ p.Add(ir.OP_f32)
+ case reflect.Float64:
+ p.Add(ir.OP_f64)
+ case reflect.String:
+ self.compileString(p, vt)
+ case reflect.Array:
+ self.compileArray(p, sp, vt.Elem(), vt.Len())
+ case reflect.Interface:
+ self.compileInterface(p, vt)
+ case reflect.Map:
+ self.compileMap(p, sp, vt)
+ case reflect.Ptr:
+ self.compilePtr(p, sp, vt.Elem())
+ case reflect.Slice:
+ self.compileSlice(p, sp, vt.Elem())
+ case reflect.Struct:
+ self.compileStruct(p, sp, vt)
+ default:
+ panic(vars.Error_type(vt))
+ }
+}
+
+func (self *Compiler) compileNil(p *ir.Program, sp int, vt reflect.Type, nil_op ir.Op, fn func(*ir.Program, int, reflect.Type)) {
+ x := p.PC()
+ p.Add(ir.OP_is_nil)
+ fn(p, sp, vt)
+ e := p.PC()
+ p.Add(ir.OP_goto)
+ p.Pin(x)
+ p.Add(nil_op)
+ p.Pin(e)
+}
+
+func (self *Compiler) compilePtr(p *ir.Program, sp int, vt reflect.Type) {
+ self.compileNil(p, sp, vt, ir.OP_null, self.compilePtrBody)
+}
+
+func (self *Compiler) compilePtrBody(p *ir.Program, sp int, vt reflect.Type) {
+ p.Tag(sp)
+ p.Add(ir.OP_save)
+ p.Add(ir.OP_deref)
+ self.compileOne(p, sp+1, vt, true)
+ p.Add(ir.OP_drop)
+}
+
+func (self *Compiler) compileMap(p *ir.Program, sp int, vt reflect.Type) {
+ self.compileNil(p, sp, vt, ir.OP_empty_obj, self.compileMapBody)
+}
+
+func (self *Compiler) compileMapBody(p *ir.Program, sp int, vt reflect.Type) {
+ p.Tag(sp + 1)
+ p.Int(ir.OP_byte, '{')
+ e := p.PC()
+ p.Add(ir.OP_is_zero_map)
+ p.Add(ir.OP_save)
+ p.Rtt(ir.OP_map_iter, vt)
+ p.Add(ir.OP_save)
+ i := p.PC()
+ p.Add(ir.OP_map_check_key)
+ u := p.PC()
+ p.Add(ir.OP_map_write_key)
+ self.compileMapBodyKey(p, vt.Key())
+ p.Pin(u)
+ p.Int(ir.OP_byte, ':')
+ p.Add(ir.OP_map_value_next)
+ self.compileOne(p, sp+2, vt.Elem(), false)
+ j := p.PC()
+ p.Add(ir.OP_map_check_key)
+ p.Int(ir.OP_byte, ',')
+ v := p.PC()
+ p.Add(ir.OP_map_write_key)
+ self.compileMapBodyKey(p, vt.Key())
+ p.Pin(v)
+ p.Int(ir.OP_byte, ':')
+ p.Add(ir.OP_map_value_next)
+ self.compileOne(p, sp+2, vt.Elem(), false)
+ p.Int(ir.OP_goto, j)
+ p.Pin(i)
+ p.Pin(j)
+ p.Add(ir.OP_map_stop)
+ p.Add(ir.OP_drop_2)
+ p.Pin(e)
+ p.Int(ir.OP_byte, '}')
+}
+
+func (self *Compiler) compileMapBodyKey(p *ir.Program, vk reflect.Type) {
+ if !vk.Implements(vars.EncodingTextMarshalerType) {
+ self.compileMapBodyTextKey(p, vk)
+ } else {
+ self.compileMapBodyUtextKey(p, vk)
+ }
+}
+
+func (self *Compiler) compileMapBodyTextKey(p *ir.Program, vk reflect.Type) {
+ switch vk.Kind() {
+ case reflect.Invalid:
+ panic("map key is nil")
+ case reflect.Bool:
+ p.Key(ir.OP_bool)
+ case reflect.Int:
+ p.Key(ir.OP_int())
+ case reflect.Int8:
+ p.Key(ir.OP_i8)
+ case reflect.Int16:
+ p.Key(ir.OP_i16)
+ case reflect.Int32:
+ p.Key(ir.OP_i32)
+ case reflect.Int64:
+ p.Key(ir.OP_i64)
+ case reflect.Uint:
+ p.Key(ir.OP_uint())
+ case reflect.Uint8:
+ p.Key(ir.OP_u8)
+ case reflect.Uint16:
+ p.Key(ir.OP_u16)
+ case reflect.Uint32:
+ p.Key(ir.OP_u32)
+ case reflect.Uint64:
+ p.Key(ir.OP_u64)
+ case reflect.Uintptr:
+ p.Key(ir.OP_uintptr())
+ case reflect.Float32:
+ p.Key(ir.OP_f32)
+ case reflect.Float64:
+ p.Key(ir.OP_f64)
+ case reflect.String:
+ self.compileString(p, vk)
+ default:
+ panic(vars.Error_type(vk))
+ }
+}
+
+func (self *Compiler) compileMapBodyUtextKey(p *ir.Program, vk reflect.Type) {
+ if vk.Kind() != reflect.Ptr {
+ addMarshalerOp(p, ir.OP_marshal_text, vk, vars.EncodingTextMarshalerType)
+ } else {
+ self.compileMapBodyUtextPtr(p, vk)
+ }
+}
+
+func (self *Compiler) compileMapBodyUtextPtr(p *ir.Program, vk reflect.Type) {
+ i := p.PC()
+ p.Add(ir.OP_is_nil)
+ addMarshalerOp(p, ir.OP_marshal_text, vk, vars.EncodingTextMarshalerType)
+ j := p.PC()
+ p.Add(ir.OP_goto)
+ p.Pin(i)
+ p.Str(ir.OP_text, "\"\"")
+ p.Pin(j)
+}
+
+func (self *Compiler) compileSlice(p *ir.Program, sp int, vt reflect.Type) {
+ self.compileNil(p, sp, vt, ir.OP_empty_arr, self.compileSliceBody)
+}
+
+func (self *Compiler) compileSliceBody(p *ir.Program, sp int, vt reflect.Type) {
+ if vars.IsSimpleByte(vt) {
+ p.Add(ir.OP_bin)
+ } else {
+ self.compileSliceArray(p, sp, vt)
+ }
+}
+
+func (self *Compiler) compileSliceArray(p *ir.Program, sp int, vt reflect.Type) {
+ p.Tag(sp)
+ p.Int(ir.OP_byte, '[')
+ e := p.PC()
+ p.Add(ir.OP_is_nil)
+ p.Add(ir.OP_save)
+ p.Add(ir.OP_slice_len)
+ i := p.PC()
+ p.Rtt(ir.OP_slice_next, vt)
+ self.compileOne(p, sp+1, vt, true)
+ j := p.PC()
+ p.Rtt(ir.OP_slice_next, vt)
+ p.Int(ir.OP_byte, ',')
+ self.compileOne(p, sp+1, vt, true)
+ p.Int(ir.OP_goto, j)
+ p.Pin(i)
+ p.Pin(j)
+ p.Add(ir.OP_drop)
+ p.Pin(e)
+ p.Int(ir.OP_byte, ']')
+}
+
+func (self *Compiler) compileArray(p *ir.Program, sp int, vt reflect.Type, nb int) {
+ p.Tag(sp)
+ p.Int(ir.OP_byte, '[')
+ p.Add(ir.OP_save)
+
+ /* first item */
+ if nb != 0 {
+ self.compileOne(p, sp+1, vt, self.pv)
+ p.Add(ir.OP_load)
+ }
+
+ /* remaining items */
+ for i := 1; i < nb; i++ {
+ p.Int(ir.OP_byte, ',')
+ p.Int(ir.OP_index, i*int(vt.Size()))
+ self.compileOne(p, sp+1, vt, self.pv)
+ p.Add(ir.OP_load)
+ }
+
+ /* end of array */
+ p.Add(ir.OP_drop)
+ p.Int(ir.OP_byte, ']')
+}
+
+func (self *Compiler) compileString(p *ir.Program, vt reflect.Type) {
+ if vt != vars.JsonNumberType {
+ p.Add(ir.OP_str)
+ } else {
+ p.Add(ir.OP_number)
+ }
+}
+
+func (self *Compiler) compileStruct(p *ir.Program, sp int, vt reflect.Type) {
+ if sp >= self.opts.MaxInlineDepth || p.PC() >= vars.MAX_ILBUF || (sp > 0 && vt.NumField() >= vars.MAX_FIELDS) {
+ p.Vp(ir.OP_recurse, vt, self.pv)
+ if self.opts.RecursiveDepth > 0 {
+ if self.pv {
+ self.rec[vt] = 1
+ } else {
+ self.rec[vt] = 0
+ }
+ }
+ } else {
+ self.compileStructBody(p, sp, vt)
+ }
+}
+
+func (self *Compiler) compileStructBody(p *ir.Program, sp int, vt reflect.Type) {
+ p.Tag(sp)
+ p.Int(ir.OP_byte, '{')
+ p.Add(ir.OP_save)
+ p.Add(ir.OP_cond_set)
+
+ /* compile each field */
+ for _, fv := range resolver.ResolveStruct(vt) {
+ var s []int
+ var o resolver.Offset
+
+ /* "omitempty" for arrays */
+ if fv.Type.Kind() == reflect.Array {
+ if fv.Type.Len() == 0 && (fv.Opts&resolver.F_omitempty) != 0 {
+ continue
+ }
+ }
+
+ /* index to the field */
+ for _, o = range fv.Path {
+ if p.Int(ir.OP_index, int(o.Size)); o.Kind == resolver.F_deref {
+ s = append(s, p.PC())
+ p.Add(ir.OP_is_nil)
+ p.Add(ir.OP_deref)
+ }
+ }
+
+ /* check for "omitempty" option */
+ if fv.Type.Kind() != reflect.Struct && fv.Type.Kind() != reflect.Array && (fv.Opts&resolver.F_omitempty) != 0 {
+ s = append(s, p.PC())
+ self.compileStructFieldZero(p, fv.Type)
+ }
+
+ /* add the comma if not the first element */
+ i := p.PC()
+ p.Add(ir.OP_cond_testc)
+ p.Int(ir.OP_byte, ',')
+ p.Pin(i)
+
+ /* compile the key and value */
+ ft := fv.Type
+ p.Str(ir.OP_text, Quote(fv.Name)+":")
+
+ /* check for "stringnize" option */
+ if (fv.Opts & resolver.F_stringize) == 0 {
+ self.compileOne(p, sp+1, ft, self.pv)
+ } else {
+ self.compileStructFieldStr(p, sp+1, ft)
+ }
+
+ /* patch the skipping jumps and reload the struct pointer */
+ p.Rel(s)
+ p.Add(ir.OP_load)
+ }
+
+ /* end of object */
+ p.Add(ir.OP_drop)
+ p.Int(ir.OP_byte, '}')
+}
+
+func (self *Compiler) compileStructFieldStr(p *ir.Program, sp int, vt reflect.Type) {
+ // NOTICE: according to encoding/json, Marshaler type has higher priority than string option
+ // see issue:
+ if self.tryCompileMarshaler(p, vt, self.pv) {
+ return
+ }
+
+ pc := -1
+ ft := vt
+ sv := false
+
+ /* dereference the pointer if needed */
+ if ft.Kind() == reflect.Ptr {
+ ft = ft.Elem()
+ }
+
+ /* check if it can be stringized */
+ switch ft.Kind() {
+ case reflect.Bool:
+ sv = true
+ case reflect.Int:
+ sv = true
+ case reflect.Int8:
+ sv = true
+ case reflect.Int16:
+ sv = true
+ case reflect.Int32:
+ sv = true
+ case reflect.Int64:
+ sv = true
+ case reflect.Uint:
+ sv = true
+ case reflect.Uint8:
+ sv = true
+ case reflect.Uint16:
+ sv = true
+ case reflect.Uint32:
+ sv = true
+ case reflect.Uint64:
+ sv = true
+ case reflect.Uintptr:
+ sv = true
+ case reflect.Float32:
+ sv = true
+ case reflect.Float64:
+ sv = true
+ case reflect.String:
+ sv = true
+ }
+
+ /* if it's not, ignore the "string" and follow the regular path */
+ if !sv {
+ self.compileOne(p, sp, vt, self.pv)
+ return
+ }
+
+ /* dereference the pointer */
+ if vt.Kind() == reflect.Ptr {
+ pc = p.PC()
+ vt = vt.Elem()
+ p.Add(ir.OP_is_nil)
+ p.Add(ir.OP_deref)
+ }
+
+ /* special case of a double-quoted string */
+ if ft != vars.JsonNumberType && ft.Kind() == reflect.String {
+ p.Add(ir.OP_quote)
+ } else {
+ self.compileStructFieldQuoted(p, sp, vt)
+ }
+
+ /* the "null" case of the pointer */
+ if pc != -1 {
+ e := p.PC()
+ p.Add(ir.OP_goto)
+ p.Pin(pc)
+ p.Add(ir.OP_null)
+ p.Pin(e)
+ }
+}
+
+func (self *Compiler) compileStructFieldZero(p *ir.Program, vt reflect.Type) {
+ switch vt.Kind() {
+ case reflect.Bool:
+ p.Add(ir.OP_is_zero_1)
+ case reflect.Int:
+ p.Add(ir.OP_is_zero_ints())
+ case reflect.Int8:
+ p.Add(ir.OP_is_zero_1)
+ case reflect.Int16:
+ p.Add(ir.OP_is_zero_2)
+ case reflect.Int32:
+ p.Add(ir.OP_is_zero_4)
+ case reflect.Int64:
+ p.Add(ir.OP_is_zero_8)
+ case reflect.Uint:
+ p.Add(ir.OP_is_zero_ints())
+ case reflect.Uint8:
+ p.Add(ir.OP_is_zero_1)
+ case reflect.Uint16:
+ p.Add(ir.OP_is_zero_2)
+ case reflect.Uint32:
+ p.Add(ir.OP_is_zero_4)
+ case reflect.Uint64:
+ p.Add(ir.OP_is_zero_8)
+ case reflect.Uintptr:
+ p.Add(ir.OP_is_nil)
+ case reflect.Float32:
+ p.Add(ir.OP_is_zero_4)
+ case reflect.Float64:
+ p.Add(ir.OP_is_zero_8)
+ case reflect.String:
+ p.Add(ir.OP_is_nil_p1)
+ case reflect.Interface:
+ p.Add(ir.OP_is_nil)
+ case reflect.Map:
+ p.Add(ir.OP_is_zero_map)
+ case reflect.Ptr:
+ p.Add(ir.OP_is_nil)
+ case reflect.Slice:
+ p.Add(ir.OP_is_nil_p1)
+ default:
+ panic(vars.Error_type(vt))
+ }
+}
+
+func (self *Compiler) compileStructFieldQuoted(p *ir.Program, sp int, vt reflect.Type) {
+ p.Int(ir.OP_byte, '"')
+ self.compileOne(p, sp, vt, self.pv)
+ p.Int(ir.OP_byte, '"')
+}
+
+func (self *Compiler) compileInterface(p *ir.Program, vt reflect.Type) {
+ x := p.PC()
+ p.Add(ir.OP_is_nil_p1)
+
+ /* iface and efaces are different */
+ if vt.NumMethod() == 0 {
+ p.Add(ir.OP_eface)
+ } else {
+ p.Add(ir.OP_iface)
+ }
+
+ /* the "null" value */
+ e := p.PC()
+ p.Add(ir.OP_goto)
+ p.Pin(x)
+ p.Add(ir.OP_null)
+ p.Pin(e)
+}
+
+func (self *Compiler) compileMarshaler(p *ir.Program, op ir.Op, vt reflect.Type, mt reflect.Type) {
+ pc := p.PC()
+ vk := vt.Kind()
+
+ /* direct receiver */
+ if vk != reflect.Ptr {
+ addMarshalerOp(p, op, vt, mt)
+ return
+ }
+ /* value receiver with a pointer type, check for nil before calling the marshaler */
+ p.Add(ir.OP_is_nil)
+
+ addMarshalerOp(p, op, vt, mt)
+
+ i := p.PC()
+ p.Add(ir.OP_goto)
+ p.Pin(pc)
+ p.Add(ir.OP_null)
+ p.Pin(i)
+}
+
+func addMarshalerOp(p *ir.Program, op ir.Op, vt reflect.Type, mt reflect.Type) {
+ if vars.UseVM {
+ itab := rt.GetItab(rt.IfaceType(rt.UnpackType(mt)), rt.UnpackType(vt), true)
+ p.Vtab(op, vt, itab)
+ } else {
+ // OPT: get itab here
+ p.Rtt(op, vt)
+ }
}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/debug_go117.go b/vendor/github.com/bytedance/sonic/internal/encoder/debug_go117.go
deleted file mode 100644
index 37e6f7d4f..000000000
--- a/vendor/github.com/bytedance/sonic/internal/encoder/debug_go117.go
+++ /dev/null
@@ -1,205 +0,0 @@
-// +build go1.17,!go1.23
-
-/*
- * Copyright 2021 ByteDance Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package encoder
-
-import (
- `fmt`
- `os`
- `runtime`
- `strings`
- `unsafe`
-
- `github.com/bytedance/sonic/internal/jit`
- `github.com/twitchyliquid64/golang-asm/obj`
-)
-
-const _FP_debug = 128
-
-var (
- debugSyncGC = os.Getenv("SONIC_SYNC_GC") != ""
- debugAsyncGC = os.Getenv("SONIC_NO_ASYNC_GC") == ""
- debugCheckPtr = os.Getenv("SONIC_CHECK_POINTER") != ""
-)
-
-var (
- _Instr_End = newInsOp(_OP_is_nil)
-
- _F_gc = jit.Func(gc)
- _F_println = jit.Func(println_wrapper)
- _F_print = jit.Func(print)
-)
-
-func (self *_Assembler) dsave(r ...obj.Addr) {
- for i, v := range r {
- if i > _FP_debug / 8 - 1 {
- panic("too many registers to save")
- } else {
- self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + _FP_saves + _FP_locals + int64(i) * 8))
- }
- }
-}
-
-func (self *_Assembler) dload(r ...obj.Addr) {
- for i, v := range r {
- if i > _FP_debug / 8 - 1 {
- panic("too many registers to load")
- } else {
- self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + _FP_saves + _FP_locals + int64(i) * 8), v)
- }
- }
-}
-
-func println_wrapper(i int, op1 int, op2 int){
- println(i, " Intrs ", op1, _OpNames[op1], "next: ", op2, _OpNames[op2])
-}
-
-func print(i int){
- println(i)
-}
-
-func gc() {
- if !debugSyncGC {
- return
- }
- runtime.GC()
- // debug.FreeOSMemory()
-}
-
-func (self *_Assembler) dcall(fn obj.Addr) {
- self.Emit("MOVQ", fn, _R10) // MOVQ ${fn}, R10
- self.Rjmp("CALL", _R10) // CALL R10
-}
-
-func (self *_Assembler) debug_gc() {
- if !debugSyncGC {
- return
- }
- self.dsave(_REG_debug...)
- self.dcall(_F_gc)
- self.dload(_REG_debug...)
-}
-
-func (self *_Assembler) debug_instr(i int, v *_Instr) {
- if debugSyncGC {
- if i+1 == len(self.p) {
- self.print_gc(i, v, &_Instr_End)
- } else {
- next := &(self.p[i+1])
- self.print_gc(i, v, next)
- name := _OpNames[next.op()]
- if strings.Contains(name, "save") {
- return
- }
- }
- // self.debug_gc()
- }
-}
-
-//go:noescape
-//go:linkname checkptrBase runtime.checkptrBase
-func checkptrBase(p unsafe.Pointer) uintptr
-
-//go:noescape
-//go:linkname findObject runtime.findObject
-func findObject(p, refBase, refOff uintptr) (base uintptr, s unsafe.Pointer, objIndex uintptr)
-
-var (
- _F_checkptr = jit.Func(checkptr)
- _F_printptr = jit.Func(printptr)
-)
-
-var (
- _R10 = jit.Reg("R10")
-)
-var _REG_debug = []obj.Addr {
- jit.Reg("AX"),
- jit.Reg("BX"),
- jit.Reg("CX"),
- jit.Reg("DX"),
- jit.Reg("DI"),
- jit.Reg("SI"),
- jit.Reg("BP"),
- jit.Reg("SP"),
- jit.Reg("R8"),
- jit.Reg("R9"),
- jit.Reg("R10"),
- jit.Reg("R11"),
- jit.Reg("R12"),
- jit.Reg("R13"),
- jit.Reg("R14"),
- jit.Reg("R15"),
-}
-
-func checkptr(ptr uintptr) {
- if ptr == 0 {
- return
- }
- fmt.Printf("pointer: %x\n", ptr)
- f := checkptrBase(unsafe.Pointer(uintptr(ptr)))
- if f == 0 {
- fmt.Printf("! unknown-based pointer: %x\n", ptr)
- } else if f == 1 {
- fmt.Printf("! stack pointer: %x\n", ptr)
- } else {
- fmt.Printf("base: %x\n", f)
- }
- findobj(ptr)
-}
-
-func findobj(ptr uintptr) {
- base, s, objIndex := findObject(ptr, 0, 0)
- if s != nil && base == 0 {
- fmt.Printf("! invalid pointer: %x\n", ptr)
- }
- fmt.Printf("objIndex: %d\n", objIndex)
-}
-
-func (self *_Assembler) check_ptr(ptr obj.Addr, lea bool) {
- if !debugCheckPtr {
- return
- }
-
- self.dsave(_REG_debug...)
- if lea {
- self.Emit("LEAQ", ptr, _R10)
- } else {
- self.Emit("MOVQ", ptr, _R10)
- }
- self.Emit("MOVQ", _R10, jit.Ptr(_SP, 0))
- self.dcall(_F_checkptr)
- self.dload(_REG_debug...)
-}
-
-func printptr(i int, ptr uintptr) {
- fmt.Printf("[%d] ptr: %x\n", i, ptr)
-}
-
-func (self *_Assembler) print_ptr(i int, ptr obj.Addr, lea bool) {
- self.dsave(_REG_debug...)
- if lea {
- self.Emit("LEAQ", ptr, _R10)
- } else {
- self.Emit("MOVQ", ptr, _R10)
- }
-
- self.Emit("MOVQ", jit.Imm(int64(i)), _AX)
- self.Emit("MOVQ", _R10, _BX)
- self.dcall(_F_printptr)
- self.dload(_REG_debug...)
-}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/encode_norace.go b/vendor/github.com/bytedance/sonic/internal/encoder/encode_norace.go
new file mode 100644
index 000000000..c53206433
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/encode_norace.go
@@ -0,0 +1,24 @@
+//go:build !race
+// +build !race
+
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package encoder
+
+func encodeIntoCheckRace(buf *[]byte, val interface{}, opts Options) error {
+ return encodeInto(buf, val, opts)
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/encode_race.go b/vendor/github.com/bytedance/sonic/internal/encoder/encode_race.go
new file mode 100644
index 000000000..c373c55f9
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/encode_race.go
@@ -0,0 +1,54 @@
+//go:build race
+// +build race
+
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package encoder
+
+import (
+ `encoding/json`
+
+ `github.com/bytedance/sonic/internal/rt`
+)
+
+
+func helpDetectDataRace(val interface{}) {
+ var out []byte
+ defer func() {
+ if v := recover(); v != nil {
+ // NOTICE: help user to locate where panic occurs
+ println("panic when encoding on: ", truncate(out))
+ panic(v)
+ }
+ }()
+ out, _ = json.Marshal(val)
+}
+
+func encodeIntoCheckRace(buf *[]byte, val interface{}, opts Options) error {
+ err := encodeInto(buf, val, opts)
+ /* put last to make the panic from sonic will always be caught at first */
+ helpDetectDataRace(val)
+ return err
+}
+
+func truncate(json []byte) string {
+ if len(json) <= 256 {
+ return rt.Mem2Str(json)
+ } else {
+ return rt.Mem2Str(json[len(json)-256:])
+ }
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/encoder.go b/vendor/github.com/bytedance/sonic/internal/encoder/encoder.go
index d285c2991..4cba1a168 100644
--- a/vendor/github.com/bytedance/sonic/internal/encoder/encoder.go
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/encoder.go
@@ -17,72 +17,62 @@
package encoder
import (
- `bytes`
- `encoding/json`
- `reflect`
- `runtime`
- `unsafe`
-
- `github.com/bytedance/sonic/internal/native`
- `github.com/bytedance/sonic/internal/native/types`
- `github.com/bytedance/sonic/internal/rt`
- `github.com/bytedance/sonic/utf8`
- `github.com/bytedance/sonic/option`
+ "bytes"
+ "encoding/json"
+ "reflect"
+ "runtime"
+ "unsafe"
+
+ "github.com/bytedance/sonic/utf8"
+ "github.com/bytedance/sonic/internal/encoder/alg"
+ "github.com/bytedance/sonic/internal/encoder/vars"
+ "github.com/bytedance/sonic/internal/rt"
+ "github.com/bytedance/sonic/option"
)
// Options is a set of encoding options.
type Options uint64
const (
- bitSortMapKeys = iota
- bitEscapeHTML
- bitCompactMarshaler
- bitNoQuoteTextMarshaler
- bitNoNullSliceOrMap
- bitValidateString
- bitNoValidateJSONMarshaler
- bitNoEncoderNewline
-
- // used for recursive compile
- bitPointerValue = 63
-)
-
-const (
// SortMapKeys indicates that the keys of a map needs to be sorted
// before serializing into JSON.
// WARNING: This hurts performance A LOT, USE WITH CARE.
- SortMapKeys Options = 1 << bitSortMapKeys
+ SortMapKeys Options = 1 << alg.BitSortMapKeys
// EscapeHTML indicates encoder to escape all HTML characters
// after serializing into JSON (see https://pkg.go.dev/encoding/json#HTMLEscape).
// WARNING: This hurts performance A LOT, USE WITH CARE.
- EscapeHTML Options = 1 << bitEscapeHTML
+ EscapeHTML Options = 1 << alg.BitEscapeHTML
// CompactMarshaler indicates that the output JSON from json.Marshaler
// is always compact and needs no validation
- CompactMarshaler Options = 1 << bitCompactMarshaler
+ CompactMarshaler Options = 1 << alg.BitCompactMarshaler
// NoQuoteTextMarshaler indicates that the output text from encoding.TextMarshaler
// is always escaped string and needs no quoting
- NoQuoteTextMarshaler Options = 1 << bitNoQuoteTextMarshaler
+ NoQuoteTextMarshaler Options = 1 << alg.BitNoQuoteTextMarshaler
// NoNullSliceOrMap indicates all empty Array or Object are encoded as '[]' or '{}',
- // instead of 'null'
- NoNullSliceOrMap Options = 1 << bitNoNullSliceOrMap
+ // instead of 'null'.
+ // NOTE: The priority of this option is lower than json tag `omitempty`.
+ NoNullSliceOrMap Options = 1 << alg.BitNoNullSliceOrMap
// ValidateString indicates that encoder should validate the input string
// before encoding it into JSON.
- ValidateString Options = 1 << bitValidateString
+ ValidateString Options = 1 << alg.BitValidateString
// NoValidateJSONMarshaler indicates that the encoder should not validate the output string
// after encoding the JSONMarshaler to JSON.
- NoValidateJSONMarshaler Options = 1 << bitNoValidateJSONMarshaler
+ NoValidateJSONMarshaler Options = 1 << alg.BitNoValidateJSONMarshaler
// NoEncoderNewline indicates that the encoder should not add a newline after every message
- NoEncoderNewline Options = 1 << bitNoEncoderNewline
+ NoEncoderNewline Options = 1 << alg.BitNoEncoderNewline
// CompatibleWithStd is used to be compatible with std encoder.
CompatibleWithStd Options = SortMapKeys | EscapeHTML | CompactMarshaler
+
+ // Encode Infinity or Nan float into `null`, instead of returning an error.
+ EncodeNullForInfOrNan Options = 1 << alg.BitEncodeNullForInfOrNan
)
// Encoder represents a specific set of encoder configurations.
@@ -171,53 +161,45 @@ func (enc *Encoder) SetIndent(prefix, indent string) {
// Quote returns the JSON-quoted version of s.
func Quote(s string) string {
- var n int
- var p []byte
-
- /* check for empty string */
- if s == "" {
- return `""`
- }
-
- /* allocate space for result */
- n = len(s) + 2
- p = make([]byte, 0, n)
-
- /* call the encoder */
- _ = encodeString(&p, s)
- return rt.Mem2Str(p)
+ buf := make([]byte, 0, len(s)+2)
+ buf = alg.Quote(buf, s, false)
+ return rt.Mem2Str(buf)
}
// Encode returns the JSON encoding of val, encoded with opts.
func Encode(val interface{}, opts Options) ([]byte, error) {
var ret []byte
- buf := newBytes()
- err := encodeInto(&buf, val, opts)
+ buf := vars.NewBytes()
+ err := encodeIntoCheckRace(buf, val, opts)
/* check for errors */
if err != nil {
- freeBytes(buf)
+ vars.FreeBytes(buf)
return nil, err
}
/* htmlescape or correct UTF-8 if opts enable */
old := buf
- buf = encodeFinish(old, opts)
- pbuf := ((*rt.GoSlice)(unsafe.Pointer(&buf))).Ptr
- pold := ((*rt.GoSlice)(unsafe.Pointer(&old))).Ptr
+ *buf = encodeFinish(*old, opts)
+ pbuf := ((*rt.GoSlice)(unsafe.Pointer(buf))).Ptr
+ pold := ((*rt.GoSlice)(unsafe.Pointer(old))).Ptr
/* return when allocated a new buffer */
if pbuf != pold {
- freeBytes(old)
- return buf, nil
+ vars.FreeBytes(old)
+ return *buf, nil
}
/* make a copy of the result */
- ret = make([]byte, len(buf))
- copy(ret, buf)
-
- freeBytes(buf)
+ if rt.CanSizeResue(cap(*buf)) {
+ ret = make([]byte, len(*buf))
+ copy(ret, *buf)
+ vars.FreeBytes(buf)
+ } else {
+ ret = *buf
+ }
+
/* return the buffer into pool */
return ret, nil
}
@@ -225,7 +207,7 @@ func Encode(val interface{}, opts Options) ([]byte, error) {
// EncodeInto is like Encode but uses a user-supplied buffer instead of allocating
// a new one.
func EncodeInto(buf *[]byte, val interface{}, opts Options) error {
- err := encodeInto(buf, val, opts)
+ err := encodeIntoCheckRace(buf, val, opts)
if err != nil {
return err
}
@@ -234,15 +216,15 @@ func EncodeInto(buf *[]byte, val interface{}, opts Options) error {
}
func encodeInto(buf *[]byte, val interface{}, opts Options) error {
- stk := newStack()
+ stk := vars.NewStack()
efv := rt.UnpackEface(val)
err := encodeTypedPointer(buf, efv.Type, &efv.Value, stk, uint64(opts))
/* return the stack into pool */
if err != nil {
- resetStack(stk)
+ vars.ResetStack(stk)
}
- freeStack(stk)
+ vars.FreeStack(stk)
/* avoid GC ahead */
runtime.KeepAlive(buf)
@@ -254,13 +236,12 @@ func encodeFinish(buf []byte, opts Options) []byte {
if opts & EscapeHTML != 0 {
buf = HTMLEscape(nil, buf)
}
- if opts & ValidateString != 0 && !utf8.Validate(buf) {
+ if (opts & ValidateString != 0) && !utf8.Validate(buf) {
buf = utf8.CorrectWith(nil, buf, `\ufffd`)
}
return buf
}
-var typeByte = rt.UnpackType(reflect.TypeOf(byte(0)))
// HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029
// characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029
@@ -269,7 +250,7 @@ var typeByte = rt.UnpackType(reflect.TypeOf(byte(0)))
// escaping within <script> tags, so an alternative JSON encoding must
// be used.
func HTMLEscape(dst []byte, src []byte) []byte {
- return htmlEscape(dst, src)
+ return alg.HtmlEscape(dst, src)
}
// EncodeIndented is like Encode but applies Indent to format the output.
@@ -277,37 +258,40 @@ func HTMLEscape(dst []byte, src []byte) []byte {
// followed by one or more copies of indent according to the indentation nesting.
func EncodeIndented(val interface{}, prefix string, indent string, opts Options) ([]byte, error) {
var err error
- var out []byte
var buf *bytes.Buffer
/* encode into the buffer */
- out = newBytes()
- err = EncodeInto(&out, val, opts)
+ out := vars.NewBytes()
+ err = EncodeInto(out, val, opts)
/* check for errors */
if err != nil {
- freeBytes(out)
+ vars.FreeBytes(out)
return nil, err
}
/* indent the JSON */
- buf = newBuffer()
- err = json.Indent(buf, out, prefix, indent)
+ buf = vars.NewBuffer()
+ err = json.Indent(buf, *out, prefix, indent)
+ vars.FreeBytes(out)
/* check for errors */
if err != nil {
- freeBytes(out)
- freeBuffer(buf)
+ vars.FreeBuffer(buf)
return nil, err
}
/* copy to the result buffer */
- ret := make([]byte, buf.Len())
- copy(ret, buf.Bytes())
-
- /* return the buffers into pool */
- freeBytes(out)
- freeBuffer(buf)
+ var ret []byte
+ if rt.CanSizeResue(cap(buf.Bytes())) {
+ ret = make([]byte, buf.Len())
+ copy(ret, buf.Bytes())
+ /* return the buffers into pool */
+ vars.FreeBuffer(buf)
+ } else {
+ ret = buf.Bytes()
+ }
+
return ret, nil
}
@@ -330,26 +314,5 @@ func Pretouch(vt reflect.Type, opts ...option.CompileOption) error {
//
// Note: it does not check for the invalid UTF-8 characters.
func Valid(data []byte) (ok bool, start int) {
- n := len(data)
- if n == 0 {
- return false, -1
- }
- s := rt.Mem2Str(data)
- p := 0
- m := types.NewStateMachine()
- ret := native.ValidateOne(&s, &p, m, types.F_VALIDATE_STRING)
- types.FreeStateMachine(m)
-
- if ret < 0 {
- return false, p-1
- }
-
- /* check for trailing spaces */
- for ;p < n; p++ {
- if (types.SPACE_MASK & (1 << data[p])) == 0 {
- return false, p
- }
- }
-
- return true, ret
+ return alg.Valid(data)
}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/ir/op.go b/vendor/github.com/bytedance/sonic/internal/encoder/ir/op.go
new file mode 100644
index 000000000..a0c693f00
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/ir/op.go
@@ -0,0 +1,473 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ir
+
+import (
+ "fmt"
+ "reflect"
+ "strconv"
+ "strings"
+ "unsafe"
+
+ "github.com/bytedance/sonic/internal/encoder/vars"
+ "github.com/bytedance/sonic/internal/rt"
+)
+
+type Op uint8
+
+const (
+ OP_null Op = iota + 1
+ OP_empty_arr
+ OP_empty_obj
+ OP_bool
+ OP_i8
+ OP_i16
+ OP_i32
+ OP_i64
+ OP_u8
+ OP_u16
+ OP_u32
+ OP_u64
+ OP_f32
+ OP_f64
+ OP_str
+ OP_bin
+ OP_quote
+ OP_number
+ OP_eface
+ OP_iface
+ OP_byte
+ OP_text
+ OP_deref
+ OP_index
+ OP_load
+ OP_save
+ OP_drop
+ OP_drop_2
+ OP_recurse
+ OP_is_nil
+ OP_is_nil_p1
+ OP_is_zero_1
+ OP_is_zero_2
+ OP_is_zero_4
+ OP_is_zero_8
+ OP_is_zero_map
+ OP_goto
+ OP_map_iter
+ OP_map_stop
+ OP_map_check_key
+ OP_map_write_key
+ OP_map_value_next
+ OP_slice_len
+ OP_slice_next
+ OP_marshal
+ OP_marshal_p
+ OP_marshal_text
+ OP_marshal_text_p
+ OP_cond_set
+ OP_cond_testc
+)
+
+const (
+ _INT_SIZE = 32 << (^uint(0) >> 63)
+ _PTR_SIZE = 32 << (^uintptr(0) >> 63)
+ _PTR_BYTE = unsafe.Sizeof(uintptr(0))
+)
+
+const OpSize = unsafe.Sizeof(NewInsOp(0))
+
+var OpNames = [256]string{
+ OP_null: "null",
+ OP_empty_arr: "empty_arr",
+ OP_empty_obj: "empty_obj",
+ OP_bool: "bool",
+ OP_i8: "i8",
+ OP_i16: "i16",
+ OP_i32: "i32",
+ OP_i64: "i64",
+ OP_u8: "u8",
+ OP_u16: "u16",
+ OP_u32: "u32",
+ OP_u64: "u64",
+ OP_f32: "f32",
+ OP_f64: "f64",
+ OP_str: "str",
+ OP_bin: "bin",
+ OP_quote: "quote",
+ OP_number: "number",
+ OP_eface: "eface",
+ OP_iface: "iface",
+ OP_byte: "byte",
+ OP_text: "text",
+ OP_deref: "deref",
+ OP_index: "index",
+ OP_load: "load",
+ OP_save: "save",
+ OP_drop: "drop",
+ OP_drop_2: "drop_2",
+ OP_recurse: "recurse",
+ OP_is_nil: "is_nil",
+ OP_is_nil_p1: "is_nil_p1",
+ OP_is_zero_1: "is_zero_1",
+ OP_is_zero_2: "is_zero_2",
+ OP_is_zero_4: "is_zero_4",
+ OP_is_zero_8: "is_zero_8",
+ OP_is_zero_map: "is_zero_map",
+ OP_goto: "goto",
+ OP_map_iter: "map_iter",
+ OP_map_stop: "map_stop",
+ OP_map_check_key: "map_check_key",
+ OP_map_write_key: "map_write_key",
+ OP_map_value_next: "map_value_next",
+ OP_slice_len: "slice_len",
+ OP_slice_next: "slice_next",
+ OP_marshal: "marshal",
+ OP_marshal_p: "marshal_p",
+ OP_marshal_text: "marshal_text",
+ OP_marshal_text_p: "marshal_text_p",
+ OP_cond_set: "cond_set",
+ OP_cond_testc: "cond_testc",
+}
+
+func (self Op) String() string {
+ if ret := OpNames[self]; ret != "" {
+ return ret
+ } else {
+ return "<invalid>"
+ }
+}
+
+func OP_int() Op {
+ switch _INT_SIZE {
+ case 32:
+ return OP_i32
+ case 64:
+ return OP_i64
+ default:
+ panic("unsupported int size")
+ }
+}
+
+func OP_uint() Op {
+ switch _INT_SIZE {
+ case 32:
+ return OP_u32
+ case 64:
+ return OP_u64
+ default:
+ panic("unsupported uint size")
+ }
+}
+
+func OP_uintptr() Op {
+ switch _PTR_SIZE {
+ case 32:
+ return OP_u32
+ case 64:
+ return OP_u64
+ default:
+ panic("unsupported pointer size")
+ }
+}
+
+func OP_is_zero_ints() Op {
+ switch _INT_SIZE {
+ case 32:
+ return OP_is_zero_4
+ case 64:
+ return OP_is_zero_8
+ default:
+ panic("unsupported integer size")
+ }
+}
+
+type Instr struct {
+ o Op
+ u int // union {op: 8, _: 8, vi: 48}, vi maybe int or len(str)
+ p unsafe.Pointer // maybe GoString.Ptr, or *GoType
+}
+
+func NewInsOp(op Op) Instr {
+ return Instr{o: op}
+}
+
+func NewInsVi(op Op, vi int) Instr {
+ return Instr{o: op, u: vi}
+}
+
+func NewInsVs(op Op, vs string) Instr {
+ return Instr{
+ o: op,
+ u: len(vs),
+ p: (*rt.GoString)(unsafe.Pointer(&vs)).Ptr,
+ }
+}
+
+func NewInsVt(op Op, vt reflect.Type) Instr {
+ return Instr{
+ o: op,
+ p: unsafe.Pointer(rt.UnpackType(vt)),
+ }
+}
+
+type typAndTab struct {
+ vt *rt.GoType
+ itab *rt.GoItab
+}
+
+func NewInsVtab(op Op, vt reflect.Type, itab *rt.GoItab) Instr {
+ return Instr{
+ o: op,
+ p: unsafe.Pointer(&typAndTab{
+ vt: rt.UnpackType(vt),
+ itab: itab,
+ }),
+ }
+}
+
+func NewInsVp(op Op, vt reflect.Type, pv bool) Instr {
+ i := 0
+ if pv {
+ i = 1
+ }
+ return Instr{
+ o: op,
+ u: i,
+ p: unsafe.Pointer(rt.UnpackType(vt)),
+ }
+}
+
+func (self Instr) Op() Op {
+ return Op(self.o)
+}
+
+func (self Instr) Vi() int {
+ return self.u
+}
+
+func (self Instr) Vf() uint8 {
+ return (*rt.GoType)(self.p).KindFlags
+}
+
+func (self Instr) Vs() (v string) {
+ (*rt.GoString)(unsafe.Pointer(&v)).Ptr = self.p
+ (*rt.GoString)(unsafe.Pointer(&v)).Len = self.Vi()
+ return
+}
+
+func (self Instr) Vk() reflect.Kind {
+ return (*rt.GoType)(self.p).Kind()
+}
+
+func (self Instr) Vt() reflect.Type {
+ return (*rt.GoType)(self.p).Pack()
+}
+
+func (self Instr) Vr() *rt.GoType {
+ return (*rt.GoType)(self.p)
+}
+
+func (self Instr) Vp() (vt reflect.Type, pv bool) {
+ return (*rt.GoType)(self.p).Pack(), self.u == 1
+}
+
+func (self Instr) Vtab() (vt *rt.GoType, itab *rt.GoItab) {
+ tt := (*typAndTab)(self.p)
+ return tt.vt, tt.itab
+}
+
+func (self Instr) Vp2() (vt *rt.GoType, pv bool) {
+ return (*rt.GoType)(self.p), self.u == 1
+}
+
+func (self Instr) I64() int64 {
+ return int64(self.Vi())
+}
+
+func (self Instr) Byte() byte {
+ return byte(self.Vi())
+}
+
+func (self Instr) Vlen() int {
+ return int((*rt.GoType)(self.p).Size)
+}
+
+func (self Instr) isBranch() bool {
+ switch self.Op() {
+ case OP_goto:
+ fallthrough
+ case OP_is_nil:
+ fallthrough
+ case OP_is_nil_p1:
+ fallthrough
+ case OP_is_zero_1:
+ fallthrough
+ case OP_is_zero_2:
+ fallthrough
+ case OP_is_zero_4:
+ fallthrough
+ case OP_is_zero_8:
+ fallthrough
+ case OP_map_check_key:
+ fallthrough
+ case OP_map_write_key:
+ fallthrough
+ case OP_slice_next:
+ fallthrough
+ case OP_cond_testc:
+ return true
+ default:
+ return false
+ }
+}
+
+func (self Instr) Disassemble() string {
+ switch self.Op() {
+ case OP_byte:
+ return fmt.Sprintf("%-18s%s", self.Op().String(), strconv.QuoteRune(rune(self.Vi())))
+ case OP_text:
+ return fmt.Sprintf("%-18s%s", self.Op().String(), strconv.Quote(self.Vs()))
+ case OP_index:
+ return fmt.Sprintf("%-18s%d", self.Op().String(), self.Vi())
+ case OP_recurse:
+ fallthrough
+ case OP_map_iter:
+ return fmt.Sprintf("%-18s%s", self.Op().String(), self.Vt())
+ case OP_marshal:
+ fallthrough
+ case OP_marshal_p:
+ fallthrough
+ case OP_marshal_text:
+ fallthrough
+ case OP_marshal_text_p:
+ vt, _ := self.Vtab()
+ return fmt.Sprintf("%-18s%s", self.Op().String(), vt.Pack())
+ case OP_goto:
+ fallthrough
+ case OP_is_nil:
+ fallthrough
+ case OP_is_nil_p1:
+ fallthrough
+ case OP_is_zero_1:
+ fallthrough
+ case OP_is_zero_2:
+ fallthrough
+ case OP_is_zero_4:
+ fallthrough
+ case OP_is_zero_8:
+ fallthrough
+ case OP_is_zero_map:
+ fallthrough
+ case OP_cond_testc:
+ fallthrough
+ case OP_map_check_key:
+ fallthrough
+ case OP_map_write_key:
+ return fmt.Sprintf("%-18sL_%d", self.Op().String(), self.Vi())
+ case OP_slice_next:
+ return fmt.Sprintf("%-18sL_%d, %s", self.Op().String(), self.Vi(), self.Vt())
+ default:
+ return fmt.Sprintf("%#v", self)
+ }
+}
+
+type (
+ Program []Instr
+)
+
+func (self Program) PC() int {
+ return len(self)
+}
+
+func (self Program) Tag(n int) {
+ if n >= vars.MaxStack {
+ panic("type nesting too deep")
+ }
+}
+
+func (self Program) Pin(i int) {
+ v := &self[i]
+ v.u = self.PC()
+}
+
+func (self Program) Rel(v []int) {
+ for _, i := range v {
+ self.Pin(i)
+ }
+}
+
+func (self *Program) Add(op Op) {
+ *self = append(*self, NewInsOp(op))
+}
+
+func (self *Program) Key(op Op) {
+ *self = append(*self,
+ NewInsVi(OP_byte, '"'),
+ NewInsOp(op),
+ NewInsVi(OP_byte, '"'),
+ )
+}
+
+func (self *Program) Int(op Op, vi int) {
+ *self = append(*self, NewInsVi(op, vi))
+}
+
+func (self *Program) Str(op Op, vs string) {
+ *self = append(*self, NewInsVs(op, vs))
+}
+
+func (self *Program) Rtt(op Op, vt reflect.Type) {
+ *self = append(*self, NewInsVt(op, vt))
+}
+
+func (self *Program) Vp(op Op, vt reflect.Type, pv bool) {
+ *self = append(*self, NewInsVp(op, vt, pv))
+}
+
+func (self *Program) Vtab(op Op, vt reflect.Type, itab *rt.GoItab) {
+ *self = append(*self, NewInsVtab(op, vt, itab))
+}
+
+func (self Program) Disassemble() string {
+ nb := len(self)
+ tab := make([]bool, nb+1)
+ ret := make([]string, 0, nb+1)
+
+ /* prescan to get all the labels */
+ for _, ins := range self {
+ if ins.isBranch() {
+ tab[ins.Vi()] = true
+ }
+ }
+
+ /* disassemble each instruction */
+ for i, ins := range self {
+ if !tab[i] {
+ ret = append(ret, "\t"+ins.Disassemble())
+ } else {
+ ret = append(ret, fmt.Sprintf("L_%d:\n\t%s", i, ins.Disassemble()))
+ }
+ }
+
+ /* add the last label, if needed */
+ if tab[nb] {
+ ret = append(ret, fmt.Sprintf("L_%d:", nb))
+ }
+
+ /* add an "end" indicator, and join all the strings */
+ return strings.Join(append(ret, "\tend"), "\n")
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/pools.go b/vendor/github.com/bytedance/sonic/internal/encoder/pools.go
deleted file mode 100644
index a2800ddef..000000000
--- a/vendor/github.com/bytedance/sonic/internal/encoder/pools.go
+++ /dev/null
@@ -1,193 +0,0 @@
-/*
- * Copyright 2021 ByteDance Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package encoder
-
-import (
- `bytes`
- `sync`
- `unsafe`
- `errors`
- `reflect`
-
- `github.com/bytedance/sonic/internal/caching`
- `github.com/bytedance/sonic/option`
- `github.com/bytedance/sonic/internal/rt`
-)
-
-const (
- _MaxStack = 4096 // 4k states
-
- _StackSize = unsafe.Sizeof(_Stack{})
-)
-
-var (
- bytesPool = sync.Pool{}
- stackPool = sync.Pool{}
- bufferPool = sync.Pool{}
- programCache = caching.CreateProgramCache()
-)
-
-type _State struct {
- x int
- f uint64
- p unsafe.Pointer
- q unsafe.Pointer
-}
-
-type _Stack struct {
- sp uint64
- sb [_MaxStack]_State
-}
-
-type _Encoder func(
- rb *[]byte,
- vp unsafe.Pointer,
- sb *_Stack,
- fv uint64,
-) error
-
-var _KeepAlive struct {
- rb *[]byte
- vp unsafe.Pointer
- sb *_Stack
- fv uint64
- err error
- frame [_FP_offs]byte
-}
-
-var errCallShadow = errors.New("DON'T CALL THIS!")
-
-// Faker func of _Encoder, used to export its stackmap as _Encoder's
-func _Encoder_Shadow(rb *[]byte, vp unsafe.Pointer, sb *_Stack, fv uint64) (err error) {
- // align to assembler_amd64.go: _FP_offs
- var frame [_FP_offs]byte
-
- // must keep all args and frames noticeable to GC
- _KeepAlive.rb = rb
- _KeepAlive.vp = vp
- _KeepAlive.sb = sb
- _KeepAlive.fv = fv
- _KeepAlive.err = err
- _KeepAlive.frame = frame
-
- return errCallShadow
-}
-
-func newBytes() []byte {
- if ret := bytesPool.Get(); ret != nil {
- return ret.([]byte)
- } else {
- return make([]byte, 0, option.DefaultEncoderBufferSize)
- }
-}
-
-func newStack() *_Stack {
- if ret := stackPool.Get(); ret == nil {
- return new(_Stack)
- } else {
- return ret.(*_Stack)
- }
-}
-
-func resetStack(p *_Stack) {
- memclrNoHeapPointers(unsafe.Pointer(p), _StackSize)
-}
-
-func newBuffer() *bytes.Buffer {
- if ret := bufferPool.Get(); ret != nil {
- return ret.(*bytes.Buffer)
- } else {
- return bytes.NewBuffer(make([]byte, 0, option.DefaultEncoderBufferSize))
- }
-}
-
-func freeBytes(p []byte) {
- p = p[:0]
- bytesPool.Put(p)
-}
-
-func freeStack(p *_Stack) {
- p.sp = 0
- stackPool.Put(p)
-}
-
-func freeBuffer(p *bytes.Buffer) {
- p.Reset()
- bufferPool.Put(p)
-}
-
-func makeEncoder(vt *rt.GoType, ex ...interface{}) (interface{}, error) {
- if pp, err := newCompiler().compile(vt.Pack(), ex[0].(bool)); err != nil {
- return nil, err
- } else {
- as := newAssembler(pp)
- as.name = vt.String()
- return as.Load(), nil
- }
-}
-
-func findOrCompile(vt *rt.GoType, pv bool) (_Encoder, error) {
- if val := programCache.Get(vt); val != nil {
- return val.(_Encoder), nil
- } else if ret, err := programCache.Compute(vt, makeEncoder, pv); err == nil {
- return ret.(_Encoder), nil
- } else {
- return nil, err
- }
-}
-
-func pretouchType(_vt reflect.Type, opts option.CompileOptions, v uint8) (map[reflect.Type]uint8, error) {
- /* compile function */
- compiler := newCompiler().apply(opts)
- encoder := func(vt *rt.GoType, ex ...interface{}) (interface{}, error) {
- if pp, err := compiler.compile(_vt, ex[0].(bool)); err != nil {
- return nil, err
- } else {
- as := newAssembler(pp)
- as.name = vt.String()
- return as.Load(), nil
- }
- }
-
- /* find or compile */
- vt := rt.UnpackType(_vt)
- if val := programCache.Get(vt); val != nil {
- return nil, nil
- } else if _, err := programCache.Compute(vt, encoder, v == 1); err == nil {
- return compiler.rec, nil
- } else {
- return nil, err
- }
-}
-
-func pretouchRec(vtm map[reflect.Type]uint8, opts option.CompileOptions) error {
- if opts.RecursiveDepth < 0 || len(vtm) == 0 {
- return nil
- }
- next := make(map[reflect.Type]uint8)
- for vt, v := range vtm {
- sub, err := pretouchType(vt, opts, v)
- if err != nil {
- return err
- }
- for svt, v := range sub {
- next[svt] = v
- }
- }
- opts.RecursiveDepth -= 1
- return pretouchRec(next, opts)
-}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/pools_amd64.go b/vendor/github.com/bytedance/sonic/internal/encoder/pools_amd64.go
new file mode 100644
index 000000000..43f026fbe
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/pools_amd64.go
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package encoder
+
+import (
+ "errors"
+ "reflect"
+ "unsafe"
+
+ "github.com/bytedance/sonic/internal/encoder/vars"
+ "github.com/bytedance/sonic/internal/encoder/x86"
+ "github.com/bytedance/sonic/internal/rt"
+ "github.com/bytedance/sonic/option"
+)
+
+
+func ForceUseJit() {
+ x86.SetCompiler(makeEncoderX86)
+ pretouchType = pretouchTypeX86
+ encodeTypedPointer = x86.EncodeTypedPointer
+ vars.UseVM = false
+}
+
+func init() {
+ if vars.UseVM {
+ ForceUseVM()
+ } else {
+ ForceUseJit()
+ }
+}
+
+var _KeepAlive struct {
+ rb *[]byte
+ vp unsafe.Pointer
+ sb *vars.Stack
+ fv uint64
+ err error
+ frame [x86.FP_offs]byte
+}
+
+var errCallShadow = errors.New("DON'T CALL THIS!")
+
+// Faker func of _Encoder, used to export its stackmap as _Encoder's
+func _Encoder_Shadow(rb *[]byte, vp unsafe.Pointer, sb *vars.Stack, fv uint64) (err error) {
+ // align to assembler_amd64.go: x86.FP_offs
+ var frame [x86.FP_offs]byte
+
+ // must keep all args and frames noticeable to GC
+ _KeepAlive.rb = rb
+ _KeepAlive.vp = vp
+ _KeepAlive.sb = sb
+ _KeepAlive.fv = fv
+ _KeepAlive.err = err
+ _KeepAlive.frame = frame
+
+ return errCallShadow
+}
+
+func makeEncoderX86(vt *rt.GoType, ex ...interface{}) (interface{}, error) {
+ pp, err := NewCompiler().Compile(vt.Pack(), ex[0].(bool))
+ if err != nil {
+ return nil, err
+ }
+ as := x86.NewAssembler(pp)
+ as.Name = vt.String()
+ return as.Load(), nil
+}
+
+func pretouchTypeX86(_vt reflect.Type, opts option.CompileOptions, v uint8) (map[reflect.Type]uint8, error) {
+ /* compile function */
+ compiler := NewCompiler().apply(opts)
+
+ /* find or compile */
+ vt := rt.UnpackType(_vt)
+ if val := vars.GetProgram(vt); val != nil {
+ return nil, nil
+ } else if _, err := vars.ComputeProgram(vt, makeEncoderX86, v == 1); err == nil {
+ return compiler.rec, nil
+ } else {
+ return nil, err
+ }
+}
+
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/pools_compt.go b/vendor/github.com/bytedance/sonic/internal/encoder/pools_compt.go
new file mode 100644
index 000000000..ef46dc98e
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/pools_compt.go
@@ -0,0 +1,24 @@
+//go:build !amd64
+// +build !amd64
+
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package encoder
+
+func init() {
+ ForceUseVM()
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/primitives.go b/vendor/github.com/bytedance/sonic/internal/encoder/primitives.go
deleted file mode 100644
index 94f1c3dc6..000000000
--- a/vendor/github.com/bytedance/sonic/internal/encoder/primitives.go
+++ /dev/null
@@ -1,167 +0,0 @@
-/*
- * Copyright 2021 ByteDance Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package encoder
-
-import (
- `encoding`
- `encoding/json`
- `unsafe`
-
- `github.com/bytedance/sonic/internal/jit`
- `github.com/bytedance/sonic/internal/native`
- `github.com/bytedance/sonic/internal/native/types`
- `github.com/bytedance/sonic/internal/rt`
-)
-
-/** Encoder Primitives **/
-
-func encodeNil(rb *[]byte) error {
- *rb = append(*rb, 'n', 'u', 'l', 'l')
- return nil
-}
-
-func encodeString(buf *[]byte, val string) error {
- var sidx int
- var pbuf *rt.GoSlice
- var pstr *rt.GoString
-
- /* opening quote */
- *buf = append(*buf, '"')
- pbuf = (*rt.GoSlice)(unsafe.Pointer(buf))
- pstr = (*rt.GoString)(unsafe.Pointer(&val))
-
- /* encode with native library */
- for sidx < pstr.Len {
- sn := pstr.Len - sidx
- dn := pbuf.Cap - pbuf.Len
- sp := padd(pstr.Ptr, sidx)
- dp := padd(pbuf.Ptr, pbuf.Len)
- nb := native.Quote(sp, sn, dp, &dn, 0)
-
- /* check for errors */
- if pbuf.Len += dn; nb >= 0 {
- break
- }
-
- /* not enough space, grow the slice and try again */
- sidx += ^nb
- *pbuf = growslice(rt.UnpackType(byteType), *pbuf, pbuf.Cap * 2)
- }
-
- /* closing quote */
- *buf = append(*buf, '"')
- return nil
-}
-
-func encodeTypedPointer(buf *[]byte, vt *rt.GoType, vp *unsafe.Pointer, sb *_Stack, fv uint64) error {
- if vt == nil {
- return encodeNil(buf)
- } else if fn, err := findOrCompile(vt, (fv&(1<<bitPointerValue)) != 0); err != nil {
- return err
- } else if vt.Indirect() {
- rt.MoreStack(_FP_size + native.MaxFrameSize)
- err := fn(buf, *vp, sb, fv)
- return err
- } else {
- rt.MoreStack(_FP_size + native.MaxFrameSize)
- err := fn(buf, unsafe.Pointer(vp), sb, fv)
- return err
- }
-}
-
-func encodeJsonMarshaler(buf *[]byte, val json.Marshaler, opt Options) error {
- if ret, err := val.MarshalJSON(); err != nil {
- return err
- } else {
- if opt & CompactMarshaler != 0 {
- return compact(buf, ret)
- }
- if opt & NoValidateJSONMarshaler == 0 {
- if ok, s := Valid(ret); !ok {
- return error_marshaler(ret, s)
- }
- }
- *buf = append(*buf, ret...)
- return nil
- }
-}
-
-func encodeTextMarshaler(buf *[]byte, val encoding.TextMarshaler, opt Options) error {
- if ret, err := val.MarshalText(); err != nil {
- return err
- } else {
- if opt & NoQuoteTextMarshaler != 0 {
- *buf = append(*buf, ret...)
- return nil
- }
- return encodeString(buf, rt.Mem2Str(ret) )
- }
-}
-
-func htmlEscape(dst []byte, src []byte) []byte {
- var sidx int
-
- dst = append(dst, src[:0]...) // avoid check nil dst
- sbuf := (*rt.GoSlice)(unsafe.Pointer(&src))
- dbuf := (*rt.GoSlice)(unsafe.Pointer(&dst))
-
- /* grow dst if it is shorter */
- if cap(dst) - len(dst) < len(src) + types.BufPaddingSize {
- cap := len(src) * 3 / 2 + types.BufPaddingSize
- *dbuf = growslice(typeByte, *dbuf, cap)
- }
-
- for sidx < sbuf.Len {
- sp := padd(sbuf.Ptr, sidx)
- dp := padd(dbuf.Ptr, dbuf.Len)
-
- sn := sbuf.Len - sidx
- dn := dbuf.Cap - dbuf.Len
- nb := native.HTMLEscape(sp, sn, dp, &dn)
-
- /* check for errors */
- if dbuf.Len += dn; nb >= 0 {
- break
- }
-
- /* not enough space, grow the slice and try again */
- sidx += ^nb
- *dbuf = growslice(typeByte, *dbuf, dbuf.Cap * 2)
- }
- return dst
-}
-
-var (
- argPtrs = []bool { true, true, true, false }
- localPtrs = []bool{}
-)
-
-var (
- _F_assertI2I = jit.Func(rt.AssertI2I2)
-)
-
-func asText(v unsafe.Pointer) (string, error) {
- text := rt.AssertI2I2(_T_encoding_TextMarshaler, *(*rt.GoIface)(v))
- r, e := (*(*encoding.TextMarshaler)(unsafe.Pointer(&text))).MarshalText()
- return rt.Mem2Str(r), e
-}
-
-func asJson(v unsafe.Pointer) (string, error) {
- text := rt.AssertI2I2(_T_json_Marshaler, *(*rt.GoIface)(v))
- r, e := (*(*json.Marshaler)(unsafe.Pointer(&text))).MarshalJSON()
- return rt.Mem2Str(r), e
-}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/stream.go b/vendor/github.com/bytedance/sonic/internal/encoder/stream.go
index d498f68fc..c2d026a0c 100644
--- a/vendor/github.com/bytedance/sonic/internal/encoder/stream.go
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/stream.go
@@ -17,8 +17,10 @@
package encoder
import (
- `encoding/json`
- `io`
+ "encoding/json"
+ "io"
+
+ "github.com/bytedance/sonic/internal/encoder/vars"
)
// StreamEncoder uses io.Writer as input.
@@ -36,21 +38,20 @@ func NewStreamEncoder(w io.Writer) *StreamEncoder {
// Encode encodes interface{} as JSON to io.Writer
func (enc *StreamEncoder) Encode(val interface{}) (err error) {
- buf := newBytes()
- out := buf
+ out := vars.NewBytes()
/* encode into the buffer */
- err = EncodeInto(&out, val, enc.Opts)
+ err = EncodeInto(out, val, enc.Opts)
if err != nil {
goto free_bytes
}
if enc.indent != "" || enc.prefix != "" {
/* indent the JSON */
- buf := newBuffer()
- err = json.Indent(buf, out, enc.prefix, enc.indent)
+ buf := vars.NewBuffer()
+ err = json.Indent(buf, *out, enc.prefix, enc.indent)
if err != nil {
- freeBuffer(buf)
+ vars.FreeBuffer(buf)
goto free_bytes
}
@@ -62,16 +63,17 @@ func (enc *StreamEncoder) Encode(val interface{}) (err error) {
/* copy into io.Writer */
_, err = io.Copy(enc.w, buf)
if err != nil {
- freeBuffer(buf)
+ vars.FreeBuffer(buf)
goto free_bytes
}
} else {
/* copy into io.Writer */
var n int
- for len(out) > 0 {
- n, err = enc.w.Write(out)
- out = out[n:]
+ buf := *out
+ for len(buf) > 0 {
+ n, err = enc.w.Write(buf)
+ buf = buf[n:]
if err != nil {
goto free_bytes
}
@@ -84,6 +86,6 @@ func (enc *StreamEncoder) Encode(val interface{}) (err error) {
}
free_bytes:
- freeBytes(buf)
+ vars.FreeBytes(out)
return err
}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/stubs_go116.go b/vendor/github.com/bytedance/sonic/internal/encoder/stubs_go116.go
deleted file mode 100644
index 84c186fcf..000000000
--- a/vendor/github.com/bytedance/sonic/internal/encoder/stubs_go116.go
+++ /dev/null
@@ -1,61 +0,0 @@
-// +build go1.16,!go1.17
-
-/*
- * Copyright 2021 ByteDance Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package encoder
-
-import (
- `unsafe`
-
- _ `github.com/cloudwego/base64x`
-
- `github.com/bytedance/sonic/internal/rt`
-)
-
-//go:linkname _subr__b64encode github.com/cloudwego/base64x._subr__b64encode
-var _subr__b64encode uintptr
-
-//go:noescape
-//go:linkname memmove runtime.memmove
-//goland:noinspection GoUnusedParameter
-func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr)
-
-//go:linkname growslice runtime.growslice
-//goland:noinspection GoUnusedParameter
-func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
-
-//go:linkname mapiternext runtime.mapiternext
-//goland:noinspection GoUnusedParameter
-func mapiternext(it *rt.GoMapIterator)
-
-//go:linkname mapiterinit runtime.mapiterinit
-//goland:noinspection GoUnusedParameter
-func mapiterinit(t *rt.GoMapType, m *rt.GoMap, it *rt.GoMapIterator)
-
-//go:linkname isValidNumber encoding/json.isValidNumber
-//goland:noinspection GoUnusedParameter
-func isValidNumber(s string) bool
-
-//go:noescape
-//go:linkname memclrNoHeapPointers runtime.memclrNoHeapPointers
-//goland:noinspection GoUnusedParameter
-func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)
-
-var _runtime_writeBarrier uintptr = rt.GcwbAddr()
-
-//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
-func gcWriteBarrierAX() \ No newline at end of file
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/stubs_go117.go b/vendor/github.com/bytedance/sonic/internal/encoder/stubs_go117.go
deleted file mode 100644
index 6e56aca00..000000000
--- a/vendor/github.com/bytedance/sonic/internal/encoder/stubs_go117.go
+++ /dev/null
@@ -1,62 +0,0 @@
-// +build go1.17,!go1.20
-
-/*
- * Copyright 2021 ByteDance Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package encoder
-
-import (
- `unsafe`
-
- _ `github.com/cloudwego/base64x`
-
- `github.com/bytedance/sonic/internal/rt`
-)
-
-//go:linkname _subr__b64encode github.com/cloudwego/base64x._subr__b64encode
-var _subr__b64encode uintptr
-
-//go:noescape
-//go:linkname memmove runtime.memmove
-//goland:noinspection GoUnusedParameter
-func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr)
-
-//go:linkname growslice runtime.growslice
-//goland:noinspection GoUnusedParameter
-func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
-
-//go:linkname mapiternext runtime.mapiternext
-//goland:noinspection GoUnusedParameter
-func mapiternext(it *rt.GoMapIterator)
-
-//go:linkname mapiterinit runtime.mapiterinit
-//goland:noinspection GoUnusedParameter
-func mapiterinit(t *rt.GoMapType, m *rt.GoMap, it *rt.GoMapIterator)
-
-//go:linkname isValidNumber encoding/json.isValidNumber
-//goland:noinspection GoUnusedParameter
-func isValidNumber(s string) bool
-
-//go:noescape
-//go:linkname memclrNoHeapPointers runtime.memclrNoHeapPointers
-//goland:noinspection GoUnusedParameter
-func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)
-
-//go:linkname _runtime_writeBarrier runtime.writeBarrier
-var _runtime_writeBarrier uintptr
-
-//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
-func gcWriteBarrierAX() \ No newline at end of file
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/stubs_go120.go b/vendor/github.com/bytedance/sonic/internal/encoder/stubs_go120.go
deleted file mode 100644
index 2b5e79bb6..000000000
--- a/vendor/github.com/bytedance/sonic/internal/encoder/stubs_go120.go
+++ /dev/null
@@ -1,62 +0,0 @@
-// +build go1.20,!go1.21
-
-/*
- * Copyright 2021 ByteDance Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package encoder
-
-import (
- `unsafe`
-
- _ `github.com/cloudwego/base64x`
-
- `github.com/bytedance/sonic/internal/rt`
-)
-
-//go:linkname _subr__b64encode github.com/cloudwego/base64x._subr__b64encode
-var _subr__b64encode uintptr
-
-//go:noescape
-//go:linkname memmove runtime.memmove
-//goland:noinspection GoUnusedParameter
-func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr)
-
-//go:linkname growslice reflect.growslice
-//goland:noinspection GoUnusedParameter
-func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
-
-//go:linkname mapiternext runtime.mapiternext
-//goland:noinspection GoUnusedParameter
-func mapiternext(it *rt.GoMapIterator)
-
-//go:linkname mapiterinit runtime.mapiterinit
-//goland:noinspection GoUnusedParameter
-func mapiterinit(t *rt.GoMapType, m *rt.GoMap, it *rt.GoMapIterator)
-
-//go:linkname isValidNumber encoding/json.isValidNumber
-//goland:noinspection GoUnusedParameter
-func isValidNumber(s string) bool
-
-//go:noescape
-//go:linkname memclrNoHeapPointers runtime.memclrNoHeapPointers
-//goland:noinspection GoUnusedParameter
-func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)
-
-//go:linkname _runtime_writeBarrier runtime.writeBarrier
-var _runtime_writeBarrier uintptr
-
-//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
-func gcWriteBarrierAX()
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/stubs_go121.go b/vendor/github.com/bytedance/sonic/internal/encoder/stubs_go121.go
deleted file mode 100644
index 89ae5d210..000000000
--- a/vendor/github.com/bytedance/sonic/internal/encoder/stubs_go121.go
+++ /dev/null
@@ -1,62 +0,0 @@
-// +build go1.21
-
-/*
- * Copyright 2021 ByteDance Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package encoder
-
-import (
- `unsafe`
-
- _ `github.com/cloudwego/base64x`
-
- `github.com/bytedance/sonic/internal/rt`
-)
-
-//go:linkname _subr__b64encode github.com/cloudwego/base64x._subr__b64encode
-var _subr__b64encode uintptr
-
-//go:noescape
-//go:linkname memmove runtime.memmove
-//goland:noinspection GoUnusedParameter
-func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr)
-
-//go:linkname growslice reflect.growslice
-//goland:noinspection GoUnusedParameter
-func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
-
-//go:linkname mapiternext runtime.mapiternext
-//goland:noinspection GoUnusedParameter
-func mapiternext(it *rt.GoMapIterator)
-
-//go:linkname mapiterinit runtime.mapiterinit
-//goland:noinspection GoUnusedParameter
-func mapiterinit(t *rt.GoMapType, m *rt.GoMap, it *rt.GoMapIterator)
-
-//go:linkname isValidNumber encoding/json.isValidNumber
-//goland:noinspection GoUnusedParameter
-func isValidNumber(s string) bool
-
-//go:noescape
-//go:linkname memclrNoHeapPointers runtime.memclrNoHeapPointers
-//goland:noinspection GoUnusedParameter
-func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)
-
-//go:linkname _runtime_writeBarrier runtime.writeBarrier
-var _runtime_writeBarrier uintptr
-
-//go:linkname gcWriteBarrier2 runtime.gcWriteBarrier2
-func gcWriteBarrier2()
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/utils.go b/vendor/github.com/bytedance/sonic/internal/encoder/utils.go
deleted file mode 100644
index 510596fda..000000000
--- a/vendor/github.com/bytedance/sonic/internal/encoder/utils.go
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright 2021 ByteDance Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package encoder
-
-import (
- `encoding/json`
- `unsafe`
-
- `github.com/bytedance/sonic/loader`
-)
-
-//go:nosplit
-func padd(p unsafe.Pointer, v int) unsafe.Pointer {
- return unsafe.Pointer(uintptr(p) + uintptr(v))
-}
-
-//go:nosplit
-func ptoenc(p loader.Function) _Encoder {
- return *(*_Encoder)(unsafe.Pointer(&p))
-}
-
-func compact(p *[]byte, v []byte) error {
- buf := newBuffer()
- err := json.Compact(buf, v)
-
- /* check for errors */
- if err != nil {
- return err
- }
-
- /* add to result */
- v = buf.Bytes()
- *p = append(*p, v...)
-
- /* return the buffer into pool */
- freeBuffer(buf)
- return nil
-}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/vars/cache.go b/vendor/github.com/bytedance/sonic/internal/encoder/vars/cache.go
new file mode 100644
index 000000000..9cf2fb15e
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/vars/cache.go
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package vars
+
+import (
+ "unsafe"
+
+ "github.com/bytedance/sonic/internal/rt"
+)
+
+type Encoder func(
+ rb *[]byte,
+ vp unsafe.Pointer,
+ sb *Stack,
+ fv uint64,
+) error
+
+func FindOrCompile(vt *rt.GoType, pv bool, compiler func(*rt.GoType, ... interface{}) (interface{}, error)) (interface{}, error) {
+ if val := programCache.Get(vt); val != nil {
+ return val, nil
+ } else if ret, err := programCache.Compute(vt, compiler, pv); err == nil {
+ return ret, nil
+ } else {
+ return nil, err
+ }
+}
+
+func GetProgram(vt *rt.GoType) (interface{}) {
+ return programCache.Get(vt)
+}
+
+func ComputeProgram(vt *rt.GoType, compute func(*rt.GoType, ... interface{}) (interface{}, error), pv bool) (interface{}, error) {
+ return programCache.Compute(vt, compute, pv)
+} \ No newline at end of file
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/vars/const.go b/vendor/github.com/bytedance/sonic/internal/encoder/vars/const.go
new file mode 100644
index 000000000..88499e959
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/vars/const.go
@@ -0,0 +1,42 @@
+/**
+ * Copyright 2024 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package vars
+
+import (
+ "os"
+ "unsafe"
+)
+
+const (
+ MaxStack = 4096 // 4k states
+ StackSize = unsafe.Sizeof(Stack{})
+ StateSize = int64(unsafe.Sizeof(State{}))
+ StackLimit = MaxStack * StateSize
+)
+
+const (
+ MAX_ILBUF = 100000 // cutoff at 100k of IL instructions
+ MAX_FIELDS = 50 // cutoff at 50 fields struct
+)
+
+var (
+ DebugSyncGC = os.Getenv("SONIC_SYNC_GC") != ""
+ DebugAsyncGC = os.Getenv("SONIC_NO_ASYNC_GC") == ""
+ DebugCheckPtr = os.Getenv("SONIC_CHECK_POINTER") != ""
+)
+
+var UseVM = os.Getenv("SONIC_ENCODER_USE_VM") != ""
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/errors.go b/vendor/github.com/bytedance/sonic/internal/encoder/vars/errors.go
index 6e9d0c783..77919c44a 100644
--- a/vendor/github.com/bytedance/sonic/internal/encoder/errors.go
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/vars/errors.go
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package encoder
+package vars
import (
`encoding/json`
@@ -26,39 +26,39 @@ import (
`github.com/bytedance/sonic/internal/rt`
)
-var _ERR_too_deep = &json.UnsupportedValueError {
+var ERR_too_deep = &json.UnsupportedValueError {
Str : "Value nesting too deep",
Value : reflect.ValueOf("..."),
}
-var _ERR_nan_or_infinite = &json.UnsupportedValueError {
+var ERR_nan_or_infinite = &json.UnsupportedValueError {
Str : "NaN or ±Infinite",
Value : reflect.ValueOf("NaN or ±Infinite"),
}
-func error_type(vtype reflect.Type) error {
+func Error_type(vtype reflect.Type) error {
return &json.UnsupportedTypeError{Type: vtype}
}
-func error_number(number json.Number) error {
+func Error_number(number json.Number) error {
return &json.UnsupportedValueError {
Str : "invalid number literal: " + strconv.Quote(string(number)),
Value : reflect.ValueOf(number),
}
}
-func error_marshaler(ret []byte, pos int) error {
+func Error_marshaler(ret []byte, pos int) error {
return fmt.Errorf("invalid Marshaler output json syntax at %d: %q", pos, ret)
}
const (
- panicNilPointerOfNonEmptyString int = 1 + iota
+ PanicNilPointerOfNonEmptyString int = 1 + iota
)
-func goPanic(code int, val unsafe.Pointer) {
+func GoPanic(code int, val unsafe.Pointer) {
switch(code){
- case panicNilPointerOfNonEmptyString:
- panic(fmt.Sprintf("val: %#v has nil pointer while its length is not zero!", (*rt.GoString)(val)))
+ case PanicNilPointerOfNonEmptyString:
+ panic(fmt.Sprintf("val: %#v has nil pointer while its length is not zero!\nThis is a nil pointer exception (NPE) problem. There might be a data race issue. It is recommended to execute the tests related to the code with the `-race` compile flag to detect the problem.", (*rt.GoString)(val)))
default:
panic("encoder error!")
}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/vars/stack.go b/vendor/github.com/bytedance/sonic/internal/encoder/vars/stack.go
new file mode 100644
index 000000000..28a630b40
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/vars/stack.go
@@ -0,0 +1,146 @@
+/**
+ * Copyright 2024 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package vars
+
+import (
+ "bytes"
+ "sync"
+ "unsafe"
+
+ "github.com/bytedance/sonic/internal/caching"
+ "github.com/bytedance/sonic/internal/rt"
+ "github.com/bytedance/sonic/option"
+)
+
+type State struct {
+ x int
+ f uint64
+ p unsafe.Pointer
+ q unsafe.Pointer
+}
+
+type Stack struct {
+ sp uintptr
+ sb [MaxStack]State
+}
+
+var (
+ bytesPool = sync.Pool{}
+ stackPool = sync.Pool{
+ New: func() interface{} {
+ return &Stack{}
+ },
+ }
+ bufferPool = sync.Pool{}
+ programCache = caching.CreateProgramCache()
+)
+
+func NewBytes() *[]byte {
+ if ret := bytesPool.Get(); ret != nil {
+ return ret.(*[]byte)
+ } else {
+ ret := make([]byte, 0, option.DefaultEncoderBufferSize)
+ return &ret
+ }
+}
+
+func NewStack() *Stack {
+ ret := stackPool.Get().(*Stack)
+ ret.sp = 0
+ return ret
+}
+
+func ResetStack(p *Stack) {
+ rt.MemclrNoHeapPointers(unsafe.Pointer(p), StackSize)
+}
+
+func (s *Stack) Top() *State {
+ return (*State)(rt.Add(unsafe.Pointer(&s.sb[0]), s.sp))
+}
+
+func (s *Stack) Cur() *State {
+ return (*State)(rt.Add(unsafe.Pointer(&s.sb[0]), s.sp - uintptr(StateSize)))
+}
+
+const _MaxStackSP = uintptr(MaxStack * StateSize)
+
+func (s *Stack) Push(v State) bool {
+ if uintptr(s.sp) >= _MaxStackSP {
+ return false
+ }
+ st := s.Top()
+ *st = v
+ s.sp += uintptr(StateSize)
+ return true
+}
+
+func (s *Stack) Pop() State {
+ s.sp -= uintptr(StateSize)
+ st := s.Top()
+ ret := *st
+ *st = State{}
+ return ret
+}
+
+func (s *Stack) Load() (int, uint64, unsafe.Pointer, unsafe.Pointer) {
+ st := s.Cur()
+ return st.x, st.f, st.p, st.q
+}
+
+func (s *Stack) Save(x int, f uint64, p unsafe.Pointer, q unsafe.Pointer) bool {
+ return s.Push(State{x: x, f:f, p: p, q: q})
+}
+
+func (s *Stack) Drop() (int, uint64, unsafe.Pointer, unsafe.Pointer) {
+ st := s.Pop()
+ return st.x, st.f, st.p, st.q
+}
+
+func NewBuffer() *bytes.Buffer {
+ if ret := bufferPool.Get(); ret != nil {
+ return ret.(*bytes.Buffer)
+ } else {
+ return bytes.NewBuffer(make([]byte, 0, option.DefaultEncoderBufferSize))
+ }
+}
+
+func FreeBytes(p *[]byte) {
+ if rt.CanSizeResue(cap(*p)) {
+ (*p) = (*p)[:0]
+ bytesPool.Put(p)
+ }
+}
+
+func FreeStack(p *Stack) {
+ p.sp = 0
+ stackPool.Put(p)
+}
+
+func FreeBuffer(p *bytes.Buffer) {
+ if rt.CanSizeResue(cap(p.Bytes())) {
+ p.Reset()
+ bufferPool.Put(p)
+ }
+}
+
+var (
+ ArgPtrs = []bool{true, true, true, false}
+ LocalPtrs = []bool{}
+
+ ArgPtrs_generic = []bool{true}
+ LocalPtrs_generic = []bool{}
+) \ No newline at end of file
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/types.go b/vendor/github.com/bytedance/sonic/internal/encoder/vars/types.go
index 3d4a00668..ef8497807 100644
--- a/vendor/github.com/bytedance/sonic/internal/encoder/types.go
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/vars/types.go
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package encoder
+package vars
import (
`encoding`
@@ -23,19 +23,19 @@ import (
)
var (
- byteType = reflect.TypeOf(byte(0))
- jsonNumberType = reflect.TypeOf(json.Number(""))
- jsonUnsupportedValueType = reflect.TypeOf(new(json.UnsupportedValueError))
+ ByteType = reflect.TypeOf(byte(0))
+ JsonNumberType = reflect.TypeOf(json.Number(""))
+ JsonUnsupportedValueType = reflect.TypeOf(new(json.UnsupportedValueError))
)
var (
- errorType = reflect.TypeOf((*error)(nil)).Elem()
- jsonMarshalerType = reflect.TypeOf((*json.Marshaler)(nil)).Elem()
- encodingTextMarshalerType = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem()
+ ErrorType = reflect.TypeOf((*error)(nil)).Elem()
+ JsonMarshalerType = reflect.TypeOf((*json.Marshaler)(nil)).Elem()
+ EncodingTextMarshalerType = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem()
)
-func isSimpleByte(vt reflect.Type) bool {
- if vt.Kind() != byteType.Kind() {
+func IsSimpleByte(vt reflect.Type) bool {
+ if vt.Kind() != ByteType.Kind() {
return false
} else {
return !isEitherMarshaler(vt) && !isEitherMarshaler(reflect.PtrTo(vt))
@@ -43,5 +43,5 @@ func isSimpleByte(vt reflect.Type) bool {
}
func isEitherMarshaler(vt reflect.Type) bool {
- return vt.Implements(jsonMarshalerType) || vt.Implements(encodingTextMarshalerType)
+ return vt.Implements(JsonMarshalerType) || vt.Implements(EncodingTextMarshalerType)
}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/vm/stbus.go b/vendor/github.com/bytedance/sonic/internal/encoder/vm/stbus.go
new file mode 100644
index 000000000..21b476c3c
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/vm/stbus.go
@@ -0,0 +1,45 @@
+/**
+ * Copyright 2024 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package vm
+
+import (
+ "unsafe"
+ _ "unsafe"
+
+ "github.com/bytedance/sonic/internal/encoder/alg"
+ "github.com/bytedance/sonic/internal/encoder/ir"
+ "github.com/bytedance/sonic/internal/encoder/vars"
+ "github.com/bytedance/sonic/internal/rt"
+)
+
+func EncodeTypedPointer(buf *[]byte, vt *rt.GoType, vp *unsafe.Pointer, sb *vars.Stack, fv uint64) error {
+ if vt == nil {
+ return alg.EncodeNil(buf)
+ } else if pp, err := vars.FindOrCompile(vt, (fv&(1<<alg.BitPointerValue)) != 0, compiler); err != nil {
+ return err
+ } else if vt.Indirect() {
+ return Execute(buf, *vp, sb, fv, pp.(*ir.Program))
+ } else {
+ return Execute(buf, unsafe.Pointer(vp), sb, fv, pp.(*ir.Program))
+ }
+}
+
+var compiler func(*rt.GoType, ... interface{}) (interface{}, error)
+
+func SetCompiler(c func(*rt.GoType, ... interface{}) (interface{}, error)) {
+ compiler = c
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/vm/vm.go b/vendor/github.com/bytedance/sonic/internal/encoder/vm/vm.go
new file mode 100644
index 000000000..b75ba807a
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/vm/vm.go
@@ -0,0 +1,374 @@
+// Copyright 2024 CloudWeGo Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package vm
+
+import (
+ "encoding"
+ "encoding/json"
+ "fmt"
+ "math"
+ "reflect"
+ "unsafe"
+
+ "github.com/bytedance/sonic/internal/encoder/alg"
+ "github.com/bytedance/sonic/internal/encoder/ir"
+ "github.com/bytedance/sonic/internal/encoder/vars"
+ "github.com/bytedance/sonic/internal/rt"
+ "github.com/bytedance/sonic/internal/base64"
+)
+
+const (
+ _S_cond = iota
+ _S_init
+)
+
+var (
+ _T_json_Marshaler = rt.UnpackType(vars.JsonMarshalerType)
+ _T_encoding_TextMarshaler = rt.UnpackType(vars.EncodingTextMarshalerType)
+)
+
+func print_instr(buf []byte, pc int, op ir.Op, ins *ir.Instr, p unsafe.Pointer) {
+ if len(buf) > 20 {
+ fmt.Println(string(buf[len(buf)-20:]))
+ } else {
+ fmt.Println(string(buf))
+ }
+ fmt.Printf("pc %04d, op %v, ins %#v, ptr: %x\n", pc, op, ins.Disassemble(), p)
+}
+
+func Execute(b *[]byte, p unsafe.Pointer, s *vars.Stack, flags uint64, prog *ir.Program) (error) {
+ pl := len(*prog)
+ if pl <= 0 {
+ return nil
+ }
+
+ var buf = *b
+ var x int
+ var q unsafe.Pointer
+ var f uint64
+
+ var pro = &(*prog)[0]
+ for pc := 0; pc < pl; {
+ ins := (*ir.Instr)(rt.Add(unsafe.Pointer(pro), ir.OpSize*uintptr(pc)))
+ pc++
+ op := ins.Op()
+
+ switch op {
+ case ir.OP_goto:
+ pc = ins.Vi()
+ continue
+ case ir.OP_byte:
+ v := ins.Byte()
+ buf = append(buf, v)
+ case ir.OP_text:
+ v := ins.Vs()
+ buf = append(buf, v...)
+ case ir.OP_deref:
+ p = *(*unsafe.Pointer)(p)
+ case ir.OP_index:
+ p = rt.Add(p, uintptr(ins.I64()))
+ case ir.OP_load:
+ // NOTICE: load CANNOT change f!
+ x, _, p, q = s.Load()
+ case ir.OP_save:
+ if !s.Save(x, f, p, q) {
+ return vars.ERR_too_deep
+ }
+ case ir.OP_drop:
+ x, f, p, q = s.Drop()
+ case ir.OP_drop_2:
+ s.Drop()
+ x, f, p, q = s.Drop()
+ case ir.OP_recurse:
+ vt, pv := ins.Vp2()
+ f := flags
+ if pv {
+ f |= (1 << alg.BitPointerValue)
+ }
+ *b = buf
+ if vt.Indirect() {
+ if err := EncodeTypedPointer(b, vt, (*unsafe.Pointer)(rt.NoEscape(unsafe.Pointer(&p))), s, f); err != nil {
+ return err
+ }
+ } else {
+ vp := (*unsafe.Pointer)(p)
+ if err := EncodeTypedPointer(b, vt, vp, s, f); err != nil {
+ return err
+ }
+ }
+ buf = *b
+ case ir.OP_is_nil:
+ if is_nil(p) {
+ pc = ins.Vi()
+ continue
+ }
+ case ir.OP_is_nil_p1:
+ if (*rt.GoEface)(p).Value == nil {
+ pc = ins.Vi()
+ continue
+ }
+ case ir.OP_null:
+ buf = append(buf, 'n', 'u', 'l', 'l')
+ case ir.OP_str:
+ v := *(*string)(p)
+ buf = alg.Quote(buf, v, false)
+ case ir.OP_bool:
+ if *(*bool)(p) {
+ buf = append(buf, 't', 'r', 'u', 'e')
+ } else {
+ buf = append(buf, 'f', 'a', 'l', 's', 'e')
+ }
+ case ir.OP_i8:
+ v := *(*int8)(p)
+ buf = alg.I64toa(buf, int64(v))
+ case ir.OP_i16:
+ v := *(*int16)(p)
+ buf = alg.I64toa(buf, int64(v))
+ case ir.OP_i32:
+ v := *(*int32)(p)
+ buf = alg.I64toa(buf, int64(v))
+ case ir.OP_i64:
+ v := *(*int64)(p)
+ buf = alg.I64toa(buf, int64(v))
+ case ir.OP_u8:
+ v := *(*uint8)(p)
+ buf = alg.U64toa(buf, uint64(v))
+ case ir.OP_u16:
+ v := *(*uint16)(p)
+ buf = alg.U64toa(buf, uint64(v))
+ case ir.OP_u32:
+ v := *(*uint32)(p)
+ buf = alg.U64toa(buf, uint64(v))
+ case ir.OP_u64:
+ v := *(*uint64)(p)
+ buf = alg.U64toa(buf, uint64(v))
+ case ir.OP_f32:
+ v := *(*float32)(p)
+ if math.IsNaN(float64(v)) || math.IsInf(float64(v), 0) {
+ if flags&(1<<alg.BitEncodeNullForInfOrNan) != 0 {
+ buf = append(buf, 'n', 'u', 'l', 'l')
+ continue
+ }
+ return vars.ERR_nan_or_infinite
+ }
+ buf = alg.F32toa(buf, v)
+ case ir.OP_f64:
+ v := *(*float64)(p)
+ if math.IsNaN(v) || math.IsInf(v, 0) {
+ if flags&(1<<alg.BitEncodeNullForInfOrNan) != 0 {
+ buf = append(buf, 'n', 'u', 'l', 'l')
+ continue
+ }
+ return vars.ERR_nan_or_infinite
+ }
+ buf = alg.F64toa(buf, v)
+ case ir.OP_bin:
+ v := *(*[]byte)(p)
+ buf = base64.EncodeBase64(buf, v)
+ case ir.OP_quote:
+ v := *(*string)(p)
+ buf = alg.Quote(buf, v, true)
+ case ir.OP_number:
+ v := *(*json.Number)(p)
+ if v == "" {
+ buf = append(buf, '0')
+ } else if !rt.IsValidNumber(string(v)) {
+ return vars.Error_number(v)
+ } else {
+ buf = append(buf, v...)
+ }
+ case ir.OP_eface:
+ *b = buf
+ if err := EncodeTypedPointer(b, *(**rt.GoType)(p), (*unsafe.Pointer)(rt.Add(p, 8)), s, flags); err != nil {
+ return err
+ }
+ buf = *b
+ case ir.OP_iface:
+ *b = buf
+ if err := EncodeTypedPointer(b, (*(**rt.GoItab)(p)).Vt, (*unsafe.Pointer)(rt.Add(p, 8)), s, flags); err != nil {
+ return err
+ }
+ buf = *b
+ case ir.OP_is_zero_map:
+ v := *(**rt.GoMap)(p)
+ if v == nil || v.Count == 0 {
+ pc = ins.Vi()
+ continue
+ }
+ case ir.OP_map_iter:
+ v := *(**rt.GoMap)(p)
+ vt := ins.Vr()
+ it, err := alg.IteratorStart(rt.MapType(vt), v, flags)
+ if err != nil {
+ return err
+ }
+ q = unsafe.Pointer(it)
+ case ir.OP_map_stop:
+ it := (*alg.MapIterator)(q)
+ alg.IteratorStop(it)
+ q = nil
+ case ir.OP_map_value_next:
+ it := (*alg.MapIterator)(q)
+ p = it.It.V
+ alg.IteratorNext(it)
+ case ir.OP_map_check_key:
+ it := (*alg.MapIterator)(q)
+ if it.It.K == nil {
+ pc = ins.Vi()
+ continue
+ }
+ p = it.It.K
+ case ir.OP_marshal_text:
+ vt, itab := ins.Vtab()
+ var it rt.GoIface
+ switch vt.Kind() {
+ case reflect.Interface :
+ if is_nil(p) {
+ buf = append(buf, 'n', 'u', 'l', 'l')
+ continue
+ }
+ it = rt.AssertI2I(_T_encoding_TextMarshaler, *(*rt.GoIface)(p))
+ case reflect.Ptr, reflect.Map : it = convT2I(p, true, itab)
+ default : it = convT2I(p, !vt.Indirect(), itab)
+ }
+ if err := alg.EncodeTextMarshaler(&buf, *(*encoding.TextMarshaler)(unsafe.Pointer(&it)), (flags)); err != nil {
+ return err
+ }
+ case ir.OP_marshal_text_p:
+ _, itab := ins.Vtab()
+ it := convT2I(p, false, itab)
+ if err := alg.EncodeTextMarshaler(&buf, *(*encoding.TextMarshaler)(unsafe.Pointer(&it)), (flags)); err != nil {
+ return err
+ }
+ case ir.OP_map_write_key:
+ if has_opts(flags, alg.BitSortMapKeys) {
+ v := *(*string)(p)
+ buf = alg.Quote(buf, v, false)
+ pc = ins.Vi()
+ continue
+ }
+ case ir.OP_slice_len:
+ v := (*rt.GoSlice)(p)
+ x = v.Len
+ p = v.Ptr
+ //TODO: why?
+ f |= 1<<_S_init
+ case ir.OP_slice_next:
+ if x == 0 {
+ pc = ins.Vi()
+ continue
+ }
+ x--
+ if has_opts(f, _S_init) {
+ f &= ^uint64(1 << _S_init)
+ } else {
+ p = rt.Add(p, uintptr(ins.Vlen()))
+ }
+ case ir.OP_cond_set:
+ f |= 1<<_S_cond
+ case ir.OP_cond_testc:
+ if has_opts(f, _S_cond) {
+ f &= ^uint64(1 << _S_cond)
+ pc = ins.Vi()
+ continue
+ }
+ case ir.OP_is_zero_1:
+ if *(*uint8)(p) == 0 {
+ pc = ins.Vi()
+ continue
+ }
+ case ir.OP_is_zero_2:
+ if *(*uint16)(p) == 0 {
+ pc = ins.Vi()
+ continue
+ }
+ case ir.OP_is_zero_4:
+ if *(*uint32)(p) == 0 {
+ pc = ins.Vi()
+ continue
+ }
+ case ir.OP_is_zero_8:
+ if *(*uint64)(p) == 0 {
+ pc = ins.Vi()
+ continue
+ }
+ case ir.OP_empty_arr:
+ if has_opts(flags, alg.BitNoNullSliceOrMap) {
+ buf = append(buf, '[', ']')
+ } else {
+ buf = append(buf, 'n', 'u', 'l', 'l')
+ }
+ case ir.OP_empty_obj:
+ if has_opts(flags, alg.BitNoNullSliceOrMap) {
+ buf = append(buf, '{', '}')
+ } else {
+ buf = append(buf, 'n', 'u', 'l', 'l')
+ }
+ case ir.OP_marshal:
+ vt, itab := ins.Vtab()
+ var it rt.GoIface
+ switch vt.Kind() {
+ case reflect.Interface :
+ if is_nil(p) {
+ buf = append(buf, 'n', 'u', 'l', 'l')
+ continue
+ }
+ it = rt.AssertI2I(_T_json_Marshaler, *(*rt.GoIface)(p))
+ case reflect.Ptr, reflect.Map : it = convT2I(p, true, itab)
+ default : it = convT2I(p, !vt.Indirect(), itab)
+ }
+ if err := alg.EncodeJsonMarshaler(&buf, *(*json.Marshaler)(unsafe.Pointer(&it)), (flags)); err != nil {
+ return err
+ }
+ case ir.OP_marshal_p:
+ _, itab := ins.Vtab()
+ it := convT2I(p, false, itab)
+ if err := alg.EncodeJsonMarshaler(&buf, *(*json.Marshaler)(unsafe.Pointer(&it)), (flags)); err != nil {
+ return err
+ }
+ default:
+ panic(fmt.Sprintf("not implement %s at %d", ins.Op().String(), pc))
+ }
+ }
+
+ *b = buf
+ return nil
+}
+
+// func to_buf(w unsafe.Pointer, l int, c int) []byte {
+// return rt.BytesFrom(unsafe.Pointer(uintptr(w)-uintptr(l)), l, c)
+// }
+
+// func from_buf(buf []byte) (unsafe.Pointer, int, int) {
+// return rt.IndexByte(buf, len(buf)), len(buf), cap(buf)
+// }
+
+func has_opts(opts uint64, bit int) bool {
+ return opts & (1<<bit) != 0
+}
+
+func is_nil(p unsafe.Pointer) bool {
+ return *(*unsafe.Pointer)(p) == nil
+}
+
+func convT2I(ptr unsafe.Pointer, deref bool, itab *rt.GoItab) (rt.GoIface) {
+ if deref {
+ ptr = *(*unsafe.Pointer)(ptr)
+ }
+ return rt.GoIface{
+ Itab: itab,
+ Value: ptr,
+ }
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/asm_stubs_amd64_go117.go b/vendor/github.com/bytedance/sonic/internal/encoder/x86/asm_stubs_amd64_go117.go
index 5a3cf1a60..eec9f6c58 100644
--- a/vendor/github.com/bytedance/sonic/internal/encoder/asm_stubs_amd64_go117.go
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/x86/asm_stubs_amd64_go117.go
@@ -1,3 +1,4 @@
+//go:build go1.17 && !go1.21
// +build go1.17,!go1.21
// Copyright 2023 CloudWeGo Authors
@@ -14,24 +15,25 @@
// See the License for the specific language governing permissions and
// limitations under the License.
-package encoder
+package x86
import (
- `strconv`
- `unsafe`
+ "strconv"
+ "unsafe"
- `github.com/bytedance/sonic/internal/jit`
- `github.com/twitchyliquid64/golang-asm/obj`
- `github.com/twitchyliquid64/golang-asm/obj/x86`
+ "github.com/bytedance/sonic/internal/jit"
+ "github.com/bytedance/sonic/internal/rt"
+ "github.com/twitchyliquid64/golang-asm/obj"
+ "github.com/twitchyliquid64/golang-asm/obj/x86"
)
var (
- _V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&_runtime_writeBarrier))))
+ _V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&rt.RuntimeWriteBarrier))))
- _F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX)
+ _F_gcWriteBarrierAX = jit.Func(rt.GcWriteBarrierAX)
)
-func (self *_Assembler) WritePtr(i int, ptr obj.Addr, rec obj.Addr) {
+func (self *Assembler) WritePtr(i int, ptr obj.Addr, rec obj.Addr) {
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
panic("rec contains AX!")
}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/asm_stubs_amd64_go121.go b/vendor/github.com/bytedance/sonic/internal/encoder/x86/asm_stubs_amd64_go121.go
index 9f7ff65e6..3d70021e4 100644
--- a/vendor/github.com/bytedance/sonic/internal/encoder/asm_stubs_amd64_go121.go
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/x86/asm_stubs_amd64_go121.go
@@ -1,4 +1,5 @@
-// +build go1.21,!go1.23
+//go:build go1.21 && !go1.24
+// +build go1.21,!go1.24
// Copyright 2023 CloudWeGo Authors
//
@@ -14,24 +15,25 @@
// See the License for the specific language governing permissions and
// limitations under the License.
-package encoder
+package x86
import (
- `strconv`
- `unsafe`
+ "strconv"
+ "unsafe"
- `github.com/bytedance/sonic/internal/jit`
- `github.com/twitchyliquid64/golang-asm/obj`
- `github.com/twitchyliquid64/golang-asm/obj/x86`
+ "github.com/bytedance/sonic/internal/jit"
+ "github.com/bytedance/sonic/internal/rt"
+ "github.com/twitchyliquid64/golang-asm/obj"
+ "github.com/twitchyliquid64/golang-asm/obj/x86"
)
var (
- _V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&_runtime_writeBarrier))))
+ _V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&rt.RuntimeWriteBarrier))))
- _F_gcWriteBarrier2 = jit.Func(gcWriteBarrier2)
+ _F_gcWriteBarrier2 = jit.Func(rt.GcWriteBarrier2)
)
-func (self *_Assembler) WritePtr(i int, ptr obj.Addr, old obj.Addr) {
+func (self *Assembler) WritePtr(i int, ptr obj.Addr, old obj.Addr) {
if old.Reg == x86.REG_AX || old.Index == x86.REG_AX {
panic("rec contains AX!")
}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/x86/assembler_regabi_amd64.go b/vendor/github.com/bytedance/sonic/internal/encoder/x86/assembler_regabi_amd64.go
new file mode 100644
index 000000000..95f452a02
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/x86/assembler_regabi_amd64.go
@@ -0,0 +1,1195 @@
+//go:build go1.17 && !go1.24
+// +build go1.17,!go1.24
+
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package x86
+
+import (
+ "fmt"
+ "reflect"
+ "strconv"
+ "unsafe"
+
+ "github.com/bytedance/sonic/internal/cpu"
+ "github.com/bytedance/sonic/internal/encoder/alg"
+ "github.com/bytedance/sonic/internal/encoder/ir"
+ "github.com/bytedance/sonic/internal/encoder/vars"
+ "github.com/bytedance/sonic/internal/jit"
+ "github.com/bytedance/sonic/internal/native/types"
+ "github.com/twitchyliquid64/golang-asm/obj"
+ "github.com/twitchyliquid64/golang-asm/obj/x86"
+
+ "github.com/bytedance/sonic/internal/native"
+ "github.com/bytedance/sonic/internal/rt"
+)
+
+/** Register Allocations
+ *
+ * State Registers:
+ *
+ * %rbx : stack base
+ * %rdi : result pointer
+ * %rsi : result length
+ * %rdx : result capacity
+ * %r12 : sp->p
+ * %r13 : sp->q
+ * %r14 : sp->x
+ * %r15 : sp->f
+ *
+ * Error Registers:
+ *
+ * %r10 : error type register
+ * %r11 : error pointer register
+ */
+
+/** Function Prototype & Stack Map
+ *
+ * func (buf *[]byte, p unsafe.Pointer, sb *_Stack, fv uint64) (err error)
+ *
+ * buf : (FP)
+ * p : 8(FP)
+ * sb : 16(FP)
+ * fv : 24(FP)
+ * err.vt : 32(FP)
+ * err.vp : 40(FP)
+ */
+
+const (
+ _S_cond = iota
+ _S_init
+)
+
+const (
+ _FP_args = 32 // 32 bytes for spill registers of arguments
+ _FP_fargs = 40 // 40 bytes for passing arguments to other Go functions
+ _FP_saves = 64 // 64 bytes for saving the registers before CALL instructions
+ _FP_locals = 24 // 24 bytes for local variables
+)
+
+const (
+ _FP_loffs = _FP_fargs + _FP_saves
+ FP_offs = _FP_loffs + _FP_locals
+ // _FP_offs = _FP_loffs + _FP_locals + _FP_debug
+ _FP_size = FP_offs + 8 // 8 bytes for the parent frame pointer
+ _FP_base = _FP_size + 8 // 8 bytes for the return address
+)
+
+const (
+ _FM_exp32 = 0x7f800000
+ _FM_exp64 = 0x7ff0000000000000
+)
+
+const (
+ _IM_null = 0x6c6c756e // 'null'
+ _IM_true = 0x65757274 // 'true'
+ _IM_fals = 0x736c6166 // 'fals' ('false' without the 'e')
+ _IM_open = 0x00225c22 // '"\"∅'
+ _IM_array = 0x5d5b // '[]'
+ _IM_object = 0x7d7b // '{}'
+ _IM_mulv = -0x5555555555555555
+)
+
+const (
+ _LB_more_space = "_more_space"
+ _LB_more_space_return = "_more_space_return_"
+)
+
+const (
+ _LB_error = "_error"
+ _LB_error_too_deep = "_error_too_deep"
+ _LB_error_invalid_number = "_error_invalid_number"
+ _LB_error_nan_or_infinite = "_error_nan_or_infinite"
+ _LB_panic = "_panic"
+)
+
+var (
+ _AX = jit.Reg("AX")
+ _BX = jit.Reg("BX")
+ _CX = jit.Reg("CX")
+ _DX = jit.Reg("DX")
+ _DI = jit.Reg("DI")
+ _SI = jit.Reg("SI")
+ _BP = jit.Reg("BP")
+ _SP = jit.Reg("SP")
+ _R8 = jit.Reg("R8")
+ _R9 = jit.Reg("R9")
+)
+
+var (
+ _X0 = jit.Reg("X0")
+ _Y0 = jit.Reg("Y0")
+)
+
+var (
+ _ST = jit.Reg("R15") // can't use R14 since it's always scratched by Go...
+ _RP = jit.Reg("DI")
+ _RL = jit.Reg("SI")
+ _RC = jit.Reg("DX")
+)
+
+var (
+ _LR = jit.Reg("R9")
+ _ET = jit.Reg("AX")
+ _EP = jit.Reg("BX")
+)
+
+var (
+ _SP_p = jit.Reg("R10") // saved on BX when call_c
+ _SP_q = jit.Reg("R11") // saved on BP when call_c
+ _SP_x = jit.Reg("R12")
+ _SP_f = jit.Reg("R13")
+)
+
+var (
+ _ARG_rb = jit.Ptr(_SP, _FP_base)
+ _ARG_vp = jit.Ptr(_SP, _FP_base+8)
+ _ARG_sb = jit.Ptr(_SP, _FP_base+16)
+ _ARG_fv = jit.Ptr(_SP, _FP_base+24)
+)
+
+var (
+ _RET_et = _ET
+ _RET_ep = _EP
+)
+
+var (
+ _VAR_sp = jit.Ptr(_SP, _FP_fargs+_FP_saves)
+ _VAR_dn = jit.Ptr(_SP, _FP_fargs+_FP_saves+8)
+ _VAR_vp = jit.Ptr(_SP, _FP_fargs+_FP_saves+16)
+)
+
+var (
+ _REG_ffi = []obj.Addr{_RP, _RL, _RC, _SP_q}
+ _REG_b64 = []obj.Addr{_SP_p, _SP_q}
+
+ _REG_all = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RP, _RL, _RC}
+ _REG_ms = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _LR}
+ _REG_enc = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RL}
+)
+
+type Assembler struct {
+ Name string
+ jit.BaseAssembler
+ p ir.Program
+ x int
+}
+
+func NewAssembler(p ir.Program) *Assembler {
+ return new(Assembler).Init(p)
+}
+
+/** Assembler Interface **/
+
+func (self *Assembler) Load() vars.Encoder {
+ return ptoenc(self.BaseAssembler.Load("encode_"+self.Name, _FP_size, _FP_args, vars.ArgPtrs, vars.LocalPtrs))
+}
+
+func (self *Assembler) Init(p ir.Program) *Assembler {
+ self.p = p
+ self.BaseAssembler.Init(self.compile)
+ return self
+}
+
+func (self *Assembler) compile() {
+ self.prologue()
+ self.instrs()
+ self.epilogue()
+ self.builtins()
+}
+
+/** Assembler Stages **/
+
+var _OpFuncTab = [256]func(*Assembler, *ir.Instr){
+ ir.OP_null: (*Assembler)._asm_OP_null,
+ ir.OP_empty_arr: (*Assembler)._asm_OP_empty_arr,
+ ir.OP_empty_obj: (*Assembler)._asm_OP_empty_obj,
+ ir.OP_bool: (*Assembler)._asm_OP_bool,
+ ir.OP_i8: (*Assembler)._asm_OP_i8,
+ ir.OP_i16: (*Assembler)._asm_OP_i16,
+ ir.OP_i32: (*Assembler)._asm_OP_i32,
+ ir.OP_i64: (*Assembler)._asm_OP_i64,
+ ir.OP_u8: (*Assembler)._asm_OP_u8,
+ ir.OP_u16: (*Assembler)._asm_OP_u16,
+ ir.OP_u32: (*Assembler)._asm_OP_u32,
+ ir.OP_u64: (*Assembler)._asm_OP_u64,
+ ir.OP_f32: (*Assembler)._asm_OP_f32,
+ ir.OP_f64: (*Assembler)._asm_OP_f64,
+ ir.OP_str: (*Assembler)._asm_OP_str,
+ ir.OP_bin: (*Assembler)._asm_OP_bin,
+ ir.OP_quote: (*Assembler)._asm_OP_quote,
+ ir.OP_number: (*Assembler)._asm_OP_number,
+ ir.OP_eface: (*Assembler)._asm_OP_eface,
+ ir.OP_iface: (*Assembler)._asm_OP_iface,
+ ir.OP_byte: (*Assembler)._asm_OP_byte,
+ ir.OP_text: (*Assembler)._asm_OP_text,
+ ir.OP_deref: (*Assembler)._asm_OP_deref,
+ ir.OP_index: (*Assembler)._asm_OP_index,
+ ir.OP_load: (*Assembler)._asm_OP_load,
+ ir.OP_save: (*Assembler)._asm_OP_save,
+ ir.OP_drop: (*Assembler)._asm_OP_drop,
+ ir.OP_drop_2: (*Assembler)._asm_OP_drop_2,
+ ir.OP_recurse: (*Assembler)._asm_OP_recurse,
+ ir.OP_is_nil: (*Assembler)._asm_OP_is_nil,
+ ir.OP_is_nil_p1: (*Assembler)._asm_OP_is_nil_p1,
+ ir.OP_is_zero_1: (*Assembler)._asm_OP_is_zero_1,
+ ir.OP_is_zero_2: (*Assembler)._asm_OP_is_zero_2,
+ ir.OP_is_zero_4: (*Assembler)._asm_OP_is_zero_4,
+ ir.OP_is_zero_8: (*Assembler)._asm_OP_is_zero_8,
+ ir.OP_is_zero_map: (*Assembler)._asm_OP_is_zero_map,
+ ir.OP_goto: (*Assembler)._asm_OP_goto,
+ ir.OP_map_iter: (*Assembler)._asm_OP_map_iter,
+ ir.OP_map_stop: (*Assembler)._asm_OP_map_stop,
+ ir.OP_map_check_key: (*Assembler)._asm_OP_map_check_key,
+ ir.OP_map_write_key: (*Assembler)._asm_OP_map_write_key,
+ ir.OP_map_value_next: (*Assembler)._asm_OP_map_value_next,
+ ir.OP_slice_len: (*Assembler)._asm_OP_slice_len,
+ ir.OP_slice_next: (*Assembler)._asm_OP_slice_next,
+ ir.OP_marshal: (*Assembler)._asm_OP_marshal,
+ ir.OP_marshal_p: (*Assembler)._asm_OP_marshal_p,
+ ir.OP_marshal_text: (*Assembler)._asm_OP_marshal_text,
+ ir.OP_marshal_text_p: (*Assembler)._asm_OP_marshal_text_p,
+ ir.OP_cond_set: (*Assembler)._asm_OP_cond_set,
+ ir.OP_cond_testc: (*Assembler)._asm_OP_cond_testc,
+}
+
+func (self *Assembler) instr(v *ir.Instr) {
+ if fn := _OpFuncTab[v.Op()]; fn != nil {
+ fn(self, v)
+ } else {
+ panic(fmt.Sprintf("invalid opcode: %d", v.Op()))
+ }
+}
+
+func (self *Assembler) instrs() {
+ for i, v := range self.p {
+ self.Mark(i)
+ self.instr(&v)
+ self.debug_instr(i, &v)
+ }
+}
+
+func (self *Assembler) builtins() {
+ self.more_space()
+ self.error_too_deep()
+ self.error_invalid_number()
+ self.error_nan_or_infinite()
+ self.go_panic()
+}
+
+func (self *Assembler) epilogue() {
+ self.Mark(len(self.p))
+ self.Emit("XORL", _ET, _ET)
+ self.Emit("XORL", _EP, _EP)
+ self.Link(_LB_error)
+ self.Emit("MOVQ", _ARG_rb, _CX) // MOVQ rb<>+0(FP), CX
+ self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8)) // MOVQ RL, 8(CX)
+ self.Emit("MOVQ", jit.Imm(0), _ARG_rb) // MOVQ AX, rb<>+0(FP)
+ self.Emit("MOVQ", jit.Imm(0), _ARG_vp) // MOVQ BX, vp<>+8(FP)
+ self.Emit("MOVQ", jit.Imm(0), _ARG_sb) // MOVQ CX, sb<>+16(FP)
+ self.Emit("MOVQ", jit.Ptr(_SP, FP_offs), _BP) // MOVQ _FP_offs(SP), BP
+ self.Emit("ADDQ", jit.Imm(_FP_size), _SP) // ADDQ $_FP_size, SP
+ self.Emit("RET") // RET
+}
+
+func (self *Assembler) prologue() {
+ self.Emit("SUBQ", jit.Imm(_FP_size), _SP) // SUBQ $_FP_size, SP
+ self.Emit("MOVQ", _BP, jit.Ptr(_SP, FP_offs)) // MOVQ BP, _FP_offs(SP)
+ self.Emit("LEAQ", jit.Ptr(_SP, FP_offs), _BP) // LEAQ _FP_offs(SP), BP
+ self.Emit("MOVQ", _AX, _ARG_rb) // MOVQ AX, rb<>+0(FP)
+ self.Emit("MOVQ", _BX, _ARG_vp) // MOVQ BX, vp<>+8(FP)
+ self.Emit("MOVQ", _CX, _ARG_sb) // MOVQ CX, sb<>+16(FP)
+ self.Emit("MOVQ", _DI, _ARG_fv) // MOVQ DI, rb<>+24(FP)
+ self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP) // MOVQ (AX) , DI
+ self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL) // MOVQ 8(AX) , SI
+ self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC) // MOVQ 16(AX), DX
+ self.Emit("MOVQ", _BX, _SP_p) // MOVQ BX, R10
+ self.Emit("MOVQ", _CX, _ST) // MOVQ CX, R8
+ self.Emit("XORL", _SP_x, _SP_x) // XORL R10, R12
+ self.Emit("XORL", _SP_f, _SP_f) // XORL R11, R13
+ self.Emit("XORL", _SP_q, _SP_q) // XORL R13, R11
+}
+
+/** Assembler Inline Functions **/
+
+func (self *Assembler) xsave(reg ...obj.Addr) {
+ for i, v := range reg {
+ if i > _FP_saves/8-1 {
+ panic("too many registers to save")
+ } else {
+ self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs+int64(i)*8))
+ }
+ }
+}
+
+func (self *Assembler) xload(reg ...obj.Addr) {
+ for i, v := range reg {
+ if i > _FP_saves/8-1 {
+ panic("too many registers to load")
+ } else {
+ self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs+int64(i)*8), v)
+ }
+ }
+}
+
+func (self *Assembler) rbuf_di() {
+ if _RP.Reg != x86.REG_DI {
+ panic("register allocation messed up: RP != DI")
+ } else {
+ self.Emit("ADDQ", _RL, _RP)
+ }
+}
+
+func (self *Assembler) store_int(nd int, fn obj.Addr, ins string) {
+ self.check_size(nd)
+ self.save_c() // SAVE $C_regs
+ self.rbuf_di() // MOVQ RP, DI
+ self.Emit(ins, jit.Ptr(_SP_p, 0), _SI) // $ins (SP.p), SI
+ self.call_c(fn) // CALL_C $fn
+ self.Emit("ADDQ", _AX, _RL) // ADDQ AX, RL
+}
+
+func (self *Assembler) store_str(s string) {
+ i := 0
+ m := rt.Str2Mem(s)
+
+ /* 8-byte stores */
+ for i <= len(m)-8 {
+ self.Emit("MOVQ", jit.Imm(rt.Get64(m[i:])), _AX) // MOVQ $s[i:], AX
+ self.Emit("MOVQ", _AX, jit.Sib(_RP, _RL, 1, int64(i))) // MOVQ AX, i(RP)(RL)
+ i += 8
+ }
+
+ /* 4-byte stores */
+ if i <= len(m)-4 {
+ self.Emit("MOVL", jit.Imm(int64(rt.Get32(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVL $s[i:], i(RP)(RL)
+ i += 4
+ }
+
+ /* 2-byte stores */
+ if i <= len(m)-2 {
+ self.Emit("MOVW", jit.Imm(int64(rt.Get16(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVW $s[i:], i(RP)(RL)
+ i += 2
+ }
+
+ /* last byte */
+ if i < len(m) {
+ self.Emit("MOVB", jit.Imm(int64(m[i])), jit.Sib(_RP, _RL, 1, int64(i))) // MOVB $s[i:], i(RP)(RL)
+ }
+}
+
+func (self *Assembler) check_size(n int) {
+ self.check_size_rl(jit.Ptr(_RL, int64(n)))
+}
+
+func (self *Assembler) check_size_r(r obj.Addr, d int) {
+ self.check_size_rl(jit.Sib(_RL, r, 1, int64(d)))
+}
+
+func (self *Assembler) check_size_rl(v obj.Addr) {
+ idx := self.x
+ key := _LB_more_space_return + strconv.Itoa(idx)
+
+ /* the following code relies on LR == R9 to work */
+ if _LR.Reg != x86.REG_R9 {
+ panic("register allocation messed up: LR != R9")
+ }
+
+ /* check for buffer capacity */
+ self.x++
+ self.Emit("LEAQ", v, _AX) // LEAQ $v, AX
+ self.Emit("CMPQ", _AX, _RC) // CMPQ AX, RC
+ self.Sjmp("JBE", key) // JBE _more_space_return_{n}
+ self.slice_grow_ax(key) // GROW $key
+ self.Link(key) // _more_space_return_{n}:
+}
+
+func (self *Assembler) slice_grow_ax(ret string) {
+ self.Byte(0x4c, 0x8d, 0x0d) // LEAQ ?(PC), R9
+ self.Sref(ret, 4) // .... &ret
+ self.Sjmp("JMP", _LB_more_space) // JMP _more_space
+}
+
+/** State Stack Helpers **/
+
+
+
+func (self *Assembler) save_state() {
+ self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX) // MOVQ (ST), CX
+ self.Emit("LEAQ", jit.Ptr(_CX, vars.StateSize), _R9) // LEAQ vars.StateSize(CX), R9
+ self.Emit("CMPQ", _R9, jit.Imm(vars.StackLimit)) // CMPQ R9, $vars.StackLimit
+ self.Sjmp("JAE", _LB_error_too_deep) // JA _error_too_deep
+ self.Emit("MOVQ", _SP_x, jit.Sib(_ST, _CX, 1, 8)) // MOVQ SP.x, 8(ST)(CX)
+ self.Emit("MOVQ", _SP_f, jit.Sib(_ST, _CX, 1, 16)) // MOVQ SP.f, 16(ST)(CX)
+ self.WritePtr(0, _SP_p, jit.Sib(_ST, _CX, 1, 24)) // MOVQ SP.p, 24(ST)(CX)
+ self.WritePtr(1, _SP_q, jit.Sib(_ST, _CX, 1, 32)) // MOVQ SP.q, 32(ST)(CX)
+ self.Emit("MOVQ", _R9, jit.Ptr(_ST, 0)) // MOVQ R9, (ST)
+}
+
+func (self *Assembler) drop_state(decr int64) {
+ self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
+ self.Emit("SUBQ", jit.Imm(decr), _AX) // SUBQ $decr, AX
+ self.Emit("MOVQ", _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST)
+ self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 8), _SP_x) // MOVQ 8(ST)(AX), SP.x
+ self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 16), _SP_f) // MOVQ 16(ST)(AX), SP.f
+ self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 24), _SP_p) // MOVQ 24(ST)(AX), SP.p
+ self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 32), _SP_q) // MOVQ 32(ST)(AX), SP.q
+ self.Emit("PXOR", _X0, _X0) // PXOR X0, X0
+ self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8)) // MOVOU X0, 8(ST)(AX)
+ self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 24)) // MOVOU X0, 24(ST)(AX)
+}
+
+/** Buffer Helpers **/
+
+func (self *Assembler) add_char(ch byte) {
+ self.Emit("MOVB", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVB $ch, (RP)(RL)
+ self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL
+}
+
+func (self *Assembler) add_long(ch uint32, n int64) {
+ self.Emit("MOVL", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVL $ch, (RP)(RL)
+ self.Emit("ADDQ", jit.Imm(n), _RL) // ADDQ $n, RL
+}
+
+func (self *Assembler) add_text(ss string) {
+ self.store_str(ss) // TEXT $ss
+ self.Emit("ADDQ", jit.Imm(int64(len(ss))), _RL) // ADDQ ${len(ss)}, RL
+}
+
+// get *buf at AX
+func (self *Assembler) prep_buffer_AX() {
+ self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX
+ self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX)
+}
+
+func (self *Assembler) save_buffer() {
+ self.Emit("MOVQ", _ARG_rb, _CX) // MOVQ rb<>+0(FP), CX
+ self.Emit("MOVQ", _RP, jit.Ptr(_CX, 0)) // MOVQ RP, (CX)
+ self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8)) // MOVQ RL, 8(CX)
+ self.Emit("MOVQ", _RC, jit.Ptr(_CX, 16)) // MOVQ RC, 16(CX)
+}
+
+// get *buf at AX
+func (self *Assembler) load_buffer_AX() {
+ self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX
+ self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP) // MOVQ (AX), RP
+ self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL) // MOVQ 8(AX), RL
+ self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC) // MOVQ 16(AX), RC
+}
+
+/** Function Interface Helpers **/
+
+func (self *Assembler) call(pc obj.Addr) {
+ self.Emit("MOVQ", pc, _LR) // MOVQ $pc, AX
+ self.Rjmp("CALL", _LR) // CALL AX
+}
+
+func (self *Assembler) save_c() {
+ self.xsave(_REG_ffi...) // SAVE $REG_ffi
+}
+
+func (self *Assembler) call_b64(pc obj.Addr) {
+ self.xsave(_REG_b64...) // SAVE $REG_all
+ self.call(pc) // CALL $pc
+ self.xload(_REG_b64...) // LOAD $REG_ffi
+}
+
+func (self *Assembler) call_c(pc obj.Addr) {
+ self.Emit("XCHGQ", _SP_p, _BX)
+ self.call(pc) // CALL $pc
+ self.xload(_REG_ffi...) // LOAD $REG_ffi
+ self.Emit("XCHGQ", _SP_p, _BX)
+}
+
+func (self *Assembler) call_go(pc obj.Addr) {
+ self.xsave(_REG_all...) // SAVE $REG_all
+ self.call(pc) // CALL $pc
+ self.xload(_REG_all...) // LOAD $REG_all
+}
+
+func (self *Assembler) call_more_space(pc obj.Addr) {
+ self.xsave(_REG_ms...) // SAVE $REG_all
+ self.call(pc) // CALL $pc
+ self.xload(_REG_ms...) // LOAD $REG_all
+}
+
+func (self *Assembler) call_encoder(pc obj.Addr) {
+ self.xsave(_REG_enc...) // SAVE $REG_all
+ self.call(pc) // CALL $pc
+ self.xload(_REG_enc...) // LOAD $REG_all
+}
+
+func (self *Assembler) call_marshaler(fn obj.Addr, it *rt.GoType, vt reflect.Type) {
+ switch vt.Kind() {
+ case reflect.Interface:
+ self.call_marshaler_i(fn, it)
+ case reflect.Ptr, reflect.Map:
+ self.call_marshaler_v(fn, it, vt, true)
+ // struct/array of 1 direct iface type can be direct
+ default:
+ self.call_marshaler_v(fn, it, vt, !rt.UnpackType(vt).Indirect())
+ }
+}
+
+var (
+ _F_assertI2I = jit.Func(rt.AssertI2I)
+)
+
+func (self *Assembler) call_marshaler_i(fn obj.Addr, it *rt.GoType) {
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
+ self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
+ self.Sjmp("JZ", "_null_{n}") // JZ _null_{n}
+ self.Emit("MOVQ", _AX, _BX) // MOVQ AX, BX
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _CX) // MOVQ 8(SP.p), CX
+ self.Emit("MOVQ", jit.Gtype(it), _AX) // MOVQ $it, AX
+ self.call_go(_F_assertI2I) // CALL_GO assertI2I
+ self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
+ self.Sjmp("JZ", "_null_{n}") // JZ _null_{n}
+ self.Emit("MOVQ", _BX, _CX) // MOVQ BX, CX
+ self.Emit("MOVQ", _AX, _BX) // MOVQ AX, BX
+ self.prep_buffer_AX()
+ self.Emit("MOVQ", _ARG_fv, _DI) // MOVQ ARG.fv, DI
+ self.call_go(fn) // CALL $fn
+ self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
+ self.Sjmp("JNZ", _LB_error) // JNZ _error
+ self.load_buffer_AX()
+ self.Sjmp("JMP", "_done_{n}") // JMP _done_{n}
+ self.Link("_null_{n}") // _null_{n}:
+ self.check_size(4) // SIZE $4
+ self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1)
+ self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL
+ self.Link("_done_{n}") // _done_{n}:
+}
+
+func (self *Assembler) call_marshaler_v(fn obj.Addr, it *rt.GoType, vt reflect.Type, deref bool) {
+ self.prep_buffer_AX() // MOVE {buf}, (SP)
+ self.Emit("MOVQ", jit.Itab(it, vt), _BX) // MOVQ $(itab(it, vt)), BX
+
+ /* dereference the pointer if needed */
+ if !deref {
+ self.Emit("MOVQ", _SP_p, _CX) // MOVQ SP.p, CX
+ } else {
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _CX) // MOVQ 0(SP.p), CX
+ }
+
+ /* call the encoder, and perform error checks */
+ self.Emit("MOVQ", _ARG_fv, _DI) // MOVQ ARG.fv, DI
+ self.call_go(fn) // CALL $fn
+ self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
+ self.Sjmp("JNZ", _LB_error) // JNZ _error
+ self.load_buffer_AX()
+}
+
+/** Builtin: _more_space **/
+
+var (
+ _T_byte = jit.Type(vars.ByteType)
+ _F_growslice = jit.Func(rt.GrowSlice)
+
+ _T_json_Marshaler = rt.UnpackType(vars.JsonMarshalerType)
+ _T_encoding_TextMarshaler = rt.UnpackType(vars.EncodingTextMarshalerType)
+)
+
+// AX must saving n
+func (self *Assembler) more_space() {
+ self.Link(_LB_more_space)
+ self.Emit("MOVQ", _RP, _BX) // MOVQ DI, BX
+ self.Emit("MOVQ", _RL, _CX) // MOVQ SI, CX
+ self.Emit("MOVQ", _RC, _DI) // MOVQ DX, DI
+ self.Emit("MOVQ", _AX, _SI) // MOVQ AX, SI
+ self.Emit("MOVQ", _T_byte, _AX) // MOVQ $_T_byte, AX
+ self.call_more_space(_F_growslice) // CALL $pc
+ self.Emit("MOVQ", _AX, _RP) // MOVQ AX, DI
+ self.Emit("MOVQ", _BX, _RL) // MOVQ BX, SI
+ self.Emit("MOVQ", _CX, _RC) // MOVQ CX, DX
+ self.save_buffer() // SAVE {buf}
+ self.Rjmp("JMP", _LR) // JMP LR
+}
+
+/** Builtin Errors **/
+
+var (
+ _V_ERR_too_deep = jit.Imm(int64(uintptr(unsafe.Pointer(vars.ERR_too_deep))))
+ _V_ERR_nan_or_infinite = jit.Imm(int64(uintptr(unsafe.Pointer(vars.ERR_nan_or_infinite))))
+ _I_json_UnsupportedValueError = jit.Itab(rt.UnpackType(vars.ErrorType), vars.JsonUnsupportedValueType)
+)
+
+func (self *Assembler) error_too_deep() {
+ self.Link(_LB_error_too_deep)
+ self.Emit("MOVQ", _V_ERR_too_deep, _EP) // MOVQ $_V_ERR_too_deep, EP
+ self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET
+ self.Sjmp("JMP", _LB_error) // JMP _error
+}
+
+func (self *Assembler) error_invalid_number() {
+ self.Link(_LB_error_invalid_number)
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ 0(SP), AX
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _BX) // MOVQ 8(SP), BX
+ self.call_go(_F_error_number) // CALL_GO error_number
+ self.Sjmp("JMP", _LB_error) // JMP _error
+}
+
+func (self *Assembler) error_nan_or_infinite() {
+ self.Link(_LB_error_nan_or_infinite)
+ self.Emit("MOVQ", _V_ERR_nan_or_infinite, _EP) // MOVQ $_V_ERR_nan_or_infinite, EP
+ self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET
+ self.Sjmp("JMP", _LB_error) // JMP _error
+}
+
+/** String Encoding Routine **/
+
+var (
+ _F_quote = jit.Imm(int64(native.S_quote))
+ _F_panic = jit.Func(vars.GoPanic)
+)
+
+func (self *Assembler) go_panic() {
+ self.Link(_LB_panic)
+ self.Emit("MOVQ", _SP_p, _BX)
+ self.call_go(_F_panic)
+}
+
+func (self *Assembler) encode_string(doubleQuote bool) {
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX
+ self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
+ self.Sjmp("JZ", "_str_empty_{n}") // JZ _str_empty_{n}
+ self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))
+ self.Sjmp("JNE", "_str_next_{n}")
+ self.Emit("MOVQ", jit.Imm(int64(vars.PanicNilPointerOfNonEmptyString)), _AX)
+ self.Sjmp("JMP", _LB_panic)
+ self.Link("_str_next_{n}")
+
+ /* openning quote, check for double quote */
+ if !doubleQuote {
+ self.check_size_r(_AX, 2) // SIZE $2
+ self.add_char('"') // CHAR $'"'
+ } else {
+ self.check_size_r(_AX, 6) // SIZE $6
+ self.add_long(_IM_open, 3) // TEXT $`"\"`
+ }
+
+ /* quoting loop */
+ self.Emit("XORL", _AX, _AX) // XORL AX, AX
+ self.Emit("MOVQ", _AX, _VAR_sp) // MOVQ AX, sp
+ self.Link("_str_loop_{n}") // _str_loop_{n}:
+ self.save_c() // SAVE $REG_ffi
+
+ /* load the output buffer first, and then input buffer,
+ * because the parameter registers collide with RP / RL / RC */
+ self.Emit("MOVQ", _RC, _CX) // MOVQ RC, CX
+ self.Emit("SUBQ", _RL, _CX) // SUBQ RL, CX
+ self.Emit("MOVQ", _CX, _VAR_dn) // MOVQ CX, dn
+ self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _DX) // LEAQ (RP)(RL), DX
+ self.Emit("LEAQ", _VAR_dn, _CX) // LEAQ dn, CX
+ self.Emit("MOVQ", _VAR_sp, _AX) // MOVQ sp, AX
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _DI) // MOVQ (SP.p), DI
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SI) // MOVQ 8(SP.p), SI
+ self.Emit("ADDQ", _AX, _DI) // ADDQ AX, DI
+ self.Emit("SUBQ", _AX, _SI) // SUBQ AX, SI
+
+ /* set the flags based on `doubleQuote` */
+ if !doubleQuote {
+ self.Emit("XORL", _R8, _R8) // XORL R8, R8
+ } else {
+ self.Emit("MOVL", jit.Imm(types.F_DOUBLE_UNQUOTE), _R8) // MOVL ${types.F_DOUBLE_UNQUOTE}, R8
+ }
+
+ /* call the native quoter */
+ self.call_c(_F_quote) // CALL quote
+ self.Emit("ADDQ", _VAR_dn, _RL) // ADDQ dn, RL
+
+ self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
+ self.Sjmp("JS", "_str_space_{n}") // JS _str_space_{n}
+
+ /* close the string, check for double quote */
+ if !doubleQuote {
+ self.check_size(1) // SIZE $1
+ self.add_char('"') // CHAR $'"'
+ self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n}
+ } else {
+ self.check_size(3) // SIZE $3
+ self.add_text("\\\"\"") // TEXT $'\""'
+ self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n}
+ }
+
+ /* not enough space to contain the quoted string */
+ self.Link("_str_space_{n}") // _str_space_{n}:
+ self.Emit("NOTQ", _AX) // NOTQ AX
+ self.Emit("ADDQ", _AX, _VAR_sp) // ADDQ AX, sp
+ self.Emit("LEAQ", jit.Sib(_RC, _RC, 1, 0), _AX) // LEAQ (RC)(RC), AX
+ self.slice_grow_ax("_str_loop_{n}") // GROW _str_loop_{n}
+
+ /* empty string, check for double quote */
+ if !doubleQuote {
+ self.Link("_str_empty_{n}") // _str_empty_{n}:
+ self.check_size(2) // SIZE $2
+ self.add_text("\"\"") // TEXT $'""'
+ self.Link("_str_end_{n}") // _str_end_{n}:
+ } else {
+ self.Link("_str_empty_{n}") // _str_empty_{n}:
+ self.check_size(6) // SIZE $6
+ self.add_text("\"\\\"\\\"\"") // TEXT $'"\"\""'
+ self.Link("_str_end_{n}") // _str_end_{n}:
+ }
+}
+
+/** OpCode Assembler Functions **/
+
+
+
+var (
+ _F_f64toa = jit.Imm(int64(native.S_f64toa))
+ _F_f32toa = jit.Imm(int64(native.S_f32toa))
+ _F_i64toa = jit.Imm(int64(native.S_i64toa))
+ _F_u64toa = jit.Imm(int64(native.S_u64toa))
+ _F_b64encode = jit.Imm(int64(_subr__b64encode))
+)
+
+var (
+ _F_memmove = jit.Func(rt.Memmove)
+ _F_error_number = jit.Func(vars.Error_number)
+ _F_isValidNumber = jit.Func(rt.IsValidNumber)
+)
+
+var (
+ _F_iteratorStop = jit.Func(alg.IteratorStop)
+ _F_iteratorNext = jit.Func(alg.IteratorNext)
+ _F_iteratorStart = jit.Func(alg.IteratorStart)
+)
+
+var (
+ _F_encodeTypedPointer obj.Addr
+ _F_encodeJsonMarshaler obj.Addr
+ _F_encodeTextMarshaler obj.Addr
+)
+
+const (
+ _MODE_AVX2 = 1 << 2
+)
+
+func init() {
+ _F_encodeJsonMarshaler = jit.Func(alg.EncodeJsonMarshaler)
+ _F_encodeTextMarshaler = jit.Func(alg.EncodeTextMarshaler)
+ _F_encodeTypedPointer = jit.Func(EncodeTypedPointer)
+}
+
+func (self *Assembler) _asm_OP_null(_ *ir.Instr) {
+ self.check_size(4)
+ self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1)
+ self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL
+}
+
+func (self *Assembler) _asm_OP_empty_arr(_ *ir.Instr) {
+ self.Emit("BTQ", jit.Imm(int64(alg.BitNoNullSliceOrMap)), _ARG_fv)
+ self.Sjmp("JC", "_empty_arr_{n}")
+ self._asm_OP_null(nil)
+ self.Sjmp("JMP", "_empty_arr_end_{n}")
+ self.Link("_empty_arr_{n}")
+ self.check_size(2)
+ self.Emit("MOVW", jit.Imm(_IM_array), jit.Sib(_RP, _RL, 1, 0))
+ self.Emit("ADDQ", jit.Imm(2), _RL)
+ self.Link("_empty_arr_end_{n}")
+}
+
+func (self *Assembler) _asm_OP_empty_obj(_ *ir.Instr) {
+ self.Emit("BTQ", jit.Imm(int64(alg.BitNoNullSliceOrMap)), _ARG_fv)
+ self.Sjmp("JC", "_empty_obj_{n}")
+ self._asm_OP_null(nil)
+ self.Sjmp("JMP", "_empty_obj_end_{n}")
+ self.Link("_empty_obj_{n}")
+ self.check_size(2)
+ self.Emit("MOVW", jit.Imm(_IM_object), jit.Sib(_RP, _RL, 1, 0))
+ self.Emit("ADDQ", jit.Imm(2), _RL)
+ self.Link("_empty_obj_end_{n}")
+}
+
+func (self *Assembler) _asm_OP_bool(_ *ir.Instr) {
+ self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0
+ self.Sjmp("JE", "_false_{n}") // JE _false_{n}
+ self.check_size(4) // SIZE $4
+ self.Emit("MOVL", jit.Imm(_IM_true), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'true', (RP)(RL*1)
+ self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL
+ self.Sjmp("JMP", "_end_{n}") // JMP _end_{n}
+ self.Link("_false_{n}") // _false_{n}:
+ self.check_size(5) // SIZE $5
+ self.Emit("MOVL", jit.Imm(_IM_fals), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'fals', (RP)(RL*1)
+ self.Emit("MOVB", jit.Imm('e'), jit.Sib(_RP, _RL, 1, 4)) // MOVB $'e', 4(RP)(RL*1)
+ self.Emit("ADDQ", jit.Imm(5), _RL) // ADDQ $5, RL
+ self.Link("_end_{n}") // _end_{n}:
+}
+
+func (self *Assembler) _asm_OP_i8(_ *ir.Instr) {
+ self.store_int(4, _F_i64toa, "MOVBQSX")
+}
+
+func (self *Assembler) _asm_OP_i16(_ *ir.Instr) {
+ self.store_int(6, _F_i64toa, "MOVWQSX")
+}
+
+func (self *Assembler) _asm_OP_i32(_ *ir.Instr) {
+ self.store_int(17, _F_i64toa, "MOVLQSX")
+}
+
+func (self *Assembler) _asm_OP_i64(_ *ir.Instr) {
+ self.store_int(21, _F_i64toa, "MOVQ")
+}
+
+func (self *Assembler) _asm_OP_u8(_ *ir.Instr) {
+ self.store_int(3, _F_u64toa, "MOVBQZX")
+}
+
+func (self *Assembler) _asm_OP_u16(_ *ir.Instr) {
+ self.store_int(5, _F_u64toa, "MOVWQZX")
+}
+
+func (self *Assembler) _asm_OP_u32(_ *ir.Instr) {
+ self.store_int(16, _F_u64toa, "MOVLQZX")
+}
+
+func (self *Assembler) _asm_OP_u64(_ *ir.Instr) {
+ self.store_int(20, _F_u64toa, "MOVQ")
+}
+
+func (self *Assembler) _asm_OP_f32(_ *ir.Instr) {
+ self.check_size(32)
+ self.Emit("MOVL", jit.Ptr(_SP_p, 0), _AX) // MOVL (SP.p), AX
+ self.Emit("ANDL", jit.Imm(_FM_exp32), _AX) // ANDL $_FM_exp32, AX
+ self.Emit("XORL", jit.Imm(_FM_exp32), _AX) // XORL $_FM_exp32, AX
+ self.Sjmp("JNZ", "_encode_normal_f32_{n}")// JNZ _encode_normal_f32_{n}
+ self.Emit("BTQ", jit.Imm(alg.BitEncodeNullForInfOrNan), _ARG_fv) // BTQ ${BitEncodeNullForInfOrNan}, fv
+ self.Sjmp("JNC", _LB_error_nan_or_infinite) // JNC _error_nan_or_infinite
+ self._asm_OP_null(nil)
+ self.Sjmp("JMP", "_encode_f32_end_{n}") // JMP _encode_f32_end_{n}
+ self.Link("_encode_normal_f32_{n}")
+ self.save_c() // SAVE $C_regs
+ self.rbuf_di() // MOVQ RP, DI
+ self.Emit("MOVSS", jit.Ptr(_SP_p, 0), _X0) // MOVSS (SP.p), X0
+ self.call_c(_F_f32toa) // CALL_C f32toa
+ self.Emit("ADDQ", _AX, _RL) // ADDQ AX, RL
+ self.Link("_encode_f32_end_{n}")
+}
+
+func (self *Assembler) _asm_OP_f64(_ *ir.Instr) {
+ self.check_size(32)
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
+ self.Emit("MOVQ", jit.Imm(_FM_exp64), _CX) // MOVQ $_FM_exp64, CX
+ self.Emit("ANDQ", _CX, _AX) // ANDQ CX, AX
+ self.Emit("XORQ", _CX, _AX) // XORQ CX, AX
+ self.Sjmp("JNZ", "_encode_normal_f64_{n}")// JNZ _encode_normal_f64_{n}
+ self.Emit("BTQ", jit.Imm(alg.BitEncodeNullForInfOrNan), _ARG_fv) // BTQ ${BitEncodeNullForInfOrNan}, fv
+ self.Sjmp("JNC", _LB_error_nan_or_infinite)// JNC _error_nan_or_infinite
+ self._asm_OP_null(nil)
+ self.Sjmp("JMP", "_encode_f64_end_{n}") // JMP _encode_f64_end_{n}
+ self.Link("_encode_normal_f64_{n}")
+ self.save_c() // SAVE $C_regs
+ self.rbuf_di() // MOVQ RP, DI
+ self.Emit("MOVSD", jit.Ptr(_SP_p, 0), _X0) // MOVSD (SP.p), X0
+ self.call_c(_F_f64toa) // CALL_C f64toa
+ self.Emit("ADDQ", _AX, _RL) // ADDQ AX, RL
+ self.Link("_encode_f64_end_{n}")
+}
+
+func (self *Assembler) _asm_OP_str(_ *ir.Instr) {
+ self.encode_string(false)
+}
+
+func (self *Assembler) _asm_OP_bin(_ *ir.Instr) {
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX
+ self.Emit("ADDQ", jit.Imm(2), _AX) // ADDQ $2, AX
+ self.Emit("MOVQ", jit.Imm(_IM_mulv), _CX) // MOVQ $_MF_mulv, CX
+ self.Emit("MOVQ", _DX, _BX) // MOVQ DX, BX
+ self.From("MULQ", _CX) // MULQ CX
+ self.Emit("LEAQ", jit.Sib(_DX, _DX, 1, 1), _AX) // LEAQ 1(DX)(DX), AX
+ self.Emit("ORQ", jit.Imm(2), _AX) // ORQ $2, AX
+ self.Emit("MOVQ", _BX, _DX) // MOVQ BX, DX
+ self.check_size_r(_AX, 0) // SIZE AX
+ self.add_char('"') // CHAR $'"'
+ self.Emit("MOVQ", _ARG_rb, _DI) // MOVQ rb<>+0(FP), DI
+ self.Emit("MOVQ", _RL, jit.Ptr(_DI, 8)) // MOVQ SI, 8(DI)
+ self.Emit("MOVQ", _SP_p, _SI) // MOVQ SP.p, SI
+
+ /* check for AVX2 support */
+ if !cpu.HasAVX2 {
+ self.Emit("XORL", _DX, _DX) // XORL DX, DX
+ } else {
+ self.Emit("MOVL", jit.Imm(_MODE_AVX2), _DX) // MOVL $_MODE_AVX2, DX
+ }
+
+ /* call the encoder */
+ self.call_b64(_F_b64encode) // CALL b64encode
+ self.load_buffer_AX() // LOAD {buf}
+ self.add_char('"') // CHAR $'"'
+}
+
+func (self *Assembler) _asm_OP_quote(_ *ir.Instr) {
+ self.encode_string(true)
+}
+
+func (self *Assembler) _asm_OP_number(_ *ir.Instr) {
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _BX) // MOVQ (SP.p), BX
+ self.Emit("TESTQ", _BX, _BX) // TESTQ BX, BX
+ self.Sjmp("JZ", "_empty_{n}")
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
+ self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
+ self.Sjmp("JNZ", "_number_next_{n}")
+ self.Emit("MOVQ", jit.Imm(int64(vars.PanicNilPointerOfNonEmptyString)), _AX)
+ self.Sjmp("JMP", _LB_panic)
+ self.Link("_number_next_{n}")
+ self.call_go(_F_isValidNumber) // CALL_GO isValidNumber
+ self.Emit("CMPB", _AX, jit.Imm(0)) // CMPB AX, $0
+ self.Sjmp("JE", _LB_error_invalid_number) // JE _error_invalid_number
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _BX) // MOVQ (SP.p), BX
+ self.check_size_r(_BX, 0) // SIZE BX
+ self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _AX) // LEAQ (RP)(RL), AX
+ self.Emit("ADDQ", jit.Ptr(_SP_p, 8), _RL) // ADDQ 8(SP.p), RL
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _BX) // MOVOU (SP.p), BX
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _CX) // MOVOU X0, 8(SP)
+ self.call_go(_F_memmove) // CALL_GO memmove
+ self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX
+ self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX)
+ self.Sjmp("JMP", "_done_{n}") // JMP _done_{n}
+ self.Link("_empty_{n}") // _empty_{n}
+ self.check_size(1) // SIZE $1
+ self.add_char('0') // CHAR $'0'
+ self.Link("_done_{n}") // _done_{n}:
+}
+
+func (self *Assembler) _asm_OP_eface(_ *ir.Instr) {
+ self.prep_buffer_AX() // MOVE {buf}, AX
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _BX) // MOVQ (SP.p), BX
+ self.Emit("LEAQ", jit.Ptr(_SP_p, 8), _CX) // LEAQ 8(SP.p), CX
+ self.Emit("MOVQ", _ST, _DI) // MOVQ ST, DI
+ self.Emit("MOVQ", _ARG_fv, _SI) // MOVQ fv, AX
+ self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer
+ self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
+ self.Sjmp("JNZ", _LB_error) // JNZ _error
+ self.load_buffer_AX()
+}
+
+func (self *Assembler) _asm_OP_iface(_ *ir.Instr) {
+ self.prep_buffer_AX() // MOVE {buf}, AX
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _CX) // MOVQ (SP.p), CX
+ self.Emit("MOVQ", jit.Ptr(_CX, 8), _BX) // MOVQ 8(CX), BX
+ self.Emit("LEAQ", jit.Ptr(_SP_p, 8), _CX) // LEAQ 8(SP.p), CX
+ self.Emit("MOVQ", _ST, _DI) // MOVQ ST, DI
+ self.Emit("MOVQ", _ARG_fv, _SI) // MOVQ fv, AX
+ self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer
+ self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
+ self.Sjmp("JNZ", _LB_error) // JNZ _error
+ self.load_buffer_AX()
+}
+
+func (self *Assembler) _asm_OP_byte(p *ir.Instr) {
+ self.check_size(1)
+ self.Emit("MOVB", jit.Imm(p.I64()), jit.Sib(_RP, _RL, 1, 0)) // MOVL p.Vi(), (RP)(RL*1)
+ self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL
+}
+
+func (self *Assembler) _asm_OP_text(p *ir.Instr) {
+ self.check_size(len(p.Vs())) // SIZE ${len(p.Vs())}
+ self.add_text(p.Vs()) // TEXT ${p.Vs()}
+}
+
+func (self *Assembler) _asm_OP_deref(_ *ir.Instr) {
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p
+}
+
+func (self *Assembler) _asm_OP_index(p *ir.Instr) {
+ self.Emit("MOVQ", jit.Imm(p.I64()), _AX) // MOVQ $p.Vi(), AX
+ self.Emit("ADDQ", _AX, _SP_p) // ADDQ AX, SP.p
+}
+
+func (self *Assembler) _asm_OP_load(_ *ir.Instr) {
+ self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX
+ self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -24), _SP_x) // MOVQ -24(ST)(AX), SP.x
+ self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -8), _SP_p) // MOVQ -8(ST)(AX), SP.p
+ self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _SP_q) // MOVQ (ST)(AX), SP.q
+}
+
+func (self *Assembler) _asm_OP_save(_ *ir.Instr) {
+ self.save_state()
+}
+
+func (self *Assembler) _asm_OP_drop(_ *ir.Instr) {
+ self.drop_state(vars.StateSize)
+}
+
+func (self *Assembler) _asm_OP_drop_2(_ *ir.Instr) {
+ self.drop_state(vars.StateSize * 2) // DROP $(vars.StateSize * 2)
+ self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 56)) // MOVOU X0, 56(ST)(AX)
+}
+
+func (self *Assembler) _asm_OP_recurse(p *ir.Instr) {
+ self.prep_buffer_AX() // MOVE {buf}, (SP)
+ vt, pv := p.Vp()
+ self.Emit("MOVQ", jit.Type(vt), _BX) // MOVQ $(type(p.Vt())), BX
+
+ /* check for indirection */
+ if !rt.UnpackType(vt).Indirect() {
+ self.Emit("MOVQ", _SP_p, _CX) // MOVQ SP.p, CX
+ } else {
+ self.Emit("MOVQ", _SP_p, _VAR_vp) // MOVQ SP.p, VAR.vp
+ self.Emit("LEAQ", _VAR_vp, _CX) // LEAQ VAR.vp, CX
+ }
+
+ /* call the encoder */
+ self.Emit("MOVQ", _ST, _DI) // MOVQ ST, DI
+ self.Emit("MOVQ", _ARG_fv, _SI) // MOVQ $fv, SI
+ if pv {
+ self.Emit("BTSQ", jit.Imm(alg.BitPointerValue), _SI) // BTSQ $1, SI
+ }
+
+ self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer
+ self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
+ self.Sjmp("JNZ", _LB_error) // JNZ _error
+ self.load_buffer_AX()
+}
+
+func (self *Assembler) _asm_OP_is_nil(p *ir.Instr) {
+ self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0
+ self.Xjmp("JE", p.Vi()) // JE p.Vi()
+}
+
+func (self *Assembler) _asm_OP_is_nil_p1(p *ir.Instr) {
+ self.Emit("CMPQ", jit.Ptr(_SP_p, 8), jit.Imm(0)) // CMPQ 8(SP.p), $0
+ self.Xjmp("JE", p.Vi()) // JE p.Vi()
+}
+
+func (self *Assembler) _asm_OP_is_zero_1(p *ir.Instr) {
+ self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0
+ self.Xjmp("JE", p.Vi()) // JE p.Vi()
+}
+
+func (self *Assembler) _asm_OP_is_zero_2(p *ir.Instr) {
+ self.Emit("CMPW", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPW (SP.p), $0
+ self.Xjmp("JE", p.Vi()) // JE p.Vi()
+}
+
+func (self *Assembler) _asm_OP_is_zero_4(p *ir.Instr) {
+ self.Emit("CMPL", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPL (SP.p), $0
+ self.Xjmp("JE", p.Vi()) // JE p.Vi()
+}
+
+func (self *Assembler) _asm_OP_is_zero_8(p *ir.Instr) {
+ self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0
+ self.Xjmp("JE", p.Vi()) // JE p.Vi()
+}
+
+func (self *Assembler) _asm_OP_is_zero_map(p *ir.Instr) {
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX
+ self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
+ self.Xjmp("JZ", p.Vi()) // JZ p.Vi()
+ self.Emit("CMPQ", jit.Ptr(_AX, 0), jit.Imm(0)) // CMPQ (AX), $0
+ self.Xjmp("JE", p.Vi()) // JE p.Vi()
+}
+
+func (self *Assembler) _asm_OP_goto(p *ir.Instr) {
+ self.Xjmp("JMP", p.Vi())
+}
+
+func (self *Assembler) _asm_OP_map_iter(p *ir.Instr) {
+ self.Emit("MOVQ", jit.Type(p.Vt()), _AX) // MOVQ $p.Vt(), AX
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _BX) // MOVQ (SP.p), BX
+ self.Emit("MOVQ", _ARG_fv, _CX) // MOVQ fv, CX
+ self.call_go(_F_iteratorStart) // CALL_GO iteratorStart
+ self.Emit("MOVQ", _AX, _SP_q) // MOVQ AX, SP.q
+ self.Emit("MOVQ", _BX, _ET) // MOVQ 32(SP), ET
+ self.Emit("MOVQ", _CX, _EP) // MOVQ 40(SP), EP
+ self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
+ self.Sjmp("JNZ", _LB_error) // JNZ _error
+}
+
+func (self *Assembler) _asm_OP_map_stop(_ *ir.Instr) {
+ self.Emit("MOVQ", _SP_q, _AX) // MOVQ SP.q, AX
+ self.call_go(_F_iteratorStop) // CALL_GO iteratorStop
+ self.Emit("XORL", _SP_q, _SP_q) // XORL SP.q, SP.q
+}
+
+func (self *Assembler) _asm_OP_map_check_key(p *ir.Instr) {
+ self.Emit("MOVQ", jit.Ptr(_SP_q, 0), _SP_p) // MOVQ (SP.q), SP.p
+ self.Emit("TESTQ", _SP_p, _SP_p) // TESTQ SP.p, SP.p
+ self.Xjmp("JZ", p.Vi()) // JNZ p.Vi()
+}
+
+func (self *Assembler) _asm_OP_map_write_key(p *ir.Instr) {
+ self.Emit("BTQ", jit.Imm(alg.BitSortMapKeys), _ARG_fv) // BTQ ${SortMapKeys}, fv
+ self.Sjmp("JNC", "_unordered_key_{n}") // JNC _unordered_key_{n}
+ self.encode_string(false) // STR $false
+ self.Xjmp("JMP", p.Vi()) // JMP ${p.Vi()}
+ self.Link("_unordered_key_{n}") // _unordered_key_{n}:
+}
+
+func (self *Assembler) _asm_OP_map_value_next(_ *ir.Instr) {
+ self.Emit("MOVQ", jit.Ptr(_SP_q, 8), _SP_p) // MOVQ 8(SP.q), SP.p
+ self.Emit("MOVQ", _SP_q, _AX) // MOVQ SP.q, AX
+ self.call_go(_F_iteratorNext) // CALL_GO iteratorNext
+}
+
+func (self *Assembler) _asm_OP_slice_len(_ *ir.Instr) {
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SP_x) // MOVQ 8(SP.p), SP.x
+ self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p
+ self.Emit("ORQ", jit.Imm(1<<_S_init), _SP_f) // ORQ $(1<<_S_init), SP.f
+}
+
+func (self *Assembler) _asm_OP_slice_next(p *ir.Instr) {
+ self.Emit("TESTQ", _SP_x, _SP_x) // TESTQ SP.x, SP.x
+ self.Xjmp("JZ", p.Vi()) // JZ p.Vi()
+ self.Emit("SUBQ", jit.Imm(1), _SP_x) // SUBQ $1, SP.x
+ self.Emit("BTRQ", jit.Imm(_S_init), _SP_f) // BTRQ $_S_init, SP.f
+ self.Emit("LEAQ", jit.Ptr(_SP_p, int64(p.Vlen())), _AX) // LEAQ $(p.vlen())(SP.p), AX
+ self.Emit("CMOVQCC", _AX, _SP_p) // CMOVQNC AX, SP.p
+}
+
+func (self *Assembler) _asm_OP_marshal(p *ir.Instr) {
+ self.call_marshaler(_F_encodeJsonMarshaler, _T_json_Marshaler, p.Vt())
+}
+
+func (self *Assembler) _asm_OP_marshal_p(p *ir.Instr) {
+ if p.Vk() != reflect.Ptr {
+ panic("marshal_p: invalid type")
+ } else {
+ self.call_marshaler_v(_F_encodeJsonMarshaler, _T_json_Marshaler, p.Vt(), false)
+ }
+}
+
+func (self *Assembler) _asm_OP_marshal_text(p *ir.Instr) {
+ self.call_marshaler(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.Vt())
+}
+
+func (self *Assembler) _asm_OP_marshal_text_p(p *ir.Instr) {
+ if p.Vk() != reflect.Ptr {
+ panic("marshal_text_p: invalid type")
+ } else {
+ self.call_marshaler_v(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.Vt(), false)
+ }
+}
+
+func (self *Assembler) _asm_OP_cond_set(_ *ir.Instr) {
+ self.Emit("ORQ", jit.Imm(1<<_S_cond), _SP_f) // ORQ $(1<<_S_cond), SP.f
+}
+
+func (self *Assembler) _asm_OP_cond_testc(p *ir.Instr) {
+ self.Emit("BTRQ", jit.Imm(_S_cond), _SP_f) // BTRQ $_S_cond, SP.f
+ self.Xjmp("JC", p.Vi())
+}
+
+func (self *Assembler) print_gc(i int, p1 *ir.Instr, p2 *ir.Instr) {
+ self.Emit("MOVQ", jit.Imm(int64(p2.Op())), _CX) // MOVQ $(p2.Op()), AX
+ self.Emit("MOVQ", jit.Imm(int64(p1.Op())), _BX) // MOVQ $(p1.Op()), BX
+ self.Emit("MOVQ", jit.Imm(int64(i)), _AX) // MOVQ $(i), CX
+ self.call_go(_F_println)
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/debug_go116.go b/vendor/github.com/bytedance/sonic/internal/encoder/x86/debug_go116.go
index d8d0c45cc..c292e88a1 100644
--- a/vendor/github.com/bytedance/sonic/internal/encoder/debug_go116.go
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/x86/debug_go116.go
@@ -1,4 +1,4 @@
-// +build go1.16,!go1.17
+// +build go1.17,!go1.17
/*
* Copyright 2021 ByteDance Inc.
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package encoder
+package x86
import (
`os`
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/x86/debug_go117.go b/vendor/github.com/bytedance/sonic/internal/encoder/x86/debug_go117.go
new file mode 100644
index 000000000..0aca3f4c5
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/x86/debug_go117.go
@@ -0,0 +1,201 @@
+//go:build go1.17 && !go1.24
+// +build go1.17,!go1.24
+
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package x86
+
+import (
+ "fmt"
+ "runtime"
+ "strings"
+ "unsafe"
+
+ "github.com/bytedance/sonic/internal/encoder/ir"
+ "github.com/bytedance/sonic/internal/encoder/vars"
+ "github.com/bytedance/sonic/internal/jit"
+ "github.com/twitchyliquid64/golang-asm/obj"
+)
+
+const _FP_debug = 128
+
+var (
+ _Instr_End = ir.NewInsOp(ir.OP_is_nil)
+
+ _F_gc = jit.Func(gc)
+ _F_println = jit.Func(println_wrapper)
+ _F_print = jit.Func(print)
+)
+
+func (self *Assembler) dsave(r ...obj.Addr) {
+ for i, v := range r {
+ if i > _FP_debug/8-1 {
+ panic("too many registers to save")
+ } else {
+ self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs+_FP_saves+_FP_locals+int64(i)*8))
+ }
+ }
+}
+
+func (self *Assembler) dload(r ...obj.Addr) {
+ for i, v := range r {
+ if i > _FP_debug/8-1 {
+ panic("too many registers to load")
+ } else {
+ self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs+_FP_saves+_FP_locals+int64(i)*8), v)
+ }
+ }
+}
+
+func println_wrapper(i int, op1 int, op2 int) {
+ println(i, " Intrs ", op1, ir.OpNames[op1], "next: ", op2, ir.OpNames[op2])
+}
+
+func print(i int) {
+ println(i)
+}
+
+func gc() {
+ if !vars.DebugSyncGC {
+ return
+ }
+ runtime.GC()
+ // debug.FreeOSMemory()
+}
+
+func (self *Assembler) dcall(fn obj.Addr) {
+ self.Emit("MOVQ", fn, _R10) // MOVQ ${fn}, R10
+ self.Rjmp("CALL", _R10) // CALL R10
+}
+
+func (self *Assembler) debug_gc() {
+ if !vars.DebugSyncGC {
+ return
+ }
+ self.dsave(_REG_debug...)
+ self.dcall(_F_gc)
+ self.dload(_REG_debug...)
+}
+
+func (self *Assembler) debug_instr(i int, v *ir.Instr) {
+ if vars.DebugSyncGC {
+ if i+1 == len(self.p) {
+ self.print_gc(i, v, &_Instr_End)
+ } else {
+ next := &(self.p[i+1])
+ self.print_gc(i, v, next)
+ name := ir.OpNames[next.Op()]
+ if strings.Contains(name, "save") {
+ return
+ }
+ }
+ // self.debug_gc()
+ }
+}
+
+//go:noescape
+//go:linkname checkptrBase runtime.checkptrBase
+func checkptrBase(p unsafe.Pointer) uintptr
+
+//go:noescape
+//go:linkname findObject runtime.findObject
+func findObject(p, refBase, refOff uintptr) (base uintptr, s unsafe.Pointer, objIndex uintptr)
+
+var (
+ _F_checkptr = jit.Func(checkptr)
+ _F_printptr = jit.Func(printptr)
+)
+
+var (
+ _R10 = jit.Reg("R10")
+)
+var _REG_debug = []obj.Addr{
+ jit.Reg("AX"),
+ jit.Reg("BX"),
+ jit.Reg("CX"),
+ jit.Reg("DX"),
+ jit.Reg("DI"),
+ jit.Reg("SI"),
+ jit.Reg("BP"),
+ jit.Reg("SP"),
+ jit.Reg("R8"),
+ jit.Reg("R9"),
+ jit.Reg("R10"),
+ jit.Reg("R11"),
+ jit.Reg("R12"),
+ jit.Reg("R13"),
+ jit.Reg("R14"),
+ jit.Reg("R15"),
+}
+
+func checkptr(ptr uintptr) {
+ if ptr == 0 {
+ return
+ }
+ fmt.Printf("pointer: %x\n", ptr)
+ f := checkptrBase(unsafe.Pointer(uintptr(ptr)))
+ if f == 0 {
+ fmt.Printf("! unknown-based pointer: %x\n", ptr)
+ } else if f == 1 {
+ fmt.Printf("! stack pointer: %x\n", ptr)
+ } else {
+ fmt.Printf("base: %x\n", f)
+ }
+ findobj(ptr)
+}
+
+func findobj(ptr uintptr) {
+ base, s, objIndex := findObject(ptr, 0, 0)
+ if s != nil && base == 0 {
+ fmt.Printf("! invalid pointer: %x\n", ptr)
+ }
+ fmt.Printf("objIndex: %d\n", objIndex)
+}
+
+func (self *Assembler) check_ptr(ptr obj.Addr, lea bool) {
+ if !vars.DebugCheckPtr {
+ return
+ }
+
+ self.dsave(_REG_debug...)
+ if lea {
+ self.Emit("LEAQ", ptr, _R10)
+ } else {
+ self.Emit("MOVQ", ptr, _R10)
+ }
+ self.Emit("MOVQ", _R10, jit.Ptr(_SP, 0))
+ self.dcall(_F_checkptr)
+ self.dload(_REG_debug...)
+}
+
+func printptr(i int, ptr uintptr) {
+ fmt.Printf("[%d] ptr: %x\n", i, ptr)
+}
+
+func (self *Assembler) print_ptr(i int, ptr obj.Addr, lea bool) {
+ self.dsave(_REG_debug...)
+ if lea {
+ self.Emit("LEAQ", ptr, _R10)
+ } else {
+ self.Emit("MOVQ", ptr, _R10)
+ }
+
+ self.Emit("MOVQ", jit.Imm(int64(i)), _AX)
+ self.Emit("MOVQ", _R10, _BX)
+ self.dcall(_F_printptr)
+ self.dload(_REG_debug...)
+}
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/x86/stbus.go b/vendor/github.com/bytedance/sonic/internal/encoder/x86/stbus.go
new file mode 100644
index 000000000..b9fa473f5
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/internal/encoder/x86/stbus.go
@@ -0,0 +1,54 @@
+/**
+ * Copyright 2024 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package x86
+
+import (
+ "unsafe"
+ _ "unsafe"
+
+ "github.com/bytedance/sonic/internal/encoder/alg"
+ "github.com/bytedance/sonic/internal/encoder/vars"
+ "github.com/bytedance/sonic/internal/rt"
+ "github.com/bytedance/sonic/loader"
+ _ "github.com/cloudwego/base64x"
+)
+
+//go:linkname _subr__b64encode github.com/cloudwego/base64x._subr__b64encode
+var _subr__b64encode uintptr
+
+var compiler func(*rt.GoType, ... interface{}) (interface{}, error)
+
+func SetCompiler(c func(*rt.GoType, ... interface{}) (interface{}, error)) {
+ compiler = c
+}
+
+func ptoenc(p loader.Function) vars.Encoder {
+ return *(*vars.Encoder)(unsafe.Pointer(&p))
+}
+
+func EncodeTypedPointer(buf *[]byte, vt *rt.GoType, vp *unsafe.Pointer, sb *vars.Stack, fv uint64) error {
+ if vt == nil {
+ return alg.EncodeNil(buf)
+ } else if fn, err := vars.FindOrCompile(vt, (fv&(1<<alg.BitPointerValue)) != 0, compiler); err != nil {
+ return err
+ } else if vt.Indirect() {
+ return fn.(vars.Encoder)(buf, *vp, sb, fv)
+ } else {
+ return fn.(vars.Encoder)(buf, unsafe.Pointer(vp), sb, fv)
+ }
+}
+