diff options
Diffstat (limited to 'vendor/github.com/bytedance/sonic/internal/encoder')
27 files changed, 0 insertions, 5029 deletions
diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/alg/mapiter.go b/vendor/github.com/bytedance/sonic/internal/encoder/alg/mapiter.go deleted file mode 100644 index 5d9956a90..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/alg/mapiter.go +++ /dev/null @@ -1,206 +0,0 @@ -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package alg - -import ( - "encoding" - "reflect" - "strconv" - "sync" - "unsafe" - - "github.com/bytedance/sonic/internal/encoder/vars" - "github.com/bytedance/sonic/internal/rt" -) - -type _MapPair struct { - k string // when the map key is integer, k is pointed to m - v unsafe.Pointer - m [32]byte -} - -type MapIterator struct { - It rt.GoMapIterator // must be the first field - kv rt.GoSlice // slice of _MapPair - ki int -} - -var ( - iteratorPool = sync.Pool{} - iteratorPair = rt.UnpackType(reflect.TypeOf(_MapPair{})) -) - -func init() { - if unsafe.Offsetof(MapIterator{}.It) != 0 { - panic("_MapIterator.it is not the first field") - } -} - - -func newIterator() *MapIterator { - if v := iteratorPool.Get(); v == nil { - return new(MapIterator) - } else { - return resetIterator(v.(*MapIterator)) - } -} - -func resetIterator(p *MapIterator) *MapIterator { - p.ki = 0 - p.It = rt.GoMapIterator{} - p.kv.Len = 0 - return p -} - -func (self *MapIterator) at(i int) *_MapPair { - return (*_MapPair)(unsafe.Pointer(uintptr(self.kv.Ptr) + uintptr(i) * unsafe.Sizeof(_MapPair{}))) -} - -func (self *MapIterator) add() (p *_MapPair) { - p = self.at(self.kv.Len) - self.kv.Len++ - return -} - -func (self *MapIterator) data() (p []_MapPair) { - *(*rt.GoSlice)(unsafe.Pointer(&p)) = self.kv - return -} - -func (self *MapIterator) append(t *rt.GoType, k unsafe.Pointer, v unsafe.Pointer) (err error) { - p := self.add() - p.v = v - - /* check for strings */ - if tk := t.Kind(); tk != reflect.String { - return self.appendGeneric(p, t, tk, k) - } - - /* fast path for strings */ - p.k = *(*string)(k) - return nil -} - -func (self *MapIterator) appendGeneric(p *_MapPair, t *rt.GoType, v reflect.Kind, k unsafe.Pointer) error { - switch v { - case reflect.Int : p.k = rt.Mem2Str(strconv.AppendInt(p.m[:0], int64(*(*int)(k)), 10)) ; return nil - case reflect.Int8 : p.k = rt.Mem2Str(strconv.AppendInt(p.m[:0], int64(*(*int8)(k)), 10)) ; return nil - case reflect.Int16 : p.k = rt.Mem2Str(strconv.AppendInt(p.m[:0], int64(*(*int16)(k)), 10)) ; return nil - case reflect.Int32 : p.k = rt.Mem2Str(strconv.AppendInt(p.m[:0], int64(*(*int32)(k)), 10)) ; return nil - case reflect.Int64 : p.k = rt.Mem2Str(strconv.AppendInt(p.m[:0], int64(*(*int64)(k)), 10)) ; return nil - case reflect.Uint : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uint)(k)), 10)) ; return nil - case reflect.Uint8 : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uint8)(k)), 10)) ; return nil - case reflect.Uint16 : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uint16)(k)), 10)) ; return nil - case reflect.Uint32 : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uint32)(k)), 10)) ; return nil - case reflect.Uint64 : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uint64)(k)), 10)) ; return nil - case reflect.Uintptr : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uintptr)(k)), 10)) ; return nil - case reflect.Interface : return self.appendInterface(p, t, k) - case reflect.Struct, reflect.Ptr : return self.appendConcrete(p, t, k) - default : panic("unexpected map key type") - } -} - -func (self *MapIterator) appendConcrete(p *_MapPair, t *rt.GoType, k unsafe.Pointer) (err error) { - // compiler has already checked that the type implements the encoding.MarshalText interface - if !t.Indirect() { - k = *(*unsafe.Pointer)(k) - } - eface := rt.GoEface{Value: k, Type: t}.Pack() - out, err := eface.(encoding.TextMarshaler).MarshalText() - if err != nil { - return err - } - p.k = rt.Mem2Str(out) - return -} - -func (self *MapIterator) appendInterface(p *_MapPair, t *rt.GoType, k unsafe.Pointer) (err error) { - if len(rt.IfaceType(t).Methods) == 0 { - panic("unexpected map key type") - } else if p.k, err = asText(k); err == nil { - return nil - } else { - return - } -} - -func IteratorStop(p *MapIterator) { - iteratorPool.Put(p) -} - -func IteratorNext(p *MapIterator) { - i := p.ki - t := &p.It - - /* check for unordered iteration */ - if i < 0 { - rt.Mapiternext(t) - return - } - - /* check for end of iteration */ - if p.ki >= p.kv.Len { - t.K = nil - t.V = nil - return - } - - /* update the key-value pair, and increase the pointer */ - t.K = unsafe.Pointer(&p.at(p.ki).k) - t.V = p.at(p.ki).v - p.ki++ -} - -func IteratorStart(t *rt.GoMapType, m *rt.GoMap, fv uint64) (*MapIterator, error) { - it := newIterator() - rt.Mapiterinit(t, m, &it.It) - - /* check for key-sorting, empty map don't need sorting */ - if m.Count == 0 || (fv & (1<<BitSortMapKeys)) == 0 { - it.ki = -1 - return it, nil - } - - /* pre-allocate space if needed */ - if m.Count > it.kv.Cap { - it.kv = rt.GrowSlice(iteratorPair, it.kv, m.Count) - } - - /* dump all the key-value pairs */ - for ; it.It.K != nil; rt.Mapiternext(&it.It) { - if err := it.append(t.Key, it.It.K, it.It.V); err != nil { - IteratorStop(it) - return nil, err - } - } - - /* sort the keys, map with only 1 item don't need sorting */ - if it.ki = 1; m.Count > 1 { - radixQsort(it.data(), 0, maxDepth(it.kv.Len)) - } - - /* load the first pair into iterator */ - it.It.V = it.at(0).v - it.It.K = unsafe.Pointer(&it.at(0).k) - return it, nil -} - -func asText(v unsafe.Pointer) (string, error) { - text := rt.AssertI2I(rt.UnpackType(vars.EncodingTextMarshalerType), *(*rt.GoIface)(v)) - r, e := (*(*encoding.TextMarshaler)(unsafe.Pointer(&text))).MarshalText() - return rt.Mem2Str(r), e -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/alg/opts.go b/vendor/github.com/bytedance/sonic/internal/encoder/alg/opts.go deleted file mode 100644 index c19e2de4e..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/alg/opts.go +++ /dev/null @@ -1,31 +0,0 @@ -/** - * Copyright 2024 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package alg - -const ( - BitSortMapKeys = iota - BitEscapeHTML - BitCompactMarshaler - BitNoQuoteTextMarshaler - BitNoNullSliceOrMap - BitValidateString - BitNoValidateJSONMarshaler - BitNoEncoderNewline - BitEncodeNullForInfOrNan - - BitPointerValue = 63 -) diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/alg/primitives.go b/vendor/github.com/bytedance/sonic/internal/encoder/alg/primitives.go deleted file mode 100644 index 63fa01890..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/alg/primitives.go +++ /dev/null @@ -1,95 +0,0 @@ -/** - * Copyright 2024 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package alg - -import ( - "encoding" - "encoding/json" - - "github.com/bytedance/sonic/internal/encoder/vars" - "github.com/bytedance/sonic/internal/rt" -) - -func Compact(p *[]byte, v []byte) error { - buf := vars.NewBuffer() - err := json.Compact(buf, v) - - /* check for errors */ - if err != nil { - return err - } - - /* add to result */ - v = buf.Bytes() - *p = append(*p, v...) - - /* return the buffer into pool */ - vars.FreeBuffer(buf) - return nil -} - -func EncodeNil(rb *[]byte) error { - *rb = append(*rb, 'n', 'u', 'l', 'l') - return nil -} - -// func Make_EncodeTypedPointer(computor func(*rt.GoType, ...interface{}) (interface{}, error)) func(*[]byte, *rt.GoType, *unsafe.Pointer, *vars.Stack, uint64) error { -// return func(buf *[]byte, vt *rt.GoType, vp *unsafe.Pointer, sb *vars.Stack, fv uint64) error { -// if vt == nil { -// return EncodeNil(buf) -// } else if fn, err := vars.FindOrCompile(vt, (fv&(1<<BitPointerValue)) != 0, computor); err != nil { -// return err -// } else if vt.Indirect() { -// err := fn(buf, *vp, sb, fv) -// return err -// } else { -// err := fn(buf, unsafe.Pointer(vp), sb, fv) -// return err -// } -// } -// } - -func EncodeJsonMarshaler(buf *[]byte, val json.Marshaler, opt uint64) error { - if ret, err := val.MarshalJSON(); err != nil { - return err - } else { - if opt&(1<<BitCompactMarshaler) != 0 { - return Compact(buf, ret) - } - if opt&(1<<BitNoValidateJSONMarshaler) == 0 { - if ok, s := Valid(ret); !ok { - return vars.Error_marshaler(ret, s) - } - } - *buf = append(*buf, ret...) - return nil - } -} - -func EncodeTextMarshaler(buf *[]byte, val encoding.TextMarshaler, opt uint64) error { - if ret, err := val.MarshalText(); err != nil { - return err - } else { - if opt&(1<<BitNoQuoteTextMarshaler) != 0 { - *buf = append(*buf, ret...) - return nil - } - *buf = Quote(*buf, rt.Mem2Str(ret), false) - return nil - } -} -
\ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/alg/sort.go b/vendor/github.com/bytedance/sonic/internal/encoder/alg/sort.go deleted file mode 100644 index 5bb0f9011..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/alg/sort.go +++ /dev/null @@ -1,206 +0,0 @@ -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package alg - -// Algorithm 3-way Radix Quicksort, d means the radix. -// Reference: https://algs4.cs.princeton.edu/51radix/Quick3string.java.html -func radixQsort(kvs []_MapPair, d, maxDepth int) { - for len(kvs) > 11 { - // To avoid the worst case of quickSort (time: O(n^2)), use introsort here. - // Reference: https://en.wikipedia.org/wiki/Introsort and - // https://github.com/golang/go/issues/467 - if maxDepth == 0 { - heapSort(kvs, 0, len(kvs)) - return - } - maxDepth-- - - p := pivot(kvs, d) - lt, i, gt := 0, 0, len(kvs) - for i < gt { - c := byteAt(kvs[i].k, d) - if c < p { - swap(kvs, lt, i) - i++ - lt++ - } else if c > p { - gt-- - swap(kvs, i, gt) - } else { - i++ - } - } - - // kvs[0:lt] < v = kvs[lt:gt] < kvs[gt:len(kvs)] - // Native implementation: - // radixQsort(kvs[:lt], d, maxDepth) - // if p > -1 { - // radixQsort(kvs[lt:gt], d+1, maxDepth) - // } - // radixQsort(kvs[gt:], d, maxDepth) - // Optimize as follows: make recursive calls only for the smaller parts. - // Reference: https://www.geeksforgeeks.org/quicksort-tail-call-optimization-reducing-worst-case-space-log-n/ - if p == -1 { - if lt > len(kvs) - gt { - radixQsort(kvs[gt:], d, maxDepth) - kvs = kvs[:lt] - } else { - radixQsort(kvs[:lt], d, maxDepth) - kvs = kvs[gt:] - } - } else { - ml := maxThree(lt, gt-lt, len(kvs)-gt) - if ml == lt { - radixQsort(kvs[lt:gt], d+1, maxDepth) - radixQsort(kvs[gt:], d, maxDepth) - kvs = kvs[:lt] - } else if ml == gt-lt { - radixQsort(kvs[:lt], d, maxDepth) - radixQsort(kvs[gt:], d, maxDepth) - kvs = kvs[lt:gt] - d += 1 - } else { - radixQsort(kvs[:lt], d, maxDepth) - radixQsort(kvs[lt:gt], d+1, maxDepth) - kvs = kvs[gt:] - } - } - } - insertRadixSort(kvs, d) -} - -func insertRadixSort(kvs []_MapPair, d int) { - for i := 1; i < len(kvs); i++ { - for j := i; j > 0 && lessFrom(kvs[j].k, kvs[j-1].k, d); j-- { - swap(kvs, j, j-1) - } - } -} - -func pivot(kvs []_MapPair, d int) int { - m := len(kvs) >> 1 - if len(kvs) > 40 { - // Tukey's ``Ninther,'' median of three mediankvs of three. - t := len(kvs) / 8 - return medianThree( - medianThree(byteAt(kvs[0].k, d), byteAt(kvs[t].k, d), byteAt(kvs[2*t].k, d)), - medianThree(byteAt(kvs[m].k, d), byteAt(kvs[m-t].k, d), byteAt(kvs[m+t].k, d)), - medianThree(byteAt(kvs[len(kvs)-1].k, d), - byteAt(kvs[len(kvs)-1-t].k, d), - byteAt(kvs[len(kvs)-1-2*t].k, d))) - } - return medianThree(byteAt(kvs[0].k, d), byteAt(kvs[m].k, d), byteAt(kvs[len(kvs)-1].k, d)) -} - -func medianThree(i, j, k int) int { - if i > j { - i, j = j, i - } // i < j - if k < i { - return i - } - if k > j { - return j - } - return k -} - -func maxThree(i, j, k int) int { - max := i - if max < j { - max = j - } - if max < k { - max = k - } - return max -} - -// maxDepth returns a threshold at which quicksort should switch -// to heapsort. It returnkvs 2*ceil(lg(n+1)). -func maxDepth(n int) int { - var depth int - for i := n; i > 0; i >>= 1 { - depth++ - } - return depth * 2 -} - -// siftDown implements the heap property on kvs[lo:hi]. -// first is an offset into the array where the root of the heap lies. -func siftDown(kvs []_MapPair, lo, hi, first int) { - root := lo - for { - child := 2*root + 1 - if child >= hi { - break - } - if child+1 < hi && kvs[first+child].k < kvs[first+child+1].k { - child++ - } - if kvs[first+root].k >= kvs[first+child].k { - return - } - swap(kvs, first+root, first+child) - root = child - } -} - -func heapSort(kvs []_MapPair, a, b int) { - first := a - lo := 0 - hi := b - a - - // Build heap with the greatest element at top. - for i := (hi - 1) / 2; i >= 0; i-- { - siftDown(kvs, i, hi, first) - } - - // Pop elements, the largest first, into end of kvs. - for i := hi - 1; i >= 0; i-- { - swap(kvs, first, first+i) - siftDown(kvs, lo, i, first) - } -} - -// Note that _MapPair.k is NOT pointed to _MapPair.m when map key is integer after swap -func swap(kvs []_MapPair, a, b int) { - kvs[a].k, kvs[b].k = kvs[b].k, kvs[a].k - kvs[a].v, kvs[b].v = kvs[b].v, kvs[a].v -} - -// Compare two strings from the pos d. -func lessFrom(a, b string, d int) bool { - l := len(a) - if l > len(b) { - l = len(b) - } - for i := d; i < l; i++ { - if a[i] == b[i] { - continue - } - return a[i] < b[i] - } - return len(a) < len(b) -} - -func byteAt(b string, p int) int { - if p < len(b) { - return int(b[p]) - } - return -1 -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/alg/spec.go b/vendor/github.com/bytedance/sonic/internal/encoder/alg/spec.go deleted file mode 100644 index bff943626..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/alg/spec.go +++ /dev/null @@ -1,198 +0,0 @@ -//go:build (amd64 && go1.16 && !go1.24) || (arm64 && go1.20 && !go1.24) -// +build amd64,go1.16,!go1.24 arm64,go1.20,!go1.24 - -/** - * Copyright 2024 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package alg - -import ( - "runtime" - "unsafe" - - "github.com/bytedance/sonic/internal/native" - "github.com/bytedance/sonic/internal/native/types" - "github.com/bytedance/sonic/internal/rt" -) - -// Valid validates json and returns first non-blank character position, -// if it is only one valid json value. -// Otherwise returns invalid character position using start. -// -// Note: it does not check for the invalid UTF-8 characters. -func Valid(data []byte) (ok bool, start int) { - n := len(data) - if n == 0 { - return false, -1 - } - s := rt.Mem2Str(data) - p := 0 - m := types.NewStateMachine() - ret := native.ValidateOne(&s, &p, m, 0) - types.FreeStateMachine(m) - - if ret < 0 { - return false, p-1 - } - - /* check for trailing spaces */ - for ;p < n; p++ { - if (types.SPACE_MASK & (1 << data[p])) == 0 { - return false, p - } - } - - return true, ret -} - -var typeByte = rt.UnpackEface(byte(0)).Type - -//go:nocheckptr -func Quote(buf []byte, val string, double bool) []byte { - if len(val) == 0 { - if double { - return append(buf, `"\"\""`...) - } - return append(buf, `""`...) - } - - if double { - buf = append(buf, `"\"`...) - } else { - buf = append(buf, `"`...) - } - sp := rt.IndexChar(val, 0) - nb := len(val) - b := (*rt.GoSlice)(unsafe.Pointer(&buf)) - - // input buffer - for nb > 0 { - // output buffer - dp := unsafe.Pointer(uintptr(b.Ptr) + uintptr(b.Len)) - dn := b.Cap - b.Len - // call native.Quote, dn is byte count it outputs - opts := uint64(0) - if double { - opts = types.F_DOUBLE_UNQUOTE - } - ret := native.Quote(sp, nb, dp, &dn, opts) - // update *buf length - b.Len += dn - - // no need more output - if ret >= 0 { - break - } - - // double buf size - *b = rt.GrowSlice(typeByte, *b, b.Cap*2) - // ret is the complement of consumed input - ret = ^ret - // update input buffer - nb -= ret - sp = unsafe.Pointer(uintptr(sp) + uintptr(ret)) - } - - runtime.KeepAlive(buf) - runtime.KeepAlive(sp) - if double { - buf = append(buf, `\""`...) - } else { - buf = append(buf, `"`...) - } - - return buf -} - -func HtmlEscape(dst []byte, src []byte) []byte { - var sidx int - - dst = append(dst, src[:0]...) // avoid check nil dst - sbuf := (*rt.GoSlice)(unsafe.Pointer(&src)) - dbuf := (*rt.GoSlice)(unsafe.Pointer(&dst)) - - /* grow dst if it is shorter */ - if cap(dst)-len(dst) < len(src)+types.BufPaddingSize { - cap := len(src)*3/2 + types.BufPaddingSize - *dbuf = rt.GrowSlice(typeByte, *dbuf, cap) - } - - for sidx < sbuf.Len { - sp := rt.Add(sbuf.Ptr, uintptr(sidx)) - dp := rt.Add(dbuf.Ptr, uintptr(dbuf.Len)) - - sn := sbuf.Len - sidx - dn := dbuf.Cap - dbuf.Len - nb := native.HTMLEscape(sp, sn, dp, &dn) - - /* check for errors */ - if dbuf.Len += dn; nb >= 0 { - break - } - - /* not enough space, grow the slice and try again */ - sidx += ^nb - *dbuf = rt.GrowSlice(typeByte, *dbuf, dbuf.Cap*2) - } - return dst -} - -func F64toa(buf []byte, v float64) ([]byte) { - if v == 0 { - return append(buf, '0') - } - buf = rt.GuardSlice2(buf, 64) - ret := native.F64toa((*byte)(rt.IndexByte(buf, len(buf))), v) - if ret > 0 { - return buf[:len(buf)+ret] - } else { - return buf - } -} - -func F32toa(buf []byte, v float32) ([]byte) { - if v == 0 { - return append(buf, '0') - } - buf = rt.GuardSlice2(buf, 64) - ret := native.F32toa((*byte)(rt.IndexByte(buf, len(buf))), v) - if ret > 0 { - return buf[:len(buf)+ret] - } else { - return buf - } -} - -func I64toa(buf []byte, v int64) ([]byte) { - buf = rt.GuardSlice2(buf, 32) - ret := native.I64toa((*byte)(rt.IndexByte(buf, len(buf))), v) - if ret > 0 { - return buf[:len(buf)+ret] - } else { - return buf - } -} - -func U64toa(buf []byte, v uint64) ([]byte) { - buf = rt.GuardSlice2(buf, 32) - ret := native.U64toa((*byte)(rt.IndexByte(buf, len(buf))), v) - if ret > 0 { - return buf[:len(buf)+ret] - } else { - return buf - } -} - diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/alg/spec_compat.go b/vendor/github.com/bytedance/sonic/internal/encoder/alg/spec_compat.go deleted file mode 100644 index c15cbf7d8..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/alg/spec_compat.go +++ /dev/null @@ -1,148 +0,0 @@ -// +build !amd64,!arm64 go1.24 !go1.16 arm64,!go1.20 - -/** - * Copyright 2024 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package alg - -import ( - _ "unsafe" - "unicode/utf8" - "strconv" - "bytes" - "encoding/json" - - "github.com/bytedance/sonic/internal/rt" -) - -// Valid validates json and returns first non-blank character position, -// if it is only one valid json value. -// Otherwise returns invalid character position using start. -// -// Note: it does not check for the invalid UTF-8 characters. -func Valid(data []byte) (ok bool, start int) { - ok = json.Valid(data) - return ok, 0 -} - -var typeByte = rt.UnpackEface(byte(0)).Type - -func Quote(e []byte, s string, double bool) []byte { - if len(s) == 0 { - if double { - return append(e, `"\"\""`...) - } - return append(e, `""`...) - } - - b := e - ss := len(e) - e = append(e, '"') - start := 0 - - for i := 0; i < len(s); { - if b := s[i]; b < utf8.RuneSelf { - if rt.SafeSet[b] { - i++ - continue - } - if start < i { - e = append(e, s[start:i]...) - } - e = append(e, '\\') - switch b { - case '\\', '"': - e = append(e, b) - case '\n': - e = append(e, 'n') - case '\r': - e = append(e, 'r') - case '\t': - e = append(e, 't') - default: - // This encodes bytes < 0x20 except for \t, \n and \r. - // If escapeHTML is set, it also escapes <, >, and & - // because they can lead to security holes when - // user-controlled strings are rendered into JSON - // and served to some browsers. - e = append(e, `u00`...) - e = append(e, rt.Hex[b>>4]) - e = append(e, rt.Hex[b&0xF]) - } - i++ - start = i - continue - } - c, size := utf8.DecodeRuneInString(s[i:]) - // if correct && c == utf8.RuneError && size == 1 { - // if start < i { - // e = append(e, s[start:i]...) - // } - // e = append(e, `\ufffd`...) - // i += size - // start = i - // continue - // } - if c == '\u2028' || c == '\u2029' { - if start < i { - e = append(e, s[start:i]...) - } - e = append(e, `\u202`...) - e = append(e, rt.Hex[c&0xF]) - i += size - start = i - continue - } - i += size - } - - if start < len(s) { - e = append(e, s[start:]...) - } - e = append(e, '"') - - if double { - return strconv.AppendQuote(b, string(e[ss:])) - } else { - return e - } -} - -func HtmlEscape(dst []byte, src []byte) []byte { - buf := bytes.NewBuffer(dst) - json.HTMLEscape(buf, src) - return buf.Bytes() -} - -func F64toa(buf []byte, v float64) ([]byte) { - bs := bytes.NewBuffer(buf) - _ = json.NewEncoder(bs).Encode(v) - return bs.Bytes() -} - -func F32toa(buf []byte, v float32) ([]byte) { - bs := bytes.NewBuffer(buf) - _ = json.NewEncoder(bs).Encode(v) - return bs.Bytes() -} - -func I64toa(buf []byte, v int64) ([]byte) { - return strconv.AppendInt(buf, int64(v), 10) -} - -func U64toa(buf []byte, v uint64) ([]byte) { - return strconv.AppendUint(buf, v, 10) -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/compiler.go b/vendor/github.com/bytedance/sonic/internal/encoder/compiler.go deleted file mode 100644 index 902fbc98b..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/compiler.go +++ /dev/null @@ -1,676 +0,0 @@ -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package encoder - -import ( - "reflect" - "unsafe" - - "github.com/bytedance/sonic/internal/encoder/ir" - "github.com/bytedance/sonic/internal/encoder/vars" - "github.com/bytedance/sonic/internal/encoder/vm" - "github.com/bytedance/sonic/internal/resolver" - "github.com/bytedance/sonic/internal/rt" - "github.com/bytedance/sonic/option" -) - -func ForceUseVM() { - vm.SetCompiler(makeEncoderVM) - pretouchType = pretouchTypeVM - encodeTypedPointer = vm.EncodeTypedPointer - vars.UseVM = true -} - -var encodeTypedPointer func(buf *[]byte, vt *rt.GoType, vp *unsafe.Pointer, sb *vars.Stack, fv uint64) error - -func makeEncoderVM(vt *rt.GoType, ex ...interface{}) (interface{}, error) { - pp, err := NewCompiler().Compile(vt.Pack(), ex[0].(bool)) - if err != nil { - return nil, err - } - return &pp, nil -} - -var pretouchType func(_vt reflect.Type, opts option.CompileOptions, v uint8) (map[reflect.Type]uint8, error) - -func pretouchTypeVM(_vt reflect.Type, opts option.CompileOptions, v uint8) (map[reflect.Type]uint8, error) { - /* compile function */ - compiler := NewCompiler().apply(opts) - - /* find or compile */ - vt := rt.UnpackType(_vt) - if val := vars.GetProgram(vt); val != nil { - return nil, nil - } else if _, err := vars.ComputeProgram(vt, makeEncoderVM, v == 1); err == nil { - return compiler.rec, nil - } else { - return nil, err - } -} - -func pretouchRec(vtm map[reflect.Type]uint8, opts option.CompileOptions) error { - if opts.RecursiveDepth < 0 || len(vtm) == 0 { - return nil - } - next := make(map[reflect.Type]uint8) - for vt, v := range vtm { - sub, err := pretouchType(vt, opts, v) - if err != nil { - return err - } - for svt, v := range sub { - next[svt] = v - } - } - opts.RecursiveDepth -= 1 - return pretouchRec(next, opts) -} - -type Compiler struct { - opts option.CompileOptions - pv bool - tab map[reflect.Type]bool - rec map[reflect.Type]uint8 -} - -func NewCompiler() *Compiler { - return &Compiler{ - opts: option.DefaultCompileOptions(), - tab: map[reflect.Type]bool{}, - rec: map[reflect.Type]uint8{}, - } -} - -func (self *Compiler) apply(opts option.CompileOptions) *Compiler { - self.opts = opts - if self.opts.RecursiveDepth > 0 { - self.rec = map[reflect.Type]uint8{} - } - return self -} - -func (self *Compiler) rescue(ep *error) { - if val := recover(); val != nil { - if err, ok := val.(error); ok { - *ep = err - } else { - panic(val) - } - } -} - -func (self *Compiler) Compile(vt reflect.Type, pv bool) (ret ir.Program, err error) { - defer self.rescue(&err) - self.compileOne(&ret, 0, vt, pv) - return -} - -func (self *Compiler) compileOne(p *ir.Program, sp int, vt reflect.Type, pv bool) { - if self.tab[vt] { - p.Vp(ir.OP_recurse, vt, pv) - } else { - self.compileRec(p, sp, vt, pv) - } -} - -func (self *Compiler) tryCompileMarshaler(p *ir.Program, vt reflect.Type, pv bool) bool { - pt := reflect.PtrTo(vt) - - /* check for addressable `json.Marshaler` with pointer receiver */ - if pv && pt.Implements(vars.JsonMarshalerType) { - addMarshalerOp(p, ir.OP_marshal_p, pt, vars.JsonMarshalerType) - return true - } - - /* check for `json.Marshaler` */ - if vt.Implements(vars.JsonMarshalerType) { - self.compileMarshaler(p, ir.OP_marshal, vt, vars.JsonMarshalerType) - return true - } - - /* check for addressable `encoding.TextMarshaler` with pointer receiver */ - if pv && pt.Implements(vars.EncodingTextMarshalerType) { - addMarshalerOp(p, ir.OP_marshal_text_p, pt, vars.EncodingTextMarshalerType) - return true - } - - /* check for `encoding.TextMarshaler` */ - if vt.Implements(vars.EncodingTextMarshalerType) { - self.compileMarshaler(p, ir.OP_marshal_text, vt, vars.EncodingTextMarshalerType) - return true - } - - return false -} - -func (self *Compiler) compileRec(p *ir.Program, sp int, vt reflect.Type, pv bool) { - pr := self.pv - - if self.tryCompileMarshaler(p, vt, pv) { - return - } - - /* enter the recursion, and compile the type */ - self.pv = pv - self.tab[vt] = true - self.compileOps(p, sp, vt) - - /* exit the recursion */ - self.pv = pr - delete(self.tab, vt) -} - -func (self *Compiler) compileOps(p *ir.Program, sp int, vt reflect.Type) { - switch vt.Kind() { - case reflect.Bool: - p.Add(ir.OP_bool) - case reflect.Int: - p.Add(ir.OP_int()) - case reflect.Int8: - p.Add(ir.OP_i8) - case reflect.Int16: - p.Add(ir.OP_i16) - case reflect.Int32: - p.Add(ir.OP_i32) - case reflect.Int64: - p.Add(ir.OP_i64) - case reflect.Uint: - p.Add(ir.OP_uint()) - case reflect.Uint8: - p.Add(ir.OP_u8) - case reflect.Uint16: - p.Add(ir.OP_u16) - case reflect.Uint32: - p.Add(ir.OP_u32) - case reflect.Uint64: - p.Add(ir.OP_u64) - case reflect.Uintptr: - p.Add(ir.OP_uintptr()) - case reflect.Float32: - p.Add(ir.OP_f32) - case reflect.Float64: - p.Add(ir.OP_f64) - case reflect.String: - self.compileString(p, vt) - case reflect.Array: - self.compileArray(p, sp, vt.Elem(), vt.Len()) - case reflect.Interface: - self.compileInterface(p, vt) - case reflect.Map: - self.compileMap(p, sp, vt) - case reflect.Ptr: - self.compilePtr(p, sp, vt.Elem()) - case reflect.Slice: - self.compileSlice(p, sp, vt.Elem()) - case reflect.Struct: - self.compileStruct(p, sp, vt) - default: - panic(vars.Error_type(vt)) - } -} - -func (self *Compiler) compileNil(p *ir.Program, sp int, vt reflect.Type, nil_op ir.Op, fn func(*ir.Program, int, reflect.Type)) { - x := p.PC() - p.Add(ir.OP_is_nil) - fn(p, sp, vt) - e := p.PC() - p.Add(ir.OP_goto) - p.Pin(x) - p.Add(nil_op) - p.Pin(e) -} - -func (self *Compiler) compilePtr(p *ir.Program, sp int, vt reflect.Type) { - self.compileNil(p, sp, vt, ir.OP_null, self.compilePtrBody) -} - -func (self *Compiler) compilePtrBody(p *ir.Program, sp int, vt reflect.Type) { - p.Tag(sp) - p.Add(ir.OP_save) - p.Add(ir.OP_deref) - self.compileOne(p, sp+1, vt, true) - p.Add(ir.OP_drop) -} - -func (self *Compiler) compileMap(p *ir.Program, sp int, vt reflect.Type) { - self.compileNil(p, sp, vt, ir.OP_empty_obj, self.compileMapBody) -} - -func (self *Compiler) compileMapBody(p *ir.Program, sp int, vt reflect.Type) { - p.Tag(sp + 1) - p.Int(ir.OP_byte, '{') - e := p.PC() - p.Add(ir.OP_is_zero_map) - p.Add(ir.OP_save) - p.Rtt(ir.OP_map_iter, vt) - p.Add(ir.OP_save) - i := p.PC() - p.Add(ir.OP_map_check_key) - u := p.PC() - p.Add(ir.OP_map_write_key) - self.compileMapBodyKey(p, vt.Key()) - p.Pin(u) - p.Int(ir.OP_byte, ':') - p.Add(ir.OP_map_value_next) - self.compileOne(p, sp+2, vt.Elem(), false) - j := p.PC() - p.Add(ir.OP_map_check_key) - p.Int(ir.OP_byte, ',') - v := p.PC() - p.Add(ir.OP_map_write_key) - self.compileMapBodyKey(p, vt.Key()) - p.Pin(v) - p.Int(ir.OP_byte, ':') - p.Add(ir.OP_map_value_next) - self.compileOne(p, sp+2, vt.Elem(), false) - p.Int(ir.OP_goto, j) - p.Pin(i) - p.Pin(j) - p.Add(ir.OP_map_stop) - p.Add(ir.OP_drop_2) - p.Pin(e) - p.Int(ir.OP_byte, '}') -} - -func (self *Compiler) compileMapBodyKey(p *ir.Program, vk reflect.Type) { - if !vk.Implements(vars.EncodingTextMarshalerType) { - self.compileMapBodyTextKey(p, vk) - } else { - self.compileMapBodyUtextKey(p, vk) - } -} - -func (self *Compiler) compileMapBodyTextKey(p *ir.Program, vk reflect.Type) { - switch vk.Kind() { - case reflect.Invalid: - panic("map key is nil") - case reflect.Bool: - p.Key(ir.OP_bool) - case reflect.Int: - p.Key(ir.OP_int()) - case reflect.Int8: - p.Key(ir.OP_i8) - case reflect.Int16: - p.Key(ir.OP_i16) - case reflect.Int32: - p.Key(ir.OP_i32) - case reflect.Int64: - p.Key(ir.OP_i64) - case reflect.Uint: - p.Key(ir.OP_uint()) - case reflect.Uint8: - p.Key(ir.OP_u8) - case reflect.Uint16: - p.Key(ir.OP_u16) - case reflect.Uint32: - p.Key(ir.OP_u32) - case reflect.Uint64: - p.Key(ir.OP_u64) - case reflect.Uintptr: - p.Key(ir.OP_uintptr()) - case reflect.Float32: - p.Key(ir.OP_f32) - case reflect.Float64: - p.Key(ir.OP_f64) - case reflect.String: - self.compileString(p, vk) - default: - panic(vars.Error_type(vk)) - } -} - -func (self *Compiler) compileMapBodyUtextKey(p *ir.Program, vk reflect.Type) { - if vk.Kind() != reflect.Ptr { - addMarshalerOp(p, ir.OP_marshal_text, vk, vars.EncodingTextMarshalerType) - } else { - self.compileMapBodyUtextPtr(p, vk) - } -} - -func (self *Compiler) compileMapBodyUtextPtr(p *ir.Program, vk reflect.Type) { - i := p.PC() - p.Add(ir.OP_is_nil) - addMarshalerOp(p, ir.OP_marshal_text, vk, vars.EncodingTextMarshalerType) - j := p.PC() - p.Add(ir.OP_goto) - p.Pin(i) - p.Str(ir.OP_text, "\"\"") - p.Pin(j) -} - -func (self *Compiler) compileSlice(p *ir.Program, sp int, vt reflect.Type) { - self.compileNil(p, sp, vt, ir.OP_empty_arr, self.compileSliceBody) -} - -func (self *Compiler) compileSliceBody(p *ir.Program, sp int, vt reflect.Type) { - if vars.IsSimpleByte(vt) { - p.Add(ir.OP_bin) - } else { - self.compileSliceArray(p, sp, vt) - } -} - -func (self *Compiler) compileSliceArray(p *ir.Program, sp int, vt reflect.Type) { - p.Tag(sp) - p.Int(ir.OP_byte, '[') - e := p.PC() - p.Add(ir.OP_is_nil) - p.Add(ir.OP_save) - p.Add(ir.OP_slice_len) - i := p.PC() - p.Rtt(ir.OP_slice_next, vt) - self.compileOne(p, sp+1, vt, true) - j := p.PC() - p.Rtt(ir.OP_slice_next, vt) - p.Int(ir.OP_byte, ',') - self.compileOne(p, sp+1, vt, true) - p.Int(ir.OP_goto, j) - p.Pin(i) - p.Pin(j) - p.Add(ir.OP_drop) - p.Pin(e) - p.Int(ir.OP_byte, ']') -} - -func (self *Compiler) compileArray(p *ir.Program, sp int, vt reflect.Type, nb int) { - p.Tag(sp) - p.Int(ir.OP_byte, '[') - p.Add(ir.OP_save) - - /* first item */ - if nb != 0 { - self.compileOne(p, sp+1, vt, self.pv) - p.Add(ir.OP_load) - } - - /* remaining items */ - for i := 1; i < nb; i++ { - p.Int(ir.OP_byte, ',') - p.Int(ir.OP_index, i*int(vt.Size())) - self.compileOne(p, sp+1, vt, self.pv) - p.Add(ir.OP_load) - } - - /* end of array */ - p.Add(ir.OP_drop) - p.Int(ir.OP_byte, ']') -} - -func (self *Compiler) compileString(p *ir.Program, vt reflect.Type) { - if vt != vars.JsonNumberType { - p.Add(ir.OP_str) - } else { - p.Add(ir.OP_number) - } -} - -func (self *Compiler) compileStruct(p *ir.Program, sp int, vt reflect.Type) { - if sp >= self.opts.MaxInlineDepth || p.PC() >= vars.MAX_ILBUF || (sp > 0 && vt.NumField() >= vars.MAX_FIELDS) { - p.Vp(ir.OP_recurse, vt, self.pv) - if self.opts.RecursiveDepth > 0 { - if self.pv { - self.rec[vt] = 1 - } else { - self.rec[vt] = 0 - } - } - } else { - self.compileStructBody(p, sp, vt) - } -} - -func (self *Compiler) compileStructBody(p *ir.Program, sp int, vt reflect.Type) { - p.Tag(sp) - p.Int(ir.OP_byte, '{') - p.Add(ir.OP_save) - p.Add(ir.OP_cond_set) - - /* compile each field */ - for _, fv := range resolver.ResolveStruct(vt) { - var s []int - var o resolver.Offset - - /* "omitempty" for arrays */ - if fv.Type.Kind() == reflect.Array { - if fv.Type.Len() == 0 && (fv.Opts&resolver.F_omitempty) != 0 { - continue - } - } - - /* index to the field */ - for _, o = range fv.Path { - if p.Int(ir.OP_index, int(o.Size)); o.Kind == resolver.F_deref { - s = append(s, p.PC()) - p.Add(ir.OP_is_nil) - p.Add(ir.OP_deref) - } - } - - /* check for "omitempty" option */ - if fv.Type.Kind() != reflect.Struct && fv.Type.Kind() != reflect.Array && (fv.Opts&resolver.F_omitempty) != 0 { - s = append(s, p.PC()) - self.compileStructFieldZero(p, fv.Type) - } - - /* add the comma if not the first element */ - i := p.PC() - p.Add(ir.OP_cond_testc) - p.Int(ir.OP_byte, ',') - p.Pin(i) - - /* compile the key and value */ - ft := fv.Type - p.Str(ir.OP_text, Quote(fv.Name)+":") - - /* check for "stringnize" option */ - if (fv.Opts & resolver.F_stringize) == 0 { - self.compileOne(p, sp+1, ft, self.pv) - } else { - self.compileStructFieldStr(p, sp+1, ft) - } - - /* patch the skipping jumps and reload the struct pointer */ - p.Rel(s) - p.Add(ir.OP_load) - } - - /* end of object */ - p.Add(ir.OP_drop) - p.Int(ir.OP_byte, '}') -} - -func (self *Compiler) compileStructFieldStr(p *ir.Program, sp int, vt reflect.Type) { - // NOTICE: according to encoding/json, Marshaler type has higher priority than string option - // see issue: - if self.tryCompileMarshaler(p, vt, self.pv) { - return - } - - pc := -1 - ft := vt - sv := false - - /* dereference the pointer if needed */ - if ft.Kind() == reflect.Ptr { - ft = ft.Elem() - } - - /* check if it can be stringized */ - switch ft.Kind() { - case reflect.Bool: - sv = true - case reflect.Int: - sv = true - case reflect.Int8: - sv = true - case reflect.Int16: - sv = true - case reflect.Int32: - sv = true - case reflect.Int64: - sv = true - case reflect.Uint: - sv = true - case reflect.Uint8: - sv = true - case reflect.Uint16: - sv = true - case reflect.Uint32: - sv = true - case reflect.Uint64: - sv = true - case reflect.Uintptr: - sv = true - case reflect.Float32: - sv = true - case reflect.Float64: - sv = true - case reflect.String: - sv = true - } - - /* if it's not, ignore the "string" and follow the regular path */ - if !sv { - self.compileOne(p, sp, vt, self.pv) - return - } - - /* dereference the pointer */ - if vt.Kind() == reflect.Ptr { - pc = p.PC() - vt = vt.Elem() - p.Add(ir.OP_is_nil) - p.Add(ir.OP_deref) - } - - /* special case of a double-quoted string */ - if ft != vars.JsonNumberType && ft.Kind() == reflect.String { - p.Add(ir.OP_quote) - } else { - self.compileStructFieldQuoted(p, sp, vt) - } - - /* the "null" case of the pointer */ - if pc != -1 { - e := p.PC() - p.Add(ir.OP_goto) - p.Pin(pc) - p.Add(ir.OP_null) - p.Pin(e) - } -} - -func (self *Compiler) compileStructFieldZero(p *ir.Program, vt reflect.Type) { - switch vt.Kind() { - case reflect.Bool: - p.Add(ir.OP_is_zero_1) - case reflect.Int: - p.Add(ir.OP_is_zero_ints()) - case reflect.Int8: - p.Add(ir.OP_is_zero_1) - case reflect.Int16: - p.Add(ir.OP_is_zero_2) - case reflect.Int32: - p.Add(ir.OP_is_zero_4) - case reflect.Int64: - p.Add(ir.OP_is_zero_8) - case reflect.Uint: - p.Add(ir.OP_is_zero_ints()) - case reflect.Uint8: - p.Add(ir.OP_is_zero_1) - case reflect.Uint16: - p.Add(ir.OP_is_zero_2) - case reflect.Uint32: - p.Add(ir.OP_is_zero_4) - case reflect.Uint64: - p.Add(ir.OP_is_zero_8) - case reflect.Uintptr: - p.Add(ir.OP_is_nil) - case reflect.Float32: - p.Add(ir.OP_is_zero_4) - case reflect.Float64: - p.Add(ir.OP_is_zero_8) - case reflect.String: - p.Add(ir.OP_is_nil_p1) - case reflect.Interface: - p.Add(ir.OP_is_nil) - case reflect.Map: - p.Add(ir.OP_is_zero_map) - case reflect.Ptr: - p.Add(ir.OP_is_nil) - case reflect.Slice: - p.Add(ir.OP_is_nil_p1) - default: - panic(vars.Error_type(vt)) - } -} - -func (self *Compiler) compileStructFieldQuoted(p *ir.Program, sp int, vt reflect.Type) { - p.Int(ir.OP_byte, '"') - self.compileOne(p, sp, vt, self.pv) - p.Int(ir.OP_byte, '"') -} - -func (self *Compiler) compileInterface(p *ir.Program, vt reflect.Type) { - x := p.PC() - p.Add(ir.OP_is_nil_p1) - - /* iface and efaces are different */ - if vt.NumMethod() == 0 { - p.Add(ir.OP_eface) - } else { - p.Add(ir.OP_iface) - } - - /* the "null" value */ - e := p.PC() - p.Add(ir.OP_goto) - p.Pin(x) - p.Add(ir.OP_null) - p.Pin(e) -} - -func (self *Compiler) compileMarshaler(p *ir.Program, op ir.Op, vt reflect.Type, mt reflect.Type) { - pc := p.PC() - vk := vt.Kind() - - /* direct receiver */ - if vk != reflect.Ptr { - addMarshalerOp(p, op, vt, mt) - return - } - /* value receiver with a pointer type, check for nil before calling the marshaler */ - p.Add(ir.OP_is_nil) - - addMarshalerOp(p, op, vt, mt) - - i := p.PC() - p.Add(ir.OP_goto) - p.Pin(pc) - p.Add(ir.OP_null) - p.Pin(i) -} - -func addMarshalerOp(p *ir.Program, op ir.Op, vt reflect.Type, mt reflect.Type) { - if vars.UseVM { - itab := rt.GetItab(rt.IfaceType(rt.UnpackType(mt)), rt.UnpackType(vt), true) - p.Vtab(op, vt, itab) - } else { - // OPT: get itab here - p.Rtt(op, vt) - } -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/encode_norace.go b/vendor/github.com/bytedance/sonic/internal/encoder/encode_norace.go deleted file mode 100644 index c53206433..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/encode_norace.go +++ /dev/null @@ -1,24 +0,0 @@ -//go:build !race -// +build !race - -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package encoder - -func encodeIntoCheckRace(buf *[]byte, val interface{}, opts Options) error { - return encodeInto(buf, val, opts) -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/encode_race.go b/vendor/github.com/bytedance/sonic/internal/encoder/encode_race.go deleted file mode 100644 index c373c55f9..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/encode_race.go +++ /dev/null @@ -1,54 +0,0 @@ -//go:build race -// +build race - -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package encoder - -import ( - `encoding/json` - - `github.com/bytedance/sonic/internal/rt` -) - - -func helpDetectDataRace(val interface{}) { - var out []byte - defer func() { - if v := recover(); v != nil { - // NOTICE: help user to locate where panic occurs - println("panic when encoding on: ", truncate(out)) - panic(v) - } - }() - out, _ = json.Marshal(val) -} - -func encodeIntoCheckRace(buf *[]byte, val interface{}, opts Options) error { - err := encodeInto(buf, val, opts) - /* put last to make the panic from sonic will always be caught at first */ - helpDetectDataRace(val) - return err -} - -func truncate(json []byte) string { - if len(json) <= 256 { - return rt.Mem2Str(json) - } else { - return rt.Mem2Str(json[len(json)-256:]) - } -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/encoder.go b/vendor/github.com/bytedance/sonic/internal/encoder/encoder.go deleted file mode 100644 index 4cba1a168..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/encoder.go +++ /dev/null @@ -1,318 +0,0 @@ -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package encoder - -import ( - "bytes" - "encoding/json" - "reflect" - "runtime" - "unsafe" - - "github.com/bytedance/sonic/utf8" - "github.com/bytedance/sonic/internal/encoder/alg" - "github.com/bytedance/sonic/internal/encoder/vars" - "github.com/bytedance/sonic/internal/rt" - "github.com/bytedance/sonic/option" -) - -// Options is a set of encoding options. -type Options uint64 - -const ( - // SortMapKeys indicates that the keys of a map needs to be sorted - // before serializing into JSON. - // WARNING: This hurts performance A LOT, USE WITH CARE. - SortMapKeys Options = 1 << alg.BitSortMapKeys - - // EscapeHTML indicates encoder to escape all HTML characters - // after serializing into JSON (see https://pkg.go.dev/encoding/json#HTMLEscape). - // WARNING: This hurts performance A LOT, USE WITH CARE. - EscapeHTML Options = 1 << alg.BitEscapeHTML - - // CompactMarshaler indicates that the output JSON from json.Marshaler - // is always compact and needs no validation - CompactMarshaler Options = 1 << alg.BitCompactMarshaler - - // NoQuoteTextMarshaler indicates that the output text from encoding.TextMarshaler - // is always escaped string and needs no quoting - NoQuoteTextMarshaler Options = 1 << alg.BitNoQuoteTextMarshaler - - // NoNullSliceOrMap indicates all empty Array or Object are encoded as '[]' or '{}', - // instead of 'null'. - // NOTE: The priority of this option is lower than json tag `omitempty`. - NoNullSliceOrMap Options = 1 << alg.BitNoNullSliceOrMap - - // ValidateString indicates that encoder should validate the input string - // before encoding it into JSON. - ValidateString Options = 1 << alg.BitValidateString - - // NoValidateJSONMarshaler indicates that the encoder should not validate the output string - // after encoding the JSONMarshaler to JSON. - NoValidateJSONMarshaler Options = 1 << alg.BitNoValidateJSONMarshaler - - // NoEncoderNewline indicates that the encoder should not add a newline after every message - NoEncoderNewline Options = 1 << alg.BitNoEncoderNewline - - // CompatibleWithStd is used to be compatible with std encoder. - CompatibleWithStd Options = SortMapKeys | EscapeHTML | CompactMarshaler - - // Encode Infinity or Nan float into `null`, instead of returning an error. - EncodeNullForInfOrNan Options = 1 << alg.BitEncodeNullForInfOrNan -) - -// Encoder represents a specific set of encoder configurations. -type Encoder struct { - Opts Options - prefix string - indent string -} - -// Encode returns the JSON encoding of v. -func (self *Encoder) Encode(v interface{}) ([]byte, error) { - if self.indent != "" || self.prefix != "" { - return EncodeIndented(v, self.prefix, self.indent, self.Opts) - } - return Encode(v, self.Opts) -} - -// SortKeys enables the SortMapKeys option. -func (self *Encoder) SortKeys() *Encoder { - self.Opts |= SortMapKeys - return self -} - -// SetEscapeHTML specifies if option EscapeHTML opens -func (self *Encoder) SetEscapeHTML(f bool) { - if f { - self.Opts |= EscapeHTML - } else { - self.Opts &= ^EscapeHTML - } -} - -// SetValidateString specifies if option ValidateString opens -func (self *Encoder) SetValidateString(f bool) { - if f { - self.Opts |= ValidateString - } else { - self.Opts &= ^ValidateString - } -} - -// SetNoValidateJSONMarshaler specifies if option NoValidateJSONMarshaler opens -func (self *Encoder) SetNoValidateJSONMarshaler(f bool) { - if f { - self.Opts |= NoValidateJSONMarshaler - } else { - self.Opts &= ^NoValidateJSONMarshaler - } -} - -// SetNoEncoderNewline specifies if option NoEncoderNewline opens -func (self *Encoder) SetNoEncoderNewline(f bool) { - if f { - self.Opts |= NoEncoderNewline - } else { - self.Opts &= ^NoEncoderNewline - } -} - - -// SetCompactMarshaler specifies if option CompactMarshaler opens -func (self *Encoder) SetCompactMarshaler(f bool) { - if f { - self.Opts |= CompactMarshaler - } else { - self.Opts &= ^CompactMarshaler - } -} - -// SetNoQuoteTextMarshaler specifies if option NoQuoteTextMarshaler opens -func (self *Encoder) SetNoQuoteTextMarshaler(f bool) { - if f { - self.Opts |= NoQuoteTextMarshaler - } else { - self.Opts &= ^NoQuoteTextMarshaler - } -} - -// SetIndent instructs the encoder to format each subsequent encoded -// value as if indented by the package-level function EncodeIndent(). -// Calling SetIndent("", "") disables indentation. -func (enc *Encoder) SetIndent(prefix, indent string) { - enc.prefix = prefix - enc.indent = indent -} - -// Quote returns the JSON-quoted version of s. -func Quote(s string) string { - buf := make([]byte, 0, len(s)+2) - buf = alg.Quote(buf, s, false) - return rt.Mem2Str(buf) -} - -// Encode returns the JSON encoding of val, encoded with opts. -func Encode(val interface{}, opts Options) ([]byte, error) { - var ret []byte - - buf := vars.NewBytes() - err := encodeIntoCheckRace(buf, val, opts) - - /* check for errors */ - if err != nil { - vars.FreeBytes(buf) - return nil, err - } - - /* htmlescape or correct UTF-8 if opts enable */ - old := buf - *buf = encodeFinish(*old, opts) - pbuf := ((*rt.GoSlice)(unsafe.Pointer(buf))).Ptr - pold := ((*rt.GoSlice)(unsafe.Pointer(old))).Ptr - - /* return when allocated a new buffer */ - if pbuf != pold { - vars.FreeBytes(old) - return *buf, nil - } - - /* make a copy of the result */ - if rt.CanSizeResue(cap(*buf)) { - ret = make([]byte, len(*buf)) - copy(ret, *buf) - vars.FreeBytes(buf) - } else { - ret = *buf - } - - /* return the buffer into pool */ - return ret, nil -} - -// EncodeInto is like Encode but uses a user-supplied buffer instead of allocating -// a new one. -func EncodeInto(buf *[]byte, val interface{}, opts Options) error { - err := encodeIntoCheckRace(buf, val, opts) - if err != nil { - return err - } - *buf = encodeFinish(*buf, opts) - return err -} - -func encodeInto(buf *[]byte, val interface{}, opts Options) error { - stk := vars.NewStack() - efv := rt.UnpackEface(val) - err := encodeTypedPointer(buf, efv.Type, &efv.Value, stk, uint64(opts)) - - /* return the stack into pool */ - if err != nil { - vars.ResetStack(stk) - } - vars.FreeStack(stk) - - /* avoid GC ahead */ - runtime.KeepAlive(buf) - runtime.KeepAlive(efv) - return err -} - -func encodeFinish(buf []byte, opts Options) []byte { - if opts & EscapeHTML != 0 { - buf = HTMLEscape(nil, buf) - } - if (opts & ValidateString != 0) && !utf8.Validate(buf) { - buf = utf8.CorrectWith(nil, buf, `\ufffd`) - } - return buf -} - - -// HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029 -// characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029 -// so that the JSON will be safe to embed inside HTML <script> tags. -// For historical reasons, web browsers don't honor standard HTML -// escaping within <script> tags, so an alternative JSON encoding must -// be used. -func HTMLEscape(dst []byte, src []byte) []byte { - return alg.HtmlEscape(dst, src) -} - -// EncodeIndented is like Encode but applies Indent to format the output. -// Each JSON element in the output will begin on a new line beginning with prefix -// followed by one or more copies of indent according to the indentation nesting. -func EncodeIndented(val interface{}, prefix string, indent string, opts Options) ([]byte, error) { - var err error - var buf *bytes.Buffer - - /* encode into the buffer */ - out := vars.NewBytes() - err = EncodeInto(out, val, opts) - - /* check for errors */ - if err != nil { - vars.FreeBytes(out) - return nil, err - } - - /* indent the JSON */ - buf = vars.NewBuffer() - err = json.Indent(buf, *out, prefix, indent) - vars.FreeBytes(out) - - /* check for errors */ - if err != nil { - vars.FreeBuffer(buf) - return nil, err - } - - /* copy to the result buffer */ - var ret []byte - if rt.CanSizeResue(cap(buf.Bytes())) { - ret = make([]byte, buf.Len()) - copy(ret, buf.Bytes()) - /* return the buffers into pool */ - vars.FreeBuffer(buf) - } else { - ret = buf.Bytes() - } - - return ret, nil -} - -// Pretouch compiles vt ahead-of-time to avoid JIT compilation on-the-fly, in -// order to reduce the first-hit latency. -// -// Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is -// a compile option to set the depth of recursive compile for the nested struct type. -func Pretouch(vt reflect.Type, opts ...option.CompileOption) error { - cfg := option.DefaultCompileOptions() - for _, opt := range opts { - opt(&cfg) - } - return pretouchRec(map[reflect.Type]uint8{vt: 0}, cfg) -} - -// Valid validates json and returns first non-blank character position, -// if it is only one valid json value. -// Otherwise returns invalid character position using start. -// -// Note: it does not check for the invalid UTF-8 characters. -func Valid(data []byte) (ok bool, start int) { - return alg.Valid(data) -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/ir/op.go b/vendor/github.com/bytedance/sonic/internal/encoder/ir/op.go deleted file mode 100644 index a0c693f00..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/ir/op.go +++ /dev/null @@ -1,473 +0,0 @@ -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package ir - -import ( - "fmt" - "reflect" - "strconv" - "strings" - "unsafe" - - "github.com/bytedance/sonic/internal/encoder/vars" - "github.com/bytedance/sonic/internal/rt" -) - -type Op uint8 - -const ( - OP_null Op = iota + 1 - OP_empty_arr - OP_empty_obj - OP_bool - OP_i8 - OP_i16 - OP_i32 - OP_i64 - OP_u8 - OP_u16 - OP_u32 - OP_u64 - OP_f32 - OP_f64 - OP_str - OP_bin - OP_quote - OP_number - OP_eface - OP_iface - OP_byte - OP_text - OP_deref - OP_index - OP_load - OP_save - OP_drop - OP_drop_2 - OP_recurse - OP_is_nil - OP_is_nil_p1 - OP_is_zero_1 - OP_is_zero_2 - OP_is_zero_4 - OP_is_zero_8 - OP_is_zero_map - OP_goto - OP_map_iter - OP_map_stop - OP_map_check_key - OP_map_write_key - OP_map_value_next - OP_slice_len - OP_slice_next - OP_marshal - OP_marshal_p - OP_marshal_text - OP_marshal_text_p - OP_cond_set - OP_cond_testc -) - -const ( - _INT_SIZE = 32 << (^uint(0) >> 63) - _PTR_SIZE = 32 << (^uintptr(0) >> 63) - _PTR_BYTE = unsafe.Sizeof(uintptr(0)) -) - -const OpSize = unsafe.Sizeof(NewInsOp(0)) - -var OpNames = [256]string{ - OP_null: "null", - OP_empty_arr: "empty_arr", - OP_empty_obj: "empty_obj", - OP_bool: "bool", - OP_i8: "i8", - OP_i16: "i16", - OP_i32: "i32", - OP_i64: "i64", - OP_u8: "u8", - OP_u16: "u16", - OP_u32: "u32", - OP_u64: "u64", - OP_f32: "f32", - OP_f64: "f64", - OP_str: "str", - OP_bin: "bin", - OP_quote: "quote", - OP_number: "number", - OP_eface: "eface", - OP_iface: "iface", - OP_byte: "byte", - OP_text: "text", - OP_deref: "deref", - OP_index: "index", - OP_load: "load", - OP_save: "save", - OP_drop: "drop", - OP_drop_2: "drop_2", - OP_recurse: "recurse", - OP_is_nil: "is_nil", - OP_is_nil_p1: "is_nil_p1", - OP_is_zero_1: "is_zero_1", - OP_is_zero_2: "is_zero_2", - OP_is_zero_4: "is_zero_4", - OP_is_zero_8: "is_zero_8", - OP_is_zero_map: "is_zero_map", - OP_goto: "goto", - OP_map_iter: "map_iter", - OP_map_stop: "map_stop", - OP_map_check_key: "map_check_key", - OP_map_write_key: "map_write_key", - OP_map_value_next: "map_value_next", - OP_slice_len: "slice_len", - OP_slice_next: "slice_next", - OP_marshal: "marshal", - OP_marshal_p: "marshal_p", - OP_marshal_text: "marshal_text", - OP_marshal_text_p: "marshal_text_p", - OP_cond_set: "cond_set", - OP_cond_testc: "cond_testc", -} - -func (self Op) String() string { - if ret := OpNames[self]; ret != "" { - return ret - } else { - return "<invalid>" - } -} - -func OP_int() Op { - switch _INT_SIZE { - case 32: - return OP_i32 - case 64: - return OP_i64 - default: - panic("unsupported int size") - } -} - -func OP_uint() Op { - switch _INT_SIZE { - case 32: - return OP_u32 - case 64: - return OP_u64 - default: - panic("unsupported uint size") - } -} - -func OP_uintptr() Op { - switch _PTR_SIZE { - case 32: - return OP_u32 - case 64: - return OP_u64 - default: - panic("unsupported pointer size") - } -} - -func OP_is_zero_ints() Op { - switch _INT_SIZE { - case 32: - return OP_is_zero_4 - case 64: - return OP_is_zero_8 - default: - panic("unsupported integer size") - } -} - -type Instr struct { - o Op - u int // union {op: 8, _: 8, vi: 48}, vi maybe int or len(str) - p unsafe.Pointer // maybe GoString.Ptr, or *GoType -} - -func NewInsOp(op Op) Instr { - return Instr{o: op} -} - -func NewInsVi(op Op, vi int) Instr { - return Instr{o: op, u: vi} -} - -func NewInsVs(op Op, vs string) Instr { - return Instr{ - o: op, - u: len(vs), - p: (*rt.GoString)(unsafe.Pointer(&vs)).Ptr, - } -} - -func NewInsVt(op Op, vt reflect.Type) Instr { - return Instr{ - o: op, - p: unsafe.Pointer(rt.UnpackType(vt)), - } -} - -type typAndTab struct { - vt *rt.GoType - itab *rt.GoItab -} - -func NewInsVtab(op Op, vt reflect.Type, itab *rt.GoItab) Instr { - return Instr{ - o: op, - p: unsafe.Pointer(&typAndTab{ - vt: rt.UnpackType(vt), - itab: itab, - }), - } -} - -func NewInsVp(op Op, vt reflect.Type, pv bool) Instr { - i := 0 - if pv { - i = 1 - } - return Instr{ - o: op, - u: i, - p: unsafe.Pointer(rt.UnpackType(vt)), - } -} - -func (self Instr) Op() Op { - return Op(self.o) -} - -func (self Instr) Vi() int { - return self.u -} - -func (self Instr) Vf() uint8 { - return (*rt.GoType)(self.p).KindFlags -} - -func (self Instr) Vs() (v string) { - (*rt.GoString)(unsafe.Pointer(&v)).Ptr = self.p - (*rt.GoString)(unsafe.Pointer(&v)).Len = self.Vi() - return -} - -func (self Instr) Vk() reflect.Kind { - return (*rt.GoType)(self.p).Kind() -} - -func (self Instr) Vt() reflect.Type { - return (*rt.GoType)(self.p).Pack() -} - -func (self Instr) Vr() *rt.GoType { - return (*rt.GoType)(self.p) -} - -func (self Instr) Vp() (vt reflect.Type, pv bool) { - return (*rt.GoType)(self.p).Pack(), self.u == 1 -} - -func (self Instr) Vtab() (vt *rt.GoType, itab *rt.GoItab) { - tt := (*typAndTab)(self.p) - return tt.vt, tt.itab -} - -func (self Instr) Vp2() (vt *rt.GoType, pv bool) { - return (*rt.GoType)(self.p), self.u == 1 -} - -func (self Instr) I64() int64 { - return int64(self.Vi()) -} - -func (self Instr) Byte() byte { - return byte(self.Vi()) -} - -func (self Instr) Vlen() int { - return int((*rt.GoType)(self.p).Size) -} - -func (self Instr) isBranch() bool { - switch self.Op() { - case OP_goto: - fallthrough - case OP_is_nil: - fallthrough - case OP_is_nil_p1: - fallthrough - case OP_is_zero_1: - fallthrough - case OP_is_zero_2: - fallthrough - case OP_is_zero_4: - fallthrough - case OP_is_zero_8: - fallthrough - case OP_map_check_key: - fallthrough - case OP_map_write_key: - fallthrough - case OP_slice_next: - fallthrough - case OP_cond_testc: - return true - default: - return false - } -} - -func (self Instr) Disassemble() string { - switch self.Op() { - case OP_byte: - return fmt.Sprintf("%-18s%s", self.Op().String(), strconv.QuoteRune(rune(self.Vi()))) - case OP_text: - return fmt.Sprintf("%-18s%s", self.Op().String(), strconv.Quote(self.Vs())) - case OP_index: - return fmt.Sprintf("%-18s%d", self.Op().String(), self.Vi()) - case OP_recurse: - fallthrough - case OP_map_iter: - return fmt.Sprintf("%-18s%s", self.Op().String(), self.Vt()) - case OP_marshal: - fallthrough - case OP_marshal_p: - fallthrough - case OP_marshal_text: - fallthrough - case OP_marshal_text_p: - vt, _ := self.Vtab() - return fmt.Sprintf("%-18s%s", self.Op().String(), vt.Pack()) - case OP_goto: - fallthrough - case OP_is_nil: - fallthrough - case OP_is_nil_p1: - fallthrough - case OP_is_zero_1: - fallthrough - case OP_is_zero_2: - fallthrough - case OP_is_zero_4: - fallthrough - case OP_is_zero_8: - fallthrough - case OP_is_zero_map: - fallthrough - case OP_cond_testc: - fallthrough - case OP_map_check_key: - fallthrough - case OP_map_write_key: - return fmt.Sprintf("%-18sL_%d", self.Op().String(), self.Vi()) - case OP_slice_next: - return fmt.Sprintf("%-18sL_%d, %s", self.Op().String(), self.Vi(), self.Vt()) - default: - return fmt.Sprintf("%#v", self) - } -} - -type ( - Program []Instr -) - -func (self Program) PC() int { - return len(self) -} - -func (self Program) Tag(n int) { - if n >= vars.MaxStack { - panic("type nesting too deep") - } -} - -func (self Program) Pin(i int) { - v := &self[i] - v.u = self.PC() -} - -func (self Program) Rel(v []int) { - for _, i := range v { - self.Pin(i) - } -} - -func (self *Program) Add(op Op) { - *self = append(*self, NewInsOp(op)) -} - -func (self *Program) Key(op Op) { - *self = append(*self, - NewInsVi(OP_byte, '"'), - NewInsOp(op), - NewInsVi(OP_byte, '"'), - ) -} - -func (self *Program) Int(op Op, vi int) { - *self = append(*self, NewInsVi(op, vi)) -} - -func (self *Program) Str(op Op, vs string) { - *self = append(*self, NewInsVs(op, vs)) -} - -func (self *Program) Rtt(op Op, vt reflect.Type) { - *self = append(*self, NewInsVt(op, vt)) -} - -func (self *Program) Vp(op Op, vt reflect.Type, pv bool) { - *self = append(*self, NewInsVp(op, vt, pv)) -} - -func (self *Program) Vtab(op Op, vt reflect.Type, itab *rt.GoItab) { - *self = append(*self, NewInsVtab(op, vt, itab)) -} - -func (self Program) Disassemble() string { - nb := len(self) - tab := make([]bool, nb+1) - ret := make([]string, 0, nb+1) - - /* prescan to get all the labels */ - for _, ins := range self { - if ins.isBranch() { - tab[ins.Vi()] = true - } - } - - /* disassemble each instruction */ - for i, ins := range self { - if !tab[i] { - ret = append(ret, "\t"+ins.Disassemble()) - } else { - ret = append(ret, fmt.Sprintf("L_%d:\n\t%s", i, ins.Disassemble())) - } - } - - /* add the last label, if needed */ - if tab[nb] { - ret = append(ret, fmt.Sprintf("L_%d:", nb)) - } - - /* add an "end" indicator, and join all the strings */ - return strings.Join(append(ret, "\tend"), "\n") -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/pools_amd64.go b/vendor/github.com/bytedance/sonic/internal/encoder/pools_amd64.go deleted file mode 100644 index 43f026fbe..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/pools_amd64.go +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package encoder - -import ( - "errors" - "reflect" - "unsafe" - - "github.com/bytedance/sonic/internal/encoder/vars" - "github.com/bytedance/sonic/internal/encoder/x86" - "github.com/bytedance/sonic/internal/rt" - "github.com/bytedance/sonic/option" -) - - -func ForceUseJit() { - x86.SetCompiler(makeEncoderX86) - pretouchType = pretouchTypeX86 - encodeTypedPointer = x86.EncodeTypedPointer - vars.UseVM = false -} - -func init() { - if vars.UseVM { - ForceUseVM() - } else { - ForceUseJit() - } -} - -var _KeepAlive struct { - rb *[]byte - vp unsafe.Pointer - sb *vars.Stack - fv uint64 - err error - frame [x86.FP_offs]byte -} - -var errCallShadow = errors.New("DON'T CALL THIS!") - -// Faker func of _Encoder, used to export its stackmap as _Encoder's -func _Encoder_Shadow(rb *[]byte, vp unsafe.Pointer, sb *vars.Stack, fv uint64) (err error) { - // align to assembler_amd64.go: x86.FP_offs - var frame [x86.FP_offs]byte - - // must keep all args and frames noticeable to GC - _KeepAlive.rb = rb - _KeepAlive.vp = vp - _KeepAlive.sb = sb - _KeepAlive.fv = fv - _KeepAlive.err = err - _KeepAlive.frame = frame - - return errCallShadow -} - -func makeEncoderX86(vt *rt.GoType, ex ...interface{}) (interface{}, error) { - pp, err := NewCompiler().Compile(vt.Pack(), ex[0].(bool)) - if err != nil { - return nil, err - } - as := x86.NewAssembler(pp) - as.Name = vt.String() - return as.Load(), nil -} - -func pretouchTypeX86(_vt reflect.Type, opts option.CompileOptions, v uint8) (map[reflect.Type]uint8, error) { - /* compile function */ - compiler := NewCompiler().apply(opts) - - /* find or compile */ - vt := rt.UnpackType(_vt) - if val := vars.GetProgram(vt); val != nil { - return nil, nil - } else if _, err := vars.ComputeProgram(vt, makeEncoderX86, v == 1); err == nil { - return compiler.rec, nil - } else { - return nil, err - } -} - diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/pools_compt.go b/vendor/github.com/bytedance/sonic/internal/encoder/pools_compt.go deleted file mode 100644 index ef46dc98e..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/pools_compt.go +++ /dev/null @@ -1,24 +0,0 @@ -//go:build !amd64 -// +build !amd64 - -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package encoder - -func init() { - ForceUseVM() -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/stream.go b/vendor/github.com/bytedance/sonic/internal/encoder/stream.go deleted file mode 100644 index c2d026a0c..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/stream.go +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package encoder - -import ( - "encoding/json" - "io" - - "github.com/bytedance/sonic/internal/encoder/vars" -) - -// StreamEncoder uses io.Writer as input. -type StreamEncoder struct { - w io.Writer - Encoder -} - -// NewStreamEncoder adapts to encoding/json.NewDecoder API. -// -// NewStreamEncoder returns a new encoder that write to w. -func NewStreamEncoder(w io.Writer) *StreamEncoder { - return &StreamEncoder{w: w} -} - -// Encode encodes interface{} as JSON to io.Writer -func (enc *StreamEncoder) Encode(val interface{}) (err error) { - out := vars.NewBytes() - - /* encode into the buffer */ - err = EncodeInto(out, val, enc.Opts) - if err != nil { - goto free_bytes - } - - if enc.indent != "" || enc.prefix != "" { - /* indent the JSON */ - buf := vars.NewBuffer() - err = json.Indent(buf, *out, enc.prefix, enc.indent) - if err != nil { - vars.FreeBuffer(buf) - goto free_bytes - } - - // according to standard library, terminate each value with a newline... - if enc.Opts & NoEncoderNewline == 0 { - buf.WriteByte('\n') - } - - /* copy into io.Writer */ - _, err = io.Copy(enc.w, buf) - if err != nil { - vars.FreeBuffer(buf) - goto free_bytes - } - - } else { - /* copy into io.Writer */ - var n int - buf := *out - for len(buf) > 0 { - n, err = enc.w.Write(buf) - buf = buf[n:] - if err != nil { - goto free_bytes - } - } - - // according to standard library, terminate each value with a newline... - if enc.Opts & NoEncoderNewline == 0 { - enc.w.Write([]byte{'\n'}) - } - } - -free_bytes: - vars.FreeBytes(out) - return err -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/vars/cache.go b/vendor/github.com/bytedance/sonic/internal/encoder/vars/cache.go deleted file mode 100644 index 9cf2fb15e..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/vars/cache.go +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package vars - -import ( - "unsafe" - - "github.com/bytedance/sonic/internal/rt" -) - -type Encoder func( - rb *[]byte, - vp unsafe.Pointer, - sb *Stack, - fv uint64, -) error - -func FindOrCompile(vt *rt.GoType, pv bool, compiler func(*rt.GoType, ... interface{}) (interface{}, error)) (interface{}, error) { - if val := programCache.Get(vt); val != nil { - return val, nil - } else if ret, err := programCache.Compute(vt, compiler, pv); err == nil { - return ret, nil - } else { - return nil, err - } -} - -func GetProgram(vt *rt.GoType) (interface{}) { - return programCache.Get(vt) -} - -func ComputeProgram(vt *rt.GoType, compute func(*rt.GoType, ... interface{}) (interface{}, error), pv bool) (interface{}, error) { - return programCache.Compute(vt, compute, pv) -}
\ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/vars/const.go b/vendor/github.com/bytedance/sonic/internal/encoder/vars/const.go deleted file mode 100644 index 88499e959..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/vars/const.go +++ /dev/null @@ -1,42 +0,0 @@ -/** - * Copyright 2024 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package vars - -import ( - "os" - "unsafe" -) - -const ( - MaxStack = 4096 // 4k states - StackSize = unsafe.Sizeof(Stack{}) - StateSize = int64(unsafe.Sizeof(State{})) - StackLimit = MaxStack * StateSize -) - -const ( - MAX_ILBUF = 100000 // cutoff at 100k of IL instructions - MAX_FIELDS = 50 // cutoff at 50 fields struct -) - -var ( - DebugSyncGC = os.Getenv("SONIC_SYNC_GC") != "" - DebugAsyncGC = os.Getenv("SONIC_NO_ASYNC_GC") == "" - DebugCheckPtr = os.Getenv("SONIC_CHECK_POINTER") != "" -) - -var UseVM = os.Getenv("SONIC_ENCODER_USE_VM") != "" diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/vars/errors.go b/vendor/github.com/bytedance/sonic/internal/encoder/vars/errors.go deleted file mode 100644 index 77919c44a..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/vars/errors.go +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package vars - -import ( - `encoding/json` - `fmt` - `reflect` - `strconv` - `unsafe` - - `github.com/bytedance/sonic/internal/rt` -) - -var ERR_too_deep = &json.UnsupportedValueError { - Str : "Value nesting too deep", - Value : reflect.ValueOf("..."), -} - -var ERR_nan_or_infinite = &json.UnsupportedValueError { - Str : "NaN or ±Infinite", - Value : reflect.ValueOf("NaN or ±Infinite"), -} - -func Error_type(vtype reflect.Type) error { - return &json.UnsupportedTypeError{Type: vtype} -} - -func Error_number(number json.Number) error { - return &json.UnsupportedValueError { - Str : "invalid number literal: " + strconv.Quote(string(number)), - Value : reflect.ValueOf(number), - } -} - -func Error_marshaler(ret []byte, pos int) error { - return fmt.Errorf("invalid Marshaler output json syntax at %d: %q", pos, ret) -} - -const ( - PanicNilPointerOfNonEmptyString int = 1 + iota -) - -func GoPanic(code int, val unsafe.Pointer) { - switch(code){ - case PanicNilPointerOfNonEmptyString: - panic(fmt.Sprintf("val: %#v has nil pointer while its length is not zero!\nThis is a nil pointer exception (NPE) problem. There might be a data race issue. It is recommended to execute the tests related to the code with the `-race` compile flag to detect the problem.", (*rt.GoString)(val))) - default: - panic("encoder error!") - } -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/vars/stack.go b/vendor/github.com/bytedance/sonic/internal/encoder/vars/stack.go deleted file mode 100644 index 28a630b40..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/vars/stack.go +++ /dev/null @@ -1,146 +0,0 @@ -/** - * Copyright 2024 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package vars - -import ( - "bytes" - "sync" - "unsafe" - - "github.com/bytedance/sonic/internal/caching" - "github.com/bytedance/sonic/internal/rt" - "github.com/bytedance/sonic/option" -) - -type State struct { - x int - f uint64 - p unsafe.Pointer - q unsafe.Pointer -} - -type Stack struct { - sp uintptr - sb [MaxStack]State -} - -var ( - bytesPool = sync.Pool{} - stackPool = sync.Pool{ - New: func() interface{} { - return &Stack{} - }, - } - bufferPool = sync.Pool{} - programCache = caching.CreateProgramCache() -) - -func NewBytes() *[]byte { - if ret := bytesPool.Get(); ret != nil { - return ret.(*[]byte) - } else { - ret := make([]byte, 0, option.DefaultEncoderBufferSize) - return &ret - } -} - -func NewStack() *Stack { - ret := stackPool.Get().(*Stack) - ret.sp = 0 - return ret -} - -func ResetStack(p *Stack) { - rt.MemclrNoHeapPointers(unsafe.Pointer(p), StackSize) -} - -func (s *Stack) Top() *State { - return (*State)(rt.Add(unsafe.Pointer(&s.sb[0]), s.sp)) -} - -func (s *Stack) Cur() *State { - return (*State)(rt.Add(unsafe.Pointer(&s.sb[0]), s.sp - uintptr(StateSize))) -} - -const _MaxStackSP = uintptr(MaxStack * StateSize) - -func (s *Stack) Push(v State) bool { - if uintptr(s.sp) >= _MaxStackSP { - return false - } - st := s.Top() - *st = v - s.sp += uintptr(StateSize) - return true -} - -func (s *Stack) Pop() State { - s.sp -= uintptr(StateSize) - st := s.Top() - ret := *st - *st = State{} - return ret -} - -func (s *Stack) Load() (int, uint64, unsafe.Pointer, unsafe.Pointer) { - st := s.Cur() - return st.x, st.f, st.p, st.q -} - -func (s *Stack) Save(x int, f uint64, p unsafe.Pointer, q unsafe.Pointer) bool { - return s.Push(State{x: x, f:f, p: p, q: q}) -} - -func (s *Stack) Drop() (int, uint64, unsafe.Pointer, unsafe.Pointer) { - st := s.Pop() - return st.x, st.f, st.p, st.q -} - -func NewBuffer() *bytes.Buffer { - if ret := bufferPool.Get(); ret != nil { - return ret.(*bytes.Buffer) - } else { - return bytes.NewBuffer(make([]byte, 0, option.DefaultEncoderBufferSize)) - } -} - -func FreeBytes(p *[]byte) { - if rt.CanSizeResue(cap(*p)) { - (*p) = (*p)[:0] - bytesPool.Put(p) - } -} - -func FreeStack(p *Stack) { - p.sp = 0 - stackPool.Put(p) -} - -func FreeBuffer(p *bytes.Buffer) { - if rt.CanSizeResue(cap(p.Bytes())) { - p.Reset() - bufferPool.Put(p) - } -} - -var ( - ArgPtrs = []bool{true, true, true, false} - LocalPtrs = []bool{} - - ArgPtrs_generic = []bool{true} - LocalPtrs_generic = []bool{} -)
\ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/vars/types.go b/vendor/github.com/bytedance/sonic/internal/encoder/vars/types.go deleted file mode 100644 index ef8497807..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/vars/types.go +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package vars - -import ( - `encoding` - `encoding/json` - `reflect` -) - -var ( - ByteType = reflect.TypeOf(byte(0)) - JsonNumberType = reflect.TypeOf(json.Number("")) - JsonUnsupportedValueType = reflect.TypeOf(new(json.UnsupportedValueError)) -) - -var ( - ErrorType = reflect.TypeOf((*error)(nil)).Elem() - JsonMarshalerType = reflect.TypeOf((*json.Marshaler)(nil)).Elem() - EncodingTextMarshalerType = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem() -) - -func IsSimpleByte(vt reflect.Type) bool { - if vt.Kind() != ByteType.Kind() { - return false - } else { - return !isEitherMarshaler(vt) && !isEitherMarshaler(reflect.PtrTo(vt)) - } -} - -func isEitherMarshaler(vt reflect.Type) bool { - return vt.Implements(JsonMarshalerType) || vt.Implements(EncodingTextMarshalerType) -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/vm/stbus.go b/vendor/github.com/bytedance/sonic/internal/encoder/vm/stbus.go deleted file mode 100644 index 21b476c3c..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/vm/stbus.go +++ /dev/null @@ -1,45 +0,0 @@ -/** - * Copyright 2024 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package vm - -import ( - "unsafe" - _ "unsafe" - - "github.com/bytedance/sonic/internal/encoder/alg" - "github.com/bytedance/sonic/internal/encoder/ir" - "github.com/bytedance/sonic/internal/encoder/vars" - "github.com/bytedance/sonic/internal/rt" -) - -func EncodeTypedPointer(buf *[]byte, vt *rt.GoType, vp *unsafe.Pointer, sb *vars.Stack, fv uint64) error { - if vt == nil { - return alg.EncodeNil(buf) - } else if pp, err := vars.FindOrCompile(vt, (fv&(1<<alg.BitPointerValue)) != 0, compiler); err != nil { - return err - } else if vt.Indirect() { - return Execute(buf, *vp, sb, fv, pp.(*ir.Program)) - } else { - return Execute(buf, unsafe.Pointer(vp), sb, fv, pp.(*ir.Program)) - } -} - -var compiler func(*rt.GoType, ... interface{}) (interface{}, error) - -func SetCompiler(c func(*rt.GoType, ... interface{}) (interface{}, error)) { - compiler = c -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/vm/vm.go b/vendor/github.com/bytedance/sonic/internal/encoder/vm/vm.go deleted file mode 100644 index b75ba807a..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/vm/vm.go +++ /dev/null @@ -1,374 +0,0 @@ -// Copyright 2024 CloudWeGo Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package vm - -import ( - "encoding" - "encoding/json" - "fmt" - "math" - "reflect" - "unsafe" - - "github.com/bytedance/sonic/internal/encoder/alg" - "github.com/bytedance/sonic/internal/encoder/ir" - "github.com/bytedance/sonic/internal/encoder/vars" - "github.com/bytedance/sonic/internal/rt" - "github.com/bytedance/sonic/internal/base64" -) - -const ( - _S_cond = iota - _S_init -) - -var ( - _T_json_Marshaler = rt.UnpackType(vars.JsonMarshalerType) - _T_encoding_TextMarshaler = rt.UnpackType(vars.EncodingTextMarshalerType) -) - -func print_instr(buf []byte, pc int, op ir.Op, ins *ir.Instr, p unsafe.Pointer) { - if len(buf) > 20 { - fmt.Println(string(buf[len(buf)-20:])) - } else { - fmt.Println(string(buf)) - } - fmt.Printf("pc %04d, op %v, ins %#v, ptr: %x\n", pc, op, ins.Disassemble(), p) -} - -func Execute(b *[]byte, p unsafe.Pointer, s *vars.Stack, flags uint64, prog *ir.Program) (error) { - pl := len(*prog) - if pl <= 0 { - return nil - } - - var buf = *b - var x int - var q unsafe.Pointer - var f uint64 - - var pro = &(*prog)[0] - for pc := 0; pc < pl; { - ins := (*ir.Instr)(rt.Add(unsafe.Pointer(pro), ir.OpSize*uintptr(pc))) - pc++ - op := ins.Op() - - switch op { - case ir.OP_goto: - pc = ins.Vi() - continue - case ir.OP_byte: - v := ins.Byte() - buf = append(buf, v) - case ir.OP_text: - v := ins.Vs() - buf = append(buf, v...) - case ir.OP_deref: - p = *(*unsafe.Pointer)(p) - case ir.OP_index: - p = rt.Add(p, uintptr(ins.I64())) - case ir.OP_load: - // NOTICE: load CANNOT change f! - x, _, p, q = s.Load() - case ir.OP_save: - if !s.Save(x, f, p, q) { - return vars.ERR_too_deep - } - case ir.OP_drop: - x, f, p, q = s.Drop() - case ir.OP_drop_2: - s.Drop() - x, f, p, q = s.Drop() - case ir.OP_recurse: - vt, pv := ins.Vp2() - f := flags - if pv { - f |= (1 << alg.BitPointerValue) - } - *b = buf - if vt.Indirect() { - if err := EncodeTypedPointer(b, vt, (*unsafe.Pointer)(rt.NoEscape(unsafe.Pointer(&p))), s, f); err != nil { - return err - } - } else { - vp := (*unsafe.Pointer)(p) - if err := EncodeTypedPointer(b, vt, vp, s, f); err != nil { - return err - } - } - buf = *b - case ir.OP_is_nil: - if is_nil(p) { - pc = ins.Vi() - continue - } - case ir.OP_is_nil_p1: - if (*rt.GoEface)(p).Value == nil { - pc = ins.Vi() - continue - } - case ir.OP_null: - buf = append(buf, 'n', 'u', 'l', 'l') - case ir.OP_str: - v := *(*string)(p) - buf = alg.Quote(buf, v, false) - case ir.OP_bool: - if *(*bool)(p) { - buf = append(buf, 't', 'r', 'u', 'e') - } else { - buf = append(buf, 'f', 'a', 'l', 's', 'e') - } - case ir.OP_i8: - v := *(*int8)(p) - buf = alg.I64toa(buf, int64(v)) - case ir.OP_i16: - v := *(*int16)(p) - buf = alg.I64toa(buf, int64(v)) - case ir.OP_i32: - v := *(*int32)(p) - buf = alg.I64toa(buf, int64(v)) - case ir.OP_i64: - v := *(*int64)(p) - buf = alg.I64toa(buf, int64(v)) - case ir.OP_u8: - v := *(*uint8)(p) - buf = alg.U64toa(buf, uint64(v)) - case ir.OP_u16: - v := *(*uint16)(p) - buf = alg.U64toa(buf, uint64(v)) - case ir.OP_u32: - v := *(*uint32)(p) - buf = alg.U64toa(buf, uint64(v)) - case ir.OP_u64: - v := *(*uint64)(p) - buf = alg.U64toa(buf, uint64(v)) - case ir.OP_f32: - v := *(*float32)(p) - if math.IsNaN(float64(v)) || math.IsInf(float64(v), 0) { - if flags&(1<<alg.BitEncodeNullForInfOrNan) != 0 { - buf = append(buf, 'n', 'u', 'l', 'l') - continue - } - return vars.ERR_nan_or_infinite - } - buf = alg.F32toa(buf, v) - case ir.OP_f64: - v := *(*float64)(p) - if math.IsNaN(v) || math.IsInf(v, 0) { - if flags&(1<<alg.BitEncodeNullForInfOrNan) != 0 { - buf = append(buf, 'n', 'u', 'l', 'l') - continue - } - return vars.ERR_nan_or_infinite - } - buf = alg.F64toa(buf, v) - case ir.OP_bin: - v := *(*[]byte)(p) - buf = base64.EncodeBase64(buf, v) - case ir.OP_quote: - v := *(*string)(p) - buf = alg.Quote(buf, v, true) - case ir.OP_number: - v := *(*json.Number)(p) - if v == "" { - buf = append(buf, '0') - } else if !rt.IsValidNumber(string(v)) { - return vars.Error_number(v) - } else { - buf = append(buf, v...) - } - case ir.OP_eface: - *b = buf - if err := EncodeTypedPointer(b, *(**rt.GoType)(p), (*unsafe.Pointer)(rt.Add(p, 8)), s, flags); err != nil { - return err - } - buf = *b - case ir.OP_iface: - *b = buf - if err := EncodeTypedPointer(b, (*(**rt.GoItab)(p)).Vt, (*unsafe.Pointer)(rt.Add(p, 8)), s, flags); err != nil { - return err - } - buf = *b - case ir.OP_is_zero_map: - v := *(**rt.GoMap)(p) - if v == nil || v.Count == 0 { - pc = ins.Vi() - continue - } - case ir.OP_map_iter: - v := *(**rt.GoMap)(p) - vt := ins.Vr() - it, err := alg.IteratorStart(rt.MapType(vt), v, flags) - if err != nil { - return err - } - q = unsafe.Pointer(it) - case ir.OP_map_stop: - it := (*alg.MapIterator)(q) - alg.IteratorStop(it) - q = nil - case ir.OP_map_value_next: - it := (*alg.MapIterator)(q) - p = it.It.V - alg.IteratorNext(it) - case ir.OP_map_check_key: - it := (*alg.MapIterator)(q) - if it.It.K == nil { - pc = ins.Vi() - continue - } - p = it.It.K - case ir.OP_marshal_text: - vt, itab := ins.Vtab() - var it rt.GoIface - switch vt.Kind() { - case reflect.Interface : - if is_nil(p) { - buf = append(buf, 'n', 'u', 'l', 'l') - continue - } - it = rt.AssertI2I(_T_encoding_TextMarshaler, *(*rt.GoIface)(p)) - case reflect.Ptr, reflect.Map : it = convT2I(p, true, itab) - default : it = convT2I(p, !vt.Indirect(), itab) - } - if err := alg.EncodeTextMarshaler(&buf, *(*encoding.TextMarshaler)(unsafe.Pointer(&it)), (flags)); err != nil { - return err - } - case ir.OP_marshal_text_p: - _, itab := ins.Vtab() - it := convT2I(p, false, itab) - if err := alg.EncodeTextMarshaler(&buf, *(*encoding.TextMarshaler)(unsafe.Pointer(&it)), (flags)); err != nil { - return err - } - case ir.OP_map_write_key: - if has_opts(flags, alg.BitSortMapKeys) { - v := *(*string)(p) - buf = alg.Quote(buf, v, false) - pc = ins.Vi() - continue - } - case ir.OP_slice_len: - v := (*rt.GoSlice)(p) - x = v.Len - p = v.Ptr - //TODO: why? - f |= 1<<_S_init - case ir.OP_slice_next: - if x == 0 { - pc = ins.Vi() - continue - } - x-- - if has_opts(f, _S_init) { - f &= ^uint64(1 << _S_init) - } else { - p = rt.Add(p, uintptr(ins.Vlen())) - } - case ir.OP_cond_set: - f |= 1<<_S_cond - case ir.OP_cond_testc: - if has_opts(f, _S_cond) { - f &= ^uint64(1 << _S_cond) - pc = ins.Vi() - continue - } - case ir.OP_is_zero_1: - if *(*uint8)(p) == 0 { - pc = ins.Vi() - continue - } - case ir.OP_is_zero_2: - if *(*uint16)(p) == 0 { - pc = ins.Vi() - continue - } - case ir.OP_is_zero_4: - if *(*uint32)(p) == 0 { - pc = ins.Vi() - continue - } - case ir.OP_is_zero_8: - if *(*uint64)(p) == 0 { - pc = ins.Vi() - continue - } - case ir.OP_empty_arr: - if has_opts(flags, alg.BitNoNullSliceOrMap) { - buf = append(buf, '[', ']') - } else { - buf = append(buf, 'n', 'u', 'l', 'l') - } - case ir.OP_empty_obj: - if has_opts(flags, alg.BitNoNullSliceOrMap) { - buf = append(buf, '{', '}') - } else { - buf = append(buf, 'n', 'u', 'l', 'l') - } - case ir.OP_marshal: - vt, itab := ins.Vtab() - var it rt.GoIface - switch vt.Kind() { - case reflect.Interface : - if is_nil(p) { - buf = append(buf, 'n', 'u', 'l', 'l') - continue - } - it = rt.AssertI2I(_T_json_Marshaler, *(*rt.GoIface)(p)) - case reflect.Ptr, reflect.Map : it = convT2I(p, true, itab) - default : it = convT2I(p, !vt.Indirect(), itab) - } - if err := alg.EncodeJsonMarshaler(&buf, *(*json.Marshaler)(unsafe.Pointer(&it)), (flags)); err != nil { - return err - } - case ir.OP_marshal_p: - _, itab := ins.Vtab() - it := convT2I(p, false, itab) - if err := alg.EncodeJsonMarshaler(&buf, *(*json.Marshaler)(unsafe.Pointer(&it)), (flags)); err != nil { - return err - } - default: - panic(fmt.Sprintf("not implement %s at %d", ins.Op().String(), pc)) - } - } - - *b = buf - return nil -} - -// func to_buf(w unsafe.Pointer, l int, c int) []byte { -// return rt.BytesFrom(unsafe.Pointer(uintptr(w)-uintptr(l)), l, c) -// } - -// func from_buf(buf []byte) (unsafe.Pointer, int, int) { -// return rt.IndexByte(buf, len(buf)), len(buf), cap(buf) -// } - -func has_opts(opts uint64, bit int) bool { - return opts & (1<<bit) != 0 -} - -func is_nil(p unsafe.Pointer) bool { - return *(*unsafe.Pointer)(p) == nil -} - -func convT2I(ptr unsafe.Pointer, deref bool, itab *rt.GoItab) (rt.GoIface) { - if deref { - ptr = *(*unsafe.Pointer)(ptr) - } - return rt.GoIface{ - Itab: itab, - Value: ptr, - } -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/x86/asm_stubs_amd64_go117.go b/vendor/github.com/bytedance/sonic/internal/encoder/x86/asm_stubs_amd64_go117.go deleted file mode 100644 index eec9f6c58..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/x86/asm_stubs_amd64_go117.go +++ /dev/null @@ -1,53 +0,0 @@ -//go:build go1.17 && !go1.21 -// +build go1.17,!go1.21 - -// Copyright 2023 CloudWeGo Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package x86 - -import ( - "strconv" - "unsafe" - - "github.com/bytedance/sonic/internal/jit" - "github.com/bytedance/sonic/internal/rt" - "github.com/twitchyliquid64/golang-asm/obj" - "github.com/twitchyliquid64/golang-asm/obj/x86" -) - -var ( - _V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&rt.RuntimeWriteBarrier)))) - - _F_gcWriteBarrierAX = jit.Func(rt.GcWriteBarrierAX) -) - -func (self *Assembler) WritePtr(i int, ptr obj.Addr, rec obj.Addr) { - if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX { - panic("rec contains AX!") - } - self.Emit("MOVQ", _V_writeBarrier, _BX) - self.Emit("CMPL", jit.Ptr(_BX, 0), jit.Imm(0)) - self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.xsave(_DI) - self.Emit("MOVQ", ptr, _AX) - self.Emit("LEAQ", rec, _DI) - self.Emit("MOVQ", _F_gcWriteBarrierAX, _BX) // MOVQ ${fn}, AX - self.Rjmp("CALL", _BX) - self.xload(_DI) - self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", ptr, rec) - self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}") -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/x86/asm_stubs_amd64_go121.go b/vendor/github.com/bytedance/sonic/internal/encoder/x86/asm_stubs_amd64_go121.go deleted file mode 100644 index 3d70021e4..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/x86/asm_stubs_amd64_go121.go +++ /dev/null @@ -1,52 +0,0 @@ -//go:build go1.21 && !go1.24 -// +build go1.21,!go1.24 - -// Copyright 2023 CloudWeGo Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package x86 - -import ( - "strconv" - "unsafe" - - "github.com/bytedance/sonic/internal/jit" - "github.com/bytedance/sonic/internal/rt" - "github.com/twitchyliquid64/golang-asm/obj" - "github.com/twitchyliquid64/golang-asm/obj/x86" -) - -var ( - _V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&rt.RuntimeWriteBarrier)))) - - _F_gcWriteBarrier2 = jit.Func(rt.GcWriteBarrier2) -) - -func (self *Assembler) WritePtr(i int, ptr obj.Addr, old obj.Addr) { - if old.Reg == x86.REG_AX || old.Index == x86.REG_AX { - panic("rec contains AX!") - } - self.Emit("MOVQ", _V_writeBarrier, _BX) - self.Emit("CMPL", jit.Ptr(_BX, 0), jit.Imm(0)) - self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.xsave(_SP_q) - self.Emit("MOVQ", _F_gcWriteBarrier2, _BX) // MOVQ ${fn}, AX - self.Rjmp("CALL", _BX) - self.Emit("MOVQ", ptr, jit.Ptr(_SP_q, 0)) - self.Emit("MOVQ", old, _AX) - self.Emit("MOVQ", _AX, jit.Ptr(_SP_q, 8)) - self.xload(_SP_q) - self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", ptr, old) -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/x86/assembler_regabi_amd64.go b/vendor/github.com/bytedance/sonic/internal/encoder/x86/assembler_regabi_amd64.go deleted file mode 100644 index c0912fb81..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/x86/assembler_regabi_amd64.go +++ /dev/null @@ -1,1195 +0,0 @@ -//go:build go1.17 && !go1.24 -// +build go1.17,!go1.24 - -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package x86 - -import ( - "fmt" - "reflect" - "strconv" - "unsafe" - - "github.com/bytedance/sonic/internal/cpu" - "github.com/bytedance/sonic/internal/encoder/alg" - "github.com/bytedance/sonic/internal/encoder/ir" - "github.com/bytedance/sonic/internal/encoder/vars" - "github.com/bytedance/sonic/internal/jit" - "github.com/bytedance/sonic/internal/native/types" - "github.com/twitchyliquid64/golang-asm/obj" - "github.com/twitchyliquid64/golang-asm/obj/x86" - - "github.com/bytedance/sonic/internal/native" - "github.com/bytedance/sonic/internal/rt" -) - -/** Register Allocations - * - * State Registers: - * - * %rbx : stack base - * %rdi : result pointer - * %rsi : result length - * %rdx : result capacity - * %r12 : sp->p - * %r13 : sp->q - * %r14 : sp->x - * %r15 : sp->f - * - * Error Registers: - * - * %r10 : error type register - * %r11 : error pointer register - */ - -/** Function Prototype & Stack Map - * - * func (buf *[]byte, p unsafe.Pointer, sb *_Stack, fv uint64) (err error) - * - * buf : (FP) - * p : 8(FP) - * sb : 16(FP) - * fv : 24(FP) - * err.vt : 32(FP) - * err.vp : 40(FP) - */ - -const ( - _S_cond = iota - _S_init -) - -const ( - _FP_args = 32 // 32 bytes for spill registers of arguments - _FP_fargs = 40 // 40 bytes for passing arguments to other Go functions - _FP_saves = 64 // 64 bytes for saving the registers before CALL instructions - _FP_locals = 24 // 24 bytes for local variables -) - -const ( - _FP_loffs = _FP_fargs + _FP_saves - FP_offs = _FP_loffs + _FP_locals - // _FP_offs = _FP_loffs + _FP_locals + _FP_debug - _FP_size = FP_offs + 8 // 8 bytes for the parent frame pointer - _FP_base = _FP_size + 8 // 8 bytes for the return address -) - -const ( - _FM_exp32 = 0x7f800000 - _FM_exp64 = 0x7ff0000000000000 -) - -const ( - _IM_null = 0x6c6c756e // 'null' - _IM_true = 0x65757274 // 'true' - _IM_fals = 0x736c6166 // 'fals' ('false' without the 'e') - _IM_open = 0x00225c22 // '"\"∅' - _IM_array = 0x5d5b // '[]' - _IM_object = 0x7d7b // '{}' - _IM_mulv = -0x5555555555555555 -) - -const ( - _LB_more_space = "_more_space" - _LB_more_space_return = "_more_space_return_" -) - -const ( - _LB_error = "_error" - _LB_error_too_deep = "_error_too_deep" - _LB_error_invalid_number = "_error_invalid_number" - _LB_error_nan_or_infinite = "_error_nan_or_infinite" - _LB_panic = "_panic" -) - -var ( - _AX = jit.Reg("AX") - _BX = jit.Reg("BX") - _CX = jit.Reg("CX") - _DX = jit.Reg("DX") - _DI = jit.Reg("DI") - _SI = jit.Reg("SI") - _BP = jit.Reg("BP") - _SP = jit.Reg("SP") - _R8 = jit.Reg("R8") - _R9 = jit.Reg("R9") -) - -var ( - _X0 = jit.Reg("X0") - _Y0 = jit.Reg("Y0") -) - -var ( - _ST = jit.Reg("R15") // can't use R14 since it's always scratched by Go... - _RP = jit.Reg("DI") - _RL = jit.Reg("SI") - _RC = jit.Reg("DX") -) - -var ( - _LR = jit.Reg("R9") - _ET = jit.Reg("AX") - _EP = jit.Reg("BX") -) - -var ( - _SP_p = jit.Reg("R10") // saved on BX when call_c - _SP_q = jit.Reg("R11") // saved on BP when call_c - _SP_x = jit.Reg("R12") - _SP_f = jit.Reg("R13") -) - -var ( - _ARG_rb = jit.Ptr(_SP, _FP_base) - _ARG_vp = jit.Ptr(_SP, _FP_base+8) - _ARG_sb = jit.Ptr(_SP, _FP_base+16) - _ARG_fv = jit.Ptr(_SP, _FP_base+24) -) - -var ( - _RET_et = _ET - _RET_ep = _EP -) - -var ( - _VAR_sp = jit.Ptr(_SP, _FP_fargs+_FP_saves) - _VAR_dn = jit.Ptr(_SP, _FP_fargs+_FP_saves+8) - _VAR_vp = jit.Ptr(_SP, _FP_fargs+_FP_saves+16) -) - -var ( - _REG_ffi = []obj.Addr{_RP, _RL, _RC, _SP_q} - _REG_b64 = []obj.Addr{_SP_p, _SP_q} - - _REG_all = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RP, _RL, _RC} - _REG_ms = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _LR} - _REG_enc = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RL} -) - -type Assembler struct { - Name string - jit.BaseAssembler - p ir.Program - x int -} - -func NewAssembler(p ir.Program) *Assembler { - return new(Assembler).Init(p) -} - -/** Assembler Interface **/ - -func (self *Assembler) Load() vars.Encoder { - return ptoenc(self.BaseAssembler.Load("encode_"+self.Name, _FP_size, _FP_args, vars.ArgPtrs, vars.LocalPtrs)) -} - -func (self *Assembler) Init(p ir.Program) *Assembler { - self.p = p - self.BaseAssembler.Init(self.compile) - return self -} - -func (self *Assembler) compile() { - self.prologue() - self.instrs() - self.epilogue() - self.builtins() -} - -/** Assembler Stages **/ - -var _OpFuncTab = [256]func(*Assembler, *ir.Instr){ - ir.OP_null: (*Assembler)._asm_OP_null, - ir.OP_empty_arr: (*Assembler)._asm_OP_empty_arr, - ir.OP_empty_obj: (*Assembler)._asm_OP_empty_obj, - ir.OP_bool: (*Assembler)._asm_OP_bool, - ir.OP_i8: (*Assembler)._asm_OP_i8, - ir.OP_i16: (*Assembler)._asm_OP_i16, - ir.OP_i32: (*Assembler)._asm_OP_i32, - ir.OP_i64: (*Assembler)._asm_OP_i64, - ir.OP_u8: (*Assembler)._asm_OP_u8, - ir.OP_u16: (*Assembler)._asm_OP_u16, - ir.OP_u32: (*Assembler)._asm_OP_u32, - ir.OP_u64: (*Assembler)._asm_OP_u64, - ir.OP_f32: (*Assembler)._asm_OP_f32, - ir.OP_f64: (*Assembler)._asm_OP_f64, - ir.OP_str: (*Assembler)._asm_OP_str, - ir.OP_bin: (*Assembler)._asm_OP_bin, - ir.OP_quote: (*Assembler)._asm_OP_quote, - ir.OP_number: (*Assembler)._asm_OP_number, - ir.OP_eface: (*Assembler)._asm_OP_eface, - ir.OP_iface: (*Assembler)._asm_OP_iface, - ir.OP_byte: (*Assembler)._asm_OP_byte, - ir.OP_text: (*Assembler)._asm_OP_text, - ir.OP_deref: (*Assembler)._asm_OP_deref, - ir.OP_index: (*Assembler)._asm_OP_index, - ir.OP_load: (*Assembler)._asm_OP_load, - ir.OP_save: (*Assembler)._asm_OP_save, - ir.OP_drop: (*Assembler)._asm_OP_drop, - ir.OP_drop_2: (*Assembler)._asm_OP_drop_2, - ir.OP_recurse: (*Assembler)._asm_OP_recurse, - ir.OP_is_nil: (*Assembler)._asm_OP_is_nil, - ir.OP_is_nil_p1: (*Assembler)._asm_OP_is_nil_p1, - ir.OP_is_zero_1: (*Assembler)._asm_OP_is_zero_1, - ir.OP_is_zero_2: (*Assembler)._asm_OP_is_zero_2, - ir.OP_is_zero_4: (*Assembler)._asm_OP_is_zero_4, - ir.OP_is_zero_8: (*Assembler)._asm_OP_is_zero_8, - ir.OP_is_zero_map: (*Assembler)._asm_OP_is_zero_map, - ir.OP_goto: (*Assembler)._asm_OP_goto, - ir.OP_map_iter: (*Assembler)._asm_OP_map_iter, - ir.OP_map_stop: (*Assembler)._asm_OP_map_stop, - ir.OP_map_check_key: (*Assembler)._asm_OP_map_check_key, - ir.OP_map_write_key: (*Assembler)._asm_OP_map_write_key, - ir.OP_map_value_next: (*Assembler)._asm_OP_map_value_next, - ir.OP_slice_len: (*Assembler)._asm_OP_slice_len, - ir.OP_slice_next: (*Assembler)._asm_OP_slice_next, - ir.OP_marshal: (*Assembler)._asm_OP_marshal, - ir.OP_marshal_p: (*Assembler)._asm_OP_marshal_p, - ir.OP_marshal_text: (*Assembler)._asm_OP_marshal_text, - ir.OP_marshal_text_p: (*Assembler)._asm_OP_marshal_text_p, - ir.OP_cond_set: (*Assembler)._asm_OP_cond_set, - ir.OP_cond_testc: (*Assembler)._asm_OP_cond_testc, -} - -func (self *Assembler) instr(v *ir.Instr) { - if fn := _OpFuncTab[v.Op()]; fn != nil { - fn(self, v) - } else { - panic(fmt.Sprintf("invalid opcode: %d", v.Op())) - } -} - -func (self *Assembler) instrs() { - for i, v := range self.p { - self.Mark(i) - self.instr(&v) - self.debug_instr(i, &v) - } -} - -func (self *Assembler) builtins() { - self.more_space() - self.error_too_deep() - self.error_invalid_number() - self.error_nan_or_infinite() - self.go_panic() -} - -func (self *Assembler) epilogue() { - self.Mark(len(self.p)) - self.Emit("XORL", _ET, _ET) - self.Emit("XORL", _EP, _EP) - self.Link(_LB_error) - self.Emit("MOVQ", _ARG_rb, _CX) // MOVQ rb<>+0(FP), CX - self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8)) // MOVQ RL, 8(CX) - self.Emit("MOVQ", jit.Imm(0), _ARG_rb) // MOVQ AX, rb<>+0(FP) - self.Emit("MOVQ", jit.Imm(0), _ARG_vp) // MOVQ BX, vp<>+8(FP) - self.Emit("MOVQ", jit.Imm(0), _ARG_sb) // MOVQ CX, sb<>+16(FP) - self.Emit("MOVQ", jit.Ptr(_SP, FP_offs), _BP) // MOVQ _FP_offs(SP), BP - self.Emit("ADDQ", jit.Imm(_FP_size), _SP) // ADDQ $_FP_size, SP - self.Emit("RET") // RET -} - -func (self *Assembler) prologue() { - self.Emit("SUBQ", jit.Imm(_FP_size), _SP) // SUBQ $_FP_size, SP - self.Emit("MOVQ", _BP, jit.Ptr(_SP, FP_offs)) // MOVQ BP, _FP_offs(SP) - self.Emit("LEAQ", jit.Ptr(_SP, FP_offs), _BP) // LEAQ _FP_offs(SP), BP - self.Emit("MOVQ", _AX, _ARG_rb) // MOVQ AX, rb<>+0(FP) - self.Emit("MOVQ", _BX, _ARG_vp) // MOVQ BX, vp<>+8(FP) - self.Emit("MOVQ", _CX, _ARG_sb) // MOVQ CX, sb<>+16(FP) - self.Emit("MOVQ", _DI, _ARG_fv) // MOVQ DI, rb<>+24(FP) - self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP) // MOVQ (AX) , DI - self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL) // MOVQ 8(AX) , SI - self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC) // MOVQ 16(AX), DX - self.Emit("MOVQ", _BX, _SP_p) // MOVQ BX, R10 - self.Emit("MOVQ", _CX, _ST) // MOVQ CX, R8 - self.Emit("XORL", _SP_x, _SP_x) // XORL R10, R12 - self.Emit("XORL", _SP_f, _SP_f) // XORL R11, R13 - self.Emit("XORL", _SP_q, _SP_q) // XORL R13, R11 -} - -/** Assembler Inline Functions **/ - -func (self *Assembler) xsave(reg ...obj.Addr) { - for i, v := range reg { - if i > _FP_saves/8-1 { - panic("too many registers to save") - } else { - self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs+int64(i)*8)) - } - } -} - -func (self *Assembler) xload(reg ...obj.Addr) { - for i, v := range reg { - if i > _FP_saves/8-1 { - panic("too many registers to load") - } else { - self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs+int64(i)*8), v) - } - } -} - -func (self *Assembler) rbuf_di() { - if _RP.Reg != x86.REG_DI { - panic("register allocation messed up: RP != DI") - } else { - self.Emit("ADDQ", _RL, _RP) - } -} - -func (self *Assembler) store_int(nd int, fn obj.Addr, ins string) { - self.check_size(nd) - self.save_c() // SAVE $C_regs - self.rbuf_di() // MOVQ RP, DI - self.Emit(ins, jit.Ptr(_SP_p, 0), _SI) // $ins (SP.p), SI - self.call_c(fn) // CALL_C $fn - self.Emit("ADDQ", _AX, _RL) // ADDQ AX, RL -} - -func (self *Assembler) store_str(s string) { - i := 0 - m := rt.Str2Mem(s) - - /* 8-byte stores */ - for i <= len(m)-8 { - self.Emit("MOVQ", jit.Imm(rt.Get64(m[i:])), _AX) // MOVQ $s[i:], AX - self.Emit("MOVQ", _AX, jit.Sib(_RP, _RL, 1, int64(i))) // MOVQ AX, i(RP)(RL) - i += 8 - } - - /* 4-byte stores */ - if i <= len(m)-4 { - self.Emit("MOVL", jit.Imm(int64(rt.Get32(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVL $s[i:], i(RP)(RL) - i += 4 - } - - /* 2-byte stores */ - if i <= len(m)-2 { - self.Emit("MOVW", jit.Imm(int64(rt.Get16(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVW $s[i:], i(RP)(RL) - i += 2 - } - - /* last byte */ - if i < len(m) { - self.Emit("MOVB", jit.Imm(int64(m[i])), jit.Sib(_RP, _RL, 1, int64(i))) // MOVB $s[i:], i(RP)(RL) - } -} - -func (self *Assembler) check_size(n int) { - self.check_size_rl(jit.Ptr(_RL, int64(n))) -} - -func (self *Assembler) check_size_r(r obj.Addr, d int) { - self.check_size_rl(jit.Sib(_RL, r, 1, int64(d))) -} - -func (self *Assembler) check_size_rl(v obj.Addr) { - idx := self.x - key := _LB_more_space_return + strconv.Itoa(idx) - - /* the following code relies on LR == R9 to work */ - if _LR.Reg != x86.REG_R9 { - panic("register allocation messed up: LR != R9") - } - - /* check for buffer capacity */ - self.x++ - self.Emit("LEAQ", v, _AX) // LEAQ $v, AX - self.Emit("CMPQ", _AX, _RC) // CMPQ AX, RC - self.Sjmp("JBE", key) // JBE _more_space_return_{n} - self.slice_grow_ax(key) // GROW $key - self.Link(key) // _more_space_return_{n}: -} - -func (self *Assembler) slice_grow_ax(ret string) { - self.Byte(0x4c, 0x8d, 0x0d) // LEAQ ?(PC), R9 - self.Sref(ret, 4) // .... &ret - self.Sjmp("JMP", _LB_more_space) // JMP _more_space -} - -/** State Stack Helpers **/ - - - -func (self *Assembler) save_state() { - self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX) // MOVQ (ST), CX - self.Emit("LEAQ", jit.Ptr(_CX, vars.StateSize), _R9) // LEAQ vars.StateSize(CX), R9 - self.Emit("CMPQ", _R9, jit.Imm(vars.StackLimit)) // CMPQ R9, $vars.StackLimit - self.Sjmp("JAE", _LB_error_too_deep) // JA _error_too_deep - self.Emit("MOVQ", _SP_x, jit.Sib(_ST, _CX, 1, 8)) // MOVQ SP.x, 8(ST)(CX) - self.Emit("MOVQ", _SP_f, jit.Sib(_ST, _CX, 1, 16)) // MOVQ SP.f, 16(ST)(CX) - self.WritePtr(0, _SP_p, jit.Sib(_ST, _CX, 1, 24)) // MOVQ SP.p, 24(ST)(CX) - self.WritePtr(1, _SP_q, jit.Sib(_ST, _CX, 1, 32)) // MOVQ SP.q, 32(ST)(CX) - self.Emit("MOVQ", _R9, jit.Ptr(_ST, 0)) // MOVQ R9, (ST) -} - -func (self *Assembler) drop_state(decr int64) { - self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX - self.Emit("SUBQ", jit.Imm(decr), _AX) // SUBQ $decr, AX - self.Emit("MOVQ", _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST) - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 8), _SP_x) // MOVQ 8(ST)(AX), SP.x - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 16), _SP_f) // MOVQ 16(ST)(AX), SP.f - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 24), _SP_p) // MOVQ 24(ST)(AX), SP.p - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 32), _SP_q) // MOVQ 32(ST)(AX), SP.q - self.Emit("PXOR", _X0, _X0) // PXOR X0, X0 - self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8)) // MOVOU X0, 8(ST)(AX) - self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 24)) // MOVOU X0, 24(ST)(AX) -} - -/** Buffer Helpers **/ - -func (self *Assembler) add_char(ch byte) { - self.Emit("MOVB", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVB $ch, (RP)(RL) - self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL -} - -func (self *Assembler) add_long(ch uint32, n int64) { - self.Emit("MOVL", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVL $ch, (RP)(RL) - self.Emit("ADDQ", jit.Imm(n), _RL) // ADDQ $n, RL -} - -func (self *Assembler) add_text(ss string) { - self.store_str(ss) // TEXT $ss - self.Emit("ADDQ", jit.Imm(int64(len(ss))), _RL) // ADDQ ${len(ss)}, RL -} - -// get *buf at AX -func (self *Assembler) prep_buffer_AX() { - self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX - self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX) -} - -func (self *Assembler) save_buffer() { - self.Emit("MOVQ", _ARG_rb, _CX) // MOVQ rb<>+0(FP), CX - self.Emit("MOVQ", _RP, jit.Ptr(_CX, 0)) // MOVQ RP, (CX) - self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8)) // MOVQ RL, 8(CX) - self.Emit("MOVQ", _RC, jit.Ptr(_CX, 16)) // MOVQ RC, 16(CX) -} - -// get *buf at AX -func (self *Assembler) load_buffer_AX() { - self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX - self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP) // MOVQ (AX), RP - self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL) // MOVQ 8(AX), RL - self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC) // MOVQ 16(AX), RC -} - -/** Function Interface Helpers **/ - -func (self *Assembler) call(pc obj.Addr) { - self.Emit("MOVQ", pc, _LR) // MOVQ $pc, AX - self.Rjmp("CALL", _LR) // CALL AX -} - -func (self *Assembler) save_c() { - self.xsave(_REG_ffi...) // SAVE $REG_ffi -} - -func (self *Assembler) call_b64(pc obj.Addr) { - self.xsave(_REG_b64...) // SAVE $REG_all - self.call(pc) // CALL $pc - self.xload(_REG_b64...) // LOAD $REG_ffi -} - -func (self *Assembler) call_c(pc obj.Addr) { - self.Emit("XCHGQ", _SP_p, _BX) - self.call(pc) // CALL $pc - self.xload(_REG_ffi...) // LOAD $REG_ffi - self.Emit("XCHGQ", _SP_p, _BX) -} - -func (self *Assembler) call_go(pc obj.Addr) { - self.xsave(_REG_all...) // SAVE $REG_all - self.call(pc) // CALL $pc - self.xload(_REG_all...) // LOAD $REG_all -} - -func (self *Assembler) call_more_space(pc obj.Addr) { - self.xsave(_REG_ms...) // SAVE $REG_all - self.call(pc) // CALL $pc - self.xload(_REG_ms...) // LOAD $REG_all -} - -func (self *Assembler) call_encoder(pc obj.Addr) { - self.xsave(_REG_enc...) // SAVE $REG_all - self.call(pc) // CALL $pc - self.xload(_REG_enc...) // LOAD $REG_all -} - -func (self *Assembler) call_marshaler(fn obj.Addr, it *rt.GoType, vt reflect.Type) { - switch vt.Kind() { - case reflect.Interface: - self.call_marshaler_i(fn, it) - case reflect.Ptr, reflect.Map: - self.call_marshaler_v(fn, it, vt, true) - // struct/array of 1 direct iface type can be direct - default: - self.call_marshaler_v(fn, it, vt, !rt.UnpackType(vt).Indirect()) - } -} - -var ( - _F_assertI2I = jit.Func(rt.AssertI2I) -) - -func (self *Assembler) call_marshaler_i(fn obj.Addr, it *rt.GoType) { - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JZ", "_null_{n}") // JZ _null_{n} - self.Emit("MOVQ", _AX, _BX) // MOVQ AX, BX - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _CX) // MOVQ 8(SP.p), CX - self.Emit("MOVQ", jit.Gtype(it), _AX) // MOVQ $it, AX - self.call_go(_F_assertI2I) // CALL_GO assertI2I - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JZ", "_null_{n}") // JZ _null_{n} - self.Emit("MOVQ", _BX, _CX) // MOVQ BX, CX - self.Emit("MOVQ", _AX, _BX) // MOVQ AX, BX - self.prep_buffer_AX() - self.Emit("MOVQ", _ARG_fv, _DI) // MOVQ ARG.fv, DI - self.call_go(fn) // CALL $fn - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ", _LB_error) // JNZ _error - self.load_buffer_AX() - self.Sjmp("JMP", "_done_{n}") // JMP _done_{n} - self.Link("_null_{n}") // _null_{n}: - self.check_size(4) // SIZE $4 - self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1) - self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL - self.Link("_done_{n}") // _done_{n}: -} - -func (self *Assembler) call_marshaler_v(fn obj.Addr, it *rt.GoType, vt reflect.Type, deref bool) { - self.prep_buffer_AX() // MOVE {buf}, (SP) - self.Emit("MOVQ", jit.Itab(it, vt), _BX) // MOVQ $(itab(it, vt)), BX - - /* dereference the pointer if needed */ - if !deref { - self.Emit("MOVQ", _SP_p, _CX) // MOVQ SP.p, CX - } else { - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _CX) // MOVQ 0(SP.p), CX - } - - /* call the encoder, and perform error checks */ - self.Emit("MOVQ", _ARG_fv, _DI) // MOVQ ARG.fv, DI - self.call_go(fn) // CALL $fn - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ", _LB_error) // JNZ _error - self.load_buffer_AX() -} - -/** Builtin: _more_space **/ - -var ( - _T_byte = jit.Type(vars.ByteType) - _F_growslice = jit.Func(rt.GrowSlice) - - _T_json_Marshaler = rt.UnpackType(vars.JsonMarshalerType) - _T_encoding_TextMarshaler = rt.UnpackType(vars.EncodingTextMarshalerType) -) - -// AX must saving n -func (self *Assembler) more_space() { - self.Link(_LB_more_space) - self.Emit("MOVQ", _RP, _BX) // MOVQ DI, BX - self.Emit("MOVQ", _RL, _CX) // MOVQ SI, CX - self.Emit("MOVQ", _RC, _DI) // MOVQ DX, DI - self.Emit("MOVQ", _AX, _SI) // MOVQ AX, SI - self.Emit("MOVQ", _T_byte, _AX) // MOVQ $_T_byte, AX - self.call_more_space(_F_growslice) // CALL $pc - self.Emit("MOVQ", _AX, _RP) // MOVQ AX, DI - self.Emit("MOVQ", _BX, _RL) // MOVQ BX, SI - self.Emit("MOVQ", _CX, _RC) // MOVQ CX, DX - self.save_buffer() // SAVE {buf} - self.Rjmp("JMP", _LR) // JMP LR -} - -/** Builtin Errors **/ - -var ( - _V_ERR_too_deep = jit.Imm(int64(uintptr(unsafe.Pointer(vars.ERR_too_deep)))) - _V_ERR_nan_or_infinite = jit.Imm(int64(uintptr(unsafe.Pointer(vars.ERR_nan_or_infinite)))) - _I_json_UnsupportedValueError = jit.Itab(rt.UnpackType(vars.ErrorType), vars.JsonUnsupportedValueType) -) - -func (self *Assembler) error_too_deep() { - self.Link(_LB_error_too_deep) - self.Emit("MOVQ", _V_ERR_too_deep, _EP) // MOVQ $_V_ERR_too_deep, EP - self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET - self.Sjmp("JMP", _LB_error) // JMP _error -} - -func (self *Assembler) error_invalid_number() { - self.Link(_LB_error_invalid_number) - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ 0(SP), AX - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _BX) // MOVQ 8(SP), BX - self.call_go(_F_error_number) // CALL_GO error_number - self.Sjmp("JMP", _LB_error) // JMP _error -} - -func (self *Assembler) error_nan_or_infinite() { - self.Link(_LB_error_nan_or_infinite) - self.Emit("MOVQ", _V_ERR_nan_or_infinite, _EP) // MOVQ $_V_ERR_nan_or_infinite, EP - self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET - self.Sjmp("JMP", _LB_error) // JMP _error -} - -/** String Encoding Routine **/ - -var ( - _F_quote = jit.Imm(int64(native.S_quote)) - _F_panic = jit.Func(vars.GoPanic) -) - -func (self *Assembler) go_panic() { - self.Link(_LB_panic) - self.Emit("MOVQ", _SP_p, _BX) - self.call_go(_F_panic) -} - -func (self *Assembler) encode_string(doubleQuote bool) { - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JZ", "_str_empty_{n}") // JZ _str_empty_{n} - self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) - self.Sjmp("JNE", "_str_next_{n}") - self.Emit("MOVQ", jit.Imm(int64(vars.PanicNilPointerOfNonEmptyString)), _AX) - self.Sjmp("JMP", _LB_panic) - self.Link("_str_next_{n}") - - /* opening quote, check for double quote */ - if !doubleQuote { - self.check_size_r(_AX, 2) // SIZE $2 - self.add_char('"') // CHAR $'"' - } else { - self.check_size_r(_AX, 6) // SIZE $6 - self.add_long(_IM_open, 3) // TEXT $`"\"` - } - - /* quoting loop */ - self.Emit("XORL", _AX, _AX) // XORL AX, AX - self.Emit("MOVQ", _AX, _VAR_sp) // MOVQ AX, sp - self.Link("_str_loop_{n}") // _str_loop_{n}: - self.save_c() // SAVE $REG_ffi - - /* load the output buffer first, and then input buffer, - * because the parameter registers collide with RP / RL / RC */ - self.Emit("MOVQ", _RC, _CX) // MOVQ RC, CX - self.Emit("SUBQ", _RL, _CX) // SUBQ RL, CX - self.Emit("MOVQ", _CX, _VAR_dn) // MOVQ CX, dn - self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _DX) // LEAQ (RP)(RL), DX - self.Emit("LEAQ", _VAR_dn, _CX) // LEAQ dn, CX - self.Emit("MOVQ", _VAR_sp, _AX) // MOVQ sp, AX - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _DI) // MOVQ (SP.p), DI - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SI) // MOVQ 8(SP.p), SI - self.Emit("ADDQ", _AX, _DI) // ADDQ AX, DI - self.Emit("SUBQ", _AX, _SI) // SUBQ AX, SI - - /* set the flags based on `doubleQuote` */ - if !doubleQuote { - self.Emit("XORL", _R8, _R8) // XORL R8, R8 - } else { - self.Emit("MOVL", jit.Imm(types.F_DOUBLE_UNQUOTE), _R8) // MOVL ${types.F_DOUBLE_UNQUOTE}, R8 - } - - /* call the native quoter */ - self.call_c(_F_quote) // CALL quote - self.Emit("ADDQ", _VAR_dn, _RL) // ADDQ dn, RL - - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS", "_str_space_{n}") // JS _str_space_{n} - - /* close the string, check for double quote */ - if !doubleQuote { - self.check_size(1) // SIZE $1 - self.add_char('"') // CHAR $'"' - self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n} - } else { - self.check_size(3) // SIZE $3 - self.add_text("\\\"\"") // TEXT $'\""' - self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n} - } - - /* not enough space to contain the quoted string */ - self.Link("_str_space_{n}") // _str_space_{n}: - self.Emit("NOTQ", _AX) // NOTQ AX - self.Emit("ADDQ", _AX, _VAR_sp) // ADDQ AX, sp - self.Emit("LEAQ", jit.Sib(_RC, _RC, 1, 0), _AX) // LEAQ (RC)(RC), AX - self.slice_grow_ax("_str_loop_{n}") // GROW _str_loop_{n} - - /* empty string, check for double quote */ - if !doubleQuote { - self.Link("_str_empty_{n}") // _str_empty_{n}: - self.check_size(2) // SIZE $2 - self.add_text("\"\"") // TEXT $'""' - self.Link("_str_end_{n}") // _str_end_{n}: - } else { - self.Link("_str_empty_{n}") // _str_empty_{n}: - self.check_size(6) // SIZE $6 - self.add_text("\"\\\"\\\"\"") // TEXT $'"\"\""' - self.Link("_str_end_{n}") // _str_end_{n}: - } -} - -/** OpCode Assembler Functions **/ - - - -var ( - _F_f64toa = jit.Imm(int64(native.S_f64toa)) - _F_f32toa = jit.Imm(int64(native.S_f32toa)) - _F_i64toa = jit.Imm(int64(native.S_i64toa)) - _F_u64toa = jit.Imm(int64(native.S_u64toa)) - _F_b64encode = jit.Imm(int64(_subr__b64encode)) -) - -var ( - _F_memmove = jit.Func(rt.Memmove) - _F_error_number = jit.Func(vars.Error_number) - _F_isValidNumber = jit.Func(rt.IsValidNumber) -) - -var ( - _F_iteratorStop = jit.Func(alg.IteratorStop) - _F_iteratorNext = jit.Func(alg.IteratorNext) - _F_iteratorStart = jit.Func(alg.IteratorStart) -) - -var ( - _F_encodeTypedPointer obj.Addr - _F_encodeJsonMarshaler obj.Addr - _F_encodeTextMarshaler obj.Addr -) - -const ( - _MODE_AVX2 = 1 << 2 -) - -func init() { - _F_encodeJsonMarshaler = jit.Func(alg.EncodeJsonMarshaler) - _F_encodeTextMarshaler = jit.Func(alg.EncodeTextMarshaler) - _F_encodeTypedPointer = jit.Func(EncodeTypedPointer) -} - -func (self *Assembler) _asm_OP_null(_ *ir.Instr) { - self.check_size(4) - self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1) - self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL -} - -func (self *Assembler) _asm_OP_empty_arr(_ *ir.Instr) { - self.Emit("BTQ", jit.Imm(int64(alg.BitNoNullSliceOrMap)), _ARG_fv) - self.Sjmp("JC", "_empty_arr_{n}") - self._asm_OP_null(nil) - self.Sjmp("JMP", "_empty_arr_end_{n}") - self.Link("_empty_arr_{n}") - self.check_size(2) - self.Emit("MOVW", jit.Imm(_IM_array), jit.Sib(_RP, _RL, 1, 0)) - self.Emit("ADDQ", jit.Imm(2), _RL) - self.Link("_empty_arr_end_{n}") -} - -func (self *Assembler) _asm_OP_empty_obj(_ *ir.Instr) { - self.Emit("BTQ", jit.Imm(int64(alg.BitNoNullSliceOrMap)), _ARG_fv) - self.Sjmp("JC", "_empty_obj_{n}") - self._asm_OP_null(nil) - self.Sjmp("JMP", "_empty_obj_end_{n}") - self.Link("_empty_obj_{n}") - self.check_size(2) - self.Emit("MOVW", jit.Imm(_IM_object), jit.Sib(_RP, _RL, 1, 0)) - self.Emit("ADDQ", jit.Imm(2), _RL) - self.Link("_empty_obj_end_{n}") -} - -func (self *Assembler) _asm_OP_bool(_ *ir.Instr) { - self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0 - self.Sjmp("JE", "_false_{n}") // JE _false_{n} - self.check_size(4) // SIZE $4 - self.Emit("MOVL", jit.Imm(_IM_true), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'true', (RP)(RL*1) - self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL - self.Sjmp("JMP", "_end_{n}") // JMP _end_{n} - self.Link("_false_{n}") // _false_{n}: - self.check_size(5) // SIZE $5 - self.Emit("MOVL", jit.Imm(_IM_fals), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'fals', (RP)(RL*1) - self.Emit("MOVB", jit.Imm('e'), jit.Sib(_RP, _RL, 1, 4)) // MOVB $'e', 4(RP)(RL*1) - self.Emit("ADDQ", jit.Imm(5), _RL) // ADDQ $5, RL - self.Link("_end_{n}") // _end_{n}: -} - -func (self *Assembler) _asm_OP_i8(_ *ir.Instr) { - self.store_int(4, _F_i64toa, "MOVBQSX") -} - -func (self *Assembler) _asm_OP_i16(_ *ir.Instr) { - self.store_int(6, _F_i64toa, "MOVWQSX") -} - -func (self *Assembler) _asm_OP_i32(_ *ir.Instr) { - self.store_int(17, _F_i64toa, "MOVLQSX") -} - -func (self *Assembler) _asm_OP_i64(_ *ir.Instr) { - self.store_int(21, _F_i64toa, "MOVQ") -} - -func (self *Assembler) _asm_OP_u8(_ *ir.Instr) { - self.store_int(3, _F_u64toa, "MOVBQZX") -} - -func (self *Assembler) _asm_OP_u16(_ *ir.Instr) { - self.store_int(5, _F_u64toa, "MOVWQZX") -} - -func (self *Assembler) _asm_OP_u32(_ *ir.Instr) { - self.store_int(16, _F_u64toa, "MOVLQZX") -} - -func (self *Assembler) _asm_OP_u64(_ *ir.Instr) { - self.store_int(20, _F_u64toa, "MOVQ") -} - -func (self *Assembler) _asm_OP_f32(_ *ir.Instr) { - self.check_size(32) - self.Emit("MOVL", jit.Ptr(_SP_p, 0), _AX) // MOVL (SP.p), AX - self.Emit("ANDL", jit.Imm(_FM_exp32), _AX) // ANDL $_FM_exp32, AX - self.Emit("XORL", jit.Imm(_FM_exp32), _AX) // XORL $_FM_exp32, AX - self.Sjmp("JNZ", "_encode_normal_f32_{n}")// JNZ _encode_normal_f32_{n} - self.Emit("BTQ", jit.Imm(alg.BitEncodeNullForInfOrNan), _ARG_fv) // BTQ ${BitEncodeNullForInfOrNan}, fv - self.Sjmp("JNC", _LB_error_nan_or_infinite) // JNC _error_nan_or_infinite - self._asm_OP_null(nil) - self.Sjmp("JMP", "_encode_f32_end_{n}") // JMP _encode_f32_end_{n} - self.Link("_encode_normal_f32_{n}") - self.save_c() // SAVE $C_regs - self.rbuf_di() // MOVQ RP, DI - self.Emit("MOVSS", jit.Ptr(_SP_p, 0), _X0) // MOVSS (SP.p), X0 - self.call_c(_F_f32toa) // CALL_C f32toa - self.Emit("ADDQ", _AX, _RL) // ADDQ AX, RL - self.Link("_encode_f32_end_{n}") -} - -func (self *Assembler) _asm_OP_f64(_ *ir.Instr) { - self.check_size(32) - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("MOVQ", jit.Imm(_FM_exp64), _CX) // MOVQ $_FM_exp64, CX - self.Emit("ANDQ", _CX, _AX) // ANDQ CX, AX - self.Emit("XORQ", _CX, _AX) // XORQ CX, AX - self.Sjmp("JNZ", "_encode_normal_f64_{n}")// JNZ _encode_normal_f64_{n} - self.Emit("BTQ", jit.Imm(alg.BitEncodeNullForInfOrNan), _ARG_fv) // BTQ ${BitEncodeNullForInfOrNan}, fv - self.Sjmp("JNC", _LB_error_nan_or_infinite)// JNC _error_nan_or_infinite - self._asm_OP_null(nil) - self.Sjmp("JMP", "_encode_f64_end_{n}") // JMP _encode_f64_end_{n} - self.Link("_encode_normal_f64_{n}") - self.save_c() // SAVE $C_regs - self.rbuf_di() // MOVQ RP, DI - self.Emit("MOVSD", jit.Ptr(_SP_p, 0), _X0) // MOVSD (SP.p), X0 - self.call_c(_F_f64toa) // CALL_C f64toa - self.Emit("ADDQ", _AX, _RL) // ADDQ AX, RL - self.Link("_encode_f64_end_{n}") -} - -func (self *Assembler) _asm_OP_str(_ *ir.Instr) { - self.encode_string(false) -} - -func (self *Assembler) _asm_OP_bin(_ *ir.Instr) { - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX - self.Emit("ADDQ", jit.Imm(2), _AX) // ADDQ $2, AX - self.Emit("MOVQ", jit.Imm(_IM_mulv), _CX) // MOVQ $_MF_mulv, CX - self.Emit("MOVQ", _DX, _BX) // MOVQ DX, BX - self.From("MULQ", _CX) // MULQ CX - self.Emit("LEAQ", jit.Sib(_DX, _DX, 1, 1), _AX) // LEAQ 1(DX)(DX), AX - self.Emit("ORQ", jit.Imm(2), _AX) // ORQ $2, AX - self.Emit("MOVQ", _BX, _DX) // MOVQ BX, DX - self.check_size_r(_AX, 0) // SIZE AX - self.add_char('"') // CHAR $'"' - self.Emit("MOVQ", _ARG_rb, _DI) // MOVQ rb<>+0(FP), DI - self.Emit("MOVQ", _RL, jit.Ptr(_DI, 8)) // MOVQ SI, 8(DI) - self.Emit("MOVQ", _SP_p, _SI) // MOVQ SP.p, SI - - /* check for AVX2 support */ - if !cpu.HasAVX2 { - self.Emit("XORL", _DX, _DX) // XORL DX, DX - } else { - self.Emit("MOVL", jit.Imm(_MODE_AVX2), _DX) // MOVL $_MODE_AVX2, DX - } - - /* call the encoder */ - self.call_b64(_F_b64encode) // CALL b64encode - self.load_buffer_AX() // LOAD {buf} - self.add_char('"') // CHAR $'"' -} - -func (self *Assembler) _asm_OP_quote(_ *ir.Instr) { - self.encode_string(true) -} - -func (self *Assembler) _asm_OP_number(_ *ir.Instr) { - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _BX) // MOVQ (SP.p), BX - self.Emit("TESTQ", _BX, _BX) // TESTQ BX, BX - self.Sjmp("JZ", "_empty_{n}") - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JNZ", "_number_next_{n}") - self.Emit("MOVQ", jit.Imm(int64(vars.PanicNilPointerOfNonEmptyString)), _AX) - self.Sjmp("JMP", _LB_panic) - self.Link("_number_next_{n}") - self.call_go(_F_isValidNumber) // CALL_GO isValidNumber - self.Emit("CMPB", _AX, jit.Imm(0)) // CMPB AX, $0 - self.Sjmp("JE", _LB_error_invalid_number) // JE _error_invalid_number - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _BX) // MOVQ (SP.p), BX - self.check_size_r(_BX, 0) // SIZE BX - self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _AX) // LEAQ (RP)(RL), AX - self.Emit("ADDQ", jit.Ptr(_SP_p, 8), _RL) // ADDQ 8(SP.p), RL - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _BX) // MOVOU (SP.p), BX - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _CX) // MOVOU X0, 8(SP) - self.call_go(_F_memmove) // CALL_GO memmove - self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX - self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX) - self.Sjmp("JMP", "_done_{n}") // JMP _done_{n} - self.Link("_empty_{n}") // _empty_{n} - self.check_size(1) // SIZE $1 - self.add_char('0') // CHAR $'0' - self.Link("_done_{n}") // _done_{n}: -} - -func (self *Assembler) _asm_OP_eface(_ *ir.Instr) { - self.prep_buffer_AX() // MOVE {buf}, AX - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _BX) // MOVQ (SP.p), BX - self.Emit("LEAQ", jit.Ptr(_SP_p, 8), _CX) // LEAQ 8(SP.p), CX - self.Emit("MOVQ", _ST, _DI) // MOVQ ST, DI - self.Emit("MOVQ", _ARG_fv, _SI) // MOVQ fv, AX - self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ", _LB_error) // JNZ _error - self.load_buffer_AX() -} - -func (self *Assembler) _asm_OP_iface(_ *ir.Instr) { - self.prep_buffer_AX() // MOVE {buf}, AX - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _CX) // MOVQ (SP.p), CX - self.Emit("MOVQ", jit.Ptr(_CX, 8), _BX) // MOVQ 8(CX), BX - self.Emit("LEAQ", jit.Ptr(_SP_p, 8), _CX) // LEAQ 8(SP.p), CX - self.Emit("MOVQ", _ST, _DI) // MOVQ ST, DI - self.Emit("MOVQ", _ARG_fv, _SI) // MOVQ fv, AX - self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ", _LB_error) // JNZ _error - self.load_buffer_AX() -} - -func (self *Assembler) _asm_OP_byte(p *ir.Instr) { - self.check_size(1) - self.Emit("MOVB", jit.Imm(p.I64()), jit.Sib(_RP, _RL, 1, 0)) // MOVL p.Vi(), (RP)(RL*1) - self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL -} - -func (self *Assembler) _asm_OP_text(p *ir.Instr) { - self.check_size(len(p.Vs())) // SIZE ${len(p.Vs())} - self.add_text(p.Vs()) // TEXT ${p.Vs()} -} - -func (self *Assembler) _asm_OP_deref(_ *ir.Instr) { - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p -} - -func (self *Assembler) _asm_OP_index(p *ir.Instr) { - self.Emit("MOVQ", jit.Imm(p.I64()), _AX) // MOVQ $p.Vi(), AX - self.Emit("ADDQ", _AX, _SP_p) // ADDQ AX, SP.p -} - -func (self *Assembler) _asm_OP_load(_ *ir.Instr) { - self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -24), _SP_x) // MOVQ -24(ST)(AX), SP.x - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -8), _SP_p) // MOVQ -8(ST)(AX), SP.p - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _SP_q) // MOVQ (ST)(AX), SP.q -} - -func (self *Assembler) _asm_OP_save(_ *ir.Instr) { - self.save_state() -} - -func (self *Assembler) _asm_OP_drop(_ *ir.Instr) { - self.drop_state(vars.StateSize) -} - -func (self *Assembler) _asm_OP_drop_2(_ *ir.Instr) { - self.drop_state(vars.StateSize * 2) // DROP $(vars.StateSize * 2) - self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 56)) // MOVOU X0, 56(ST)(AX) -} - -func (self *Assembler) _asm_OP_recurse(p *ir.Instr) { - self.prep_buffer_AX() // MOVE {buf}, (SP) - vt, pv := p.Vp() - self.Emit("MOVQ", jit.Type(vt), _BX) // MOVQ $(type(p.Vt())), BX - - /* check for indirection */ - if !rt.UnpackType(vt).Indirect() { - self.Emit("MOVQ", _SP_p, _CX) // MOVQ SP.p, CX - } else { - self.Emit("MOVQ", _SP_p, _VAR_vp) // MOVQ SP.p, VAR.vp - self.Emit("LEAQ", _VAR_vp, _CX) // LEAQ VAR.vp, CX - } - - /* call the encoder */ - self.Emit("MOVQ", _ST, _DI) // MOVQ ST, DI - self.Emit("MOVQ", _ARG_fv, _SI) // MOVQ $fv, SI - if pv { - self.Emit("BTSQ", jit.Imm(alg.BitPointerValue), _SI) // BTSQ $1, SI - } - - self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ", _LB_error) // JNZ _error - self.load_buffer_AX() -} - -func (self *Assembler) _asm_OP_is_nil(p *ir.Instr) { - self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0 - self.Xjmp("JE", p.Vi()) // JE p.Vi() -} - -func (self *Assembler) _asm_OP_is_nil_p1(p *ir.Instr) { - self.Emit("CMPQ", jit.Ptr(_SP_p, 8), jit.Imm(0)) // CMPQ 8(SP.p), $0 - self.Xjmp("JE", p.Vi()) // JE p.Vi() -} - -func (self *Assembler) _asm_OP_is_zero_1(p *ir.Instr) { - self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0 - self.Xjmp("JE", p.Vi()) // JE p.Vi() -} - -func (self *Assembler) _asm_OP_is_zero_2(p *ir.Instr) { - self.Emit("CMPW", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPW (SP.p), $0 - self.Xjmp("JE", p.Vi()) // JE p.Vi() -} - -func (self *Assembler) _asm_OP_is_zero_4(p *ir.Instr) { - self.Emit("CMPL", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPL (SP.p), $0 - self.Xjmp("JE", p.Vi()) // JE p.Vi() -} - -func (self *Assembler) _asm_OP_is_zero_8(p *ir.Instr) { - self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0 - self.Xjmp("JE", p.Vi()) // JE p.Vi() -} - -func (self *Assembler) _asm_OP_is_zero_map(p *ir.Instr) { - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Xjmp("JZ", p.Vi()) // JZ p.Vi() - self.Emit("CMPQ", jit.Ptr(_AX, 0), jit.Imm(0)) // CMPQ (AX), $0 - self.Xjmp("JE", p.Vi()) // JE p.Vi() -} - -func (self *Assembler) _asm_OP_goto(p *ir.Instr) { - self.Xjmp("JMP", p.Vi()) -} - -func (self *Assembler) _asm_OP_map_iter(p *ir.Instr) { - self.Emit("MOVQ", jit.Type(p.Vt()), _AX) // MOVQ $p.Vt(), AX - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _BX) // MOVQ (SP.p), BX - self.Emit("MOVQ", _ARG_fv, _CX) // MOVQ fv, CX - self.call_go(_F_iteratorStart) // CALL_GO iteratorStart - self.Emit("MOVQ", _AX, _SP_q) // MOVQ AX, SP.q - self.Emit("MOVQ", _BX, _ET) // MOVQ 32(SP), ET - self.Emit("MOVQ", _CX, _EP) // MOVQ 40(SP), EP - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ", _LB_error) // JNZ _error -} - -func (self *Assembler) _asm_OP_map_stop(_ *ir.Instr) { - self.Emit("MOVQ", _SP_q, _AX) // MOVQ SP.q, AX - self.call_go(_F_iteratorStop) // CALL_GO iteratorStop - self.Emit("XORL", _SP_q, _SP_q) // XORL SP.q, SP.q -} - -func (self *Assembler) _asm_OP_map_check_key(p *ir.Instr) { - self.Emit("MOVQ", jit.Ptr(_SP_q, 0), _SP_p) // MOVQ (SP.q), SP.p - self.Emit("TESTQ", _SP_p, _SP_p) // TESTQ SP.p, SP.p - self.Xjmp("JZ", p.Vi()) // JNZ p.Vi() -} - -func (self *Assembler) _asm_OP_map_write_key(p *ir.Instr) { - self.Emit("BTQ", jit.Imm(alg.BitSortMapKeys), _ARG_fv) // BTQ ${SortMapKeys}, fv - self.Sjmp("JNC", "_unordered_key_{n}") // JNC _unordered_key_{n} - self.encode_string(false) // STR $false - self.Xjmp("JMP", p.Vi()) // JMP ${p.Vi()} - self.Link("_unordered_key_{n}") // _unordered_key_{n}: -} - -func (self *Assembler) _asm_OP_map_value_next(_ *ir.Instr) { - self.Emit("MOVQ", jit.Ptr(_SP_q, 8), _SP_p) // MOVQ 8(SP.q), SP.p - self.Emit("MOVQ", _SP_q, _AX) // MOVQ SP.q, AX - self.call_go(_F_iteratorNext) // CALL_GO iteratorNext -} - -func (self *Assembler) _asm_OP_slice_len(_ *ir.Instr) { - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SP_x) // MOVQ 8(SP.p), SP.x - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p - self.Emit("ORQ", jit.Imm(1<<_S_init), _SP_f) // ORQ $(1<<_S_init), SP.f -} - -func (self *Assembler) _asm_OP_slice_next(p *ir.Instr) { - self.Emit("TESTQ", _SP_x, _SP_x) // TESTQ SP.x, SP.x - self.Xjmp("JZ", p.Vi()) // JZ p.Vi() - self.Emit("SUBQ", jit.Imm(1), _SP_x) // SUBQ $1, SP.x - self.Emit("BTRQ", jit.Imm(_S_init), _SP_f) // BTRQ $_S_init, SP.f - self.Emit("LEAQ", jit.Ptr(_SP_p, int64(p.Vlen())), _AX) // LEAQ $(p.vlen())(SP.p), AX - self.Emit("CMOVQCC", _AX, _SP_p) // CMOVQNC AX, SP.p -} - -func (self *Assembler) _asm_OP_marshal(p *ir.Instr) { - self.call_marshaler(_F_encodeJsonMarshaler, _T_json_Marshaler, p.Vt()) -} - -func (self *Assembler) _asm_OP_marshal_p(p *ir.Instr) { - if p.Vk() != reflect.Ptr { - panic("marshal_p: invalid type") - } else { - self.call_marshaler_v(_F_encodeJsonMarshaler, _T_json_Marshaler, p.Vt(), false) - } -} - -func (self *Assembler) _asm_OP_marshal_text(p *ir.Instr) { - self.call_marshaler(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.Vt()) -} - -func (self *Assembler) _asm_OP_marshal_text_p(p *ir.Instr) { - if p.Vk() != reflect.Ptr { - panic("marshal_text_p: invalid type") - } else { - self.call_marshaler_v(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.Vt(), false) - } -} - -func (self *Assembler) _asm_OP_cond_set(_ *ir.Instr) { - self.Emit("ORQ", jit.Imm(1<<_S_cond), _SP_f) // ORQ $(1<<_S_cond), SP.f -} - -func (self *Assembler) _asm_OP_cond_testc(p *ir.Instr) { - self.Emit("BTRQ", jit.Imm(_S_cond), _SP_f) // BTRQ $_S_cond, SP.f - self.Xjmp("JC", p.Vi()) -} - -func (self *Assembler) print_gc(i int, p1 *ir.Instr, p2 *ir.Instr) { - self.Emit("MOVQ", jit.Imm(int64(p2.Op())), _CX) // MOVQ $(p2.Op()), AX - self.Emit("MOVQ", jit.Imm(int64(p1.Op())), _BX) // MOVQ $(p1.Op()), BX - self.Emit("MOVQ", jit.Imm(int64(i)), _AX) // MOVQ $(i), CX - self.call_go(_F_println) -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/x86/debug_go116.go b/vendor/github.com/bytedance/sonic/internal/encoder/x86/debug_go116.go deleted file mode 100644 index c292e88a1..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/x86/debug_go116.go +++ /dev/null @@ -1,66 +0,0 @@ -// +build go1.17,!go1.17 - -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package x86 - -import ( - `os` - `strings` - `runtime` - `runtime/debug` - - `github.com/bytedance/sonic/internal/jit` -) - -var ( - debugSyncGC = os.Getenv("SONIC_SYNC_GC") != "" - debugAsyncGC = os.Getenv("SONIC_NO_ASYNC_GC") == "" -) - -var ( - _Instr_End _Instr = newInsOp(_OP_null) - - _F_gc = jit.Func(runtime.GC) - _F_force_gc = jit.Func(debug.FreeOSMemory) - _F_println = jit.Func(println_wrapper) -) - -func println_wrapper(i int, op1 int, op2 int){ - println(i, " Intrs ", op1, _OpNames[op1], "next: ", op2, _OpNames[op2]) -} - -func (self *_Assembler) force_gc() { - self.call_go(_F_gc) - self.call_go(_F_force_gc) -} - -func (self *_Assembler) debug_instr(i int, v *_Instr) { - if debugSyncGC { - if (i+1 == len(self.p)) { - self.print_gc(i, v, &_Instr_End) - } else { - next := &(self.p[i+1]) - self.print_gc(i, v, next) - name := _OpNames[next.op()] - if strings.Contains(name, "save") { - return - } - } - self.force_gc() - } -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/x86/debug_go117.go b/vendor/github.com/bytedance/sonic/internal/encoder/x86/debug_go117.go deleted file mode 100644 index 0aca3f4c5..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/x86/debug_go117.go +++ /dev/null @@ -1,201 +0,0 @@ -//go:build go1.17 && !go1.24 -// +build go1.17,!go1.24 - -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package x86 - -import ( - "fmt" - "runtime" - "strings" - "unsafe" - - "github.com/bytedance/sonic/internal/encoder/ir" - "github.com/bytedance/sonic/internal/encoder/vars" - "github.com/bytedance/sonic/internal/jit" - "github.com/twitchyliquid64/golang-asm/obj" -) - -const _FP_debug = 128 - -var ( - _Instr_End = ir.NewInsOp(ir.OP_is_nil) - - _F_gc = jit.Func(gc) - _F_println = jit.Func(println_wrapper) - _F_print = jit.Func(print) -) - -func (self *Assembler) dsave(r ...obj.Addr) { - for i, v := range r { - if i > _FP_debug/8-1 { - panic("too many registers to save") - } else { - self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs+_FP_saves+_FP_locals+int64(i)*8)) - } - } -} - -func (self *Assembler) dload(r ...obj.Addr) { - for i, v := range r { - if i > _FP_debug/8-1 { - panic("too many registers to load") - } else { - self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs+_FP_saves+_FP_locals+int64(i)*8), v) - } - } -} - -func println_wrapper(i int, op1 int, op2 int) { - println(i, " Intrs ", op1, ir.OpNames[op1], "next: ", op2, ir.OpNames[op2]) -} - -func print(i int) { - println(i) -} - -func gc() { - if !vars.DebugSyncGC { - return - } - runtime.GC() - // debug.FreeOSMemory() -} - -func (self *Assembler) dcall(fn obj.Addr) { - self.Emit("MOVQ", fn, _R10) // MOVQ ${fn}, R10 - self.Rjmp("CALL", _R10) // CALL R10 -} - -func (self *Assembler) debug_gc() { - if !vars.DebugSyncGC { - return - } - self.dsave(_REG_debug...) - self.dcall(_F_gc) - self.dload(_REG_debug...) -} - -func (self *Assembler) debug_instr(i int, v *ir.Instr) { - if vars.DebugSyncGC { - if i+1 == len(self.p) { - self.print_gc(i, v, &_Instr_End) - } else { - next := &(self.p[i+1]) - self.print_gc(i, v, next) - name := ir.OpNames[next.Op()] - if strings.Contains(name, "save") { - return - } - } - // self.debug_gc() - } -} - -//go:noescape -//go:linkname checkptrBase runtime.checkptrBase -func checkptrBase(p unsafe.Pointer) uintptr - -//go:noescape -//go:linkname findObject runtime.findObject -func findObject(p, refBase, refOff uintptr) (base uintptr, s unsafe.Pointer, objIndex uintptr) - -var ( - _F_checkptr = jit.Func(checkptr) - _F_printptr = jit.Func(printptr) -) - -var ( - _R10 = jit.Reg("R10") -) -var _REG_debug = []obj.Addr{ - jit.Reg("AX"), - jit.Reg("BX"), - jit.Reg("CX"), - jit.Reg("DX"), - jit.Reg("DI"), - jit.Reg("SI"), - jit.Reg("BP"), - jit.Reg("SP"), - jit.Reg("R8"), - jit.Reg("R9"), - jit.Reg("R10"), - jit.Reg("R11"), - jit.Reg("R12"), - jit.Reg("R13"), - jit.Reg("R14"), - jit.Reg("R15"), -} - -func checkptr(ptr uintptr) { - if ptr == 0 { - return - } - fmt.Printf("pointer: %x\n", ptr) - f := checkptrBase(unsafe.Pointer(uintptr(ptr))) - if f == 0 { - fmt.Printf("! unknown-based pointer: %x\n", ptr) - } else if f == 1 { - fmt.Printf("! stack pointer: %x\n", ptr) - } else { - fmt.Printf("base: %x\n", f) - } - findobj(ptr) -} - -func findobj(ptr uintptr) { - base, s, objIndex := findObject(ptr, 0, 0) - if s != nil && base == 0 { - fmt.Printf("! invalid pointer: %x\n", ptr) - } - fmt.Printf("objIndex: %d\n", objIndex) -} - -func (self *Assembler) check_ptr(ptr obj.Addr, lea bool) { - if !vars.DebugCheckPtr { - return - } - - self.dsave(_REG_debug...) - if lea { - self.Emit("LEAQ", ptr, _R10) - } else { - self.Emit("MOVQ", ptr, _R10) - } - self.Emit("MOVQ", _R10, jit.Ptr(_SP, 0)) - self.dcall(_F_checkptr) - self.dload(_REG_debug...) -} - -func printptr(i int, ptr uintptr) { - fmt.Printf("[%d] ptr: %x\n", i, ptr) -} - -func (self *Assembler) print_ptr(i int, ptr obj.Addr, lea bool) { - self.dsave(_REG_debug...) - if lea { - self.Emit("LEAQ", ptr, _R10) - } else { - self.Emit("MOVQ", ptr, _R10) - } - - self.Emit("MOVQ", jit.Imm(int64(i)), _AX) - self.Emit("MOVQ", _R10, _BX) - self.dcall(_F_printptr) - self.dload(_REG_debug...) -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/x86/stbus.go b/vendor/github.com/bytedance/sonic/internal/encoder/x86/stbus.go deleted file mode 100644 index b9fa473f5..000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/x86/stbus.go +++ /dev/null @@ -1,54 +0,0 @@ -/** - * Copyright 2024 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package x86 - -import ( - "unsafe" - _ "unsafe" - - "github.com/bytedance/sonic/internal/encoder/alg" - "github.com/bytedance/sonic/internal/encoder/vars" - "github.com/bytedance/sonic/internal/rt" - "github.com/bytedance/sonic/loader" - _ "github.com/cloudwego/base64x" -) - -//go:linkname _subr__b64encode github.com/cloudwego/base64x._subr__b64encode -var _subr__b64encode uintptr - -var compiler func(*rt.GoType, ... interface{}) (interface{}, error) - -func SetCompiler(c func(*rt.GoType, ... interface{}) (interface{}, error)) { - compiler = c -} - -func ptoenc(p loader.Function) vars.Encoder { - return *(*vars.Encoder)(unsafe.Pointer(&p)) -} - -func EncodeTypedPointer(buf *[]byte, vt *rt.GoType, vp *unsafe.Pointer, sb *vars.Stack, fv uint64) error { - if vt == nil { - return alg.EncodeNil(buf) - } else if fn, err := vars.FindOrCompile(vt, (fv&(1<<alg.BitPointerValue)) != 0, compiler); err != nil { - return err - } else if vt.Indirect() { - return fn.(vars.Encoder)(buf, *vp, sb, fv) - } else { - return fn.(vars.Encoder)(buf, unsafe.Pointer(vp), sb, fv) - } -} - |