summaryrefslogtreecommitdiff
path: root/vendor/github.com/bytedance/sonic/loader
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/bytedance/sonic/loader')
-rw-r--r--vendor/github.com/bytedance/sonic/loader/LICENSE201
-rw-r--r--vendor/github.com/bytedance/sonic/loader/funcdata_compat.go2
-rw-r--r--vendor/github.com/bytedance/sonic/loader/funcdata_go116.go2
-rw-r--r--vendor/github.com/bytedance/sonic/loader/funcdata_go118.go2
-rw-r--r--vendor/github.com/bytedance/sonic/loader/funcdata_go120.go2
-rw-r--r--vendor/github.com/bytedance/sonic/loader/funcdata_go121.go2
-rw-r--r--vendor/github.com/bytedance/sonic/loader/funcdata_latest.go2
-rw-r--r--vendor/github.com/bytedance/sonic/loader/internal/abi/abi.go197
-rw-r--r--vendor/github.com/bytedance/sonic/loader/internal/abi/abi_amd64.go282
-rw-r--r--vendor/github.com/bytedance/sonic/loader/internal/abi/abi_legacy_amd64.go182
-rw-r--r--vendor/github.com/bytedance/sonic/loader/internal/abi/abi_regabi_amd64.go316
-rw-r--r--vendor/github.com/bytedance/sonic/loader/internal/abi/stubs.go35
-rw-r--r--vendor/github.com/bytedance/sonic/loader/internal/rt/fastmem.go62
-rw-r--r--vendor/github.com/bytedance/sonic/loader/internal/rt/fastvalue.go183
-rw-r--r--vendor/github.com/bytedance/sonic/loader/internal/rt/stackmap.go181
-rw-r--r--vendor/github.com/bytedance/sonic/loader/loader_latest.go2
-rw-r--r--vendor/github.com/bytedance/sonic/loader/wrapper.go4
17 files changed, 1648 insertions, 9 deletions
diff --git a/vendor/github.com/bytedance/sonic/loader/LICENSE b/vendor/github.com/bytedance/sonic/loader/LICENSE
new file mode 100644
index 000000000..261eeb9e9
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/loader/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/bytedance/sonic/loader/funcdata_compat.go b/vendor/github.com/bytedance/sonic/loader/funcdata_compat.go
index 1af575aaa..10a14e349 100644
--- a/vendor/github.com/bytedance/sonic/loader/funcdata_compat.go
+++ b/vendor/github.com/bytedance/sonic/loader/funcdata_compat.go
@@ -24,7 +24,7 @@ import (
`unsafe`
`sort`
- `github.com/bytedance/sonic/internal/rt`
+ `github.com/bytedance/sonic/loader/internal/rt`
)
const (
diff --git a/vendor/github.com/bytedance/sonic/loader/funcdata_go116.go b/vendor/github.com/bytedance/sonic/loader/funcdata_go116.go
index 1dfe2297d..c3195b4c3 100644
--- a/vendor/github.com/bytedance/sonic/loader/funcdata_go116.go
+++ b/vendor/github.com/bytedance/sonic/loader/funcdata_go116.go
@@ -24,7 +24,7 @@ import (
`unsafe`
`sort`
- `github.com/bytedance/sonic/internal/rt`
+ `github.com/bytedance/sonic/loader/internal/rt`
)
const (
diff --git a/vendor/github.com/bytedance/sonic/loader/funcdata_go118.go b/vendor/github.com/bytedance/sonic/loader/funcdata_go118.go
index 84178ab85..54ec4d688 100644
--- a/vendor/github.com/bytedance/sonic/loader/funcdata_go118.go
+++ b/vendor/github.com/bytedance/sonic/loader/funcdata_go118.go
@@ -21,7 +21,7 @@
package loader
import (
- `github.com/bytedance/sonic/internal/rt`
+ `github.com/bytedance/sonic/loader/internal/rt`
)
const (
diff --git a/vendor/github.com/bytedance/sonic/loader/funcdata_go120.go b/vendor/github.com/bytedance/sonic/loader/funcdata_go120.go
index ed3e7d1a4..803312fd7 100644
--- a/vendor/github.com/bytedance/sonic/loader/funcdata_go120.go
+++ b/vendor/github.com/bytedance/sonic/loader/funcdata_go120.go
@@ -20,7 +20,7 @@
package loader
import (
- `github.com/bytedance/sonic/internal/rt`
+ `github.com/bytedance/sonic/loader/internal/rt`
)
const (
diff --git a/vendor/github.com/bytedance/sonic/loader/funcdata_go121.go b/vendor/github.com/bytedance/sonic/loader/funcdata_go121.go
index ebeaca5a7..8130ffc1a 100644
--- a/vendor/github.com/bytedance/sonic/loader/funcdata_go121.go
+++ b/vendor/github.com/bytedance/sonic/loader/funcdata_go121.go
@@ -21,7 +21,7 @@ package loader
import (
`unsafe`
- `github.com/bytedance/sonic/internal/rt`
+ `github.com/bytedance/sonic/loader/internal/rt`
)
const (
diff --git a/vendor/github.com/bytedance/sonic/loader/funcdata_latest.go b/vendor/github.com/bytedance/sonic/loader/funcdata_latest.go
index 08618dca4..6029f204d 100644
--- a/vendor/github.com/bytedance/sonic/loader/funcdata_latest.go
+++ b/vendor/github.com/bytedance/sonic/loader/funcdata_latest.go
@@ -24,7 +24,7 @@ import (
`sort`
`unsafe`
- `github.com/bytedance/sonic/internal/rt`
+ `github.com/bytedance/sonic/loader/internal/rt`
)
type funcTab struct {
diff --git a/vendor/github.com/bytedance/sonic/loader/internal/abi/abi.go b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi.go
new file mode 100644
index 000000000..524ab5330
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi.go
@@ -0,0 +1,197 @@
+/*
+ * Copyright 2022 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package abi
+
+import (
+ `fmt`
+ `reflect`
+ `sort`
+ `strings`
+
+ `github.com/bytedance/sonic/loader/internal/rt`
+)
+
+type FunctionLayout struct {
+ FP uint32
+ Args []Parameter
+ Rets []Parameter
+}
+
+func (self FunctionLayout) String() string {
+ return self.formatFn()
+}
+
+func (self FunctionLayout) ArgSize() uint32 {
+ size := uintptr(0)
+ for _, arg := range self.Args {
+ size += arg.Type.Size()
+ }
+ return uint32(size)
+}
+
+type slot struct {
+ p bool
+ m uint32
+}
+
+func (self FunctionLayout) StackMap() *rt.StackMap {
+ var st []slot
+ var mb rt.StackMapBuilder
+
+ /* add arguments */
+ for _, v := range self.Args {
+ st = append(st, slot {
+ m: v.Mem,
+ p: v.IsPointer,
+ })
+ }
+
+ /* add stack-passed return values */
+ for _, v := range self.Rets {
+ if !v.InRegister {
+ st = append(st, slot {
+ m: v.Mem,
+ p: v.IsPointer,
+ })
+ }
+ }
+
+ /* sort by memory offset */
+ sort.Slice(st, func(i int, j int) bool {
+ return st[i].m < st[j].m
+ })
+
+ /* add the bits */
+ for _, v := range st {
+ mb.AddField(v.p)
+ }
+
+ /* build the stack map */
+ return mb.Build()
+}
+
+func (self FunctionLayout) formatFn() string {
+ fp := self.FP
+ return fmt.Sprintf("\n%#04x\nRets:\n%s\nArgs:\n%s", fp, self.formatSeq(self.Rets, &fp), self.formatSeq(self.Args, &fp))
+}
+
+func (self FunctionLayout) formatSeq(v []Parameter, fp *uint32) string {
+ nb := len(v)
+ mm := make([]string, 0, len(v))
+
+ /* convert each part */
+ for i := nb-1; i >=0; i-- {
+ *fp -= PtrSize
+ mm = append(mm, fmt.Sprintf("%#04x %s", *fp, v[i].String()))
+ }
+
+ /* join them together */
+ return strings.Join(mm, "\n")
+}
+
+type Frame struct {
+ desc *FunctionLayout
+ locals []bool
+ ccall bool
+}
+
+func NewFrame(desc *FunctionLayout, locals []bool, ccall bool) Frame {
+ fr := Frame{}
+ fr.desc = desc
+ fr.locals = locals
+ fr.ccall = ccall
+ return fr
+}
+
+func (self *Frame) String() string {
+ out := self.desc.String()
+
+ off := -8
+ out += fmt.Sprintf("\n%#4x [Return PC]", off)
+ off -= 8
+ out += fmt.Sprintf("\n%#4x [RBP]", off)
+ off -= 8
+
+ for _, v := range ReservedRegs(self.ccall) {
+ out += fmt.Sprintf("\n%#4x [%v]", off, v)
+ off -= PtrSize
+ }
+
+ for _, b := range self.locals {
+ out += fmt.Sprintf("\n%#4x [%v]", off, b)
+ off -= PtrSize
+ }
+
+ return out
+}
+
+func (self *Frame) Prev() uint32 {
+ return self.Size() + PtrSize
+}
+
+func (self *Frame) Size() uint32 {
+ return uint32(self.Offs() + PtrSize)
+}
+
+func (self *Frame) Offs() uint32 {
+ return uint32(len(ReservedRegs(self.ccall)) * PtrSize + len(self.locals)*PtrSize)
+}
+
+func (self *Frame) ArgPtrs() *rt.StackMap {
+ return self.desc.StackMap()
+}
+
+func (self *Frame) LocalPtrs() *rt.StackMap {
+ var m rt.StackMapBuilder
+ for _, b := range self.locals {
+ m.AddFields(len(ReservedRegs(self.ccall)), b)
+ }
+ return m.Build()
+}
+
+func alignUp(n uint32, a int) uint32 {
+ return (uint32(n) + uint32(a) - 1) &^ (uint32(a) - 1)
+}
+
+func isPointer(vt reflect.Type) bool {
+ switch vt.Kind() {
+ case reflect.Bool : fallthrough
+ case reflect.Int : fallthrough
+ case reflect.Int8 : fallthrough
+ case reflect.Int16 : fallthrough
+ case reflect.Int32 : fallthrough
+ case reflect.Int64 : fallthrough
+ case reflect.Uint : fallthrough
+ case reflect.Uint8 : fallthrough
+ case reflect.Uint16 : fallthrough
+ case reflect.Uint32 : fallthrough
+ case reflect.Uint64 : fallthrough
+ case reflect.Float32 : fallthrough
+ case reflect.Float64 : fallthrough
+ case reflect.Uintptr : return false
+ case reflect.Chan : fallthrough
+ case reflect.Func : fallthrough
+ case reflect.Map : fallthrough
+ case reflect.Ptr : fallthrough
+ case reflect.UnsafePointer : return true
+ case reflect.Complex64 : fallthrough
+ case reflect.Complex128 : fallthrough
+ case reflect.Array : fallthrough
+ case reflect.Struct : panic("abi: unsupported types")
+ default : panic("abi: invalid value type")
+ }
+} \ No newline at end of file
diff --git a/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_amd64.go b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_amd64.go
new file mode 100644
index 000000000..c2b45a8e1
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_amd64.go
@@ -0,0 +1,282 @@
+/*
+ * Copyright 2022 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package abi
+
+import (
+ `fmt`
+ `reflect`
+ `unsafe`
+
+ . `github.com/cloudwego/iasm/x86_64`
+)
+
+const (
+ PtrSize = 8 // pointer size
+ PtrAlign = 8 // pointer alignment
+)
+
+var iregOrderC = []Register{
+ RDI,
+ RSI,
+ RDX,
+ RCX,
+ R8,
+ R9,
+}
+
+var xregOrderC = []Register{
+ XMM0,
+ XMM1,
+ XMM2,
+ XMM3,
+ XMM4,
+ XMM5,
+ XMM6,
+ XMM7,
+}
+
+var (
+ intType = reflect.TypeOf(0)
+ ptrType = reflect.TypeOf(unsafe.Pointer(nil))
+)
+
+func (self *Frame) argv(i int) *MemoryOperand {
+ return Ptr(RSP, int32(self.Prev() + self.desc.Args[i].Mem))
+}
+
+// spillv is used for growstack spill registers
+func (self *Frame) spillv(i int) *MemoryOperand {
+ // remain one slot for caller return pc
+ return Ptr(RSP, PtrSize + int32(self.desc.Args[i].Mem))
+}
+
+func (self *Frame) retv(i int) *MemoryOperand {
+ return Ptr(RSP, int32(self.Prev() + self.desc.Rets[i].Mem))
+}
+
+func (self *Frame) resv(i int) *MemoryOperand {
+ return Ptr(RSP, int32(self.Offs() - uint32((i+1) * PtrSize)))
+}
+
+func (self *Frame) emitGrowStack(p *Program, entry *Label) {
+ // spill all register arguments
+ for i, v := range self.desc.Args {
+ if v.InRegister {
+ if v.IsFloat == floatKind64 {
+ p.MOVSD(v.Reg, self.spillv(i))
+ } else if v.IsFloat == floatKind32 {
+ p.MOVSS(v.Reg, self.spillv(i))
+ }else {
+ p.MOVQ(v.Reg, self.spillv(i))
+ }
+ }
+ }
+
+ // call runtime.morestack_noctxt
+ p.MOVQ(F_morestack_noctxt, R12)
+ p.CALLQ(R12)
+ // load all register arguments
+ for i, v := range self.desc.Args {
+ if v.InRegister {
+ if v.IsFloat == floatKind64 {
+ p.MOVSD(self.spillv(i), v.Reg)
+ } else if v.IsFloat == floatKind32 {
+ p.MOVSS(self.spillv(i), v.Reg)
+ }else {
+ p.MOVQ(self.spillv(i), v.Reg)
+ }
+ }
+ }
+
+ // jump back to the function entry
+ p.JMP(entry)
+}
+
+func (self *Frame) GrowStackTextSize() uint32 {
+ p := DefaultArch.CreateProgram()
+ // spill all register arguments
+ for i, v := range self.desc.Args {
+ if v.InRegister {
+ if v.IsFloat == floatKind64 {
+ p.MOVSD(v.Reg, self.spillv(i))
+ } else if v.IsFloat == floatKind32 {
+ p.MOVSS(v.Reg, self.spillv(i))
+ }else {
+ p.MOVQ(v.Reg, self.spillv(i))
+ }
+ }
+ }
+
+ // call runtime.morestack_noctxt
+ p.MOVQ(F_morestack_noctxt, R12)
+ p.CALLQ(R12)
+ // load all register arguments
+ for i, v := range self.desc.Args {
+ if v.InRegister {
+ if v.IsFloat == floatKind64 {
+ p.MOVSD(self.spillv(i), v.Reg)
+ } else if v.IsFloat == floatKind32 {
+ p.MOVSS(self.spillv(i), v.Reg)
+ } else {
+ p.MOVQ(self.spillv(i), v.Reg)
+ }
+ }
+ }
+
+ // jump back to the function entry
+ l := CreateLabel("")
+ p.Link(l)
+ p.JMP(l)
+
+ return uint32(len(p.Assemble(0)))
+}
+
+func (self *Frame) emitPrologue(p *Program) {
+ p.SUBQ(self.Size(), RSP)
+ p.MOVQ(RBP, Ptr(RSP, int32(self.Offs())))
+ p.LEAQ(Ptr(RSP, int32(self.Offs())), RBP)
+}
+
+func (self *Frame) emitEpilogue(p *Program) {
+ p.MOVQ(Ptr(RSP, int32(self.Offs())), RBP)
+ p.ADDQ(self.Size(), RSP)
+ p.RET()
+}
+
+func (self *Frame) emitReserveRegs(p *Program) {
+ // spill reserved registers
+ for i, r := range ReservedRegs(self.ccall) {
+ switch r.(type) {
+ case Register64:
+ p.MOVQ(r, self.resv(i))
+ case XMMRegister:
+ p.MOVSD(r, self.resv(i))
+ default:
+ panic(fmt.Sprintf("unsupported register type %t to reserve", r))
+ }
+ }
+}
+
+func (self *Frame) emitSpillPtrs(p *Program) {
+ // spill pointer argument registers
+ for i, r := range self.desc.Args {
+ if r.InRegister && r.IsPointer {
+ p.MOVQ(r.Reg, self.argv(i))
+ }
+ }
+}
+
+func (self *Frame) emitClearPtrs(p *Program) {
+ // spill pointer argument registers
+ for i, r := range self.desc.Args {
+ if r.InRegister && r.IsPointer {
+ p.MOVQ(int64(0), self.argv(i))
+ }
+ }
+}
+
+func (self *Frame) emitCallC(p *Program, addr uintptr) {
+ p.MOVQ(addr, RAX)
+ p.CALLQ(RAX)
+}
+
+type floatKind uint8
+
+const (
+ notFloatKind floatKind = iota
+ floatKind32
+ floatKind64
+)
+
+type Parameter struct {
+ InRegister bool
+ IsPointer bool
+ IsFloat floatKind
+ Reg Register
+ Mem uint32
+ Type reflect.Type
+}
+
+func mkIReg(vt reflect.Type, reg Register64) (p Parameter) {
+ p.Reg = reg
+ p.Type = vt
+ p.InRegister = true
+ p.IsPointer = isPointer(vt)
+ return
+}
+
+func isFloat(vt reflect.Type) floatKind {
+ switch vt.Kind() {
+ case reflect.Float32:
+ return floatKind32
+ case reflect.Float64:
+ return floatKind64
+ default:
+ return notFloatKind
+ }
+}
+
+func mkXReg(vt reflect.Type, reg XMMRegister) (p Parameter) {
+ p.Reg = reg
+ p.Type = vt
+ p.InRegister = true
+ p.IsFloat = isFloat(vt)
+ return
+}
+
+func mkStack(vt reflect.Type, mem uint32) (p Parameter) {
+ p.Mem = mem
+ p.Type = vt
+ p.InRegister = false
+ p.IsPointer = isPointer(vt)
+ p.IsFloat = isFloat(vt)
+ return
+}
+
+func (self Parameter) String() string {
+ if self.InRegister {
+ return fmt.Sprintf("[%%%s, Pointer(%v), Float(%v)]", self.Reg, self.IsPointer, self.IsFloat)
+ } else {
+ return fmt.Sprintf("[%d(FP), Pointer(%v), Float(%v)]", self.Mem, self.IsPointer, self.IsFloat)
+ }
+}
+
+func CallC(addr uintptr, fr Frame, maxStack uintptr) []byte {
+ p := DefaultArch.CreateProgram()
+
+ stack := CreateLabel("_stack_grow")
+ entry := CreateLabel("_entry")
+ p.Link(entry)
+ fr.emitStackCheck(p, stack, maxStack)
+ fr.emitPrologue(p)
+ fr.emitReserveRegs(p)
+ fr.emitSpillPtrs(p)
+ fr.emitExchangeArgs(p)
+ fr.emitCallC(p, addr)
+ fr.emitExchangeRets(p)
+ fr.emitRestoreRegs(p)
+ fr.emitEpilogue(p)
+ p.Link(stack)
+ fr.emitGrowStack(p, entry)
+
+ return p.Assemble(0)
+}
+
+
+func (self *Frame) emitDebug(p *Program) {
+ p.INT(3)
+} \ No newline at end of file
diff --git a/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_legacy_amd64.go b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_legacy_amd64.go
new file mode 100644
index 000000000..298c48178
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_legacy_amd64.go
@@ -0,0 +1,182 @@
+//go:build !go1.17
+// +build !go1.17
+
+/*
+ * Copyright 2022 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package abi
+
+import (
+ `fmt`
+ `reflect`
+ `runtime`
+
+ . `github.com/cloudwego/iasm/x86_64`
+)
+
+func ReservedRegs(callc bool) []Register {
+ return nil
+}
+
+func salloc(p []Parameter, sp uint32, vt reflect.Type) (uint32, []Parameter) {
+ switch vt.Kind() {
+ case reflect.Bool : return sp + 8, append(p, mkStack(reflect.TypeOf(false), sp))
+ case reflect.Int : return sp + 8, append(p, mkStack(intType, sp))
+ case reflect.Int8 : return sp + 8, append(p, mkStack(reflect.TypeOf(int8(0)), sp))
+ case reflect.Int16 : return sp + 8, append(p, mkStack(reflect.TypeOf(int16(0)), sp))
+ case reflect.Int32 : return sp + 8, append(p, mkStack(reflect.TypeOf(int32(0)), sp))
+ case reflect.Int64 : return sp + 8, append(p, mkStack(reflect.TypeOf(int64(0)), sp))
+ case reflect.Uint : return sp + 8, append(p, mkStack(reflect.TypeOf(uint(0)), sp))
+ case reflect.Uint8 : return sp + 8, append(p, mkStack(reflect.TypeOf(uint8(0)), sp))
+ case reflect.Uint16 : return sp + 8, append(p, mkStack(reflect.TypeOf(uint16(0)), sp))
+ case reflect.Uint32 : return sp + 8, append(p, mkStack(reflect.TypeOf(uint32(0)), sp))
+ case reflect.Uint64 : return sp + 8, append(p, mkStack(reflect.TypeOf(uint64(0)), sp))
+ case reflect.Uintptr : return sp + 8, append(p, mkStack(reflect.TypeOf(uintptr(0)), sp))
+ case reflect.Float32 : return sp + 8, append(p, mkStack(reflect.TypeOf(float32(0)), sp))
+ case reflect.Float64 : return sp + 8, append(p, mkStack(reflect.TypeOf(float64(0)), sp))
+ case reflect.Complex64 : panic("abi: go116: not implemented: complex64")
+ case reflect.Complex128 : panic("abi: go116: not implemented: complex128")
+ case reflect.Array : panic("abi: go116: not implemented: arrays")
+ case reflect.Chan : return sp + 8, append(p, mkStack(reflect.TypeOf((chan int)(nil)), sp))
+ case reflect.Func : return sp + 8, append(p, mkStack(reflect.TypeOf((func())(nil)), sp))
+ case reflect.Map : return sp + 8, append(p, mkStack(reflect.TypeOf((map[int]int)(nil)), sp))
+ case reflect.Ptr : return sp + 8, append(p, mkStack(reflect.TypeOf((*int)(nil)), sp))
+ case reflect.UnsafePointer : return sp + 8, append(p, mkStack(ptrType, sp))
+ case reflect.Interface : return sp + 16, append(p, mkStack(ptrType, sp), mkStack(ptrType, sp + 8))
+ case reflect.Slice : return sp + 24, append(p, mkStack(ptrType, sp), mkStack(intType, sp + 8), mkStack(intType, sp + 16))
+ case reflect.String : return sp + 16, append(p, mkStack(ptrType, sp), mkStack(intType, sp + 8))
+ case reflect.Struct : panic("abi: go116: not implemented: structs")
+ default : panic("abi: invalid value type")
+ }
+}
+
+func NewFunctionLayout(ft reflect.Type) FunctionLayout {
+ var sp uint32
+ var fn FunctionLayout
+
+ /* assign every arguments */
+ for i := 0; i < ft.NumIn(); i++ {
+ sp, fn.Args = salloc(fn.Args, sp, ft.In(i))
+ }
+
+ /* assign every return value */
+ for i := 0; i < ft.NumOut(); i++ {
+ sp, fn.Rets = salloc(fn.Rets, sp, ft.Out(i))
+ }
+
+ /* update function ID and stack pointer */
+ fn.FP = sp
+ return fn
+}
+
+func (self *Frame) emitExchangeArgs(p *Program) {
+ iregArgs, xregArgs := 0, 0
+ for _, v := range self.desc.Args {
+ if v.IsFloat != notFloatKind {
+ xregArgs += 1
+ } else {
+ iregArgs += 1
+ }
+ }
+
+ if iregArgs > len(iregOrderC) {
+ panic("too many arguments, only support at most 6 integer arguments now")
+ }
+ if xregArgs > len(xregOrderC) {
+ panic("too many arguments, only support at most 8 float arguments now")
+ }
+
+ ic, xc := iregArgs, xregArgs
+ for i := 0; i < len(self.desc.Args); i++ {
+ arg := self.desc.Args[i]
+ if arg.IsFloat == floatKind64 {
+ p.MOVSD(self.argv(i), xregOrderC[xregArgs - xc])
+ xc -= 1
+ } else if arg.IsFloat == floatKind32 {
+ p.MOVSS(self.argv(i), xregOrderC[xregArgs - xc])
+ xc -= 1
+ } else {
+ p.MOVQ(self.argv(i), iregOrderC[iregArgs - ic])
+ ic -= 1
+ }
+ }
+}
+
+func (self *Frame) emitStackCheck(p *Program, to *Label, maxStack uintptr) {
+ // get the current goroutine
+ switch runtime.GOOS {
+ case "linux" : p.MOVQ(Abs(-8), R14).FS()
+ case "darwin" : p.MOVQ(Abs(0x30), R14).GS()
+ case "windows": break // windows always stores G pointer at R14
+ default : panic("unsupported operating system")
+ }
+
+ // check the stack guard
+ p.LEAQ(Ptr(RSP, -int32(self.Size() + uint32(maxStack))), RAX)
+ p.CMPQ(Ptr(R14, _G_stackguard0), RAX)
+ p.JBE(to)
+}
+
+func (self *Frame) StackCheckTextSize() uint32 {
+ p := DefaultArch.CreateProgram()
+
+ // get the current goroutine
+ switch runtime.GOOS {
+ case "linux" : p.MOVQ(Abs(-8), R14).FS()
+ case "darwin" : p.MOVQ(Abs(0x30), R14).GS()
+ case "windows": break // windows always stores G pointer at R14
+ default : panic("unsupported operating system")
+ }
+
+ // check the stack guard
+ p.LEAQ(Ptr(RSP, -int32(self.Size())), RAX)
+ p.CMPQ(Ptr(R14, _G_stackguard0), RAX)
+ l := CreateLabel("")
+ p.Link(l)
+ p.JBE(l)
+
+ return uint32(len(p.Assemble(0)))
+}
+
+func (self *Frame) emitExchangeRets(p *Program) {
+ if len(self.desc.Rets) > 1 {
+ panic("too many results, only support one result now")
+ }
+ // store result
+ if len(self.desc.Rets) ==1 {
+ if self.desc.Rets[0].IsFloat == floatKind64 {
+ p.MOVSD(xregOrderC[0], self.retv(0))
+ } else if self.desc.Rets[0].IsFloat == floatKind32 {
+ p.MOVSS(xregOrderC[0], self.retv(0))
+ } else {
+ p.MOVQ(RAX, self.retv(0))
+ }
+ }
+}
+
+func (self *Frame) emitRestoreRegs(p *Program) {
+ // load reserved registers
+ for i, r := range ReservedRegs(self.ccall) {
+ switch r.(type) {
+ case Register64:
+ p.MOVQ(self.resv(i), r)
+ case XMMRegister:
+ p.MOVSD(self.resv(i), r)
+ default:
+ panic(fmt.Sprintf("unsupported register type %t to reserve", r))
+ }
+ }
+} \ No newline at end of file
diff --git a/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_regabi_amd64.go b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_regabi_amd64.go
new file mode 100644
index 000000000..5a31dea89
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_regabi_amd64.go
@@ -0,0 +1,316 @@
+//go:build go1.17
+// +build go1.17
+
+/*
+ * Copyright 2022 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/** Go Internal ABI implementation
+ *
+ * This module implements the function layout algorithm described by the Go internal ABI.
+ * See https://github.com/golang/go/blob/master/src/cmd/compile/abi-internal.md for more info.
+ */
+
+package abi
+
+import (
+ `fmt`
+ `reflect`
+
+ . `github.com/cloudwego/iasm/x86_64`
+)
+
+/** Frame Structure of the Generated Function
+ FP +------------------------------+
+ | . . . |
+ | 2nd reg argument spill space |
+ + 1st reg argument spill space |
+ | <pointer-sized alignment> |
+ | . . . |
+ | 2nd stack-assigned result |
+ + 1st stack-assigned result |
+ | <pointer-sized alignment> |
+ | . . . |
+ | 2nd stack-assigned argument |
+ | 1st stack-assigned argument |
+ | stack-assigned receiver |
+prev() +------------------------------+ (Previous Frame)
+ Return PC |
+size() -------------------------------|
+ Saved RBP |
+offs() -------------------------------|
+ 1th Reserved Registers |
+ -------------------------------|
+ 2th Reserved Registers |
+ -------------------------------|
+ Local Variables |
+ RSP -------------------------------|↓ lower addresses
+*/
+
+const zeroRegGo = XMM15
+
+var iregOrderGo = [...]Register64 {
+ RAX,// RDI
+ RBX,// RSI
+ RCX,// RDX
+ RDI,// RCX
+ RSI,// R8
+ R8, // R9
+ R9,
+ R10,
+ R11,
+}
+
+var xregOrderGo = [...]XMMRegister {
+ XMM0,
+ XMM1,
+ XMM2,
+ XMM3,
+ XMM4,
+ XMM5,
+ XMM6,
+ XMM7,
+ XMM8,
+ XMM9,
+ XMM10,
+ XMM11,
+ XMM12,
+ XMM13,
+ XMM14,
+}
+
+func ReservedRegs(callc bool) []Register {
+ if callc {
+ return nil
+ }
+ return []Register {
+ R14, // current goroutine
+ R15, // GOT reference
+ }
+}
+
+type stackAlloc struct {
+ s uint32
+ i int
+ x int
+}
+
+func (self *stackAlloc) reset() {
+ self.i, self.x = 0, 0
+}
+
+func (self *stackAlloc) ireg(vt reflect.Type) (p Parameter) {
+ p = mkIReg(vt, iregOrderGo[self.i])
+ self.i++
+ return
+}
+
+func (self *stackAlloc) xreg(vt reflect.Type) (p Parameter) {
+ p = mkXReg(vt, xregOrderGo[self.x])
+ self.x++
+ return
+}
+
+func (self *stackAlloc) stack(vt reflect.Type) (p Parameter) {
+ p = mkStack(vt, self.s)
+ self.s += uint32(vt.Size())
+ return
+}
+
+func (self *stackAlloc) spill(n uint32, a int) uint32 {
+ self.s = alignUp(self.s, a) + n
+ return self.s
+}
+
+func (self *stackAlloc) alloc(p []Parameter, vt reflect.Type) []Parameter {
+ nb := vt.Size()
+ vk := vt.Kind()
+
+ /* zero-sized objects are allocated on stack */
+ if nb == 0 {
+ return append(p, mkStack(intType, self.s))
+ }
+
+ /* check for value type */
+ switch vk {
+ case reflect.Bool : return self.valloc(p, reflect.TypeOf(false))
+ case reflect.Int : return self.valloc(p, intType)
+ case reflect.Int8 : return self.valloc(p, reflect.TypeOf(int8(0)))
+ case reflect.Int16 : return self.valloc(p, reflect.TypeOf(int16(0)))
+ case reflect.Int32 : return self.valloc(p, reflect.TypeOf(uint32(0)))
+ case reflect.Int64 : return self.valloc(p, reflect.TypeOf(int64(0)))
+ case reflect.Uint : return self.valloc(p, reflect.TypeOf(uint(0)))
+ case reflect.Uint8 : return self.valloc(p, reflect.TypeOf(uint8(0)))
+ case reflect.Uint16 : return self.valloc(p, reflect.TypeOf(uint16(0)))
+ case reflect.Uint32 : return self.valloc(p, reflect.TypeOf(uint32(0)))
+ case reflect.Uint64 : return self.valloc(p, reflect.TypeOf(uint64(0)))
+ case reflect.Uintptr : return self.valloc(p, reflect.TypeOf(uintptr(0)))
+ case reflect.Float32 : return self.valloc(p, reflect.TypeOf(float32(0)))
+ case reflect.Float64 : return self.valloc(p, reflect.TypeOf(float64(0)))
+ case reflect.Complex64 : panic("abi: go117: not implemented: complex64")
+ case reflect.Complex128 : panic("abi: go117: not implemented: complex128")
+ case reflect.Array : panic("abi: go117: not implemented: arrays")
+ case reflect.Chan : return self.valloc(p, reflect.TypeOf((chan int)(nil)))
+ case reflect.Func : return self.valloc(p, reflect.TypeOf((func())(nil)))
+ case reflect.Map : return self.valloc(p, reflect.TypeOf((map[int]int)(nil)))
+ case reflect.Ptr : return self.valloc(p, reflect.TypeOf((*int)(nil)))
+ case reflect.UnsafePointer : return self.valloc(p, ptrType)
+ case reflect.Interface : return self.valloc(p, ptrType, ptrType)
+ case reflect.Slice : return self.valloc(p, ptrType, intType, intType)
+ case reflect.String : return self.valloc(p, ptrType, intType)
+ case reflect.Struct : panic("abi: go117: not implemented: structs")
+ default : panic("abi: invalid value type")
+ }
+}
+
+func (self *stackAlloc) valloc(p []Parameter, vts ...reflect.Type) []Parameter {
+ for _, vt := range vts {
+ enum := isFloat(vt)
+ if enum != notFloatKind && self.x < len(xregOrderGo) {
+ p = append(p, self.xreg(vt))
+ } else if enum == notFloatKind && self.i < len(iregOrderGo) {
+ p = append(p, self.ireg(vt))
+ } else {
+ p = append(p, self.stack(vt))
+ }
+ }
+ return p
+}
+
+func NewFunctionLayout(ft reflect.Type) FunctionLayout {
+ var sa stackAlloc
+ var fn FunctionLayout
+
+ /* assign every arguments */
+ for i := 0; i < ft.NumIn(); i++ {
+ fn.Args = sa.alloc(fn.Args, ft.In(i))
+ }
+
+ /* reset the register counter, and add a pointer alignment field */
+ sa.reset()
+
+ /* assign every return value */
+ for i := 0; i < ft.NumOut(); i++ {
+ fn.Rets = sa.alloc(fn.Rets, ft.Out(i))
+ }
+
+ sa.spill(0, PtrAlign)
+
+ /* assign spill slots */
+ for i := 0; i < len(fn.Args); i++ {
+ if fn.Args[i].InRegister {
+ fn.Args[i].Mem = sa.spill(PtrSize, PtrAlign) - PtrSize
+ }
+ }
+
+ /* add the final pointer alignment field */
+ fn.FP = sa.spill(0, PtrAlign)
+ return fn
+}
+
+func (self *Frame) emitExchangeArgs(p *Program) {
+ iregArgs := make([]Parameter, 0, len(self.desc.Args))
+ xregArgs := 0
+ for _, v := range self.desc.Args {
+ if v.InRegister {
+ if v.IsFloat != notFloatKind {
+ xregArgs += 1
+ } else {
+ iregArgs = append(iregArgs, v)
+ }
+ } else {
+ panic("not support stack-assgined arguments now")
+ }
+ }
+ if xregArgs > len(xregOrderC) {
+ panic("too many arguments, only support at most 8 integer register arguments now")
+ }
+
+ switch len(iregArgs) {
+ case 0, 1, 2, 3: {
+ //Fast-Path: when arguments count are less than four, just exchange the registers
+ for i := 0; i < len(iregArgs); i++ {
+ p.MOVQ(iregOrderGo[i], iregOrderC[i])
+ }
+ }
+ case 4, 5, 6: {
+ // need to spill 3th ~ regArgs registers before exchange
+ for i := 3; i < len(iregArgs); i++ {
+ arg := iregArgs[i]
+ // pointer args have already been spilled
+ if !arg.IsPointer {
+ p.MOVQ(iregOrderGo[i], Ptr(RSP, int32(self.Prev() + arg.Mem)))
+ }
+ }
+ p.MOVQ(iregOrderGo[0], iregOrderC[0])
+ p.MOVQ(iregOrderGo[1], iregOrderC[1])
+ p.MOVQ(iregOrderGo[2], iregOrderC[2])
+ for i := 3; i < len(iregArgs); i++ {
+ arg := iregArgs[i]
+ p.MOVQ(Ptr(RSP, int32(self.Prev() + arg.Mem)), iregOrderC[i])
+ }
+ }
+ default:
+ panic("too many arguments, only support at most 6 integer register arguments now")
+ }
+}
+
+func (self *Frame) emitStackCheck(p *Program, to *Label, maxStack uintptr) {
+ p.LEAQ(Ptr(RSP, int32(-(self.Size() + uint32(maxStack)))), R12)
+ p.CMPQ(Ptr(R14, _G_stackguard0), R12)
+ p.JBE(to)
+}
+
+func (self *Frame) StackCheckTextSize() uint32 {
+ p := DefaultArch.CreateProgram()
+ p.LEAQ(Ptr(RSP, int32(-(self.Size()))), R12)
+ p.CMPQ(Ptr(R14, _G_stackguard0), R12)
+ to := CreateLabel("")
+ p.Link(to)
+ p.JBE(to)
+ return uint32(len(p.Assemble(0)))
+}
+
+func (self *Frame) emitExchangeRets(p *Program) {
+ if len(self.desc.Rets) > 1 {
+ panic("too many results, only support one result now")
+ }
+ // store result
+ if len(self.desc.Rets) == 1 && !self.desc.Rets[0].InRegister {
+ if self.desc.Rets[0].IsFloat == floatKind64 {
+ p.MOVSD(xregOrderC[0], self.retv(0))
+ } else if self.desc.Rets[0].IsFloat == floatKind32 {
+ p.MOVSS(xregOrderC[0], self.retv(0))
+ } else {
+ p.MOVQ(RAX, self.retv(0))
+ }
+ }
+}
+
+func (self *Frame) emitRestoreRegs(p *Program) {
+ // load reserved registers
+ for i, r := range ReservedRegs(self.ccall) {
+ switch r.(type) {
+ case Register64:
+ p.MOVQ(self.resv(i), r)
+ case XMMRegister:
+ p.MOVSD(self.resv(i), r)
+ default:
+ panic(fmt.Sprintf("unsupported register type %t to reserve", r))
+ }
+ }
+ // zero xmm15 for go abi
+ p.XORPS(zeroRegGo, zeroRegGo)
+} \ No newline at end of file
diff --git a/vendor/github.com/bytedance/sonic/loader/internal/abi/stubs.go b/vendor/github.com/bytedance/sonic/loader/internal/abi/stubs.go
new file mode 100644
index 000000000..af9930156
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/loader/internal/abi/stubs.go
@@ -0,0 +1,35 @@
+/**
+ * Copyright 2023 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package abi
+
+import (
+ _ `unsafe`
+
+ `github.com/bytedance/sonic/loader/internal/rt`
+)
+
+const (
+ _G_stackguard0 = 0x10
+)
+
+var (
+ F_morestack_noctxt = uintptr(rt.FuncAddr(morestack_noctxt))
+)
+
+//go:linkname morestack_noctxt runtime.morestack_noctxt
+func morestack_noctxt()
+
diff --git a/vendor/github.com/bytedance/sonic/loader/internal/rt/fastmem.go b/vendor/github.com/bytedance/sonic/loader/internal/rt/fastmem.go
new file mode 100644
index 000000000..3bc24c4e4
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/loader/internal/rt/fastmem.go
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package rt
+
+import (
+ `unsafe`
+ `reflect`
+)
+
+//go:nosplit
+func Mem2Str(v []byte) (s string) {
+ (*GoString)(unsafe.Pointer(&s)).Len = (*GoSlice)(unsafe.Pointer(&v)).Len
+ (*GoString)(unsafe.Pointer(&s)).Ptr = (*GoSlice)(unsafe.Pointer(&v)).Ptr
+ return
+}
+
+//go:nosplit
+func Str2Mem(s string) (v []byte) {
+ (*GoSlice)(unsafe.Pointer(&v)).Cap = (*GoString)(unsafe.Pointer(&s)).Len
+ (*GoSlice)(unsafe.Pointer(&v)).Len = (*GoString)(unsafe.Pointer(&s)).Len
+ (*GoSlice)(unsafe.Pointer(&v)).Ptr = (*GoString)(unsafe.Pointer(&s)).Ptr
+ return
+}
+
+func BytesFrom(p unsafe.Pointer, n int, c int) (r []byte) {
+ (*GoSlice)(unsafe.Pointer(&r)).Ptr = p
+ (*GoSlice)(unsafe.Pointer(&r)).Len = n
+ (*GoSlice)(unsafe.Pointer(&r)).Cap = c
+ return
+}
+
+func FuncAddr(f interface{}) unsafe.Pointer {
+ if vv := UnpackEface(f); vv.Type.Kind() != reflect.Func {
+ panic("f is not a function")
+ } else {
+ return *(*unsafe.Pointer)(vv.Value)
+ }
+}
+
+//go:nocheckptr
+func IndexChar(src string, index int) unsafe.Pointer {
+ return unsafe.Pointer(uintptr((*GoString)(unsafe.Pointer(&src)).Ptr) + uintptr(index))
+}
+
+//go:nocheckptr
+func IndexByte(ptr []byte, index int) unsafe.Pointer {
+ return unsafe.Pointer(uintptr((*GoSlice)(unsafe.Pointer(&ptr)).Ptr) + uintptr(index))
+}
diff --git a/vendor/github.com/bytedance/sonic/loader/internal/rt/fastvalue.go b/vendor/github.com/bytedance/sonic/loader/internal/rt/fastvalue.go
new file mode 100644
index 000000000..e6c5bc869
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/loader/internal/rt/fastvalue.go
@@ -0,0 +1,183 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package rt
+
+import (
+ `reflect`
+ `unsafe`
+)
+
+var (
+ reflectRtypeItab = findReflectRtypeItab()
+)
+
+// GoType.KindFlags const
+const (
+ F_direct = 1 << 5
+ F_kind_mask = (1 << 5) - 1
+)
+
+// GoType.Flags const
+const (
+ tflagUncommon uint8 = 1 << 0
+ tflagExtraStar uint8 = 1 << 1
+ tflagNamed uint8 = 1 << 2
+ tflagRegularMemory uint8 = 1 << 3
+)
+
+type GoType struct {
+ Size uintptr
+ PtrData uintptr
+ Hash uint32
+ Flags uint8
+ Align uint8
+ FieldAlign uint8
+ KindFlags uint8
+ Traits unsafe.Pointer
+ GCData *byte
+ Str int32
+ PtrToSelf int32
+}
+
+func (self *GoType) IsNamed() bool {
+ return (self.Flags & tflagNamed) != 0
+}
+
+func (self *GoType) Kind() reflect.Kind {
+ return reflect.Kind(self.KindFlags & F_kind_mask)
+}
+
+func (self *GoType) Pack() (t reflect.Type) {
+ (*GoIface)(unsafe.Pointer(&t)).Itab = reflectRtypeItab
+ (*GoIface)(unsafe.Pointer(&t)).Value = unsafe.Pointer(self)
+ return
+}
+
+func (self *GoType) String() string {
+ return self.Pack().String()
+}
+
+func (self *GoType) Indirect() bool {
+ return self.KindFlags & F_direct == 0
+}
+
+type GoItab struct {
+ it unsafe.Pointer
+ Vt *GoType
+ hv uint32
+ _ [4]byte
+ fn [1]uintptr
+}
+
+type GoIface struct {
+ Itab *GoItab
+ Value unsafe.Pointer
+}
+
+type GoEface struct {
+ Type *GoType
+ Value unsafe.Pointer
+}
+
+func (self GoEface) Pack() (v interface{}) {
+ *(*GoEface)(unsafe.Pointer(&v)) = self
+ return
+}
+
+type GoPtrType struct {
+ GoType
+ Elem *GoType
+}
+
+type GoMapType struct {
+ GoType
+ Key *GoType
+ Elem *GoType
+ Bucket *GoType
+ Hasher func(unsafe.Pointer, uintptr) uintptr
+ KeySize uint8
+ ElemSize uint8
+ BucketSize uint16
+ Flags uint32
+}
+
+func (self *GoMapType) IndirectElem() bool {
+ return self.Flags & 2 != 0
+}
+
+type GoStructType struct {
+ GoType
+ Pkg *byte
+ Fields []GoStructField
+}
+
+type GoStructField struct {
+ Name *byte
+ Type *GoType
+ OffEmbed uintptr
+}
+
+type GoInterfaceType struct {
+ GoType
+ PkgPath *byte
+ Methods []GoInterfaceMethod
+}
+
+type GoInterfaceMethod struct {
+ Name int32
+ Type int32
+}
+
+type GoSlice struct {
+ Ptr unsafe.Pointer
+ Len int
+ Cap int
+}
+
+type GoString struct {
+ Ptr unsafe.Pointer
+ Len int
+}
+
+func PtrElem(t *GoType) *GoType {
+ return (*GoPtrType)(unsafe.Pointer(t)).Elem
+}
+
+func MapType(t *GoType) *GoMapType {
+ return (*GoMapType)(unsafe.Pointer(t))
+}
+
+func IfaceType(t *GoType) *GoInterfaceType {
+ return (*GoInterfaceType)(unsafe.Pointer(t))
+}
+
+func UnpackType(t reflect.Type) *GoType {
+ return (*GoType)((*GoIface)(unsafe.Pointer(&t)).Value)
+}
+
+func UnpackEface(v interface{}) GoEface {
+ return *(*GoEface)(unsafe.Pointer(&v))
+}
+
+func UnpackIface(v interface{}) GoIface {
+ return *(*GoIface)(unsafe.Pointer(&v))
+}
+
+func findReflectRtypeItab() *GoItab {
+ v := reflect.TypeOf(struct{}{})
+ return (*GoIface)(unsafe.Pointer(&v)).Itab
+}
diff --git a/vendor/github.com/bytedance/sonic/loader/internal/rt/stackmap.go b/vendor/github.com/bytedance/sonic/loader/internal/rt/stackmap.go
new file mode 100644
index 000000000..84ed9a95f
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/loader/internal/rt/stackmap.go
@@ -0,0 +1,181 @@
+/**
+ * Copyright 2023 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package rt
+
+import (
+ `fmt`
+ `strings`
+ `unsafe`
+
+)
+
+type Bitmap struct {
+ N int
+ B []byte
+}
+
+func (self *Bitmap) grow() {
+ if self.N >= len(self.B) * 8 {
+ self.B = append(self.B, 0)
+ }
+}
+
+func (self *Bitmap) mark(i int, bv int) {
+ if bv != 0 {
+ self.B[i / 8] |= 1 << (i % 8)
+ } else {
+ self.B[i / 8] &^= 1 << (i % 8)
+ }
+}
+
+func (self *Bitmap) Set(i int, bv int) {
+ if i >= self.N {
+ panic("bitmap: invalid bit position")
+ } else {
+ self.mark(i, bv)
+ }
+}
+
+func (self *Bitmap) Append(bv int) {
+ self.grow()
+ self.mark(self.N, bv)
+ self.N++
+}
+
+func (self *Bitmap) AppendMany(n int, bv int) {
+ for i := 0; i < n; i++ {
+ self.Append(bv)
+ }
+}
+
+// var (
+// _stackMapLock = sync.Mutex{}
+// _stackMapCache = make(map[*StackMap]struct{})
+// )
+
+type BitVec struct {
+ N uintptr
+ B unsafe.Pointer
+}
+
+func (self BitVec) Bit(i uintptr) byte {
+ return (*(*byte)(unsafe.Pointer(uintptr(self.B) + i / 8)) >> (i % 8)) & 1
+}
+
+func (self BitVec) String() string {
+ var i uintptr
+ var v []string
+
+ /* add each bit */
+ for i = 0; i < self.N; i++ {
+ v = append(v, fmt.Sprintf("%d", self.Bit(i)))
+ }
+
+ /* join them together */
+ return fmt.Sprintf(
+ "BitVec { %s }",
+ strings.Join(v, ", "),
+ )
+}
+
+type StackMap struct {
+ N int32
+ L int32
+ B [1]byte
+}
+
+// func (self *StackMap) add() {
+// _stackMapLock.Lock()
+// _stackMapCache[self] = struct{}{}
+// _stackMapLock.Unlock()
+// }
+
+func (self *StackMap) Pin() uintptr {
+ // self.add()
+ return uintptr(unsafe.Pointer(self))
+}
+
+func (self *StackMap) Get(i int32) BitVec {
+ return BitVec {
+ N: uintptr(self.L),
+ B: unsafe.Pointer(uintptr(unsafe.Pointer(&self.B)) + uintptr(i * ((self.L + 7) >> 3))),
+ }
+}
+
+func (self *StackMap) String() string {
+ sb := strings.Builder{}
+ sb.WriteString("StackMap {")
+
+ /* dump every stack map */
+ for i := int32(0); i < self.N; i++ {
+ sb.WriteRune('\n')
+ sb.WriteString(" " + self.Get(i).String())
+ }
+
+ /* close the stackmap */
+ sb.WriteString("\n}")
+ return sb.String()
+}
+
+func (self *StackMap) MarshalBinary() ([]byte, error) {
+ size := int(self.N) * int(self.L) + int(unsafe.Sizeof(self.L)) + int(unsafe.Sizeof(self.N))
+ return BytesFrom(unsafe.Pointer(self), size, size), nil
+}
+
+var (
+ byteType = UnpackEface(byte(0)).Type
+)
+
+const (
+ _StackMapSize = unsafe.Sizeof(StackMap{})
+)
+
+//go:linkname mallocgc runtime.mallocgc
+//goland:noinspection GoUnusedParameter
+func mallocgc(nb uintptr, vt *GoType, zero bool) unsafe.Pointer
+
+type StackMapBuilder struct {
+ b Bitmap
+}
+
+//go:nocheckptr
+func (self *StackMapBuilder) Build() (p *StackMap) {
+ nb := len(self.b.B)
+ bm := mallocgc(_StackMapSize + uintptr(nb) - 1, byteType, false)
+
+ /* initialize as 1 bitmap of N bits */
+ p = (*StackMap)(bm)
+ p.N, p.L = 1, int32(self.b.N)
+ copy(BytesFrom(unsafe.Pointer(&p.B), nb, nb), self.b.B)
+ return
+}
+
+func (self *StackMapBuilder) AddField(ptr bool) {
+ if ptr {
+ self.b.Append(1)
+ } else {
+ self.b.Append(0)
+ }
+}
+
+func (self *StackMapBuilder) AddFields(n int, ptr bool) {
+ if ptr {
+ self.b.AppendMany(n, 1)
+ } else {
+ self.b.AppendMany(n, 0)
+ }
+} \ No newline at end of file
diff --git a/vendor/github.com/bytedance/sonic/loader/loader_latest.go b/vendor/github.com/bytedance/sonic/loader/loader_latest.go
index 3664c04ba..0534541aa 100644
--- a/vendor/github.com/bytedance/sonic/loader/loader_latest.go
+++ b/vendor/github.com/bytedance/sonic/loader/loader_latest.go
@@ -19,7 +19,7 @@
package loader
import (
- `github.com/bytedance/sonic/internal/rt`
+ `github.com/bytedance/sonic/loader/internal/rt`
)
// LoadFuncs loads only one function as module, and returns the function pointer
diff --git a/vendor/github.com/bytedance/sonic/loader/wrapper.go b/vendor/github.com/bytedance/sonic/loader/wrapper.go
index 73ebc3518..b581a460f 100644
--- a/vendor/github.com/bytedance/sonic/loader/wrapper.go
+++ b/vendor/github.com/bytedance/sonic/loader/wrapper.go
@@ -20,8 +20,8 @@ import (
`reflect`
`unsafe`
- `github.com/bytedance/sonic/internal/abi`
- `github.com/bytedance/sonic/internal/rt`
+ `github.com/bytedance/sonic/loader/internal/abi`
+ `github.com/bytedance/sonic/loader/internal/rt`
)
var _C_Redzone = []bool{false, false, false, false}