summaryrefslogtreecommitdiff
path: root/vendor/github.com/bytedance/sonic/loader/internal
diff options
context:
space:
mode:
authorLibravatar dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>2024-05-06 08:50:47 +0000
committerLibravatar GitHub <noreply@github.com>2024-05-06 08:50:47 +0000
commita5f28fe0c923984c263592e82bbce99b0032b794 (patch)
tree403544ad5305eb171a85d2b4c59559f83abd87a7 /vendor/github.com/bytedance/sonic/loader/internal
parent[chore]: Bump golang.org/x/image from 0.15.0 to 0.16.0 (#2898) (diff)
downloadgotosocial-a5f28fe0c923984c263592e82bbce99b0032b794.tar.xz
[chore]: Bump github.com/gin-contrib/gzip from 1.0.0 to 1.0.1 (#2899)
Bumps [github.com/gin-contrib/gzip](https://github.com/gin-contrib/gzip) from 1.0.0 to 1.0.1. - [Release notes](https://github.com/gin-contrib/gzip/releases) - [Changelog](https://github.com/gin-contrib/gzip/blob/master/.goreleaser.yaml) - [Commits](https://github.com/gin-contrib/gzip/compare/v1.0.0...v1.0.1) --- updated-dependencies: - dependency-name: github.com/gin-contrib/gzip dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Diffstat (limited to 'vendor/github.com/bytedance/sonic/loader/internal')
-rw-r--r--vendor/github.com/bytedance/sonic/loader/internal/abi/abi.go197
-rw-r--r--vendor/github.com/bytedance/sonic/loader/internal/abi/abi_amd64.go282
-rw-r--r--vendor/github.com/bytedance/sonic/loader/internal/abi/abi_legacy_amd64.go182
-rw-r--r--vendor/github.com/bytedance/sonic/loader/internal/abi/abi_regabi_amd64.go316
-rw-r--r--vendor/github.com/bytedance/sonic/loader/internal/abi/stubs.go35
-rw-r--r--vendor/github.com/bytedance/sonic/loader/internal/rt/fastmem.go62
-rw-r--r--vendor/github.com/bytedance/sonic/loader/internal/rt/fastvalue.go183
-rw-r--r--vendor/github.com/bytedance/sonic/loader/internal/rt/stackmap.go181
8 files changed, 1438 insertions, 0 deletions
diff --git a/vendor/github.com/bytedance/sonic/loader/internal/abi/abi.go b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi.go
new file mode 100644
index 000000000..524ab5330
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi.go
@@ -0,0 +1,197 @@
+/*
+ * Copyright 2022 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package abi
+
+import (
+ `fmt`
+ `reflect`
+ `sort`
+ `strings`
+
+ `github.com/bytedance/sonic/loader/internal/rt`
+)
+
+type FunctionLayout struct {
+ FP uint32
+ Args []Parameter
+ Rets []Parameter
+}
+
+func (self FunctionLayout) String() string {
+ return self.formatFn()
+}
+
+func (self FunctionLayout) ArgSize() uint32 {
+ size := uintptr(0)
+ for _, arg := range self.Args {
+ size += arg.Type.Size()
+ }
+ return uint32(size)
+}
+
+type slot struct {
+ p bool
+ m uint32
+}
+
+func (self FunctionLayout) StackMap() *rt.StackMap {
+ var st []slot
+ var mb rt.StackMapBuilder
+
+ /* add arguments */
+ for _, v := range self.Args {
+ st = append(st, slot {
+ m: v.Mem,
+ p: v.IsPointer,
+ })
+ }
+
+ /* add stack-passed return values */
+ for _, v := range self.Rets {
+ if !v.InRegister {
+ st = append(st, slot {
+ m: v.Mem,
+ p: v.IsPointer,
+ })
+ }
+ }
+
+ /* sort by memory offset */
+ sort.Slice(st, func(i int, j int) bool {
+ return st[i].m < st[j].m
+ })
+
+ /* add the bits */
+ for _, v := range st {
+ mb.AddField(v.p)
+ }
+
+ /* build the stack map */
+ return mb.Build()
+}
+
+func (self FunctionLayout) formatFn() string {
+ fp := self.FP
+ return fmt.Sprintf("\n%#04x\nRets:\n%s\nArgs:\n%s", fp, self.formatSeq(self.Rets, &fp), self.formatSeq(self.Args, &fp))
+}
+
+func (self FunctionLayout) formatSeq(v []Parameter, fp *uint32) string {
+ nb := len(v)
+ mm := make([]string, 0, len(v))
+
+ /* convert each part */
+ for i := nb-1; i >=0; i-- {
+ *fp -= PtrSize
+ mm = append(mm, fmt.Sprintf("%#04x %s", *fp, v[i].String()))
+ }
+
+ /* join them together */
+ return strings.Join(mm, "\n")
+}
+
+type Frame struct {
+ desc *FunctionLayout
+ locals []bool
+ ccall bool
+}
+
+func NewFrame(desc *FunctionLayout, locals []bool, ccall bool) Frame {
+ fr := Frame{}
+ fr.desc = desc
+ fr.locals = locals
+ fr.ccall = ccall
+ return fr
+}
+
+func (self *Frame) String() string {
+ out := self.desc.String()
+
+ off := -8
+ out += fmt.Sprintf("\n%#4x [Return PC]", off)
+ off -= 8
+ out += fmt.Sprintf("\n%#4x [RBP]", off)
+ off -= 8
+
+ for _, v := range ReservedRegs(self.ccall) {
+ out += fmt.Sprintf("\n%#4x [%v]", off, v)
+ off -= PtrSize
+ }
+
+ for _, b := range self.locals {
+ out += fmt.Sprintf("\n%#4x [%v]", off, b)
+ off -= PtrSize
+ }
+
+ return out
+}
+
+func (self *Frame) Prev() uint32 {
+ return self.Size() + PtrSize
+}
+
+func (self *Frame) Size() uint32 {
+ return uint32(self.Offs() + PtrSize)
+}
+
+func (self *Frame) Offs() uint32 {
+ return uint32(len(ReservedRegs(self.ccall)) * PtrSize + len(self.locals)*PtrSize)
+}
+
+func (self *Frame) ArgPtrs() *rt.StackMap {
+ return self.desc.StackMap()
+}
+
+func (self *Frame) LocalPtrs() *rt.StackMap {
+ var m rt.StackMapBuilder
+ for _, b := range self.locals {
+ m.AddFields(len(ReservedRegs(self.ccall)), b)
+ }
+ return m.Build()
+}
+
+func alignUp(n uint32, a int) uint32 {
+ return (uint32(n) + uint32(a) - 1) &^ (uint32(a) - 1)
+}
+
+func isPointer(vt reflect.Type) bool {
+ switch vt.Kind() {
+ case reflect.Bool : fallthrough
+ case reflect.Int : fallthrough
+ case reflect.Int8 : fallthrough
+ case reflect.Int16 : fallthrough
+ case reflect.Int32 : fallthrough
+ case reflect.Int64 : fallthrough
+ case reflect.Uint : fallthrough
+ case reflect.Uint8 : fallthrough
+ case reflect.Uint16 : fallthrough
+ case reflect.Uint32 : fallthrough
+ case reflect.Uint64 : fallthrough
+ case reflect.Float32 : fallthrough
+ case reflect.Float64 : fallthrough
+ case reflect.Uintptr : return false
+ case reflect.Chan : fallthrough
+ case reflect.Func : fallthrough
+ case reflect.Map : fallthrough
+ case reflect.Ptr : fallthrough
+ case reflect.UnsafePointer : return true
+ case reflect.Complex64 : fallthrough
+ case reflect.Complex128 : fallthrough
+ case reflect.Array : fallthrough
+ case reflect.Struct : panic("abi: unsupported types")
+ default : panic("abi: invalid value type")
+ }
+} \ No newline at end of file
diff --git a/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_amd64.go b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_amd64.go
new file mode 100644
index 000000000..c2b45a8e1
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_amd64.go
@@ -0,0 +1,282 @@
+/*
+ * Copyright 2022 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package abi
+
+import (
+ `fmt`
+ `reflect`
+ `unsafe`
+
+ . `github.com/cloudwego/iasm/x86_64`
+)
+
+const (
+ PtrSize = 8 // pointer size
+ PtrAlign = 8 // pointer alignment
+)
+
+var iregOrderC = []Register{
+ RDI,
+ RSI,
+ RDX,
+ RCX,
+ R8,
+ R9,
+}
+
+var xregOrderC = []Register{
+ XMM0,
+ XMM1,
+ XMM2,
+ XMM3,
+ XMM4,
+ XMM5,
+ XMM6,
+ XMM7,
+}
+
+var (
+ intType = reflect.TypeOf(0)
+ ptrType = reflect.TypeOf(unsafe.Pointer(nil))
+)
+
+func (self *Frame) argv(i int) *MemoryOperand {
+ return Ptr(RSP, int32(self.Prev() + self.desc.Args[i].Mem))
+}
+
+// spillv is used for growstack spill registers
+func (self *Frame) spillv(i int) *MemoryOperand {
+ // remain one slot for caller return pc
+ return Ptr(RSP, PtrSize + int32(self.desc.Args[i].Mem))
+}
+
+func (self *Frame) retv(i int) *MemoryOperand {
+ return Ptr(RSP, int32(self.Prev() + self.desc.Rets[i].Mem))
+}
+
+func (self *Frame) resv(i int) *MemoryOperand {
+ return Ptr(RSP, int32(self.Offs() - uint32((i+1) * PtrSize)))
+}
+
+func (self *Frame) emitGrowStack(p *Program, entry *Label) {
+ // spill all register arguments
+ for i, v := range self.desc.Args {
+ if v.InRegister {
+ if v.IsFloat == floatKind64 {
+ p.MOVSD(v.Reg, self.spillv(i))
+ } else if v.IsFloat == floatKind32 {
+ p.MOVSS(v.Reg, self.spillv(i))
+ }else {
+ p.MOVQ(v.Reg, self.spillv(i))
+ }
+ }
+ }
+
+ // call runtime.morestack_noctxt
+ p.MOVQ(F_morestack_noctxt, R12)
+ p.CALLQ(R12)
+ // load all register arguments
+ for i, v := range self.desc.Args {
+ if v.InRegister {
+ if v.IsFloat == floatKind64 {
+ p.MOVSD(self.spillv(i), v.Reg)
+ } else if v.IsFloat == floatKind32 {
+ p.MOVSS(self.spillv(i), v.Reg)
+ }else {
+ p.MOVQ(self.spillv(i), v.Reg)
+ }
+ }
+ }
+
+ // jump back to the function entry
+ p.JMP(entry)
+}
+
+func (self *Frame) GrowStackTextSize() uint32 {
+ p := DefaultArch.CreateProgram()
+ // spill all register arguments
+ for i, v := range self.desc.Args {
+ if v.InRegister {
+ if v.IsFloat == floatKind64 {
+ p.MOVSD(v.Reg, self.spillv(i))
+ } else if v.IsFloat == floatKind32 {
+ p.MOVSS(v.Reg, self.spillv(i))
+ }else {
+ p.MOVQ(v.Reg, self.spillv(i))
+ }
+ }
+ }
+
+ // call runtime.morestack_noctxt
+ p.MOVQ(F_morestack_noctxt, R12)
+ p.CALLQ(R12)
+ // load all register arguments
+ for i, v := range self.desc.Args {
+ if v.InRegister {
+ if v.IsFloat == floatKind64 {
+ p.MOVSD(self.spillv(i), v.Reg)
+ } else if v.IsFloat == floatKind32 {
+ p.MOVSS(self.spillv(i), v.Reg)
+ } else {
+ p.MOVQ(self.spillv(i), v.Reg)
+ }
+ }
+ }
+
+ // jump back to the function entry
+ l := CreateLabel("")
+ p.Link(l)
+ p.JMP(l)
+
+ return uint32(len(p.Assemble(0)))
+}
+
+func (self *Frame) emitPrologue(p *Program) {
+ p.SUBQ(self.Size(), RSP)
+ p.MOVQ(RBP, Ptr(RSP, int32(self.Offs())))
+ p.LEAQ(Ptr(RSP, int32(self.Offs())), RBP)
+}
+
+func (self *Frame) emitEpilogue(p *Program) {
+ p.MOVQ(Ptr(RSP, int32(self.Offs())), RBP)
+ p.ADDQ(self.Size(), RSP)
+ p.RET()
+}
+
+func (self *Frame) emitReserveRegs(p *Program) {
+ // spill reserved registers
+ for i, r := range ReservedRegs(self.ccall) {
+ switch r.(type) {
+ case Register64:
+ p.MOVQ(r, self.resv(i))
+ case XMMRegister:
+ p.MOVSD(r, self.resv(i))
+ default:
+ panic(fmt.Sprintf("unsupported register type %t to reserve", r))
+ }
+ }
+}
+
+func (self *Frame) emitSpillPtrs(p *Program) {
+ // spill pointer argument registers
+ for i, r := range self.desc.Args {
+ if r.InRegister && r.IsPointer {
+ p.MOVQ(r.Reg, self.argv(i))
+ }
+ }
+}
+
+func (self *Frame) emitClearPtrs(p *Program) {
+ // spill pointer argument registers
+ for i, r := range self.desc.Args {
+ if r.InRegister && r.IsPointer {
+ p.MOVQ(int64(0), self.argv(i))
+ }
+ }
+}
+
+func (self *Frame) emitCallC(p *Program, addr uintptr) {
+ p.MOVQ(addr, RAX)
+ p.CALLQ(RAX)
+}
+
+type floatKind uint8
+
+const (
+ notFloatKind floatKind = iota
+ floatKind32
+ floatKind64
+)
+
+type Parameter struct {
+ InRegister bool
+ IsPointer bool
+ IsFloat floatKind
+ Reg Register
+ Mem uint32
+ Type reflect.Type
+}
+
+func mkIReg(vt reflect.Type, reg Register64) (p Parameter) {
+ p.Reg = reg
+ p.Type = vt
+ p.InRegister = true
+ p.IsPointer = isPointer(vt)
+ return
+}
+
+func isFloat(vt reflect.Type) floatKind {
+ switch vt.Kind() {
+ case reflect.Float32:
+ return floatKind32
+ case reflect.Float64:
+ return floatKind64
+ default:
+ return notFloatKind
+ }
+}
+
+func mkXReg(vt reflect.Type, reg XMMRegister) (p Parameter) {
+ p.Reg = reg
+ p.Type = vt
+ p.InRegister = true
+ p.IsFloat = isFloat(vt)
+ return
+}
+
+func mkStack(vt reflect.Type, mem uint32) (p Parameter) {
+ p.Mem = mem
+ p.Type = vt
+ p.InRegister = false
+ p.IsPointer = isPointer(vt)
+ p.IsFloat = isFloat(vt)
+ return
+}
+
+func (self Parameter) String() string {
+ if self.InRegister {
+ return fmt.Sprintf("[%%%s, Pointer(%v), Float(%v)]", self.Reg, self.IsPointer, self.IsFloat)
+ } else {
+ return fmt.Sprintf("[%d(FP), Pointer(%v), Float(%v)]", self.Mem, self.IsPointer, self.IsFloat)
+ }
+}
+
+func CallC(addr uintptr, fr Frame, maxStack uintptr) []byte {
+ p := DefaultArch.CreateProgram()
+
+ stack := CreateLabel("_stack_grow")
+ entry := CreateLabel("_entry")
+ p.Link(entry)
+ fr.emitStackCheck(p, stack, maxStack)
+ fr.emitPrologue(p)
+ fr.emitReserveRegs(p)
+ fr.emitSpillPtrs(p)
+ fr.emitExchangeArgs(p)
+ fr.emitCallC(p, addr)
+ fr.emitExchangeRets(p)
+ fr.emitRestoreRegs(p)
+ fr.emitEpilogue(p)
+ p.Link(stack)
+ fr.emitGrowStack(p, entry)
+
+ return p.Assemble(0)
+}
+
+
+func (self *Frame) emitDebug(p *Program) {
+ p.INT(3)
+} \ No newline at end of file
diff --git a/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_legacy_amd64.go b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_legacy_amd64.go
new file mode 100644
index 000000000..298c48178
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_legacy_amd64.go
@@ -0,0 +1,182 @@
+//go:build !go1.17
+// +build !go1.17
+
+/*
+ * Copyright 2022 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package abi
+
+import (
+ `fmt`
+ `reflect`
+ `runtime`
+
+ . `github.com/cloudwego/iasm/x86_64`
+)
+
+func ReservedRegs(callc bool) []Register {
+ return nil
+}
+
+func salloc(p []Parameter, sp uint32, vt reflect.Type) (uint32, []Parameter) {
+ switch vt.Kind() {
+ case reflect.Bool : return sp + 8, append(p, mkStack(reflect.TypeOf(false), sp))
+ case reflect.Int : return sp + 8, append(p, mkStack(intType, sp))
+ case reflect.Int8 : return sp + 8, append(p, mkStack(reflect.TypeOf(int8(0)), sp))
+ case reflect.Int16 : return sp + 8, append(p, mkStack(reflect.TypeOf(int16(0)), sp))
+ case reflect.Int32 : return sp + 8, append(p, mkStack(reflect.TypeOf(int32(0)), sp))
+ case reflect.Int64 : return sp + 8, append(p, mkStack(reflect.TypeOf(int64(0)), sp))
+ case reflect.Uint : return sp + 8, append(p, mkStack(reflect.TypeOf(uint(0)), sp))
+ case reflect.Uint8 : return sp + 8, append(p, mkStack(reflect.TypeOf(uint8(0)), sp))
+ case reflect.Uint16 : return sp + 8, append(p, mkStack(reflect.TypeOf(uint16(0)), sp))
+ case reflect.Uint32 : return sp + 8, append(p, mkStack(reflect.TypeOf(uint32(0)), sp))
+ case reflect.Uint64 : return sp + 8, append(p, mkStack(reflect.TypeOf(uint64(0)), sp))
+ case reflect.Uintptr : return sp + 8, append(p, mkStack(reflect.TypeOf(uintptr(0)), sp))
+ case reflect.Float32 : return sp + 8, append(p, mkStack(reflect.TypeOf(float32(0)), sp))
+ case reflect.Float64 : return sp + 8, append(p, mkStack(reflect.TypeOf(float64(0)), sp))
+ case reflect.Complex64 : panic("abi: go116: not implemented: complex64")
+ case reflect.Complex128 : panic("abi: go116: not implemented: complex128")
+ case reflect.Array : panic("abi: go116: not implemented: arrays")
+ case reflect.Chan : return sp + 8, append(p, mkStack(reflect.TypeOf((chan int)(nil)), sp))
+ case reflect.Func : return sp + 8, append(p, mkStack(reflect.TypeOf((func())(nil)), sp))
+ case reflect.Map : return sp + 8, append(p, mkStack(reflect.TypeOf((map[int]int)(nil)), sp))
+ case reflect.Ptr : return sp + 8, append(p, mkStack(reflect.TypeOf((*int)(nil)), sp))
+ case reflect.UnsafePointer : return sp + 8, append(p, mkStack(ptrType, sp))
+ case reflect.Interface : return sp + 16, append(p, mkStack(ptrType, sp), mkStack(ptrType, sp + 8))
+ case reflect.Slice : return sp + 24, append(p, mkStack(ptrType, sp), mkStack(intType, sp + 8), mkStack(intType, sp + 16))
+ case reflect.String : return sp + 16, append(p, mkStack(ptrType, sp), mkStack(intType, sp + 8))
+ case reflect.Struct : panic("abi: go116: not implemented: structs")
+ default : panic("abi: invalid value type")
+ }
+}
+
+func NewFunctionLayout(ft reflect.Type) FunctionLayout {
+ var sp uint32
+ var fn FunctionLayout
+
+ /* assign every arguments */
+ for i := 0; i < ft.NumIn(); i++ {
+ sp, fn.Args = salloc(fn.Args, sp, ft.In(i))
+ }
+
+ /* assign every return value */
+ for i := 0; i < ft.NumOut(); i++ {
+ sp, fn.Rets = salloc(fn.Rets, sp, ft.Out(i))
+ }
+
+ /* update function ID and stack pointer */
+ fn.FP = sp
+ return fn
+}
+
+func (self *Frame) emitExchangeArgs(p *Program) {
+ iregArgs, xregArgs := 0, 0
+ for _, v := range self.desc.Args {
+ if v.IsFloat != notFloatKind {
+ xregArgs += 1
+ } else {
+ iregArgs += 1
+ }
+ }
+
+ if iregArgs > len(iregOrderC) {
+ panic("too many arguments, only support at most 6 integer arguments now")
+ }
+ if xregArgs > len(xregOrderC) {
+ panic("too many arguments, only support at most 8 float arguments now")
+ }
+
+ ic, xc := iregArgs, xregArgs
+ for i := 0; i < len(self.desc.Args); i++ {
+ arg := self.desc.Args[i]
+ if arg.IsFloat == floatKind64 {
+ p.MOVSD(self.argv(i), xregOrderC[xregArgs - xc])
+ xc -= 1
+ } else if arg.IsFloat == floatKind32 {
+ p.MOVSS(self.argv(i), xregOrderC[xregArgs - xc])
+ xc -= 1
+ } else {
+ p.MOVQ(self.argv(i), iregOrderC[iregArgs - ic])
+ ic -= 1
+ }
+ }
+}
+
+func (self *Frame) emitStackCheck(p *Program, to *Label, maxStack uintptr) {
+ // get the current goroutine
+ switch runtime.GOOS {
+ case "linux" : p.MOVQ(Abs(-8), R14).FS()
+ case "darwin" : p.MOVQ(Abs(0x30), R14).GS()
+ case "windows": break // windows always stores G pointer at R14
+ default : panic("unsupported operating system")
+ }
+
+ // check the stack guard
+ p.LEAQ(Ptr(RSP, -int32(self.Size() + uint32(maxStack))), RAX)
+ p.CMPQ(Ptr(R14, _G_stackguard0), RAX)
+ p.JBE(to)
+}
+
+func (self *Frame) StackCheckTextSize() uint32 {
+ p := DefaultArch.CreateProgram()
+
+ // get the current goroutine
+ switch runtime.GOOS {
+ case "linux" : p.MOVQ(Abs(-8), R14).FS()
+ case "darwin" : p.MOVQ(Abs(0x30), R14).GS()
+ case "windows": break // windows always stores G pointer at R14
+ default : panic("unsupported operating system")
+ }
+
+ // check the stack guard
+ p.LEAQ(Ptr(RSP, -int32(self.Size())), RAX)
+ p.CMPQ(Ptr(R14, _G_stackguard0), RAX)
+ l := CreateLabel("")
+ p.Link(l)
+ p.JBE(l)
+
+ return uint32(len(p.Assemble(0)))
+}
+
+func (self *Frame) emitExchangeRets(p *Program) {
+ if len(self.desc.Rets) > 1 {
+ panic("too many results, only support one result now")
+ }
+ // store result
+ if len(self.desc.Rets) ==1 {
+ if self.desc.Rets[0].IsFloat == floatKind64 {
+ p.MOVSD(xregOrderC[0], self.retv(0))
+ } else if self.desc.Rets[0].IsFloat == floatKind32 {
+ p.MOVSS(xregOrderC[0], self.retv(0))
+ } else {
+ p.MOVQ(RAX, self.retv(0))
+ }
+ }
+}
+
+func (self *Frame) emitRestoreRegs(p *Program) {
+ // load reserved registers
+ for i, r := range ReservedRegs(self.ccall) {
+ switch r.(type) {
+ case Register64:
+ p.MOVQ(self.resv(i), r)
+ case XMMRegister:
+ p.MOVSD(self.resv(i), r)
+ default:
+ panic(fmt.Sprintf("unsupported register type %t to reserve", r))
+ }
+ }
+} \ No newline at end of file
diff --git a/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_regabi_amd64.go b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_regabi_amd64.go
new file mode 100644
index 000000000..5a31dea89
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_regabi_amd64.go
@@ -0,0 +1,316 @@
+//go:build go1.17
+// +build go1.17
+
+/*
+ * Copyright 2022 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/** Go Internal ABI implementation
+ *
+ * This module implements the function layout algorithm described by the Go internal ABI.
+ * See https://github.com/golang/go/blob/master/src/cmd/compile/abi-internal.md for more info.
+ */
+
+package abi
+
+import (
+ `fmt`
+ `reflect`
+
+ . `github.com/cloudwego/iasm/x86_64`
+)
+
+/** Frame Structure of the Generated Function
+ FP +------------------------------+
+ | . . . |
+ | 2nd reg argument spill space |
+ + 1st reg argument spill space |
+ | <pointer-sized alignment> |
+ | . . . |
+ | 2nd stack-assigned result |
+ + 1st stack-assigned result |
+ | <pointer-sized alignment> |
+ | . . . |
+ | 2nd stack-assigned argument |
+ | 1st stack-assigned argument |
+ | stack-assigned receiver |
+prev() +------------------------------+ (Previous Frame)
+ Return PC |
+size() -------------------------------|
+ Saved RBP |
+offs() -------------------------------|
+ 1th Reserved Registers |
+ -------------------------------|
+ 2th Reserved Registers |
+ -------------------------------|
+ Local Variables |
+ RSP -------------------------------|↓ lower addresses
+*/
+
+const zeroRegGo = XMM15
+
+var iregOrderGo = [...]Register64 {
+ RAX,// RDI
+ RBX,// RSI
+ RCX,// RDX
+ RDI,// RCX
+ RSI,// R8
+ R8, // R9
+ R9,
+ R10,
+ R11,
+}
+
+var xregOrderGo = [...]XMMRegister {
+ XMM0,
+ XMM1,
+ XMM2,
+ XMM3,
+ XMM4,
+ XMM5,
+ XMM6,
+ XMM7,
+ XMM8,
+ XMM9,
+ XMM10,
+ XMM11,
+ XMM12,
+ XMM13,
+ XMM14,
+}
+
+func ReservedRegs(callc bool) []Register {
+ if callc {
+ return nil
+ }
+ return []Register {
+ R14, // current goroutine
+ R15, // GOT reference
+ }
+}
+
+type stackAlloc struct {
+ s uint32
+ i int
+ x int
+}
+
+func (self *stackAlloc) reset() {
+ self.i, self.x = 0, 0
+}
+
+func (self *stackAlloc) ireg(vt reflect.Type) (p Parameter) {
+ p = mkIReg(vt, iregOrderGo[self.i])
+ self.i++
+ return
+}
+
+func (self *stackAlloc) xreg(vt reflect.Type) (p Parameter) {
+ p = mkXReg(vt, xregOrderGo[self.x])
+ self.x++
+ return
+}
+
+func (self *stackAlloc) stack(vt reflect.Type) (p Parameter) {
+ p = mkStack(vt, self.s)
+ self.s += uint32(vt.Size())
+ return
+}
+
+func (self *stackAlloc) spill(n uint32, a int) uint32 {
+ self.s = alignUp(self.s, a) + n
+ return self.s
+}
+
+func (self *stackAlloc) alloc(p []Parameter, vt reflect.Type) []Parameter {
+ nb := vt.Size()
+ vk := vt.Kind()
+
+ /* zero-sized objects are allocated on stack */
+ if nb == 0 {
+ return append(p, mkStack(intType, self.s))
+ }
+
+ /* check for value type */
+ switch vk {
+ case reflect.Bool : return self.valloc(p, reflect.TypeOf(false))
+ case reflect.Int : return self.valloc(p, intType)
+ case reflect.Int8 : return self.valloc(p, reflect.TypeOf(int8(0)))
+ case reflect.Int16 : return self.valloc(p, reflect.TypeOf(int16(0)))
+ case reflect.Int32 : return self.valloc(p, reflect.TypeOf(uint32(0)))
+ case reflect.Int64 : return self.valloc(p, reflect.TypeOf(int64(0)))
+ case reflect.Uint : return self.valloc(p, reflect.TypeOf(uint(0)))
+ case reflect.Uint8 : return self.valloc(p, reflect.TypeOf(uint8(0)))
+ case reflect.Uint16 : return self.valloc(p, reflect.TypeOf(uint16(0)))
+ case reflect.Uint32 : return self.valloc(p, reflect.TypeOf(uint32(0)))
+ case reflect.Uint64 : return self.valloc(p, reflect.TypeOf(uint64(0)))
+ case reflect.Uintptr : return self.valloc(p, reflect.TypeOf(uintptr(0)))
+ case reflect.Float32 : return self.valloc(p, reflect.TypeOf(float32(0)))
+ case reflect.Float64 : return self.valloc(p, reflect.TypeOf(float64(0)))
+ case reflect.Complex64 : panic("abi: go117: not implemented: complex64")
+ case reflect.Complex128 : panic("abi: go117: not implemented: complex128")
+ case reflect.Array : panic("abi: go117: not implemented: arrays")
+ case reflect.Chan : return self.valloc(p, reflect.TypeOf((chan int)(nil)))
+ case reflect.Func : return self.valloc(p, reflect.TypeOf((func())(nil)))
+ case reflect.Map : return self.valloc(p, reflect.TypeOf((map[int]int)(nil)))
+ case reflect.Ptr : return self.valloc(p, reflect.TypeOf((*int)(nil)))
+ case reflect.UnsafePointer : return self.valloc(p, ptrType)
+ case reflect.Interface : return self.valloc(p, ptrType, ptrType)
+ case reflect.Slice : return self.valloc(p, ptrType, intType, intType)
+ case reflect.String : return self.valloc(p, ptrType, intType)
+ case reflect.Struct : panic("abi: go117: not implemented: structs")
+ default : panic("abi: invalid value type")
+ }
+}
+
+func (self *stackAlloc) valloc(p []Parameter, vts ...reflect.Type) []Parameter {
+ for _, vt := range vts {
+ enum := isFloat(vt)
+ if enum != notFloatKind && self.x < len(xregOrderGo) {
+ p = append(p, self.xreg(vt))
+ } else if enum == notFloatKind && self.i < len(iregOrderGo) {
+ p = append(p, self.ireg(vt))
+ } else {
+ p = append(p, self.stack(vt))
+ }
+ }
+ return p
+}
+
+func NewFunctionLayout(ft reflect.Type) FunctionLayout {
+ var sa stackAlloc
+ var fn FunctionLayout
+
+ /* assign every arguments */
+ for i := 0; i < ft.NumIn(); i++ {
+ fn.Args = sa.alloc(fn.Args, ft.In(i))
+ }
+
+ /* reset the register counter, and add a pointer alignment field */
+ sa.reset()
+
+ /* assign every return value */
+ for i := 0; i < ft.NumOut(); i++ {
+ fn.Rets = sa.alloc(fn.Rets, ft.Out(i))
+ }
+
+ sa.spill(0, PtrAlign)
+
+ /* assign spill slots */
+ for i := 0; i < len(fn.Args); i++ {
+ if fn.Args[i].InRegister {
+ fn.Args[i].Mem = sa.spill(PtrSize, PtrAlign) - PtrSize
+ }
+ }
+
+ /* add the final pointer alignment field */
+ fn.FP = sa.spill(0, PtrAlign)
+ return fn
+}
+
+func (self *Frame) emitExchangeArgs(p *Program) {
+ iregArgs := make([]Parameter, 0, len(self.desc.Args))
+ xregArgs := 0
+ for _, v := range self.desc.Args {
+ if v.InRegister {
+ if v.IsFloat != notFloatKind {
+ xregArgs += 1
+ } else {
+ iregArgs = append(iregArgs, v)
+ }
+ } else {
+ panic("not support stack-assgined arguments now")
+ }
+ }
+ if xregArgs > len(xregOrderC) {
+ panic("too many arguments, only support at most 8 integer register arguments now")
+ }
+
+ switch len(iregArgs) {
+ case 0, 1, 2, 3: {
+ //Fast-Path: when arguments count are less than four, just exchange the registers
+ for i := 0; i < len(iregArgs); i++ {
+ p.MOVQ(iregOrderGo[i], iregOrderC[i])
+ }
+ }
+ case 4, 5, 6: {
+ // need to spill 3th ~ regArgs registers before exchange
+ for i := 3; i < len(iregArgs); i++ {
+ arg := iregArgs[i]
+ // pointer args have already been spilled
+ if !arg.IsPointer {
+ p.MOVQ(iregOrderGo[i], Ptr(RSP, int32(self.Prev() + arg.Mem)))
+ }
+ }
+ p.MOVQ(iregOrderGo[0], iregOrderC[0])
+ p.MOVQ(iregOrderGo[1], iregOrderC[1])
+ p.MOVQ(iregOrderGo[2], iregOrderC[2])
+ for i := 3; i < len(iregArgs); i++ {
+ arg := iregArgs[i]
+ p.MOVQ(Ptr(RSP, int32(self.Prev() + arg.Mem)), iregOrderC[i])
+ }
+ }
+ default:
+ panic("too many arguments, only support at most 6 integer register arguments now")
+ }
+}
+
+func (self *Frame) emitStackCheck(p *Program, to *Label, maxStack uintptr) {
+ p.LEAQ(Ptr(RSP, int32(-(self.Size() + uint32(maxStack)))), R12)
+ p.CMPQ(Ptr(R14, _G_stackguard0), R12)
+ p.JBE(to)
+}
+
+func (self *Frame) StackCheckTextSize() uint32 {
+ p := DefaultArch.CreateProgram()
+ p.LEAQ(Ptr(RSP, int32(-(self.Size()))), R12)
+ p.CMPQ(Ptr(R14, _G_stackguard0), R12)
+ to := CreateLabel("")
+ p.Link(to)
+ p.JBE(to)
+ return uint32(len(p.Assemble(0)))
+}
+
+func (self *Frame) emitExchangeRets(p *Program) {
+ if len(self.desc.Rets) > 1 {
+ panic("too many results, only support one result now")
+ }
+ // store result
+ if len(self.desc.Rets) == 1 && !self.desc.Rets[0].InRegister {
+ if self.desc.Rets[0].IsFloat == floatKind64 {
+ p.MOVSD(xregOrderC[0], self.retv(0))
+ } else if self.desc.Rets[0].IsFloat == floatKind32 {
+ p.MOVSS(xregOrderC[0], self.retv(0))
+ } else {
+ p.MOVQ(RAX, self.retv(0))
+ }
+ }
+}
+
+func (self *Frame) emitRestoreRegs(p *Program) {
+ // load reserved registers
+ for i, r := range ReservedRegs(self.ccall) {
+ switch r.(type) {
+ case Register64:
+ p.MOVQ(self.resv(i), r)
+ case XMMRegister:
+ p.MOVSD(self.resv(i), r)
+ default:
+ panic(fmt.Sprintf("unsupported register type %t to reserve", r))
+ }
+ }
+ // zero xmm15 for go abi
+ p.XORPS(zeroRegGo, zeroRegGo)
+} \ No newline at end of file
diff --git a/vendor/github.com/bytedance/sonic/loader/internal/abi/stubs.go b/vendor/github.com/bytedance/sonic/loader/internal/abi/stubs.go
new file mode 100644
index 000000000..af9930156
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/loader/internal/abi/stubs.go
@@ -0,0 +1,35 @@
+/**
+ * Copyright 2023 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package abi
+
+import (
+ _ `unsafe`
+
+ `github.com/bytedance/sonic/loader/internal/rt`
+)
+
+const (
+ _G_stackguard0 = 0x10
+)
+
+var (
+ F_morestack_noctxt = uintptr(rt.FuncAddr(morestack_noctxt))
+)
+
+//go:linkname morestack_noctxt runtime.morestack_noctxt
+func morestack_noctxt()
+
diff --git a/vendor/github.com/bytedance/sonic/loader/internal/rt/fastmem.go b/vendor/github.com/bytedance/sonic/loader/internal/rt/fastmem.go
new file mode 100644
index 000000000..3bc24c4e4
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/loader/internal/rt/fastmem.go
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package rt
+
+import (
+ `unsafe`
+ `reflect`
+)
+
+//go:nosplit
+func Mem2Str(v []byte) (s string) {
+ (*GoString)(unsafe.Pointer(&s)).Len = (*GoSlice)(unsafe.Pointer(&v)).Len
+ (*GoString)(unsafe.Pointer(&s)).Ptr = (*GoSlice)(unsafe.Pointer(&v)).Ptr
+ return
+}
+
+//go:nosplit
+func Str2Mem(s string) (v []byte) {
+ (*GoSlice)(unsafe.Pointer(&v)).Cap = (*GoString)(unsafe.Pointer(&s)).Len
+ (*GoSlice)(unsafe.Pointer(&v)).Len = (*GoString)(unsafe.Pointer(&s)).Len
+ (*GoSlice)(unsafe.Pointer(&v)).Ptr = (*GoString)(unsafe.Pointer(&s)).Ptr
+ return
+}
+
+func BytesFrom(p unsafe.Pointer, n int, c int) (r []byte) {
+ (*GoSlice)(unsafe.Pointer(&r)).Ptr = p
+ (*GoSlice)(unsafe.Pointer(&r)).Len = n
+ (*GoSlice)(unsafe.Pointer(&r)).Cap = c
+ return
+}
+
+func FuncAddr(f interface{}) unsafe.Pointer {
+ if vv := UnpackEface(f); vv.Type.Kind() != reflect.Func {
+ panic("f is not a function")
+ } else {
+ return *(*unsafe.Pointer)(vv.Value)
+ }
+}
+
+//go:nocheckptr
+func IndexChar(src string, index int) unsafe.Pointer {
+ return unsafe.Pointer(uintptr((*GoString)(unsafe.Pointer(&src)).Ptr) + uintptr(index))
+}
+
+//go:nocheckptr
+func IndexByte(ptr []byte, index int) unsafe.Pointer {
+ return unsafe.Pointer(uintptr((*GoSlice)(unsafe.Pointer(&ptr)).Ptr) + uintptr(index))
+}
diff --git a/vendor/github.com/bytedance/sonic/loader/internal/rt/fastvalue.go b/vendor/github.com/bytedance/sonic/loader/internal/rt/fastvalue.go
new file mode 100644
index 000000000..e6c5bc869
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/loader/internal/rt/fastvalue.go
@@ -0,0 +1,183 @@
+/*
+ * Copyright 2021 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package rt
+
+import (
+ `reflect`
+ `unsafe`
+)
+
+var (
+ reflectRtypeItab = findReflectRtypeItab()
+)
+
+// GoType.KindFlags const
+const (
+ F_direct = 1 << 5
+ F_kind_mask = (1 << 5) - 1
+)
+
+// GoType.Flags const
+const (
+ tflagUncommon uint8 = 1 << 0
+ tflagExtraStar uint8 = 1 << 1
+ tflagNamed uint8 = 1 << 2
+ tflagRegularMemory uint8 = 1 << 3
+)
+
+type GoType struct {
+ Size uintptr
+ PtrData uintptr
+ Hash uint32
+ Flags uint8
+ Align uint8
+ FieldAlign uint8
+ KindFlags uint8
+ Traits unsafe.Pointer
+ GCData *byte
+ Str int32
+ PtrToSelf int32
+}
+
+func (self *GoType) IsNamed() bool {
+ return (self.Flags & tflagNamed) != 0
+}
+
+func (self *GoType) Kind() reflect.Kind {
+ return reflect.Kind(self.KindFlags & F_kind_mask)
+}
+
+func (self *GoType) Pack() (t reflect.Type) {
+ (*GoIface)(unsafe.Pointer(&t)).Itab = reflectRtypeItab
+ (*GoIface)(unsafe.Pointer(&t)).Value = unsafe.Pointer(self)
+ return
+}
+
+func (self *GoType) String() string {
+ return self.Pack().String()
+}
+
+func (self *GoType) Indirect() bool {
+ return self.KindFlags & F_direct == 0
+}
+
+type GoItab struct {
+ it unsafe.Pointer
+ Vt *GoType
+ hv uint32
+ _ [4]byte
+ fn [1]uintptr
+}
+
+type GoIface struct {
+ Itab *GoItab
+ Value unsafe.Pointer
+}
+
+type GoEface struct {
+ Type *GoType
+ Value unsafe.Pointer
+}
+
+func (self GoEface) Pack() (v interface{}) {
+ *(*GoEface)(unsafe.Pointer(&v)) = self
+ return
+}
+
+type GoPtrType struct {
+ GoType
+ Elem *GoType
+}
+
+type GoMapType struct {
+ GoType
+ Key *GoType
+ Elem *GoType
+ Bucket *GoType
+ Hasher func(unsafe.Pointer, uintptr) uintptr
+ KeySize uint8
+ ElemSize uint8
+ BucketSize uint16
+ Flags uint32
+}
+
+func (self *GoMapType) IndirectElem() bool {
+ return self.Flags & 2 != 0
+}
+
+type GoStructType struct {
+ GoType
+ Pkg *byte
+ Fields []GoStructField
+}
+
+type GoStructField struct {
+ Name *byte
+ Type *GoType
+ OffEmbed uintptr
+}
+
+type GoInterfaceType struct {
+ GoType
+ PkgPath *byte
+ Methods []GoInterfaceMethod
+}
+
+type GoInterfaceMethod struct {
+ Name int32
+ Type int32
+}
+
+type GoSlice struct {
+ Ptr unsafe.Pointer
+ Len int
+ Cap int
+}
+
+type GoString struct {
+ Ptr unsafe.Pointer
+ Len int
+}
+
+func PtrElem(t *GoType) *GoType {
+ return (*GoPtrType)(unsafe.Pointer(t)).Elem
+}
+
+func MapType(t *GoType) *GoMapType {
+ return (*GoMapType)(unsafe.Pointer(t))
+}
+
+func IfaceType(t *GoType) *GoInterfaceType {
+ return (*GoInterfaceType)(unsafe.Pointer(t))
+}
+
+func UnpackType(t reflect.Type) *GoType {
+ return (*GoType)((*GoIface)(unsafe.Pointer(&t)).Value)
+}
+
+func UnpackEface(v interface{}) GoEface {
+ return *(*GoEface)(unsafe.Pointer(&v))
+}
+
+func UnpackIface(v interface{}) GoIface {
+ return *(*GoIface)(unsafe.Pointer(&v))
+}
+
+func findReflectRtypeItab() *GoItab {
+ v := reflect.TypeOf(struct{}{})
+ return (*GoIface)(unsafe.Pointer(&v)).Itab
+}
diff --git a/vendor/github.com/bytedance/sonic/loader/internal/rt/stackmap.go b/vendor/github.com/bytedance/sonic/loader/internal/rt/stackmap.go
new file mode 100644
index 000000000..84ed9a95f
--- /dev/null
+++ b/vendor/github.com/bytedance/sonic/loader/internal/rt/stackmap.go
@@ -0,0 +1,181 @@
+/**
+ * Copyright 2023 ByteDance Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package rt
+
+import (
+ `fmt`
+ `strings`
+ `unsafe`
+
+)
+
+type Bitmap struct {
+ N int
+ B []byte
+}
+
+func (self *Bitmap) grow() {
+ if self.N >= len(self.B) * 8 {
+ self.B = append(self.B, 0)
+ }
+}
+
+func (self *Bitmap) mark(i int, bv int) {
+ if bv != 0 {
+ self.B[i / 8] |= 1 << (i % 8)
+ } else {
+ self.B[i / 8] &^= 1 << (i % 8)
+ }
+}
+
+func (self *Bitmap) Set(i int, bv int) {
+ if i >= self.N {
+ panic("bitmap: invalid bit position")
+ } else {
+ self.mark(i, bv)
+ }
+}
+
+func (self *Bitmap) Append(bv int) {
+ self.grow()
+ self.mark(self.N, bv)
+ self.N++
+}
+
+func (self *Bitmap) AppendMany(n int, bv int) {
+ for i := 0; i < n; i++ {
+ self.Append(bv)
+ }
+}
+
+// var (
+// _stackMapLock = sync.Mutex{}
+// _stackMapCache = make(map[*StackMap]struct{})
+// )
+
+type BitVec struct {
+ N uintptr
+ B unsafe.Pointer
+}
+
+func (self BitVec) Bit(i uintptr) byte {
+ return (*(*byte)(unsafe.Pointer(uintptr(self.B) + i / 8)) >> (i % 8)) & 1
+}
+
+func (self BitVec) String() string {
+ var i uintptr
+ var v []string
+
+ /* add each bit */
+ for i = 0; i < self.N; i++ {
+ v = append(v, fmt.Sprintf("%d", self.Bit(i)))
+ }
+
+ /* join them together */
+ return fmt.Sprintf(
+ "BitVec { %s }",
+ strings.Join(v, ", "),
+ )
+}
+
+type StackMap struct {
+ N int32
+ L int32
+ B [1]byte
+}
+
+// func (self *StackMap) add() {
+// _stackMapLock.Lock()
+// _stackMapCache[self] = struct{}{}
+// _stackMapLock.Unlock()
+// }
+
+func (self *StackMap) Pin() uintptr {
+ // self.add()
+ return uintptr(unsafe.Pointer(self))
+}
+
+func (self *StackMap) Get(i int32) BitVec {
+ return BitVec {
+ N: uintptr(self.L),
+ B: unsafe.Pointer(uintptr(unsafe.Pointer(&self.B)) + uintptr(i * ((self.L + 7) >> 3))),
+ }
+}
+
+func (self *StackMap) String() string {
+ sb := strings.Builder{}
+ sb.WriteString("StackMap {")
+
+ /* dump every stack map */
+ for i := int32(0); i < self.N; i++ {
+ sb.WriteRune('\n')
+ sb.WriteString(" " + self.Get(i).String())
+ }
+
+ /* close the stackmap */
+ sb.WriteString("\n}")
+ return sb.String()
+}
+
+func (self *StackMap) MarshalBinary() ([]byte, error) {
+ size := int(self.N) * int(self.L) + int(unsafe.Sizeof(self.L)) + int(unsafe.Sizeof(self.N))
+ return BytesFrom(unsafe.Pointer(self), size, size), nil
+}
+
+var (
+ byteType = UnpackEface(byte(0)).Type
+)
+
+const (
+ _StackMapSize = unsafe.Sizeof(StackMap{})
+)
+
+//go:linkname mallocgc runtime.mallocgc
+//goland:noinspection GoUnusedParameter
+func mallocgc(nb uintptr, vt *GoType, zero bool) unsafe.Pointer
+
+type StackMapBuilder struct {
+ b Bitmap
+}
+
+//go:nocheckptr
+func (self *StackMapBuilder) Build() (p *StackMap) {
+ nb := len(self.b.B)
+ bm := mallocgc(_StackMapSize + uintptr(nb) - 1, byteType, false)
+
+ /* initialize as 1 bitmap of N bits */
+ p = (*StackMap)(bm)
+ p.N, p.L = 1, int32(self.b.N)
+ copy(BytesFrom(unsafe.Pointer(&p.B), nb, nb), self.b.B)
+ return
+}
+
+func (self *StackMapBuilder) AddField(ptr bool) {
+ if ptr {
+ self.b.Append(1)
+ } else {
+ self.b.Append(0)
+ }
+}
+
+func (self *StackMapBuilder) AddFields(n int, ptr bool) {
+ if ptr {
+ self.b.AppendMany(n, 1)
+ } else {
+ self.b.AppendMany(n, 0)
+ }
+} \ No newline at end of file