summaryrefslogtreecommitdiff
path: root/vendor/github.com/bytedance/sonic/loader/internal/abi
diff options
context:
space:
mode:
authorLibravatar dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>2025-01-27 11:06:46 +0000
committerLibravatar GitHub <noreply@github.com>2025-01-27 11:06:46 +0000
commit5c96702cb5d9461b35c232858a3c91ab699dec7d (patch)
treed11da1c2140aadb19c5888c545af81ab3d9f6081 /vendor/github.com/bytedance/sonic/loader/internal/abi
parent[chore] Allow suppressing trusted-proxies warning by disabling rate limiting ... (diff)
downloadgotosocial-5c96702cb5d9461b35c232858a3c91ab699dec7d.tar.xz
[chore]: Bump github.com/gin-contrib/gzip from 1.1.0 to 1.2.2 (#3693)
Bumps [github.com/gin-contrib/gzip](https://github.com/gin-contrib/gzip) from 1.1.0 to 1.2.2. - [Release notes](https://github.com/gin-contrib/gzip/releases) - [Changelog](https://github.com/gin-contrib/gzip/blob/master/.goreleaser.yaml) - [Commits](https://github.com/gin-contrib/gzip/compare/v1.1.0...v1.2.2) --- updated-dependencies: - dependency-name: github.com/gin-contrib/gzip dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Diffstat (limited to 'vendor/github.com/bytedance/sonic/loader/internal/abi')
-rw-r--r--vendor/github.com/bytedance/sonic/loader/internal/abi/abi_amd64.go391
-rw-r--r--vendor/github.com/bytedance/sonic/loader/internal/abi/abi_legacy_amd64.go305
-rw-r--r--vendor/github.com/bytedance/sonic/loader/internal/abi/abi_regabi_amd64.go451
3 files changed, 614 insertions, 533 deletions
diff --git a/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_amd64.go b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_amd64.go
index c2b45a8e1..2969c3bba 100644
--- a/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_amd64.go
+++ b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_amd64.go
@@ -17,266 +17,285 @@
package abi
import (
- `fmt`
- `reflect`
- `unsafe`
+ "fmt"
+ "reflect"
+ "unsafe"
- . `github.com/cloudwego/iasm/x86_64`
+ x64 "github.com/bytedance/sonic/loader/internal/iasm/x86_64"
+)
+
+type (
+ Register = x64.Register
+ Register64 = x64.Register64
+ XMMRegister = x64.XMMRegister
+ Program = x64.Program
+ MemoryOperand = x64.MemoryOperand
+ Label = x64.Label
+)
+
+var (
+ Ptr = x64.Ptr
+ DefaultArch = x64.DefaultArch
+ CreateLabel = x64.CreateLabel
)
const (
- PtrSize = 8 // pointer size
- PtrAlign = 8 // pointer alignment
+ RAX = x64.RAX
+ RSP = x64.RSP
+ RBP = x64.RBP
+ R12 = x64.R12
+ R14 = x64.R14
+ R15 = x64.R15
+)
+
+const (
+ PtrSize = 8 // pointer size
+ PtrAlign = 8 // pointer alignment
)
var iregOrderC = []Register{
- RDI,
- RSI,
- RDX,
- RCX,
- R8,
- R9,
+ x64.RDI,
+ x64.RSI,
+ x64.RDX,
+ x64.RCX,
+ x64.R8,
+ x64.R9,
}
var xregOrderC = []Register{
- XMM0,
- XMM1,
- XMM2,
- XMM3,
- XMM4,
- XMM5,
- XMM6,
- XMM7,
+ x64.XMM0,
+ x64.XMM1,
+ x64.XMM2,
+ x64.XMM3,
+ x64.XMM4,
+ x64.XMM5,
+ x64.XMM6,
+ x64.XMM7,
}
var (
- intType = reflect.TypeOf(0)
- ptrType = reflect.TypeOf(unsafe.Pointer(nil))
+ intType = reflect.TypeOf(0)
+ ptrType = reflect.TypeOf(unsafe.Pointer(nil))
)
func (self *Frame) argv(i int) *MemoryOperand {
- return Ptr(RSP, int32(self.Prev() + self.desc.Args[i].Mem))
+ return Ptr(RSP, int32(self.Prev()+self.desc.Args[i].Mem))
}
// spillv is used for growstack spill registers
func (self *Frame) spillv(i int) *MemoryOperand {
- // remain one slot for caller return pc
- return Ptr(RSP, PtrSize + int32(self.desc.Args[i].Mem))
+ // remain one slot for caller return pc
+ return Ptr(RSP, PtrSize+int32(self.desc.Args[i].Mem))
}
func (self *Frame) retv(i int) *MemoryOperand {
- return Ptr(RSP, int32(self.Prev() + self.desc.Rets[i].Mem))
+ return Ptr(RSP, int32(self.Prev()+self.desc.Rets[i].Mem))
}
func (self *Frame) resv(i int) *MemoryOperand {
- return Ptr(RSP, int32(self.Offs() - uint32((i+1) * PtrSize)))
+ return Ptr(RSP, int32(self.Offs()-uint32((i+1)*PtrSize)))
}
func (self *Frame) emitGrowStack(p *Program, entry *Label) {
- // spill all register arguments
- for i, v := range self.desc.Args {
- if v.InRegister {
- if v.IsFloat == floatKind64 {
- p.MOVSD(v.Reg, self.spillv(i))
- } else if v.IsFloat == floatKind32 {
- p.MOVSS(v.Reg, self.spillv(i))
- }else {
- p.MOVQ(v.Reg, self.spillv(i))
- }
- }
- }
-
- // call runtime.morestack_noctxt
- p.MOVQ(F_morestack_noctxt, R12)
- p.CALLQ(R12)
- // load all register arguments
- for i, v := range self.desc.Args {
- if v.InRegister {
- if v.IsFloat == floatKind64 {
- p.MOVSD(self.spillv(i), v.Reg)
- } else if v.IsFloat == floatKind32 {
- p.MOVSS(self.spillv(i), v.Reg)
- }else {
- p.MOVQ(self.spillv(i), v.Reg)
- }
- }
- }
-
- // jump back to the function entry
- p.JMP(entry)
+ // spill all register arguments
+ for i, v := range self.desc.Args {
+ if v.InRegister {
+ if v.IsFloat == floatKind64 {
+ p.MOVSD(v.Reg, self.spillv(i))
+ } else if v.IsFloat == floatKind32 {
+ p.MOVSS(v.Reg, self.spillv(i))
+ } else {
+ p.MOVQ(v.Reg, self.spillv(i))
+ }
+ }
+ }
+
+ // call runtime.morestack_noctxt
+ p.MOVQ(F_morestack_noctxt, R12)
+ p.CALLQ(R12)
+ // load all register arguments
+ for i, v := range self.desc.Args {
+ if v.InRegister {
+ if v.IsFloat == floatKind64 {
+ p.MOVSD(self.spillv(i), v.Reg)
+ } else if v.IsFloat == floatKind32 {
+ p.MOVSS(self.spillv(i), v.Reg)
+ } else {
+ p.MOVQ(self.spillv(i), v.Reg)
+ }
+ }
+ }
+
+ // jump back to the function entry
+ p.JMP(entry)
}
func (self *Frame) GrowStackTextSize() uint32 {
- p := DefaultArch.CreateProgram()
- // spill all register arguments
- for i, v := range self.desc.Args {
- if v.InRegister {
- if v.IsFloat == floatKind64 {
- p.MOVSD(v.Reg, self.spillv(i))
- } else if v.IsFloat == floatKind32 {
- p.MOVSS(v.Reg, self.spillv(i))
- }else {
- p.MOVQ(v.Reg, self.spillv(i))
- }
- }
- }
-
- // call runtime.morestack_noctxt
- p.MOVQ(F_morestack_noctxt, R12)
- p.CALLQ(R12)
- // load all register arguments
- for i, v := range self.desc.Args {
- if v.InRegister {
- if v.IsFloat == floatKind64 {
- p.MOVSD(self.spillv(i), v.Reg)
- } else if v.IsFloat == floatKind32 {
- p.MOVSS(self.spillv(i), v.Reg)
- } else {
- p.MOVQ(self.spillv(i), v.Reg)
- }
- }
- }
-
- // jump back to the function entry
- l := CreateLabel("")
- p.Link(l)
- p.JMP(l)
-
- return uint32(len(p.Assemble(0)))
+ p := DefaultArch.CreateProgram()
+ // spill all register arguments
+ for i, v := range self.desc.Args {
+ if v.InRegister {
+ if v.IsFloat == floatKind64 {
+ p.MOVSD(v.Reg, self.spillv(i))
+ } else if v.IsFloat == floatKind32 {
+ p.MOVSS(v.Reg, self.spillv(i))
+ } else {
+ p.MOVQ(v.Reg, self.spillv(i))
+ }
+ }
+ }
+
+ // call runtime.morestack_noctxt
+ p.MOVQ(F_morestack_noctxt, R12)
+ p.CALLQ(R12)
+ // load all register arguments
+ for i, v := range self.desc.Args {
+ if v.InRegister {
+ if v.IsFloat == floatKind64 {
+ p.MOVSD(self.spillv(i), v.Reg)
+ } else if v.IsFloat == floatKind32 {
+ p.MOVSS(self.spillv(i), v.Reg)
+ } else {
+ p.MOVQ(self.spillv(i), v.Reg)
+ }
+ }
+ }
+
+ // jump back to the function entry
+ l := CreateLabel("")
+ p.Link(l)
+ p.JMP(l)
+
+ return uint32(len(p.Assemble(0)))
}
func (self *Frame) emitPrologue(p *Program) {
- p.SUBQ(self.Size(), RSP)
- p.MOVQ(RBP, Ptr(RSP, int32(self.Offs())))
- p.LEAQ(Ptr(RSP, int32(self.Offs())), RBP)
+ p.SUBQ(self.Size(), RSP)
+ p.MOVQ(RBP, Ptr(RSP, int32(self.Offs())))
+ p.LEAQ(Ptr(RSP, int32(self.Offs())), RBP)
}
func (self *Frame) emitEpilogue(p *Program) {
- p.MOVQ(Ptr(RSP, int32(self.Offs())), RBP)
- p.ADDQ(self.Size(), RSP)
- p.RET()
+ p.MOVQ(Ptr(RSP, int32(self.Offs())), RBP)
+ p.ADDQ(self.Size(), RSP)
+ p.RET()
}
func (self *Frame) emitReserveRegs(p *Program) {
- // spill reserved registers
- for i, r := range ReservedRegs(self.ccall) {
- switch r.(type) {
- case Register64:
- p.MOVQ(r, self.resv(i))
- case XMMRegister:
- p.MOVSD(r, self.resv(i))
- default:
- panic(fmt.Sprintf("unsupported register type %t to reserve", r))
- }
- }
+ // spill reserved registers
+ for i, r := range ReservedRegs(self.ccall) {
+ switch r.(type) {
+ case Register64:
+ p.MOVQ(r, self.resv(i))
+ case XMMRegister:
+ p.MOVSD(r, self.resv(i))
+ default:
+ panic(fmt.Sprintf("unsupported register type %t to reserve", r))
+ }
+ }
}
func (self *Frame) emitSpillPtrs(p *Program) {
- // spill pointer argument registers
- for i, r := range self.desc.Args {
- if r.InRegister && r.IsPointer {
- p.MOVQ(r.Reg, self.argv(i))
- }
- }
+ // spill pointer argument registers
+ for i, r := range self.desc.Args {
+ if r.InRegister && r.IsPointer {
+ p.MOVQ(r.Reg, self.argv(i))
+ }
+ }
}
func (self *Frame) emitClearPtrs(p *Program) {
- // spill pointer argument registers
- for i, r := range self.desc.Args {
- if r.InRegister && r.IsPointer {
- p.MOVQ(int64(0), self.argv(i))
- }
- }
+ // spill pointer argument registers
+ for i, r := range self.desc.Args {
+ if r.InRegister && r.IsPointer {
+ p.MOVQ(int64(0), self.argv(i))
+ }
+ }
}
func (self *Frame) emitCallC(p *Program, addr uintptr) {
- p.MOVQ(addr, RAX)
- p.CALLQ(RAX)
+ p.MOVQ(addr, RAX)
+ p.CALLQ(RAX)
}
type floatKind uint8
const (
- notFloatKind floatKind = iota
- floatKind32
- floatKind64
+ notFloatKind floatKind = iota
+ floatKind32
+ floatKind64
)
type Parameter struct {
- InRegister bool
- IsPointer bool
- IsFloat floatKind
- Reg Register
- Mem uint32
- Type reflect.Type
+ InRegister bool
+ IsPointer bool
+ IsFloat floatKind
+ Reg Register
+ Mem uint32
+ Type reflect.Type
}
func mkIReg(vt reflect.Type, reg Register64) (p Parameter) {
- p.Reg = reg
- p.Type = vt
- p.InRegister = true
- p.IsPointer = isPointer(vt)
- return
+ p.Reg = reg
+ p.Type = vt
+ p.InRegister = true
+ p.IsPointer = isPointer(vt)
+ return
}
func isFloat(vt reflect.Type) floatKind {
- switch vt.Kind() {
- case reflect.Float32:
- return floatKind32
- case reflect.Float64:
- return floatKind64
- default:
- return notFloatKind
- }
+ switch vt.Kind() {
+ case reflect.Float32:
+ return floatKind32
+ case reflect.Float64:
+ return floatKind64
+ default:
+ return notFloatKind
+ }
}
func mkXReg(vt reflect.Type, reg XMMRegister) (p Parameter) {
- p.Reg = reg
- p.Type = vt
- p.InRegister = true
- p.IsFloat = isFloat(vt)
- return
+ p.Reg = reg
+ p.Type = vt
+ p.InRegister = true
+ p.IsFloat = isFloat(vt)
+ return
}
func mkStack(vt reflect.Type, mem uint32) (p Parameter) {
- p.Mem = mem
- p.Type = vt
- p.InRegister = false
- p.IsPointer = isPointer(vt)
- p.IsFloat = isFloat(vt)
- return
+ p.Mem = mem
+ p.Type = vt
+ p.InRegister = false
+ p.IsPointer = isPointer(vt)
+ p.IsFloat = isFloat(vt)
+ return
}
func (self Parameter) String() string {
- if self.InRegister {
- return fmt.Sprintf("[%%%s, Pointer(%v), Float(%v)]", self.Reg, self.IsPointer, self.IsFloat)
- } else {
- return fmt.Sprintf("[%d(FP), Pointer(%v), Float(%v)]", self.Mem, self.IsPointer, self.IsFloat)
- }
+ if self.InRegister {
+ return fmt.Sprintf("[%%%s, Pointer(%v), Float(%v)]", self.Reg, self.IsPointer, self.IsFloat)
+ } else {
+ return fmt.Sprintf("[%d(FP), Pointer(%v), Float(%v)]", self.Mem, self.IsPointer, self.IsFloat)
+ }
}
func CallC(addr uintptr, fr Frame, maxStack uintptr) []byte {
- p := DefaultArch.CreateProgram()
-
- stack := CreateLabel("_stack_grow")
- entry := CreateLabel("_entry")
- p.Link(entry)
- fr.emitStackCheck(p, stack, maxStack)
- fr.emitPrologue(p)
- fr.emitReserveRegs(p)
- fr.emitSpillPtrs(p)
- fr.emitExchangeArgs(p)
- fr.emitCallC(p, addr)
- fr.emitExchangeRets(p)
- fr.emitRestoreRegs(p)
- fr.emitEpilogue(p)
- p.Link(stack)
- fr.emitGrowStack(p, entry)
-
- return p.Assemble(0)
+ p := DefaultArch.CreateProgram()
+
+ stack := CreateLabel("_stack_grow")
+ entry := CreateLabel("_entry")
+ p.Link(entry)
+ fr.emitStackCheck(p, stack, maxStack)
+ fr.emitPrologue(p)
+ fr.emitReserveRegs(p)
+ fr.emitSpillPtrs(p)
+ fr.emitExchangeArgs(p)
+ fr.emitCallC(p, addr)
+ fr.emitExchangeRets(p)
+ fr.emitRestoreRegs(p)
+ fr.emitEpilogue(p)
+ p.Link(stack)
+ fr.emitGrowStack(p, entry)
+
+ return p.Assemble(0)
}
-
-
-func (self *Frame) emitDebug(p *Program) {
- p.INT(3)
-} \ No newline at end of file
diff --git a/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_legacy_amd64.go b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_legacy_amd64.go
index 298c48178..722c0696f 100644
--- a/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_legacy_amd64.go
+++ b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_legacy_amd64.go
@@ -20,163 +20,196 @@
package abi
import (
- `fmt`
- `reflect`
- `runtime`
-
- . `github.com/cloudwego/iasm/x86_64`
+ "fmt"
+ "reflect"
+ "runtime"
)
func ReservedRegs(callc bool) []Register {
- return nil
+ return nil
}
func salloc(p []Parameter, sp uint32, vt reflect.Type) (uint32, []Parameter) {
- switch vt.Kind() {
- case reflect.Bool : return sp + 8, append(p, mkStack(reflect.TypeOf(false), sp))
- case reflect.Int : return sp + 8, append(p, mkStack(intType, sp))
- case reflect.Int8 : return sp + 8, append(p, mkStack(reflect.TypeOf(int8(0)), sp))
- case reflect.Int16 : return sp + 8, append(p, mkStack(reflect.TypeOf(int16(0)), sp))
- case reflect.Int32 : return sp + 8, append(p, mkStack(reflect.TypeOf(int32(0)), sp))
- case reflect.Int64 : return sp + 8, append(p, mkStack(reflect.TypeOf(int64(0)), sp))
- case reflect.Uint : return sp + 8, append(p, mkStack(reflect.TypeOf(uint(0)), sp))
- case reflect.Uint8 : return sp + 8, append(p, mkStack(reflect.TypeOf(uint8(0)), sp))
- case reflect.Uint16 : return sp + 8, append(p, mkStack(reflect.TypeOf(uint16(0)), sp))
- case reflect.Uint32 : return sp + 8, append(p, mkStack(reflect.TypeOf(uint32(0)), sp))
- case reflect.Uint64 : return sp + 8, append(p, mkStack(reflect.TypeOf(uint64(0)), sp))
- case reflect.Uintptr : return sp + 8, append(p, mkStack(reflect.TypeOf(uintptr(0)), sp))
- case reflect.Float32 : return sp + 8, append(p, mkStack(reflect.TypeOf(float32(0)), sp))
- case reflect.Float64 : return sp + 8, append(p, mkStack(reflect.TypeOf(float64(0)), sp))
- case reflect.Complex64 : panic("abi: go116: not implemented: complex64")
- case reflect.Complex128 : panic("abi: go116: not implemented: complex128")
- case reflect.Array : panic("abi: go116: not implemented: arrays")
- case reflect.Chan : return sp + 8, append(p, mkStack(reflect.TypeOf((chan int)(nil)), sp))
- case reflect.Func : return sp + 8, append(p, mkStack(reflect.TypeOf((func())(nil)), sp))
- case reflect.Map : return sp + 8, append(p, mkStack(reflect.TypeOf((map[int]int)(nil)), sp))
- case reflect.Ptr : return sp + 8, append(p, mkStack(reflect.TypeOf((*int)(nil)), sp))
- case reflect.UnsafePointer : return sp + 8, append(p, mkStack(ptrType, sp))
- case reflect.Interface : return sp + 16, append(p, mkStack(ptrType, sp), mkStack(ptrType, sp + 8))
- case reflect.Slice : return sp + 24, append(p, mkStack(ptrType, sp), mkStack(intType, sp + 8), mkStack(intType, sp + 16))
- case reflect.String : return sp + 16, append(p, mkStack(ptrType, sp), mkStack(intType, sp + 8))
- case reflect.Struct : panic("abi: go116: not implemented: structs")
- default : panic("abi: invalid value type")
- }
+ switch vt.Kind() {
+ case reflect.Bool:
+ return sp + 8, append(p, mkStack(reflect.TypeOf(false), sp))
+ case reflect.Int:
+ return sp + 8, append(p, mkStack(intType, sp))
+ case reflect.Int8:
+ return sp + 8, append(p, mkStack(reflect.TypeOf(int8(0)), sp))
+ case reflect.Int16:
+ return sp + 8, append(p, mkStack(reflect.TypeOf(int16(0)), sp))
+ case reflect.Int32:
+ return sp + 8, append(p, mkStack(reflect.TypeOf(int32(0)), sp))
+ case reflect.Int64:
+ return sp + 8, append(p, mkStack(reflect.TypeOf(int64(0)), sp))
+ case reflect.Uint:
+ return sp + 8, append(p, mkStack(reflect.TypeOf(uint(0)), sp))
+ case reflect.Uint8:
+ return sp + 8, append(p, mkStack(reflect.TypeOf(uint8(0)), sp))
+ case reflect.Uint16:
+ return sp + 8, append(p, mkStack(reflect.TypeOf(uint16(0)), sp))
+ case reflect.Uint32:
+ return sp + 8, append(p, mkStack(reflect.TypeOf(uint32(0)), sp))
+ case reflect.Uint64:
+ return sp + 8, append(p, mkStack(reflect.TypeOf(uint64(0)), sp))
+ case reflect.Uintptr:
+ return sp + 8, append(p, mkStack(reflect.TypeOf(uintptr(0)), sp))
+ case reflect.Float32:
+ return sp + 8, append(p, mkStack(reflect.TypeOf(float32(0)), sp))
+ case reflect.Float64:
+ return sp + 8, append(p, mkStack(reflect.TypeOf(float64(0)), sp))
+ case reflect.Complex64:
+ panic("abi: go116: not implemented: complex64")
+ case reflect.Complex128:
+ panic("abi: go116: not implemented: complex128")
+ case reflect.Array:
+ panic("abi: go116: not implemented: arrays")
+ case reflect.Chan:
+ return sp + 8, append(p, mkStack(reflect.TypeOf((chan int)(nil)), sp))
+ case reflect.Func:
+ return sp + 8, append(p, mkStack(reflect.TypeOf((func())(nil)), sp))
+ case reflect.Map:
+ return sp + 8, append(p, mkStack(reflect.TypeOf((map[int]int)(nil)), sp))
+ case reflect.Ptr:
+ return sp + 8, append(p, mkStack(reflect.TypeOf((*int)(nil)), sp))
+ case reflect.UnsafePointer:
+ return sp + 8, append(p, mkStack(ptrType, sp))
+ case reflect.Interface:
+ return sp + 16, append(p, mkStack(ptrType, sp), mkStack(ptrType, sp+8))
+ case reflect.Slice:
+ return sp + 24, append(p, mkStack(ptrType, sp), mkStack(intType, sp+8), mkStack(intType, sp+16))
+ case reflect.String:
+ return sp + 16, append(p, mkStack(ptrType, sp), mkStack(intType, sp+8))
+ case reflect.Struct:
+ panic("abi: go116: not implemented: structs")
+ default:
+ panic("abi: invalid value type")
+ }
}
func NewFunctionLayout(ft reflect.Type) FunctionLayout {
- var sp uint32
- var fn FunctionLayout
-
- /* assign every arguments */
- for i := 0; i < ft.NumIn(); i++ {
- sp, fn.Args = salloc(fn.Args, sp, ft.In(i))
- }
-
- /* assign every return value */
- for i := 0; i < ft.NumOut(); i++ {
- sp, fn.Rets = salloc(fn.Rets, sp, ft.Out(i))
- }
-
- /* update function ID and stack pointer */
- fn.FP = sp
- return fn
+ var sp uint32
+ var fn FunctionLayout
+
+ /* assign every arguments */
+ for i := 0; i < ft.NumIn(); i++ {
+ sp, fn.Args = salloc(fn.Args, sp, ft.In(i))
+ }
+
+ /* assign every return value */
+ for i := 0; i < ft.NumOut(); i++ {
+ sp, fn.Rets = salloc(fn.Rets, sp, ft.Out(i))
+ }
+
+ /* update function ID and stack pointer */
+ fn.FP = sp
+ return fn
}
func (self *Frame) emitExchangeArgs(p *Program) {
- iregArgs, xregArgs := 0, 0
- for _, v := range self.desc.Args {
- if v.IsFloat != notFloatKind {
- xregArgs += 1
- } else {
- iregArgs += 1
- }
- }
-
- if iregArgs > len(iregOrderC) {
- panic("too many arguments, only support at most 6 integer arguments now")
- }
- if xregArgs > len(xregOrderC) {
- panic("too many arguments, only support at most 8 float arguments now")
- }
-
- ic, xc := iregArgs, xregArgs
- for i := 0; i < len(self.desc.Args); i++ {
- arg := self.desc.Args[i]
- if arg.IsFloat == floatKind64 {
- p.MOVSD(self.argv(i), xregOrderC[xregArgs - xc])
- xc -= 1
- } else if arg.IsFloat == floatKind32 {
- p.MOVSS(self.argv(i), xregOrderC[xregArgs - xc])
- xc -= 1
- } else {
- p.MOVQ(self.argv(i), iregOrderC[iregArgs - ic])
- ic -= 1
- }
- }
+ iregArgs, xregArgs := 0, 0
+ for _, v := range self.desc.Args {
+ if v.IsFloat != notFloatKind {
+ xregArgs += 1
+ } else {
+ iregArgs += 1
+ }
+ }
+
+ if iregArgs > len(iregOrderC) {
+ panic("too many arguments, only support at most 6 integer arguments now")
+ }
+ if xregArgs > len(xregOrderC) {
+ panic("too many arguments, only support at most 8 float arguments now")
+ }
+
+ ic, xc := iregArgs, xregArgs
+ for i := 0; i < len(self.desc.Args); i++ {
+ arg := self.desc.Args[i]
+ if arg.IsFloat == floatKind64 {
+ p.MOVSD(self.argv(i), xregOrderC[xregArgs-xc])
+ xc -= 1
+ } else if arg.IsFloat == floatKind32 {
+ p.MOVSS(self.argv(i), xregOrderC[xregArgs-xc])
+ xc -= 1
+ } else {
+ p.MOVQ(self.argv(i), iregOrderC[iregArgs-ic])
+ ic -= 1
+ }
+ }
}
func (self *Frame) emitStackCheck(p *Program, to *Label, maxStack uintptr) {
- // get the current goroutine
- switch runtime.GOOS {
- case "linux" : p.MOVQ(Abs(-8), R14).FS()
- case "darwin" : p.MOVQ(Abs(0x30), R14).GS()
- case "windows": break // windows always stores G pointer at R14
- default : panic("unsupported operating system")
- }
-
- // check the stack guard
- p.LEAQ(Ptr(RSP, -int32(self.Size() + uint32(maxStack))), RAX)
- p.CMPQ(Ptr(R14, _G_stackguard0), RAX)
- p.JBE(to)
+ // get the current goroutine
+ switch runtime.GOOS {
+ case "linux":
+ p.MOVQ(Abs(-8), R14).FS()
+ case "darwin":
+ p.MOVQ(Abs(0x30), R14).GS()
+ case "windows":
+ break // windows always stores G pointer at R14
+ default:
+ panic("unsupported operating system")
+ }
+
+ // check the stack guard
+ p.LEAQ(Ptr(RSP, -int32(self.Size()+uint32(maxStack))), RAX)
+ p.CMPQ(Ptr(R14, _G_stackguard0), RAX)
+ p.JBE(to)
}
func (self *Frame) StackCheckTextSize() uint32 {
- p := DefaultArch.CreateProgram()
-
- // get the current goroutine
- switch runtime.GOOS {
- case "linux" : p.MOVQ(Abs(-8), R14).FS()
- case "darwin" : p.MOVQ(Abs(0x30), R14).GS()
- case "windows": break // windows always stores G pointer at R14
- default : panic("unsupported operating system")
- }
-
- // check the stack guard
- p.LEAQ(Ptr(RSP, -int32(self.Size())), RAX)
- p.CMPQ(Ptr(R14, _G_stackguard0), RAX)
- l := CreateLabel("")
- p.Link(l)
- p.JBE(l)
-
- return uint32(len(p.Assemble(0)))
+ p := DefaultArch.CreateProgram()
+
+ // get the current goroutine
+ switch runtime.GOOS {
+ case "linux":
+ p.MOVQ(Abs(-8), R14).FS()
+ case "darwin":
+ p.MOVQ(Abs(0x30), R14).GS()
+ case "windows":
+ break // windows always stores G pointer at R14
+ default:
+ panic("unsupported operating system")
+ }
+
+ // check the stack guard
+ p.LEAQ(Ptr(RSP, -int32(self.Size())), RAX)
+ p.CMPQ(Ptr(R14, _G_stackguard0), RAX)
+ l := CreateLabel("")
+ p.Link(l)
+ p.JBE(l)
+
+ return uint32(len(p.Assemble(0)))
}
func (self *Frame) emitExchangeRets(p *Program) {
- if len(self.desc.Rets) > 1 {
- panic("too many results, only support one result now")
- }
- // store result
- if len(self.desc.Rets) ==1 {
- if self.desc.Rets[0].IsFloat == floatKind64 {
- p.MOVSD(xregOrderC[0], self.retv(0))
- } else if self.desc.Rets[0].IsFloat == floatKind32 {
- p.MOVSS(xregOrderC[0], self.retv(0))
- } else {
- p.MOVQ(RAX, self.retv(0))
- }
- }
+ if len(self.desc.Rets) > 1 {
+ panic("too many results, only support one result now")
+ }
+ // store result
+ if len(self.desc.Rets) == 1 {
+ if self.desc.Rets[0].IsFloat == floatKind64 {
+ p.MOVSD(xregOrderC[0], self.retv(0))
+ } else if self.desc.Rets[0].IsFloat == floatKind32 {
+ p.MOVSS(xregOrderC[0], self.retv(0))
+ } else {
+ p.MOVQ(RAX, self.retv(0))
+ }
+ }
}
func (self *Frame) emitRestoreRegs(p *Program) {
- // load reserved registers
- for i, r := range ReservedRegs(self.ccall) {
- switch r.(type) {
- case Register64:
- p.MOVQ(self.resv(i), r)
- case XMMRegister:
- p.MOVSD(self.resv(i), r)
- default:
- panic(fmt.Sprintf("unsupported register type %t to reserve", r))
- }
- }
-} \ No newline at end of file
+ // load reserved registers
+ for i, r := range ReservedRegs(self.ccall) {
+ switch r.(type) {
+ case Register64:
+ p.MOVQ(self.resv(i), r)
+ case XMMRegister:
+ p.MOVSD(self.resv(i), r)
+ default:
+ panic(fmt.Sprintf("unsupported register type %t to reserve", r))
+ }
+ }
+}
diff --git a/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_regabi_amd64.go b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_regabi_amd64.go
index 5a31dea89..d4c940de3 100644
--- a/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_regabi_amd64.go
+++ b/vendor/github.com/bytedance/sonic/loader/internal/abi/abi_regabi_amd64.go
@@ -26,10 +26,10 @@
package abi
import (
- `fmt`
- `reflect`
+ "fmt"
+ "reflect"
- . `github.com/cloudwego/iasm/x86_64`
+ x64 "github.com/bytedance/sonic/loader/internal/iasm/x86_64"
)
/** Frame Structure of the Generated Function
@@ -59,258 +59,287 @@ offs() -------------------------------|
RSP -------------------------------|↓ lower addresses
*/
-const zeroRegGo = XMM15
-
-var iregOrderGo = [...]Register64 {
- RAX,// RDI
- RBX,// RSI
- RCX,// RDX
- RDI,// RCX
- RSI,// R8
- R8, // R9
- R9,
- R10,
- R11,
+const zeroRegGo = x64.XMM15
+
+var iregOrderGo = [...]Register64{
+ x64.RAX, // RDI
+ x64.RBX, // RSI
+ x64.RCX, // RDX
+ x64.RDI, // RCX
+ x64.RSI, // R8
+ x64.R8, // R9
+ x64.R9,
+ x64.R10,
+ x64.R11,
}
-var xregOrderGo = [...]XMMRegister {
- XMM0,
- XMM1,
- XMM2,
- XMM3,
- XMM4,
- XMM5,
- XMM6,
- XMM7,
- XMM8,
- XMM9,
- XMM10,
- XMM11,
- XMM12,
- XMM13,
- XMM14,
+var xregOrderGo = [...]XMMRegister{
+ x64.XMM0,
+ x64.XMM1,
+ x64.XMM2,
+ x64.XMM3,
+ x64.XMM4,
+ x64.XMM5,
+ x64.XMM6,
+ x64.XMM7,
+ x64.XMM8,
+ x64.XMM9,
+ x64.XMM10,
+ x64.XMM11,
+ x64.XMM12,
+ x64.XMM13,
+ x64.XMM14,
}
func ReservedRegs(callc bool) []Register {
- if callc {
- return nil
- }
- return []Register {
- R14, // current goroutine
- R15, // GOT reference
- }
+ if callc {
+ return nil
+ }
+ return []Register{
+ R14, // current goroutine
+ R15, // GOT reference
+ }
}
type stackAlloc struct {
- s uint32
- i int
- x int
+ s uint32
+ i int
+ x int
}
func (self *stackAlloc) reset() {
- self.i, self.x = 0, 0
+ self.i, self.x = 0, 0
}
func (self *stackAlloc) ireg(vt reflect.Type) (p Parameter) {
- p = mkIReg(vt, iregOrderGo[self.i])
- self.i++
- return
+ p = mkIReg(vt, iregOrderGo[self.i])
+ self.i++
+ return
}
func (self *stackAlloc) xreg(vt reflect.Type) (p Parameter) {
- p = mkXReg(vt, xregOrderGo[self.x])
- self.x++
- return
+ p = mkXReg(vt, xregOrderGo[self.x])
+ self.x++
+ return
}
func (self *stackAlloc) stack(vt reflect.Type) (p Parameter) {
- p = mkStack(vt, self.s)
- self.s += uint32(vt.Size())
- return
+ p = mkStack(vt, self.s)
+ self.s += uint32(vt.Size())
+ return
}
func (self *stackAlloc) spill(n uint32, a int) uint32 {
- self.s = alignUp(self.s, a) + n
- return self.s
+ self.s = alignUp(self.s, a) + n
+ return self.s
}
func (self *stackAlloc) alloc(p []Parameter, vt reflect.Type) []Parameter {
- nb := vt.Size()
- vk := vt.Kind()
-
- /* zero-sized objects are allocated on stack */
- if nb == 0 {
- return append(p, mkStack(intType, self.s))
- }
-
- /* check for value type */
- switch vk {
- case reflect.Bool : return self.valloc(p, reflect.TypeOf(false))
- case reflect.Int : return self.valloc(p, intType)
- case reflect.Int8 : return self.valloc(p, reflect.TypeOf(int8(0)))
- case reflect.Int16 : return self.valloc(p, reflect.TypeOf(int16(0)))
- case reflect.Int32 : return self.valloc(p, reflect.TypeOf(uint32(0)))
- case reflect.Int64 : return self.valloc(p, reflect.TypeOf(int64(0)))
- case reflect.Uint : return self.valloc(p, reflect.TypeOf(uint(0)))
- case reflect.Uint8 : return self.valloc(p, reflect.TypeOf(uint8(0)))
- case reflect.Uint16 : return self.valloc(p, reflect.TypeOf(uint16(0)))
- case reflect.Uint32 : return self.valloc(p, reflect.TypeOf(uint32(0)))
- case reflect.Uint64 : return self.valloc(p, reflect.TypeOf(uint64(0)))
- case reflect.Uintptr : return self.valloc(p, reflect.TypeOf(uintptr(0)))
- case reflect.Float32 : return self.valloc(p, reflect.TypeOf(float32(0)))
- case reflect.Float64 : return self.valloc(p, reflect.TypeOf(float64(0)))
- case reflect.Complex64 : panic("abi: go117: not implemented: complex64")
- case reflect.Complex128 : panic("abi: go117: not implemented: complex128")
- case reflect.Array : panic("abi: go117: not implemented: arrays")
- case reflect.Chan : return self.valloc(p, reflect.TypeOf((chan int)(nil)))
- case reflect.Func : return self.valloc(p, reflect.TypeOf((func())(nil)))
- case reflect.Map : return self.valloc(p, reflect.TypeOf((map[int]int)(nil)))
- case reflect.Ptr : return self.valloc(p, reflect.TypeOf((*int)(nil)))
- case reflect.UnsafePointer : return self.valloc(p, ptrType)
- case reflect.Interface : return self.valloc(p, ptrType, ptrType)
- case reflect.Slice : return self.valloc(p, ptrType, intType, intType)
- case reflect.String : return self.valloc(p, ptrType, intType)
- case reflect.Struct : panic("abi: go117: not implemented: structs")
- default : panic("abi: invalid value type")
- }
+ nb := vt.Size()
+ vk := vt.Kind()
+
+ /* zero-sized objects are allocated on stack */
+ if nb == 0 {
+ return append(p, mkStack(intType, self.s))
+ }
+
+ /* check for value type */
+ switch vk {
+ case reflect.Bool:
+ return self.valloc(p, reflect.TypeOf(false))
+ case reflect.Int:
+ return self.valloc(p, intType)
+ case reflect.Int8:
+ return self.valloc(p, reflect.TypeOf(int8(0)))
+ case reflect.Int16:
+ return self.valloc(p, reflect.TypeOf(int16(0)))
+ case reflect.Int32:
+ return self.valloc(p, reflect.TypeOf(uint32(0)))
+ case reflect.Int64:
+ return self.valloc(p, reflect.TypeOf(int64(0)))
+ case reflect.Uint:
+ return self.valloc(p, reflect.TypeOf(uint(0)))
+ case reflect.Uint8:
+ return self.valloc(p, reflect.TypeOf(uint8(0)))
+ case reflect.Uint16:
+ return self.valloc(p, reflect.TypeOf(uint16(0)))
+ case reflect.Uint32:
+ return self.valloc(p, reflect.TypeOf(uint32(0)))
+ case reflect.Uint64:
+ return self.valloc(p, reflect.TypeOf(uint64(0)))
+ case reflect.Uintptr:
+ return self.valloc(p, reflect.TypeOf(uintptr(0)))
+ case reflect.Float32:
+ return self.valloc(p, reflect.TypeOf(float32(0)))
+ case reflect.Float64:
+ return self.valloc(p, reflect.TypeOf(float64(0)))
+ case reflect.Complex64:
+ panic("abi: go117: not implemented: complex64")
+ case reflect.Complex128:
+ panic("abi: go117: not implemented: complex128")
+ case reflect.Array:
+ panic("abi: go117: not implemented: arrays")
+ case reflect.Chan:
+ return self.valloc(p, reflect.TypeOf((chan int)(nil)))
+ case reflect.Func:
+ return self.valloc(p, reflect.TypeOf((func())(nil)))
+ case reflect.Map:
+ return self.valloc(p, reflect.TypeOf((map[int]int)(nil)))
+ case reflect.Ptr:
+ return self.valloc(p, reflect.TypeOf((*int)(nil)))
+ case reflect.UnsafePointer:
+ return self.valloc(p, ptrType)
+ case reflect.Interface:
+ return self.valloc(p, ptrType, ptrType)
+ case reflect.Slice:
+ return self.valloc(p, ptrType, intType, intType)
+ case reflect.String:
+ return self.valloc(p, ptrType, intType)
+ case reflect.Struct:
+ panic("abi: go117: not implemented: structs")
+ default:
+ panic("abi: invalid value type")
+ }
}
func (self *stackAlloc) valloc(p []Parameter, vts ...reflect.Type) []Parameter {
- for _, vt := range vts {
- enum := isFloat(vt)
- if enum != notFloatKind && self.x < len(xregOrderGo) {
- p = append(p, self.xreg(vt))
- } else if enum == notFloatKind && self.i < len(iregOrderGo) {
- p = append(p, self.ireg(vt))
- } else {
- p = append(p, self.stack(vt))
- }
- }
- return p
+ for _, vt := range vts {
+ enum := isFloat(vt)
+ if enum != notFloatKind && self.x < len(xregOrderGo) {
+ p = append(p, self.xreg(vt))
+ } else if enum == notFloatKind && self.i < len(iregOrderGo) {
+ p = append(p, self.ireg(vt))
+ } else {
+ p = append(p, self.stack(vt))
+ }
+ }
+ return p
}
func NewFunctionLayout(ft reflect.Type) FunctionLayout {
- var sa stackAlloc
- var fn FunctionLayout
-
- /* assign every arguments */
- for i := 0; i < ft.NumIn(); i++ {
- fn.Args = sa.alloc(fn.Args, ft.In(i))
- }
-
- /* reset the register counter, and add a pointer alignment field */
- sa.reset()
-
- /* assign every return value */
- for i := 0; i < ft.NumOut(); i++ {
- fn.Rets = sa.alloc(fn.Rets, ft.Out(i))
- }
-
- sa.spill(0, PtrAlign)
-
- /* assign spill slots */
- for i := 0; i < len(fn.Args); i++ {
- if fn.Args[i].InRegister {
- fn.Args[i].Mem = sa.spill(PtrSize, PtrAlign) - PtrSize
- }
- }
-
- /* add the final pointer alignment field */
- fn.FP = sa.spill(0, PtrAlign)
- return fn
+ var sa stackAlloc
+ var fn FunctionLayout
+
+ /* assign every arguments */
+ for i := 0; i < ft.NumIn(); i++ {
+ fn.Args = sa.alloc(fn.Args, ft.In(i))
+ }
+
+ /* reset the register counter, and add a pointer alignment field */
+ sa.reset()
+
+ /* assign every return value */
+ for i := 0; i < ft.NumOut(); i++ {
+ fn.Rets = sa.alloc(fn.Rets, ft.Out(i))
+ }
+
+ sa.spill(0, PtrAlign)
+
+ /* assign spill slots */
+ for i := 0; i < len(fn.Args); i++ {
+ if fn.Args[i].InRegister {
+ fn.Args[i].Mem = sa.spill(PtrSize, PtrAlign) - PtrSize
+ }
+ }
+
+ /* add the final pointer alignment field */
+ fn.FP = sa.spill(0, PtrAlign)
+ return fn
}
func (self *Frame) emitExchangeArgs(p *Program) {
- iregArgs := make([]Parameter, 0, len(self.desc.Args))
- xregArgs := 0
- for _, v := range self.desc.Args {
- if v.InRegister {
- if v.IsFloat != notFloatKind {
- xregArgs += 1
- } else {
- iregArgs = append(iregArgs, v)
- }
- } else {
- panic("not support stack-assgined arguments now")
- }
- }
- if xregArgs > len(xregOrderC) {
- panic("too many arguments, only support at most 8 integer register arguments now")
- }
-
- switch len(iregArgs) {
- case 0, 1, 2, 3: {
- //Fast-Path: when arguments count are less than four, just exchange the registers
- for i := 0; i < len(iregArgs); i++ {
- p.MOVQ(iregOrderGo[i], iregOrderC[i])
- }
- }
- case 4, 5, 6: {
- // need to spill 3th ~ regArgs registers before exchange
- for i := 3; i < len(iregArgs); i++ {
- arg := iregArgs[i]
- // pointer args have already been spilled
- if !arg.IsPointer {
- p.MOVQ(iregOrderGo[i], Ptr(RSP, int32(self.Prev() + arg.Mem)))
- }
- }
- p.MOVQ(iregOrderGo[0], iregOrderC[0])
- p.MOVQ(iregOrderGo[1], iregOrderC[1])
- p.MOVQ(iregOrderGo[2], iregOrderC[2])
- for i := 3; i < len(iregArgs); i++ {
- arg := iregArgs[i]
- p.MOVQ(Ptr(RSP, int32(self.Prev() + arg.Mem)), iregOrderC[i])
- }
- }
- default:
- panic("too many arguments, only support at most 6 integer register arguments now")
- }
+ iregArgs := make([]Parameter, 0, len(self.desc.Args))
+ xregArgs := 0
+ for _, v := range self.desc.Args {
+ if v.InRegister {
+ if v.IsFloat != notFloatKind {
+ xregArgs += 1
+ } else {
+ iregArgs = append(iregArgs, v)
+ }
+ } else {
+ panic("not support stack-assgined arguments now")
+ }
+ }
+ if xregArgs > len(xregOrderC) {
+ panic("too many arguments, only support at most 8 integer register arguments now")
+ }
+
+ switch len(iregArgs) {
+ case 0, 1, 2, 3:
+ {
+ //Fast-Path: when arguments count are less than four, just exchange the registers
+ for i := 0; i < len(iregArgs); i++ {
+ p.MOVQ(iregOrderGo[i], iregOrderC[i])
+ }
+ }
+ case 4, 5, 6:
+ {
+ // need to spill 3th ~ regArgs registers before exchange
+ for i := 3; i < len(iregArgs); i++ {
+ arg := iregArgs[i]
+ // pointer args have already been spilled
+ if !arg.IsPointer {
+ p.MOVQ(iregOrderGo[i], Ptr(RSP, int32(self.Prev()+arg.Mem)))
+ }
+ }
+ p.MOVQ(iregOrderGo[0], iregOrderC[0])
+ p.MOVQ(iregOrderGo[1], iregOrderC[1])
+ p.MOVQ(iregOrderGo[2], iregOrderC[2])
+ for i := 3; i < len(iregArgs); i++ {
+ arg := iregArgs[i]
+ p.MOVQ(Ptr(RSP, int32(self.Prev()+arg.Mem)), iregOrderC[i])
+ }
+ }
+ default:
+ panic("too many arguments, only support at most 6 integer register arguments now")
+ }
}
func (self *Frame) emitStackCheck(p *Program, to *Label, maxStack uintptr) {
- p.LEAQ(Ptr(RSP, int32(-(self.Size() + uint32(maxStack)))), R12)
- p.CMPQ(Ptr(R14, _G_stackguard0), R12)
- p.JBE(to)
+ p.LEAQ(Ptr(RSP, int32(-(self.Size()+uint32(maxStack)))), R12)
+ p.CMPQ(Ptr(R14, _G_stackguard0), R12)
+ p.JBE(to)
}
func (self *Frame) StackCheckTextSize() uint32 {
- p := DefaultArch.CreateProgram()
- p.LEAQ(Ptr(RSP, int32(-(self.Size()))), R12)
- p.CMPQ(Ptr(R14, _G_stackguard0), R12)
- to := CreateLabel("")
- p.Link(to)
- p.JBE(to)
- return uint32(len(p.Assemble(0)))
+ p := DefaultArch.CreateProgram()
+ p.LEAQ(Ptr(RSP, int32(-(self.Size()))), R12)
+ p.CMPQ(Ptr(R14, _G_stackguard0), R12)
+ to := CreateLabel("")
+ p.Link(to)
+ p.JBE(to)
+ return uint32(len(p.Assemble(0)))
}
func (self *Frame) emitExchangeRets(p *Program) {
- if len(self.desc.Rets) > 1 {
- panic("too many results, only support one result now")
- }
- // store result
- if len(self.desc.Rets) == 1 && !self.desc.Rets[0].InRegister {
- if self.desc.Rets[0].IsFloat == floatKind64 {
- p.MOVSD(xregOrderC[0], self.retv(0))
- } else if self.desc.Rets[0].IsFloat == floatKind32 {
- p.MOVSS(xregOrderC[0], self.retv(0))
- } else {
- p.MOVQ(RAX, self.retv(0))
- }
- }
+ if len(self.desc.Rets) > 1 {
+ panic("too many results, only support one result now")
+ }
+ // store result
+ if len(self.desc.Rets) == 1 && !self.desc.Rets[0].InRegister {
+ if self.desc.Rets[0].IsFloat == floatKind64 {
+ p.MOVSD(xregOrderC[0], self.retv(0))
+ } else if self.desc.Rets[0].IsFloat == floatKind32 {
+ p.MOVSS(xregOrderC[0], self.retv(0))
+ } else {
+ p.MOVQ(RAX, self.retv(0))
+ }
+ }
}
func (self *Frame) emitRestoreRegs(p *Program) {
- // load reserved registers
- for i, r := range ReservedRegs(self.ccall) {
- switch r.(type) {
- case Register64:
- p.MOVQ(self.resv(i), r)
- case XMMRegister:
- p.MOVSD(self.resv(i), r)
- default:
- panic(fmt.Sprintf("unsupported register type %t to reserve", r))
- }
- }
- // zero xmm15 for go abi
- p.XORPS(zeroRegGo, zeroRegGo)
-} \ No newline at end of file
+ // load reserved registers
+ for i, r := range ReservedRegs(self.ccall) {
+ switch r.(type) {
+ case Register64:
+ p.MOVQ(self.resv(i), r)
+ case XMMRegister:
+ p.MOVSD(self.resv(i), r)
+ default:
+ panic(fmt.Sprintf("unsupported register type %t to reserve", r))
+ }
+ }
+ // zero xmm15 for go abi
+ p.XORPS(zeroRegGo, zeroRegGo)
+}