5
* Copyright 2022 ByteDance Inc.
7
* Licensed under the Apache License, Version 2.0 (the "License");
8
* you may not use this file except in compliance with the License.
9
* You may obtain a copy of the License at
11
* http://www.apache.org/licenses/LICENSE-2.0
13
* Unless required by applicable law or agreed to in writing, software
14
* distributed under the License is distributed on an "AS IS" BASIS,
15
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16
* See the License for the specific language governing permissions and
17
* limitations under the License.
20
/** Go Internal ABI implementation
22
* This module implements the function layout algorithm described by the Go internal ABI.
23
* See https://github.com/golang/go/blob/master/src/cmd/compile/abi-internal.md for more info.
32
. `github.com/chenzhuoyu/iasm/x86_64`
35
/** Frame Structure of the Generated Function
36
FP +------------------------------+
38
| 2nd reg argument spill space |
39
+ 1st reg argument spill space |
40
| <pointer-sized alignment> |
42
| 2nd stack-assigned result |
43
+ 1st stack-assigned result |
44
| <pointer-sized alignment> |
46
| 2nd stack-assigned argument |
47
| 1st stack-assigned argument |
48
| stack-assigned receiver |
49
prev() +------------------------------+ (Previous Frame)
51
size() -------------------------------|
53
offs() -------------------------------|
54
1th Reserved Registers |
55
-------------------------------|
56
2th Reserved Registers |
57
-------------------------------|
59
RSP -------------------------------|↓ lower addresses
62
const zeroRegGo = XMM15
64
var iregOrderGo = [...]Register64 {
76
var xregOrderGo = [...]XMMRegister {
94
func ReservedRegs(callc bool) []Register {
99
R14, // current goroutine
100
R15, // GOT reference
104
type stackAlloc struct {
110
func (self *stackAlloc) reset() {
111
self.i, self.x = 0, 0
114
func (self *stackAlloc) ireg(vt reflect.Type) (p Parameter) {
115
p = mkIReg(vt, iregOrderGo[self.i])
120
func (self *stackAlloc) xreg(vt reflect.Type) (p Parameter) {
121
p = mkXReg(vt, xregOrderGo[self.x])
126
func (self *stackAlloc) stack(vt reflect.Type) (p Parameter) {
127
p = mkStack(vt, self.s)
128
self.s += uint32(vt.Size())
132
func (self *stackAlloc) spill(n uint32, a int) uint32 {
133
self.s = alignUp(self.s, a) + n
137
func (self *stackAlloc) alloc(p []Parameter, vt reflect.Type) []Parameter {
141
/* zero-sized objects are allocated on stack */
143
return append(p, mkStack(intType, self.s))
146
/* check for value type */
148
case reflect.Bool : return self.valloc(p, reflect.TypeOf(false))
149
case reflect.Int : return self.valloc(p, intType)
150
case reflect.Int8 : return self.valloc(p, reflect.TypeOf(int8(0)))
151
case reflect.Int16 : return self.valloc(p, reflect.TypeOf(int16(0)))
152
case reflect.Int32 : return self.valloc(p, reflect.TypeOf(uint32(0)))
153
case reflect.Int64 : return self.valloc(p, reflect.TypeOf(int64(0)))
154
case reflect.Uint : return self.valloc(p, reflect.TypeOf(uint(0)))
155
case reflect.Uint8 : return self.valloc(p, reflect.TypeOf(uint8(0)))
156
case reflect.Uint16 : return self.valloc(p, reflect.TypeOf(uint16(0)))
157
case reflect.Uint32 : return self.valloc(p, reflect.TypeOf(uint32(0)))
158
case reflect.Uint64 : return self.valloc(p, reflect.TypeOf(uint64(0)))
159
case reflect.Uintptr : return self.valloc(p, reflect.TypeOf(uintptr(0)))
160
case reflect.Float32 : return self.valloc(p, reflect.TypeOf(float32(0)))
161
case reflect.Float64 : return self.valloc(p, reflect.TypeOf(float64(0)))
162
case reflect.Complex64 : panic("abi: go117: not implemented: complex64")
163
case reflect.Complex128 : panic("abi: go117: not implemented: complex128")
164
case reflect.Array : panic("abi: go117: not implemented: arrays")
165
case reflect.Chan : return self.valloc(p, reflect.TypeOf((chan int)(nil)))
166
case reflect.Func : return self.valloc(p, reflect.TypeOf((func())(nil)))
167
case reflect.Map : return self.valloc(p, reflect.TypeOf((map[int]int)(nil)))
168
case reflect.Ptr : return self.valloc(p, reflect.TypeOf((*int)(nil)))
169
case reflect.UnsafePointer : return self.valloc(p, ptrType)
170
case reflect.Interface : return self.valloc(p, ptrType, ptrType)
171
case reflect.Slice : return self.valloc(p, ptrType, intType, intType)
172
case reflect.String : return self.valloc(p, ptrType, intType)
173
case reflect.Struct : panic("abi: go117: not implemented: structs")
174
default : panic("abi: invalid value type")
178
func (self *stackAlloc) valloc(p []Parameter, vts ...reflect.Type) []Parameter {
179
for _, vt := range vts {
181
if enum != notFloatKind && self.x < len(xregOrderGo) {
182
p = append(p, self.xreg(vt))
183
} else if enum == notFloatKind && self.i < len(iregOrderGo) {
184
p = append(p, self.ireg(vt))
186
p = append(p, self.stack(vt))
192
func NewFunctionLayout(ft reflect.Type) FunctionLayout {
194
var fn FunctionLayout
196
/* assign every arguments */
197
for i := 0; i < ft.NumIn(); i++ {
198
fn.Args = sa.alloc(fn.Args, ft.In(i))
201
/* reset the register counter, and add a pointer alignment field */
204
/* assign every return value */
205
for i := 0; i < ft.NumOut(); i++ {
206
fn.Rets = sa.alloc(fn.Rets, ft.Out(i))
209
sa.spill(0, PtrAlign)
211
/* assign spill slots */
212
for i := 0; i < len(fn.Args); i++ {
213
if fn.Args[i].InRegister {
214
fn.Args[i].Mem = sa.spill(PtrSize, PtrAlign) - PtrSize
218
/* add the final pointer alignment field */
219
fn.FP = sa.spill(0, PtrAlign)
223
func (self *Frame) emitExchangeArgs(p *Program) {
224
iregArgs := make([]Parameter, 0, len(self.desc.Args))
226
for _, v := range self.desc.Args {
228
if v.IsFloat != notFloatKind {
231
iregArgs = append(iregArgs, v)
234
panic("not support stack-assgined arguments now")
237
if xregArgs > len(xregOrderC) {
238
panic("too many arguments, only support at most 8 integer register arguments now")
241
switch len(iregArgs) {
243
//Fast-Path: when arguments count are less than four, just exchange the registers
244
for i := 0; i < len(iregArgs); i++ {
245
p.MOVQ(iregOrderGo[i], iregOrderC[i])
249
// need to spill 3th ~ regArgs registers before exchange
250
for i := 3; i < len(iregArgs); i++ {
252
// pointer args have already been spilled
254
p.MOVQ(iregOrderGo[i], Ptr(RSP, int32(self.Prev() + arg.Mem)))
257
p.MOVQ(iregOrderGo[0], iregOrderC[0])
258
p.MOVQ(iregOrderGo[1], iregOrderC[1])
259
p.MOVQ(iregOrderGo[2], iregOrderC[2])
260
for i := 3; i < len(iregArgs); i++ {
262
p.MOVQ(Ptr(RSP, int32(self.Prev() + arg.Mem)), iregOrderC[i])
266
panic("too many arguments, only support at most 6 integer register arguments now")
270
func (self *Frame) emitStackCheck(p *Program, to *Label, maxStack uintptr) {
271
p.LEAQ(Ptr(RSP, int32(-(self.Size() + uint32(maxStack)))), R12)
272
p.CMPQ(Ptr(R14, _G_stackguard0), R12)
276
func (self *Frame) StackCheckTextSize() uint32 {
277
p := DefaultArch.CreateProgram()
278
p.LEAQ(Ptr(RSP, int32(-(self.Size()))), R12)
279
p.CMPQ(Ptr(R14, _G_stackguard0), R12)
280
to := CreateLabel("")
283
return uint32(len(p.Assemble(0)))
286
func (self *Frame) emitExchangeRets(p *Program) {
287
if len(self.desc.Rets) > 1 {
288
panic("too many results, only support one result now")
291
if len(self.desc.Rets) == 1 && !self.desc.Rets[0].InRegister {
292
if self.desc.Rets[0].IsFloat == floatKind64 {
293
p.MOVSD(xregOrderC[0], self.retv(0))
294
} else if self.desc.Rets[0].IsFloat == floatKind32 {
295
p.MOVSS(xregOrderC[0], self.retv(0))
297
p.MOVQ(RAX, self.retv(0))
302
func (self *Frame) emitRestoreRegs(p *Program) {
303
// load reserved registers
304
for i, r := range ReservedRegs(self.ccall) {
307
p.MOVQ(self.resv(i), r)
309
p.MOVSD(self.resv(i), r)
311
panic(fmt.Sprintf("unsupported register type %t to reserve", r))
314
// zero xmm15 for go abi
315
p.XORPS(zeroRegGo, zeroRegGo)