podman

Форк
0
316 строк · 9.8 Кб
1
//go:build go1.17
2
// +build go1.17
3

4
/*
5
 * Copyright 2022 ByteDance Inc.
6
 *
7
 * Licensed under the Apache License, Version 2.0 (the "License");
8
 * you may not use this file except in compliance with the License.
9
 * You may obtain a copy of the License at
10
 *
11
 *     http://www.apache.org/licenses/LICENSE-2.0
12
 *
13
 * Unless required by applicable law or agreed to in writing, software
14
 * distributed under the License is distributed on an "AS IS" BASIS,
15
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16
 * See the License for the specific language governing permissions and
17
 * limitations under the License.
18
 */
19

20
/** Go Internal ABI implementation
21
 *
22
 *  This module implements the function layout algorithm described by the Go internal ABI.
23
 *  See https://github.com/golang/go/blob/master/src/cmd/compile/abi-internal.md for more info.
24
 */
25

26
package abi
27

28
import (
29
    `fmt`
30
    `reflect`
31

32
    . `github.com/chenzhuoyu/iasm/x86_64`
33
)
34

35
/** Frame Structure of the Generated Function
36
    FP  +------------------------------+
37
        |             . . .            |
38
        | 2nd reg argument spill space |
39
        + 1st reg argument spill space |
40
        | <pointer-sized alignment>    |
41
        |             . . .            |
42
        | 2nd stack-assigned result    |
43
        + 1st stack-assigned result    |
44
        | <pointer-sized alignment>    |
45
        |             . . .            |
46
        | 2nd stack-assigned argument  |
47
        | 1st stack-assigned argument  |
48
        | stack-assigned receiver      |
49
prev()  +------------------------------+ (Previous Frame)
50
                Return PC              |
51
size()  -------------------------------|
52
               Saved RBP               |
53
offs()  -------------------------------|
54
           1th Reserved Registers      |
55
        -------------------------------|
56
           2th Reserved Registers      |
57
        -------------------------------|
58
           Local Variables             |
59
    RSP -------------------------------|↓ lower addresses
60
*/
61

62
const zeroRegGo = XMM15
63

64
var iregOrderGo = [...]Register64 {
65
    RAX,// RDI
66
    RBX,// RSI
67
    RCX,// RDX
68
    RDI,// RCX
69
    RSI,// R8
70
    R8, // R9
71
    R9,
72
    R10,
73
    R11,
74
}
75

76
var xregOrderGo = [...]XMMRegister {
77
    XMM0,
78
    XMM1,
79
    XMM2,
80
    XMM3,
81
    XMM4,
82
    XMM5,
83
    XMM6,
84
    XMM7,
85
    XMM8,
86
    XMM9,
87
    XMM10,
88
    XMM11,
89
    XMM12,
90
    XMM13,
91
    XMM14,
92
}
93

94
func ReservedRegs(callc bool) []Register {
95
    if callc {
96
        return nil
97
    }
98
    return []Register {
99
        R14, // current goroutine
100
        R15, // GOT reference
101
    }
102
}
103

104
type stackAlloc struct {
105
    s uint32
106
    i int
107
    x int
108
}
109

110
func (self *stackAlloc) reset() {
111
    self.i, self.x = 0, 0
112
}
113

114
func (self *stackAlloc) ireg(vt reflect.Type) (p Parameter) {
115
    p = mkIReg(vt, iregOrderGo[self.i])
116
    self.i++
117
    return
118
}
119

120
func (self *stackAlloc) xreg(vt reflect.Type) (p Parameter) {
121
    p = mkXReg(vt, xregOrderGo[self.x])
122
    self.x++
123
    return
124
}
125

126
func (self *stackAlloc) stack(vt reflect.Type) (p Parameter) {
127
    p = mkStack(vt, self.s)
128
    self.s += uint32(vt.Size())
129
    return
130
}
131

132
func (self *stackAlloc) spill(n uint32, a int) uint32 {
133
    self.s = alignUp(self.s, a) + n
134
    return self.s
135
}
136

137
func (self *stackAlloc) alloc(p []Parameter, vt reflect.Type) []Parameter {
138
    nb := vt.Size()
139
    vk := vt.Kind()
140

141
    /* zero-sized objects are allocated on stack */
142
    if nb == 0 {
143
        return append(p, mkStack(intType, self.s))
144
    }
145

146
    /* check for value type */
147
    switch vk {
148
        case reflect.Bool          : return self.valloc(p, reflect.TypeOf(false))
149
        case reflect.Int           : return self.valloc(p, intType)
150
        case reflect.Int8          : return self.valloc(p, reflect.TypeOf(int8(0)))
151
        case reflect.Int16         : return self.valloc(p, reflect.TypeOf(int16(0)))
152
        case reflect.Int32         : return self.valloc(p, reflect.TypeOf(uint32(0)))
153
        case reflect.Int64         : return self.valloc(p, reflect.TypeOf(int64(0)))
154
        case reflect.Uint          : return self.valloc(p, reflect.TypeOf(uint(0)))
155
        case reflect.Uint8         : return self.valloc(p, reflect.TypeOf(uint8(0)))
156
        case reflect.Uint16        : return self.valloc(p, reflect.TypeOf(uint16(0)))
157
        case reflect.Uint32        : return self.valloc(p, reflect.TypeOf(uint32(0)))
158
        case reflect.Uint64        : return self.valloc(p, reflect.TypeOf(uint64(0)))
159
        case reflect.Uintptr       : return self.valloc(p, reflect.TypeOf(uintptr(0)))
160
        case reflect.Float32       : return self.valloc(p, reflect.TypeOf(float32(0)))
161
        case reflect.Float64       : return self.valloc(p, reflect.TypeOf(float64(0)))
162
        case reflect.Complex64     : panic("abi: go117: not implemented: complex64")
163
        case reflect.Complex128    : panic("abi: go117: not implemented: complex128")
164
        case reflect.Array         : panic("abi: go117: not implemented: arrays")
165
        case reflect.Chan          : return self.valloc(p, reflect.TypeOf((chan int)(nil)))
166
        case reflect.Func          : return self.valloc(p, reflect.TypeOf((func())(nil)))
167
        case reflect.Map           : return self.valloc(p, reflect.TypeOf((map[int]int)(nil)))
168
        case reflect.Ptr           : return self.valloc(p, reflect.TypeOf((*int)(nil)))
169
        case reflect.UnsafePointer : return self.valloc(p, ptrType)
170
        case reflect.Interface     : return self.valloc(p, ptrType, ptrType)
171
        case reflect.Slice         : return self.valloc(p, ptrType, intType, intType)
172
        case reflect.String        : return self.valloc(p, ptrType, intType)
173
        case reflect.Struct        : panic("abi: go117: not implemented: structs")
174
        default                    : panic("abi: invalid value type")
175
    }
176
}
177

178
func (self *stackAlloc) valloc(p []Parameter, vts ...reflect.Type) []Parameter {
179
    for _, vt := range vts { 
180
        enum := isFloat(vt)
181
        if enum != notFloatKind && self.x < len(xregOrderGo) {
182
            p = append(p, self.xreg(vt))
183
        } else if enum == notFloatKind && self.i < len(iregOrderGo) {
184
            p = append(p, self.ireg(vt))
185
        } else {
186
            p = append(p, self.stack(vt))
187
        }
188
    }
189
    return p
190
}
191

192
func NewFunctionLayout(ft reflect.Type) FunctionLayout {
193
    var sa stackAlloc
194
    var fn FunctionLayout
195

196
    /* assign every arguments */
197
    for i := 0; i < ft.NumIn(); i++ {
198
        fn.Args = sa.alloc(fn.Args, ft.In(i))
199
    }
200

201
    /* reset the register counter, and add a pointer alignment field */
202
    sa.reset()
203

204
    /* assign every return value */
205
    for i := 0; i < ft.NumOut(); i++ {
206
        fn.Rets = sa.alloc(fn.Rets, ft.Out(i))
207
    }
208

209
    sa.spill(0, PtrAlign)
210

211
    /* assign spill slots */
212
    for i := 0; i < len(fn.Args); i++ {
213
        if fn.Args[i].InRegister {
214
            fn.Args[i].Mem = sa.spill(PtrSize, PtrAlign) - PtrSize
215
        }
216
    }
217

218
    /* add the final pointer alignment field */
219
    fn.FP = sa.spill(0, PtrAlign)
220
    return fn
221
}
222

223
func (self *Frame) emitExchangeArgs(p *Program) {
224
    iregArgs := make([]Parameter, 0, len(self.desc.Args))
225
    xregArgs := 0
226
    for _, v := range self.desc.Args {
227
        if v.InRegister {
228
            if v.IsFloat != notFloatKind {
229
                xregArgs += 1
230
            } else {
231
                iregArgs = append(iregArgs, v)
232
            }
233
        } else {
234
            panic("not support stack-assgined arguments now")
235
        }
236
    }
237
    if xregArgs > len(xregOrderC) {
238
        panic("too many arguments, only support at most 8 integer register arguments now")
239
    }
240

241
    switch len(iregArgs) {
242
    case 0, 1, 2, 3: {
243
        //Fast-Path: when arguments count are less than four, just exchange the registers
244
        for i := 0; i < len(iregArgs); i++ {
245
            p.MOVQ(iregOrderGo[i], iregOrderC[i])
246
        }
247
    }
248
    case 4, 5, 6: {
249
        // need to spill 3th ~ regArgs registers before exchange
250
        for i := 3; i < len(iregArgs); i++ {
251
            arg := iregArgs[i]
252
            // pointer args have already been spilled 
253
            if !arg.IsPointer {
254
                p.MOVQ(iregOrderGo[i], Ptr(RSP, int32(self.Prev() + arg.Mem)))
255
            }
256
        }
257
        p.MOVQ(iregOrderGo[0], iregOrderC[0])
258
        p.MOVQ(iregOrderGo[1], iregOrderC[1])
259
        p.MOVQ(iregOrderGo[2], iregOrderC[2])
260
        for i := 3; i < len(iregArgs); i++ {
261
            arg := iregArgs[i]
262
            p.MOVQ(Ptr(RSP, int32(self.Prev() + arg.Mem)), iregOrderC[i])
263
        }
264
    }
265
    default:
266
        panic("too many arguments, only support at most 6 integer register arguments now")
267
    }
268
}
269

270
func (self *Frame) emitStackCheck(p *Program, to *Label, maxStack uintptr) {
271
    p.LEAQ(Ptr(RSP, int32(-(self.Size() + uint32(maxStack)))), R12)
272
    p.CMPQ(Ptr(R14, _G_stackguard0), R12)
273
    p.JBE(to)
274
}
275

276
func (self *Frame) StackCheckTextSize() uint32 {
277
    p  := DefaultArch.CreateProgram()
278
    p.LEAQ(Ptr(RSP, int32(-(self.Size()))), R12)
279
    p.CMPQ(Ptr(R14, _G_stackguard0), R12)
280
    to := CreateLabel("")
281
    p.Link(to)
282
    p.JBE(to)
283
    return uint32(len(p.Assemble(0)))
284
}
285

286
func (self *Frame) emitExchangeRets(p *Program) {
287
    if len(self.desc.Rets) > 1 {
288
        panic("too many results, only support one result now")
289
    }    
290
    // store result
291
    if len(self.desc.Rets) == 1 && !self.desc.Rets[0].InRegister {
292
        if self.desc.Rets[0].IsFloat == floatKind64 {
293
            p.MOVSD(xregOrderC[0], self.retv(0))
294
        } else if self.desc.Rets[0].IsFloat == floatKind32 {
295
            p.MOVSS(xregOrderC[0], self.retv(0))
296
        } else {
297
            p.MOVQ(RAX, self.retv(0))
298
        }
299
    }
300
}
301

302
func (self *Frame) emitRestoreRegs(p *Program) {
303
    // load reserved registers
304
    for i, r := range ReservedRegs(self.ccall) {
305
        switch r.(type) {
306
        case Register64:
307
            p.MOVQ(self.resv(i), r)
308
        case XMMRegister:
309
            p.MOVSD(self.resv(i), r)
310
        default:
311
            panic(fmt.Sprintf("unsupported register type %t to reserve", r))
312
        }
313
    }
314
    // zero xmm15 for go abi
315
    p.XORPS(zeroRegGo, zeroRegGo)
316
}

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.